From ec966b91477957dc98f394db951fba0fbfcbc491 Mon Sep 17 00:00:00 2001 From: Tolu Aina <7848930+toluaina@users.noreply.github.com> Date: Sun, 29 Jan 2023 19:13:41 +0100 Subject: [PATCH] fix insert on through tables --- .pre-commit-config.yaml | 2 +- examples/bootstrap.sh | 1 + examples/through/README | 5 + examples/through/data.py | 67 ++++++++++++ examples/through/schema.json | 35 +++++++ examples/through/schema.py | 88 ++++++++++++++++ pgsync/base.py | 32 +++++- pgsync/sync.py | 53 +++++++--- requirements/base.in | 4 +- requirements/dev.txt | 26 ++--- requirements/prod.txt | 16 +-- requirements/test.txt | 22 ++-- tests/conftest.py | 35 +++++++ tests/test_base.py | 4 + tests/test_sync_nested_children.py | 157 ++++++++++++++++++++++++++++- 15 files changed, 495 insertions(+), 52 deletions(-) create mode 100644 examples/through/README create mode 100644 examples/through/data.py create mode 100644 examples/through/schema.json create mode 100644 examples/through/schema.py diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 62bedb95..bbb73d01 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -6,7 +6,7 @@ repos: - id: black - repo: https://github.com/pycqa/isort - rev: 5.11.4 + rev: 5.12.0 hooks: - id: isort args: ["--profile", "black", "--filter-files"] \ No newline at end of file diff --git a/examples/bootstrap.sh b/examples/bootstrap.sh index 053c0789..0871e21d 100755 --- a/examples/bootstrap.sh +++ b/examples/bootstrap.sh @@ -2,6 +2,7 @@ set -u # create database prior to running this bootstrap source .pythonpath +source .env if [ $# -eq 0 ]; then echo "No arguments supplied" diff --git a/examples/through/README b/examples/through/README new file mode 100644 index 00000000..a6f87a45 --- /dev/null +++ b/examples/through/README @@ -0,0 +1,5 @@ +DELETE FROM customer_group; + +INSERT INTO customer_group (customer_id, group_id) VALUES ( 1, 2); + +INSERT INTO customer_group (customer_id, group_id) VALUES ( 1, 3); \ No newline at end of file diff --git a/examples/through/data.py b/examples/through/data.py new file mode 100644 index 00000000..988f104d --- /dev/null +++ b/examples/through/data.py @@ -0,0 +1,67 @@ +import datetime +import random +from typing import Dict, List + +import click +from faker import Faker +from schema import Customer, CustomerGroup, Group +from sqlalchemy.orm import sessionmaker + +from pgsync.base import pg_engine, subtransactions +from pgsync.constants import DEFAULT_SCHEMA +from pgsync.helper import teardown +from pgsync.utils import config_loader, get_config + + +@click.command() +@click.option( + "--config", + "-c", + help="Schema config", + type=click.Path(exists=True), +) +def main(config): + + config: str = get_config(config) + teardown(drop_db=False, config=config) + + for document in config_loader(config): + + database: str = document.get("database", document["index"]) + with pg_engine(database) as engine: + schema: str = document.get("schema", DEFAULT_SCHEMA) + connection = engine.connect().execution_options( + schema_translate_map={None: schema} + ) + Session = sessionmaker(bind=connection, autoflush=True) + session = Session() + + customers = [ + Customer(name="CustomerA"), + Customer(name="CustomerB"), + Customer(name="CustomerC"), + ] + with subtransactions(session): + session.add_all(customers) + + groups = [ + Group(group_name="GroupA"), + Group(group_name="GroupB"), + Group(group_name="GroupC"), + ] + with subtransactions(session): + session.add_all(groups) + + customers_groups = [ + CustomerGroup(customer=customers[0], group=groups[0]), + CustomerGroup(customer=customers[1], group=groups[1]), + CustomerGroup(customer=customers[2], group=groups[2]), + ] + with subtransactions(session): + session.add_all(customers_groups) + + session.commit() + + +if __name__ == "__main__": + main() diff --git a/examples/through/schema.json b/examples/through/schema.json new file mode 100644 index 00000000..bbce0797 --- /dev/null +++ b/examples/through/schema.json @@ -0,0 +1,35 @@ +[ + { + "database": "through", + "index": "through", + "nodes": + { + "table": "customer", + "columns": + [ + "id", + "name" + ], + "children": + [ + { + "table": "group", + "columns": + [ + "id", + "group_name" + ], + "relationship": + { + "variant": "object", + "type": "one_to_many", + "through_tables": + [ + "customer_group" + ] + } + } + ] + } + } +] \ No newline at end of file diff --git a/examples/through/schema.py b/examples/through/schema.py new file mode 100644 index 00000000..ae5d8319 --- /dev/null +++ b/examples/through/schema.py @@ -0,0 +1,88 @@ +import click +import sqlalchemy as sa +from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.schema import UniqueConstraint + +from pgsync.base import create_database, create_schema, pg_engine +from pgsync.constants import DEFAULT_SCHEMA +from pgsync.helper import teardown +from pgsync.utils import config_loader, get_config + +Base = declarative_base() + + +class Customer(Base): + __tablename__ = "customer" + __table_args__ = (UniqueConstraint("name"),) + id = sa.Column(sa.Integer, primary_key=True, autoincrement=True) + name = sa.Column(sa.String, nullable=False) + + +class Group(Base): + __tablename__ = "group" + __table_args__ = ( + UniqueConstraint( + "group_name", + ), + ) + id = sa.Column(sa.Integer, primary_key=True, autoincrement=True) + group_name = sa.Column(sa.String, nullable=False) + + +class CustomerGroup(Base): + __tablename__ = "customer_group" + __table_args__ = ( + UniqueConstraint( + "customer_id", + "group_id", + ), + ) + id = sa.Column(sa.Integer, primary_key=True, autoincrement=True) + customer_id = sa.Column( + sa.Integer, + sa.ForeignKey(Customer.id, ondelete="CASCADE"), + ) + customer = sa.orm.relationship( + Customer, + backref=sa.orm.backref("customers"), + ) + group_id = sa.Column( + sa.Integer, + sa.ForeignKey(Group.id, ondelete="CASCADE"), + ) + group = sa.orm.relationship( + Group, + backref=sa.orm.backref("groups"), + ) + + +def setup(config: str) -> None: + for document in config_loader(config): + database: str = document.get("database", document["index"]) + schema: str = document.get("schema", DEFAULT_SCHEMA) + create_database(database) + create_schema(database, schema) + with pg_engine(database) as engine: + engine = engine.connect().execution_options( + schema_translate_map={None: schema} + ) + Base.metadata.drop_all(engine) + Base.metadata.create_all(engine) + + +@click.command() +@click.option( + "--config", + "-c", + help="Schema config", + type=click.Path(exists=True), +) +def main(config): + + config: str = get_config(config) + teardown(config=config) + setup(config) + + +if __name__ == "__main__": + main() diff --git a/pgsync/base.py b/pgsync/base.py index 7e24f9f8..9c67822c 100644 --- a/pgsync/base.py +++ b/pgsync/base.py @@ -78,6 +78,36 @@ def data(self) -> dict: return self.old return self.new + def foreign_key_constraint(self, model) -> dict: + """ + { + 'public.customer': { referred table with a fully qualified name + 'local': 'customer_id', + 'remote': 'id', + 'value': 1 + }, + 'public.group': { referred table with a fully qualified name + 'local': 'group_id', + 'remote': 'id', + 'value': 1 + } + } + """ + constraints: dict = {} + for foreign_key in model.foreign_keys: + referred_table: str = str(foreign_key.constraint.referred_table) + constraints.setdefault(referred_table, {}) + if foreign_key.constraint.column_keys: + if foreign_key.constraint.column_keys[0] in self.data: + constraints[referred_table] = { + "local": foreign_key.constraint.column_keys[0], + "remote": foreign_key.column.name, + "value": self.data[ + foreign_key.constraint.column_keys[0] + ], + } + return constraints + class TupleIdentifierType(sa.types.UserDefinedType): cache_ok: bool = True @@ -453,7 +483,7 @@ def logical_slot_peek_changes( upto_nchanges: Optional[int] = None, limit: Optional[int] = None, offset: Optional[int] = None, - ) -> List[sa.engine.row.LegacyRow]: + ) -> List[sa.engine.row.Row]: """Peek a logical replication slot without consuming changes. SELECT * FROM PG_LOGICAL_SLOT_PEEK_CHANGES('testdb', NULL, 1) diff --git a/pgsync/sync.py b/pgsync/sync.py index 22abbb2a..3988590c 100644 --- a/pgsync/sync.py +++ b/pgsync/sync.py @@ -512,6 +512,23 @@ def _root_foreign_key_resolver( return filters + def _through_node_resolver( + self, node: Node, payload: Payload, filters: list + ) -> list: + """Handle where node is a through table with a direct references to + root + """ + foreign_key_constraint = payload.foreign_key_constraint(node.model) + if self.tree.root.name in foreign_key_constraint: + filters.append( + { + foreign_key_constraint[self.tree.root.name][ + "remote" + ]: foreign_key_constraint[self.tree.root.name]["value"] + } + ) + return filters + def _insert_op( self, node: Node, filters: dict, payloads: List[Payload] ) -> dict: @@ -540,28 +557,36 @@ def _insert_op( ) raise - # set the parent as the new entity that has changed - foreign_keys = self.query_builder._get_foreign_keys( - node.parent, - node, - ) + try: + foreign_keys = self.query_builder.get_foreign_keys( + node.parent, + node, + ) + except ForeignKeyError: + foreign_keys = self.query_builder._get_foreign_keys( + node.parent, + node, + ) _filters: list = [] for payload in payloads: - for i, key in enumerate(foreign_keys[node.name]): - if key == foreign_keys[node.parent.name][i]: - filters[node.parent.table].append( - { - foreign_keys[node.parent.name][ - i - ]: payload.data[key] - } - ) + for node_key in foreign_keys[node.name]: + for parent_key in foreign_keys[node.parent.name]: + if node_key == parent_key: + filters[node.parent.table].append( + {parent_key: payload.data[node_key]} + ) _filters = self._root_foreign_key_resolver( node, payload, foreign_keys, _filters ) + # through table with a direct references to root + if not _filters: + _filters = self._through_node_resolver( + node, payload, _filters + ) + if _filters: filters[self.tree.root.table].extend(_filters) diff --git a/requirements/base.in b/requirements/base.in index 3d155d00..a2e8109b 100644 --- a/requirements/base.in +++ b/requirements/base.in @@ -11,7 +11,9 @@ faker psycopg2-binary redis requests-aws4auth -sqlalchemy + +# pin sqlalchemy to latest 1.* until 2.0 support +sqlalchemy==1.4.* sqlparse # pin these libs because latest flake8 does not allow newer versions of importlib-metadata https://github.com/PyCQA/flake8/issues/1522 diff --git a/requirements/dev.txt b/requirements/dev.txt index 733932d4..d71177a3 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -10,9 +10,9 @@ attrs==22.2.0 # via pytest black==22.12.0 # via -r requirements/base.in -boto3==1.26.54 +boto3==1.26.59 # via -r requirements/base.in -botocore==1.29.54 +botocore==1.29.59 # via # boto3 # s3transfer @@ -36,7 +36,7 @@ click==8.1.3 # -r requirements/base.in # black # pip-tools -coverage[toml]==7.0.5 +coverage[toml]==7.1.0 # via # -r requirements/dev.in # pytest-cov @@ -52,7 +52,7 @@ environs==9.5.0 # via -r requirements/base.in exceptiongroup==1.1.0 # via pytest -faker==16.6.0 +faker==16.6.1 # via -r requirements/base.in filelock==3.9.0 # via virtualenv @@ -65,7 +65,7 @@ flake8==5.0.4 # flake8-print flake8-debugger==4.1.2 # via -r requirements/test.in -flake8-docstrings==1.6.0 +flake8-docstrings==1.7.0 # via -r requirements/test.in flake8-isort==6.0.0 # via -r requirements/test.in @@ -75,9 +75,9 @@ flake8-todo==0.7 # via -r requirements/test.in freezegun==1.2.2 # via -r requirements/test.in -greenlet==2.0.1 +greenlet==2.0.2 # via sqlalchemy -identify==2.5.13 +identify==2.5.16 # via pre-commit idna==3.4 # via requests @@ -85,7 +85,7 @@ importlib-metadata==4.2.0 # via -r requirements/base.in iniconfig==2.0.0 # via pytest -isort==5.11.4 +isort==5.12.0 # via flake8-isort jmespath==1.0.1 # via @@ -103,7 +103,7 @@ nodeenv==1.7.0 # via pre-commit opensearch-dsl==2.0.1 # via -r requirements/base.in -opensearch-py==2.0.1 +opensearch-py==2.1.1 # via opensearch-dsl packaging==23.0 # via @@ -111,7 +111,7 @@ packaging==23.0 # marshmallow # pytest # pytest-sugar -pathspec==0.10.3 +pathspec==0.11.0 # via black pip-tools==6.12.1 # via -r requirements/dev.in @@ -121,7 +121,7 @@ platformdirs==2.6.2 # virtualenv pluggy==1.0.0 # via pytest -pre-commit==2.21.0 +pre-commit==3.0.1 # via -r requirements/dev.in psycopg2-binary==2.9.5 # via -r requirements/base.in @@ -168,7 +168,7 @@ requests==2.28.2 # via # opensearch-py # requests-aws4auth -requests-aws4auth==1.2.0 +requests-aws4auth==1.2.1 # via -r requirements/base.in s3transfer==0.6.0 # via boto3 @@ -206,7 +206,7 @@ virtualenv==20.16.2 # pre-commit wheel==0.38.4 # via pip-tools -zipp==3.11.0 +zipp==3.12.0 # via importlib-metadata # The following packages are considered to be unsafe in a requirements file: diff --git a/requirements/prod.txt b/requirements/prod.txt index cadb96c8..a562f4b1 100644 --- a/requirements/prod.txt +++ b/requirements/prod.txt @@ -8,9 +8,9 @@ async-timeout==4.0.2 # via redis black==22.12.0 # via -r requirements/base.in -boto3==1.26.54 +boto3==1.26.59 # via -r requirements/base.in -botocore==1.29.54 +botocore==1.29.59 # via # boto3 # s3transfer @@ -39,11 +39,11 @@ elasticsearch-dsl==7.4.0 # via -r requirements/base.in environs==9.5.0 # via -r requirements/base.in -faker==16.6.0 +faker==16.6.1 # via -r requirements/base.in filelock==3.9.0 # via virtualenv -greenlet==2.0.1 +greenlet==2.0.2 # via sqlalchemy idna==3.4 # via requests @@ -61,11 +61,11 @@ newrelic==8.5.0 # via -r requirements/prod.in opensearch-dsl==2.0.1 # via -r requirements/base.in -opensearch-py==2.0.1 +opensearch-py==2.1.1 # via opensearch-dsl packaging==23.0 # via marshmallow -pathspec==0.10.3 +pathspec==0.11.0 # via black platformdirs==2.6.2 # via @@ -87,7 +87,7 @@ requests==2.28.2 # via # opensearch-py # requests-aws4auth -requests-aws4auth==1.2.0 +requests-aws4auth==1.2.1 # via -r requirements/base.in s3transfer==0.6.0 # via boto3 @@ -113,5 +113,5 @@ urllib3==1.26.14 # requests virtualenv==20.16.2 # via -r requirements/base.in -zipp==3.11.0 +zipp==3.12.0 # via importlib-metadata diff --git a/requirements/test.txt b/requirements/test.txt index c940a31c..48be7976 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -10,9 +10,9 @@ attrs==22.2.0 # via pytest black==22.12.0 # via -r requirements/base.in -boto3==1.26.54 +boto3==1.26.59 # via -r requirements/base.in -botocore==1.29.54 +botocore==1.29.59 # via # boto3 # s3transfer @@ -31,7 +31,7 @@ click==8.1.3 # via # -r requirements/base.in # black -coverage[toml]==7.0.5 +coverage[toml]==7.1.0 # via pytest-cov distlib==0.3.6 # via virtualenv @@ -45,7 +45,7 @@ environs==9.5.0 # via -r requirements/base.in exceptiongroup==1.1.0 # via pytest -faker==16.6.0 +faker==16.6.1 # via -r requirements/base.in filelock==3.9.0 # via virtualenv @@ -58,7 +58,7 @@ flake8==5.0.4 # flake8-print flake8-debugger==4.1.2 # via -r requirements/test.in -flake8-docstrings==1.6.0 +flake8-docstrings==1.7.0 # via -r requirements/test.in flake8-isort==6.0.0 # via -r requirements/test.in @@ -68,7 +68,7 @@ flake8-todo==0.7 # via -r requirements/test.in freezegun==1.2.2 # via -r requirements/test.in -greenlet==2.0.1 +greenlet==2.0.2 # via sqlalchemy idna==3.4 # via requests @@ -76,7 +76,7 @@ importlib-metadata==4.2.0 # via -r requirements/base.in iniconfig==2.0.0 # via pytest -isort==5.11.4 +isort==5.12.0 # via flake8-isort jmespath==1.0.1 # via @@ -92,14 +92,14 @@ mypy-extensions==0.4.3 # via black opensearch-dsl==2.0.1 # via -r requirements/base.in -opensearch-py==2.0.1 +opensearch-py==2.1.1 # via opensearch-dsl packaging==23.0 # via # marshmallow # pytest # pytest-sugar -pathspec==0.10.3 +pathspec==0.11.0 # via black platformdirs==2.6.2 # via @@ -148,7 +148,7 @@ requests==2.28.2 # via # opensearch-py # requests-aws4auth -requests-aws4auth==1.2.0 +requests-aws4auth==1.2.1 # via -r requirements/base.in s3transfer==0.6.0 # via boto3 @@ -181,5 +181,5 @@ urllib3==1.26.14 # requests virtualenv==20.16.2 # via -r requirements/base.in -zipp==3.11.0 +zipp==3.12.0 # via importlib-metadata diff --git a/tests/conftest.py b/tests/conftest.py index 09d100b5..9e7b12f9 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -354,6 +354,35 @@ class Rating(base): return Rating +@pytest.fixture(scope="session") +def group_cls(base): + class Group(base): + __tablename__ = "group" + __table_args__ = (UniqueConstraint("group_name"),) + id = sa.Column(sa.Integer, primary_key=True) + group_name = sa.Column(sa.String, nullable=False) + + return Group + + +@pytest.fixture(scope="session") +def book_group_cls(base, book_cls, group_cls): + class BookGroup(base): + __tablename__ = "book_group" + __table_args__ = (UniqueConstraint("book_isbn", "group_id"),) + id = sa.Column(sa.Integer, primary_key=True) + book_isbn = sa.Column(sa.String, sa.ForeignKey(book_cls.isbn)) + book = sa.orm.relationship( + book_cls, backref=sa.orm.backref("book_book_group_books") + ) + group_id = sa.Column(sa.Integer, sa.ForeignKey(group_cls.id)) + group = sa.orm.relationship( + group_cls, backref=sa.orm.backref("groups"), cascade="all,delete" + ) + + return BookGroup + + @pytest.fixture(scope="session") def model_mapping( city_cls, @@ -373,6 +402,8 @@ def model_mapping( user_cls, contact_cls, contact_item_cls, + book_group_cls, + group_cls, ): return { "cities": city_cls, @@ -392,6 +423,8 @@ def model_mapping( "users": user_cls, "contacts": contact_cls, "contact_items": contact_item_cls, + "book_groups": book_group_cls, + "groups": group_cls, } @@ -432,6 +465,8 @@ def dataset( book_language_cls, book_shelf_cls, rating_cls, + book_group_cls, + group_cls, ): eu_continent = continent_cls(name="Europe") diff --git a/tests/test_base.py b/tests/test_base.py index ea79e42e..582ac1b0 100644 --- a/tests/test_base.py +++ b/tests/test_base.py @@ -90,6 +90,7 @@ def test_tables(self, connection): tables = [ "continent", "country", + "group", "publisher", "book", "city", @@ -101,6 +102,7 @@ def test_tables(self, connection): "shelf", "author", "book_author", + "book_group", "rating", "contact", "contact_item", @@ -193,6 +195,7 @@ def test_truncate_schema( "author", "book", "book_author", + "book_group", "book_language", "book_shelf", "book_subject", @@ -201,6 +204,7 @@ def test_truncate_schema( "contact_item", "continent", "country", + "group", "language", "publisher", "rating", diff --git a/tests/test_sync_nested_children.py b/tests/test_sync_nested_children.py index 72a986d4..8d9a81dc 100644 --- a/tests/test_sync_nested_children.py +++ b/tests/test_sync_nested_children.py @@ -33,6 +33,8 @@ def data( subject_cls, book_shelf_cls, shelf_cls, + book_group_cls, + group_cls, ): session = sync.session @@ -267,6 +269,8 @@ def data( subject_cls.__table__.name, book_shelf_cls.__table__.name, shelf_cls.__table__.name, + book_group_cls.__table__.name, + group_cls.__table__.name, ] ) @@ -1128,9 +1132,155 @@ def poll_db(): assert_resync_empty(sync, nodes) sync.search_client.close() - def test_insert_through_child_noop(self, sync, data): - # insert a new through child with noop - pass + def test_insert_through_child_op2( + self, book_cls, group_cls, book_group_cls, data + ): + # insert a new through child with op + nodes = { + "table": "book", + "columns": ["isbn", "title"], + "children": [ + { + "table": "group", + "columns": ["id", "group_name"], + "relationship": { + "variant": "object", + "type": "one_to_many", + "through_tables": ["book_group"], + }, + } + ], + } + document = { + "index": "testdb", + "database": "testdb", + "nodes": nodes, + } + + sync = Sync(document) + sync.tree.build(nodes) + session = sync.session + + with subtransactions(session): + session.execute(book_group_cls.__table__.delete()) + session.execute( + group_cls.__table__.insert().values(id=1, group_name="GroupA") + ) + session.execute( + group_cls.__table__.insert().values(id=2, group_name="GroupB") + ) + + docs = [sort_list(doc) for doc in sync.sync()] + assert docs == [ + { + "_id": "abc", + "_index": "testdb", + "_source": { + "isbn": "abc", + "group": None, + "title": "The Tiger Club", + "_meta": {}, + }, + }, + { + "_id": "def", + "_index": "testdb", + "_source": { + "isbn": "def", + "group": None, + "title": "The Lion Club", + "_meta": {}, + }, + }, + { + "_id": "ghi", + "_index": "testdb", + "_source": { + "isbn": "ghi", + "group": None, + "title": "The Rabbit Club", + "_meta": {}, + }, + }, + ] + sync.checkpoint = sync.txid_current + + def pull(): + txmin = sync.checkpoint + txmax = sync.txid_current + sync.logical_slot_changes(txmin=txmin, txmax=txmax) + + def poll_redis(): + return [] + + def poll_db(): + with subtransactions(session): + session.execute( + book_group_cls.__table__.insert().values( + book_isbn="abc", group_id=1 + ) + ) + session.execute( + book_group_cls.__table__.insert().values( + book_isbn="abc", group_id=2 + ) + ) + + with mock.patch("pgsync.sync.Sync.poll_redis", side_effect=poll_redis): + with mock.patch("pgsync.sync.Sync.poll_db", side_effect=poll_db): + with mock.patch("pgsync.sync.Sync.pull", side_effect=pull): + with mock.patch( + "pgsync.sync.Sync.truncate_slots", + side_effect=noop, + ): + with mock.patch( + "pgsync.sync.Sync.status", + side_effect=noop, + ): + sync.receive(NTHREADS_POLLDB) + sync.search_client.refresh("testdb") + + docs = [sort_list(doc) for doc in sync.sync()] + # all authors are none, also no book_authors + assert docs == [ + { + "_id": "abc", + "_index": "testdb", + "_source": { + "isbn": "abc", + "group": [ + {"id": 1, "group_name": "GroupA"}, + {"id": 2, "group_name": "GroupB"}, + ], + "title": "The Tiger Club", + "_meta": { + "group": {"id": [1, 2]}, + "book_group": {"id": [1, 2]}, + }, + }, + }, + { + "_id": "def", + "_index": "testdb", + "_source": { + "isbn": "def", + "group": None, + "title": "The Lion Club", + "_meta": {}, + }, + }, + { + "_id": "ghi", + "_index": "testdb", + "_source": { + "isbn": "ghi", + "group": None, + "title": "The Rabbit Club", + "_meta": {}, + }, + }, + ] + sync.search_client.close() def test_update_through_child_noop(self, sync, data): # update a new through child with noop @@ -1392,6 +1542,7 @@ def test_insert_through_child_op( assert doc[key] == expected[i][key] assert_resync_empty(sync, nodes) + sync.search_client.close() def test_update_through_child_op(