refactor: remove serialization_max_chunk_size for cluster tests (#4316)

This commit is contained in:
Borys 2024-12-16 13:42:56 +02:00 committed by GitHub
parent af04b558db
commit 8237d8fa81
No known key found for this signature in database
GPG key ID: B5690EEEBB952194

View file

@ -1433,7 +1433,7 @@ async def test_migration_with_key_ttl(df_factory):
assert await nodes[1].client.execute_command("stick k_sticky") == 0 assert await nodes[1].client.execute_command("stick k_sticky") == 0
@dfly_args({"proactor_threads": 4, "cluster_mode": "yes", "serialization_max_chunk_size": 0}) @dfly_args({"proactor_threads": 4, "cluster_mode": "yes"})
async def test_network_disconnect_during_migration(df_factory): async def test_network_disconnect_during_migration(df_factory):
instances = [ instances = [
df_factory.create(port=next(next_port), admin_port=next(next_port)) for i in range(2) df_factory.create(port=next(next_port), admin_port=next(next_port)) for i in range(2)
@ -1472,7 +1472,7 @@ async def test_network_disconnect_during_migration(df_factory):
await proxy.start() await proxy.start()
await wait_for_status(nodes[0].admin_client, nodes[1].id, "FINISHED", 60) await wait_for_status(nodes[0].admin_client, nodes[1].id, "FINISHED", 300)
nodes[0].migrations = [] nodes[0].migrations = []
nodes[0].slots = [] nodes[0].slots = []
nodes[1].slots = [(0, 16383)] nodes[1].slots = [(0, 16383)]
@ -1970,7 +1970,7 @@ async def test_cluster_migration_cancel(df_factory: DflyInstanceFactory):
assert str(i) == await nodes[1].client.get(f"{{key50}}:{i}") assert str(i) == await nodes[1].client.get(f"{{key50}}:{i}")
@dfly_args({"proactor_threads": 2, "cluster_mode": "yes", "serialization_max_chunk_size": 0}) @dfly_args({"proactor_threads": 2, "cluster_mode": "yes"})
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_cluster_migration_huge_container(df_factory: DflyInstanceFactory): async def test_cluster_migration_huge_container(df_factory: DflyInstanceFactory):
instances = [ instances = [
@ -2003,7 +2003,7 @@ async def test_cluster_migration_huge_container(df_factory: DflyInstanceFactory)
await push_config(json.dumps(generate_config(nodes)), [node.admin_client for node in nodes]) await push_config(json.dumps(generate_config(nodes)), [node.admin_client for node in nodes])
logging.debug("Waiting for migration to finish") logging.debug("Waiting for migration to finish")
await wait_for_status(nodes[0].admin_client, nodes[1].id, "FINISHED") await wait_for_status(nodes[0].admin_client, nodes[1].id, "FINISHED", 30)
target_data = await StaticSeeder.capture(nodes[1].client) target_data = await StaticSeeder.capture(nodes[1].client)
assert source_data == target_data assert source_data == target_data
@ -2433,7 +2433,7 @@ async def test_cluster_memory_consumption_migration(df_factory: DflyInstanceFact
@pytest.mark.asyncio @pytest.mark.asyncio
@dfly_args({"proactor_threads": 4, "cluster_mode": "yes", "serialization_max_chunk_size": 0}) @dfly_args({"proactor_threads": 4, "cluster_mode": "yes"})
async def test_migration_timeout_on_sync(df_factory: DflyInstanceFactory, df_seeder_factory): async def test_migration_timeout_on_sync(df_factory: DflyInstanceFactory, df_seeder_factory):
# Timeout set to 3 seconds because we must first saturate the socket before we get the timeout # Timeout set to 3 seconds because we must first saturate the socket before we get the timeout
instances = [ instances = [