test: add test migration in cache mode (#4413)

This commit is contained in:
Borys 2025-01-07 13:15:59 +02:00 committed by GitHub
parent a520ce0195
commit aaf33d5a1f
No known key found for this signature in database
GPG key ID: B5690EEEBB952194

View file

@ -1496,14 +1496,18 @@ async def test_network_disconnect_during_migration(df_factory):
@pytest.mark.parametrize( @pytest.mark.parametrize(
"node_count, segments, keys, huge_values", "node_count, segments, keys, huge_values, cache_mode",
[ [
pytest.param(3, 16, 20_000, 10), pytest.param(3, 16, 20_000, 10, "false"),
pytest.param(3, 16, 20_000, 10, "true"),
# 1mb effectively disables breakdown of huge values. # 1mb effectively disables breakdown of huge values.
# TODO: add a test that mixes huge and small values, see # TODO: add a test that mixes huge and small values, see
# https://github.com/dragonflydb/dragonfly/pull/4144/files/11e5e387d31bcf1bc53dfbb28cf3bcaf094d77fa#r1850130930 # https://github.com/dragonflydb/dragonfly/pull/4144/files/11e5e387d31bcf1bc53dfbb28cf3bcaf094d77fa#r1850130930
pytest.param(3, 16, 20_000, 1_000_000), pytest.param(3, 16, 20_000, 1_000_000, "true"),
pytest.param(5, 20, 30_000, 1_000_000, marks=[pytest.mark.slow, pytest.mark.opt_only]), pytest.param(3, 16, 20_000, 1_000_000, "false"),
pytest.param(
5, 20, 30_000, 1_000_000, "false", marks=[pytest.mark.slow, pytest.mark.opt_only]
),
], ],
) )
@dfly_args({"proactor_threads": 4, "cluster_mode": "yes"}) @dfly_args({"proactor_threads": 4, "cluster_mode": "yes"})
@ -1514,6 +1518,7 @@ async def test_cluster_fuzzymigration(
segments: int, segments: int,
keys: int, keys: int,
huge_values: int, huge_values: int,
cache_mode: string,
): ):
instances = [ instances = [
df_factory.create( df_factory.create(
@ -1522,6 +1527,7 @@ async def test_cluster_fuzzymigration(
vmodule="outgoing_slot_migration=9,cluster_family=9,incoming_slot_migration=9,streamer=9", vmodule="outgoing_slot_migration=9,cluster_family=9,incoming_slot_migration=9,streamer=9",
serialization_max_chunk_size=huge_values, serialization_max_chunk_size=huge_values,
replication_stream_output_limit=10, replication_stream_output_limit=10,
cache_mode=cache_mode,
) )
for i in range(node_count) for i in range(node_count)
] ]