diff --git a/tests/dragonfly/cluster_test.py b/tests/dragonfly/cluster_test.py index 1e41a002f..07ee39ef3 100644 --- a/tests/dragonfly/cluster_test.py +++ b/tests/dragonfly/cluster_test.py @@ -1496,14 +1496,18 @@ async def test_network_disconnect_during_migration(df_factory): @pytest.mark.parametrize( - "node_count, segments, keys, huge_values", + "node_count, segments, keys, huge_values, cache_mode", [ - pytest.param(3, 16, 20_000, 10), + pytest.param(3, 16, 20_000, 10, "false"), + pytest.param(3, 16, 20_000, 10, "true"), # 1mb effectively disables breakdown of huge values. # TODO: add a test that mixes huge and small values, see # https://github.com/dragonflydb/dragonfly/pull/4144/files/11e5e387d31bcf1bc53dfbb28cf3bcaf094d77fa#r1850130930 - pytest.param(3, 16, 20_000, 1_000_000), - pytest.param(5, 20, 30_000, 1_000_000, marks=[pytest.mark.slow, pytest.mark.opt_only]), + pytest.param(3, 16, 20_000, 1_000_000, "true"), + pytest.param(3, 16, 20_000, 1_000_000, "false"), + pytest.param( + 5, 20, 30_000, 1_000_000, "false", marks=[pytest.mark.slow, pytest.mark.opt_only] + ), ], ) @dfly_args({"proactor_threads": 4, "cluster_mode": "yes"}) @@ -1514,6 +1518,7 @@ async def test_cluster_fuzzymigration( segments: int, keys: int, huge_values: int, + cache_mode: string, ): instances = [ df_factory.create( @@ -1522,6 +1527,7 @@ async def test_cluster_fuzzymigration( vmodule="outgoing_slot_migration=9,cluster_family=9,incoming_slot_migration=9,streamer=9", serialization_max_chunk_size=huge_values, replication_stream_output_limit=10, + cache_mode=cache_mode, ) for i in range(node_count) ]