fix(server): debug populate consume less memory (#4384)

* fix server: debug populate consume less memory

Signed-off-by: adi_holden <adi@dragonflydb.io>
This commit is contained in:
adiholden 2025-01-01 10:49:17 +02:00 committed by GitHub
parent 2abe2c0ac2
commit 810af83074
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
3 changed files with 35 additions and 32 deletions

View file

@ -17,7 +17,7 @@ from .instance import DflyInstance, DflyInstanceFactory
("ZSET", 250_000, 100, 100),
("LIST", 300_000, 100, 100),
("STRING", 3_500_000, 1000, 1),
("STREAM", 260_000, 100, 100),
("STREAM", 280_000, 100, 100),
],
)
# We limit to 5gb just in case to sanity check the gh runner. Otherwise, if we ask for too much
@ -69,6 +69,7 @@ async def test_rss_used_mem_gap(df_factory, type, keys, val_size, elements):
await client.execute_command("DFLY", "LOAD", f"{dbfilename}-summary.dfs")
await check_memory()
await client.execute_command("FLUSHALL")
@pytest.mark.asyncio

View file

@ -569,12 +569,7 @@ async def test_tiered_entries_throttle(async_client: aioredis.Redis):
@dfly_args({"serialization_max_chunk_size": 4096, "proactor_threads": 1})
@pytest.mark.parametrize(
"cont_type",
[
("HASH"),
("SET"),
("ZSET"),
("LIST"),
],
[("HASH"), ("SET"), ("ZSET"), ("LIST"), ("STREAM")],
)
@pytest.mark.slow
async def test_big_value_serialization_memory_limit(df_factory, cont_type):
@ -590,17 +585,16 @@ async def test_big_value_serialization_memory_limit(df_factory, cont_type):
await client.execute_command(
f"debug populate 1 prefix {element_size} TYPE {cont_type} RAND ELEMENTS {elements}"
)
await asyncio.sleep(1)
info = await client.info("ALL")
# rss double's because of DEBUG POPULATE
assert info["used_memory_peak_rss"] > (one_gb * 2)
assert info["used_memory_peak_rss"] < (one_gb * 1.2)
# if we execute SAVE below without big value serialization we trigger the assertion below.
# note the peak would reach (one_gb * 3) without it.
await client.execute_command("SAVE")
info = await client.info("ALL")
upper_limit = 2_250_000_000 # 2.25 GB
assert info["used_memory_peak_rss"] < upper_limit
assert info["used_memory_peak_rss"] < (one_gb * 1.3)
await client.execute_command("FLUSHALL")
await client.close()