mirror of
https://github.com/dragonflydb/dragonfly.git
synced 2025-05-11 10:25:47 +02:00
feat(rdb_load): add support for loading huge streams (#3855)
* chore: remove RdbLoad Ltrace::arr nested vector * feat(rdb_load): add support for loading huge streams
This commit is contained in:
parent
07e0b9db4b
commit
4dbed3f8dd
4 changed files with 188 additions and 147 deletions
|
@ -644,4 +644,24 @@ TEST_F(RdbTest, LoadHugeList) {
|
|||
ASSERT_EQ(100000, CheckedInt({"llen", "test:1"}));
|
||||
}
|
||||
|
||||
// Tests loading a huge stream, where the stream is loaded in multiple partial
|
||||
// reads.
|
||||
TEST_F(RdbTest, LoadHugeStream) {
|
||||
// Add a huge stream (test:0) with 2000 entries, and 4 1k elements per entry
|
||||
// (note must be more than 512*4kb elements to test partial reads).
|
||||
for (int i = 0; i != 2000; i++) {
|
||||
Run({"debug", "populate", "1", "test", "2000", "rand", "type", "stream", "elements", "4"});
|
||||
}
|
||||
ASSERT_EQ(2000, CheckedInt({"xlen", "test:0"}));
|
||||
|
||||
RespExpr resp = Run({"save", "df"});
|
||||
ASSERT_EQ(resp, "OK");
|
||||
|
||||
auto save_info = service_->server_family().GetLastSaveInfo();
|
||||
resp = Run({"dfly", "load", save_info.file_name});
|
||||
ASSERT_EQ(resp, "OK");
|
||||
|
||||
ASSERT_EQ(2000, CheckedInt({"xlen", "test:0"}));
|
||||
}
|
||||
|
||||
} // namespace dfly
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue