fix(search_family): Fix indexes loading in the FT.SEARCH and Ft.AGGREGATE commands (#3955)

fixes dragonflydb#3782, dragonflydb#3659

Signed-off-by: Stsiapan Bahrytsevich <stefan@dragonflydb.io>
This commit is contained in:
Stepan Bagritsevich 2024-10-27 21:14:02 +01:00 committed by GitHub
parent 851e43211e
commit 5dcad859b0
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
8 changed files with 270 additions and 98 deletions

View file

@ -629,6 +629,15 @@ vector<pair<string, SortableValue>> FieldIndices::ExtractStoredValues(DocId doc)
return out;
}
absl::flat_hash_set<std::string_view> FieldIndices::GetSortIndiciesFields() const {
absl::flat_hash_set<std::string_view> fields_idents;
fields_idents.reserve(sort_indices_.size());
for (const auto& [ident, _] : sort_indices_) {
fields_idents.insert(ident);
}
return fields_idents;
}
SearchAlgorithm::SearchAlgorithm() = default;
SearchAlgorithm::~SearchAlgorithm() = default;

View file

@ -90,6 +90,8 @@ class FieldIndices {
// Extract values stored in sort indices
std::vector<std::pair<std::string, SortableValue>> ExtractStoredValues(DocId doc) const;
absl::flat_hash_set<std::string_view> GetSortIndiciesFields() const;
private:
void CreateIndices(PMR_NS::memory_resource* mr);
void CreateSortIndices(PMR_NS::memory_resource* mr);

View file

@ -79,8 +79,8 @@ search::SortableValue ExtractSortableValueFromJson(const search::Schema& schema,
} // namespace
SearchDocData BaseAccessor::Serialize(const search::Schema& schema,
const FieldsList& fields) const {
SearchDocData BaseAccessor::Serialize(
const search::Schema& schema, absl::Span<const SearchField<std::string_view>> fields) const {
SearchDocData out{};
for (const auto& [fident, fname] : fields) {
out[fname] = ExtractSortableValue(schema, fident, absl::StrJoin(GetStrings(fident), ","));
@ -248,14 +248,14 @@ JsonAccessor::JsonPathContainer* JsonAccessor::GetPath(std::string_view field) c
}
SearchDocData JsonAccessor::Serialize(const search::Schema& schema) const {
FieldsList fields{};
SearchFieldsList fields{};
for (const auto& [fname, fident] : schema.field_names)
fields.emplace_back(fident, fname);
return Serialize(schema, fields);
}
SearchDocData JsonAccessor::Serialize(const search::Schema& schema,
const FieldsList& fields) const {
SearchDocData JsonAccessor::Serialize(
const search::Schema& schema, absl::Span<const SearchField<std::string_view>> fields) const {
SearchDocData out{};
for (const auto& [ident, name] : fields) {
if (auto* path = GetPath(ident); path) {

View file

@ -28,7 +28,8 @@ struct BaseAccessor : public search::DocumentAccessor {
virtual SearchDocData Serialize(const search::Schema& schema) const = 0;
// Serialize selected fields
virtual SearchDocData Serialize(const search::Schema& schema, const FieldsList& fields) const;
virtual SearchDocData Serialize(const search::Schema& schema,
absl::Span<const SearchField<std::string_view>> fields) const;
/*
Serialize the whole type, the default implementation is to serialize all fields.
@ -78,7 +79,8 @@ struct JsonAccessor : public BaseAccessor {
VectorInfo GetVector(std::string_view field) const override;
// The JsonAccessor works with structured types and not plain strings, so an overload is needed
SearchDocData Serialize(const search::Schema& schema, const FieldsList& fields) const override;
SearchDocData Serialize(const search::Schema& schema,
absl::Span<const SearchField<std::string_view>> fields) const override;
SearchDocData Serialize(const search::Schema& schema) const override;
SearchDocData SerializeDocument(const search::Schema& schema) const override;

View file

@ -62,7 +62,7 @@ bool SerializedSearchDoc::operator>=(const SerializedSearchDoc& other) const {
bool SearchParams::ShouldReturnField(std::string_view field) const {
auto cb = [field](const auto& entry) { return entry.first == field; };
return !return_fields.fields || any_of(return_fields->begin(), return_fields->end(), cb);
return !return_fields || any_of(return_fields->begin(), return_fields->end(), cb);
}
string_view SearchFieldTypeToString(search::SchemaField::FieldType type) {
@ -211,6 +211,17 @@ bool ShardDocIndex::Matches(string_view key, unsigned obj_code) const {
return base_->Matches(key, obj_code);
}
SearchFieldsList ToSV(const std::optional<OwnedSearchFieldsList>& fields) {
SearchFieldsList sv_fields;
if (fields) {
sv_fields.reserve(fields->size());
for (const auto& [fident, fname] : fields.value()) {
sv_fields.emplace_back(fident, fname);
}
}
return sv_fields;
}
SearchResult ShardDocIndex::Search(const OpArgs& op_args, const SearchParams& params,
search::SearchAlgorithm* search_algo) const {
auto& db_slice = op_args.GetDbSlice();
@ -219,6 +230,9 @@ SearchResult ShardDocIndex::Search(const OpArgs& op_args, const SearchParams& pa
if (!search_results.error.empty())
return SearchResult{facade::ErrorReply{std::move(search_results.error)}};
SearchFieldsList fields_to_load =
ToSV(params.ShouldReturnAllFields() ? params.load_fields : params.return_fields);
vector<SerializedSearchDoc> out;
out.reserve(search_results.ids.size());
@ -235,15 +249,19 @@ SearchResult ShardDocIndex::Search(const OpArgs& op_args, const SearchParams& pa
auto accessor = GetAccessor(op_args.db_cntx, (*it)->second);
SearchDocData doc_data;
if (params.return_fields.ShouldReturnAllFields()) {
if (params.ShouldReturnAllFields()) {
/*
In this case we need to load the whole document.
In this case we need to load the whole document or loaded fields.
For JSON indexes it would be {"$", <the whole document as string>}
*/
doc_data = accessor->SerializeDocument(base_->schema);
SearchDocData loaded_fields = accessor->Serialize(base_->schema, fields_to_load);
doc_data.insert(std::make_move_iterator(loaded_fields.begin()),
std::make_move_iterator(loaded_fields.end()));
} else {
/* Load only selected fields */
doc_data = accessor->Serialize(base_->schema, params.return_fields.GetFields());
/* Load only specific fields */
doc_data = accessor->Serialize(base_->schema, fields_to_load);
}
auto score = search_results.scores.empty() ? monostate{} : std::move(search_results.scores[i]);
@ -263,6 +281,9 @@ vector<SearchDocData> ShardDocIndex::SearchForAggregator(
if (!search_results.error.empty())
return {};
SearchFieldsList fields_to_load =
GetFieldsToLoad(params.load_fields, indices_->GetSortIndiciesFields());
vector<absl::flat_hash_map<string, search::SortableValue>> out;
for (DocId doc : search_results.ids) {
auto key = key_index_.Get(doc);
@ -274,14 +295,7 @@ vector<SearchDocData> ShardDocIndex::SearchForAggregator(
auto accessor = GetAccessor(op_args.db_cntx, (*it)->second);
auto extracted = indices_->ExtractStoredValues(doc);
SearchDocData loaded;
if (params.load_fields.ShouldReturnAllFields()) {
// Load all fields
loaded = accessor->Serialize(base_->schema);
} else {
// Load only selected fields
loaded = accessor->Serialize(base_->schema, params.load_fields.GetFields());
}
SearchDocData loaded = accessor->Serialize(base_->schema, fields_to_load);
out.emplace_back(make_move_iterator(extracted.begin()), make_move_iterator(extracted.end()));
out.back().insert(make_move_iterator(loaded.begin()), make_move_iterator(loaded.end()));
@ -290,6 +304,30 @@ vector<SearchDocData> ShardDocIndex::SearchForAggregator(
return out;
}
SearchFieldsList ShardDocIndex::GetFieldsToLoad(
const std::optional<OwnedSearchFieldsList>& load_fields,
const absl::flat_hash_set<std::string_view>& skip_fields) const {
// identifier to short name
absl::flat_hash_map<std::string_view, std::string_view> unique_fields;
unique_fields.reserve(base_->schema.field_names.size());
for (const auto& [fname, fident] : base_->schema.field_names) {
if (!skip_fields.contains(fident)) {
unique_fields[fident] = fname;
}
}
if (load_fields) {
for (const auto& [fident, fname] : load_fields.value()) {
if (!skip_fields.contains(fident)) {
unique_fields[fident] = fname;
}
}
}
return {unique_fields.begin(), unique_fields.end()};
}
DocIndexInfo ShardDocIndex::GetInfo() const {
return {*base_, key_index_.Size()};
}

View file

@ -52,36 +52,10 @@ struct SearchResult {
std::optional<facade::ErrorReply> error;
};
using FieldsList = std::vector<std::pair<std::string /*identifier*/, std::string /*short name*/>>;
template <typename T> using SearchField = std::pair<T /*identifier*/, T /*short name*/>;
struct SelectedFields {
/*
1. If not set -> return all fields
2. If set but empty -> no fields should be returned
3. If set and not empty -> return only these fields
*/
std::optional<FieldsList> fields;
bool ShouldReturnAllFields() const {
return !fields.has_value();
}
bool ShouldReturnNoFields() const {
return fields && fields->empty();
}
FieldsList* operator->() {
return &fields.value();
}
const FieldsList* operator->() const {
return &fields.value();
}
const FieldsList& GetFields() const {
return fields.value();
}
};
using SearchFieldsList = std::vector<SearchField<std::string_view>>;
using OwnedSearchFieldsList = std::vector<SearchField<std::string>>;
struct SearchParams {
// Parameters for "LIMIT offset total": select total amount documents with a specific offset from
@ -89,13 +63,29 @@ struct SearchParams {
size_t limit_offset = 0;
size_t limit_total = 10;
// Set but empty means no fields should be returned
SelectedFields return_fields;
/*
1. If not set -> return all fields
2. If set but empty -> no fields should be returned
3. If set and not empty -> return only these fields
*/
std::optional<OwnedSearchFieldsList> return_fields;
/*
Fields that should be also loaded from the document.
Only one of load_fields and return_fields should be set.
*/
std::optional<OwnedSearchFieldsList> load_fields;
std::optional<search::SortOption> sort_option;
search::QueryParams query_params;
bool ShouldReturnAllFields() const {
return !return_fields.has_value();
}
bool IdsOnly() const {
return return_fields.ShouldReturnNoFields();
return return_fields && return_fields->empty();
}
bool ShouldReturnField(std::string_view field) const;
@ -105,7 +95,7 @@ struct AggregateParams {
std::string_view index, query;
search::QueryParams params;
SelectedFields load_fields;
std::optional<OwnedSearchFieldsList> load_fields;
std::vector<aggregate::PipelineStep> steps;
};
@ -179,6 +169,11 @@ class ShardDocIndex {
io::Result<StringVec, facade::ErrorReply> GetTagVals(std::string_view field) const;
private:
// Returns the fields that are the union of the already indexed fields and load_fields, excluding
// skip_fields Load_fields should not be destroyed while the result of this function is being used
SearchFieldsList GetFieldsToLoad(const std::optional<OwnedSearchFieldsList>& load_fields,
const absl::flat_hash_set<std::string_view>& skip_fields) const;
// Clears internal data. Traverses all matching documents and assigns ids.
void Rebuild(const OpArgs& op_args, PMR_NS::memory_resource* mr);

View file

@ -183,6 +183,39 @@ optional<search::Schema> ParseSchemaOrReply(DocIndex::DataType type, CmdArgParse
#pragma GCC diagnostic pop
#endif
std::string_view ParseField(CmdArgParser* parser) {
std::string_view field = parser->Next();
if (!field.empty() && field.front() == '@') {
field.remove_prefix(1); // remove leading @ if exists
}
return field;
}
std::string_view ParseFieldWithAtSign(CmdArgParser* parser) {
std::string_view field = parser->Next();
if (!field.empty() && field.front() == '@') {
field.remove_prefix(1); // remove leading @
} else {
// Temporary warning until we can throw an error
LOG(WARNING) << "bad arguments: Field name '" << field << "' should start with '@'. '@" << field
<< "' is expected";
}
return field;
}
void ParseLoadFields(CmdArgParser* parser, std::optional<OwnedSearchFieldsList>* load_fields) {
size_t num_fields = parser->Next<size_t>();
if (!load_fields->has_value()) {
load_fields->emplace();
}
while (num_fields--) {
string_view field = ParseField(parser);
string_view alias = parser->Check("AS") ? parser->Next() : field;
load_fields->value().emplace_back(field, alias);
}
}
search::QueryParams ParseQueryParams(CmdArgParser* parser) {
search::QueryParams params;
size_t num_args = parser->Next<size_t>();
@ -201,17 +234,30 @@ optional<SearchParams> ParseSearchParamsOrReply(CmdArgParser parser, ConnectionC
if (parser.Check("LIMIT")) {
params.limit_offset = parser.Next<size_t>();
params.limit_total = parser.Next<size_t>();
} else if (parser.Check("LOAD")) {
if (params.return_fields) {
cntx->SendError("LOAD cannot be applied after RETURN");
return std::nullopt;
}
ParseLoadFields(&parser, &params.load_fields);
} else if (parser.Check("RETURN")) {
if (params.load_fields) {
cntx->SendError("RETURN cannot be applied after LOAD");
return std::nullopt;
}
// RETURN {num} [{ident} AS {name}...]
size_t num_fields = parser.Next<size_t>();
params.return_fields.fields.emplace();
params.return_fields.emplace();
while (params.return_fields->size() < num_fields) {
string_view ident = parser.Next();
string_view alias = parser.Check("AS") ? parser.Next() : ident;
params.return_fields->emplace_back(ident, alias);
}
} else if (parser.Check("NOCONTENT")) { // NOCONTENT
params.return_fields.fields.emplace();
params.load_fields.emplace();
params.return_fields.emplace();
} else if (parser.Check("PARAMS")) { // [PARAMS num(ignored) name(ignored) knn_vector]
params.query_params = ParseQueryParams(&parser);
} else if (parser.Check("SORTBY")) {
@ -230,26 +276,6 @@ optional<SearchParams> ParseSearchParamsOrReply(CmdArgParser parser, ConnectionC
return params;
}
std::string_view ParseField(CmdArgParser* parser) {
std::string_view field = parser->Next();
if (field.front() == '@') {
field.remove_prefix(1); // remove leading @ if exists
}
return field;
}
std::string_view ParseFieldWithAtSign(CmdArgParser* parser) {
std::string_view field = parser->Next();
if (field.front() != '@') {
// Temporary warning until we can throw an error
LOG(WARNING) << "bad arguments: Field name '" << field << "' should start with '@'. '@" << field
<< "' is expected";
} else {
field.remove_prefix(1); // remove leading @
}
return field;
}
optional<AggregateParams> ParseAggregatorParamsOrReply(CmdArgParser parser,
ConnectionContext* cntx) {
AggregateParams params;
@ -258,16 +284,7 @@ optional<AggregateParams> ParseAggregatorParamsOrReply(CmdArgParser parser,
// Parse LOAD count field [field ...]
// LOAD options are at the beginning of the query, so we need to parse them first
while (parser.HasNext() && parser.Check("LOAD")) {
size_t num_fields = parser.Next<size_t>();
if (!params.load_fields.fields) {
params.load_fields.fields.emplace();
}
while (num_fields--) {
string_view field = ParseField(&parser);
string_view alias = parser.Check("AS") ? parser.Next() : field;
params.load_fields->emplace_back(field, alias);
}
ParseLoadFields(&parser, &params.load_fields);
}
while (parser.HasNext()) {

View file

@ -137,6 +137,49 @@ template <typename... Matchers> auto IsUnordArrayWithSize(Matchers... matchers)
return IsUnordArrayWithSizeMatcher(std::make_tuple(matchers...));
}
template <typename Expected, size_t... Is>
void BuildKvMatchers(std::vector<Matcher<std::pair<std::string, RespExpr>>>& kv_matchers,
const Expected& expected, std::index_sequence<Is...>) {
std::initializer_list<int>{
(kv_matchers.emplace_back(Pair(std::get<Is * 2>(expected), std::get<Is * 2 + 1>(expected))),
0)...};
}
MATCHER_P(IsMapWithSizeMatcher, expected, "") {
if (arg.type != RespExpr::ARRAY) {
*result_listener << "Wrong response type: " << arg.type;
return false;
}
constexpr size_t expected_size = std::tuple_size<decltype(expected)>::value;
constexpr size_t exprected_pairs_number = expected_size / 2;
auto result = arg.GetVec();
if (result.size() != expected_size + 1 || result.size() % 2 != 1) {
*result_listener << "Wrong resp array size: " << result.size();
return false;
}
if (result[0].GetInt() != exprected_pairs_number) {
*result_listener << "Wrong pairs count: " << result[0].GetInt().value_or(-1);
return false;
}
std::vector<std::pair<std::string, RespExpr>> received_pairs;
for (size_t i = 1; i < result.size(); i += 2) {
received_pairs.emplace_back(result[i].GetString(), result[i + 1]);
}
std::vector<Matcher<std::pair<std::string, RespExpr>>> kv_matchers;
BuildKvMatchers(kv_matchers, expected, std::make_index_sequence<exprected_pairs_number>{});
return ExplainMatchResult(UnorderedElementsAreArray(kv_matchers), received_pairs,
result_listener);
}
template <typename... Args> auto IsMapWithSize(Args... args) {
return IsMapWithSizeMatcher(std::make_tuple(args...));
}
TEST_F(SearchFamilyTest, CreateDropListIndex) {
EXPECT_EQ(Run({"ft.create", "idx-1", "ON", "HASH", "PREFIX", "1", "prefix-1"}), "OK");
EXPECT_EQ(Run({"ft.create", "idx-2", "ON", "JSON", "PREFIX", "1", "prefix-2"}), "OK");
@ -172,7 +215,7 @@ TEST_F(SearchFamilyTest, CreateDropDifferentDatabases) {
// ft.search must work on the another database
resp = Run({"ft.search", "idx-1", "*"});
EXPECT_THAT(resp, IsArray(IntArg(1), "doc-0", IsArray("name", "Name of 0")));
EXPECT_THAT(resp, IsMapWithSize("doc-0", IsMap("name", "Name of 0")));
// ft.dropindex must work on the another database
EXPECT_EQ(Run({"ft.dropindex", "idx-1"}), "OK");
@ -401,12 +444,11 @@ TEST_F(SearchFamilyTest, JsonArrayValues) {
// Test complicated RETURN expression
auto res = Run(
{"ft.search", "i1", "@name:bob", "return", "1", "max($.plays[*].score)", "as", "max-score"});
EXPECT_THAT(res.GetVec()[2], RespArray(ElementsAre("max-score", "15")));
EXPECT_THAT(res, IsMapWithSize("k2", IsMap("max-score", "15")));
// Test invalid json path expression omits that field
res = Run({"ft.search", "i1", "@name:alex", "return", "1", "::??INVALID??::", "as", "retval"});
EXPECT_EQ(res.GetVec()[1], "k1");
EXPECT_THAT(res.GetVec()[2], RespArray(ElementsAre()));
EXPECT_THAT(res, IsMapWithSize("k1", IsMap()));
}
#endif
@ -558,9 +600,7 @@ TEST_F(SearchFamilyTest, TestReturn) {
"longB", "AS", "justB", "NUMERIC", "longC", "AS", "justC",
"NUMERIC", "vector", "VECTOR", "FLAT", "2", "DIM", "1"});
auto MatchEntry = [](string key, auto... fields) {
return RespArray(ElementsAre(IntArg(1), key, IsUnordArray(fields...)));
};
auto MatchEntry = [](string key, auto... fields) { return IsMapWithSize(key, IsMap(fields...)); };
// Check all fields are returned
auto resp = Run({"ft.search", "i1", "@justA:0"});
@ -686,8 +726,8 @@ TEST_F(SearchFamilyTest, Unicode) {
// Check the result is valid
auto resp = Run({"ft.search", "i1", "λιβελλούλη"});
EXPECT_THAT(resp.GetVec()[2].GetVec(),
UnorderedElementsAre("visits", "100", "title", "πανίσχυρη ΛΙΒΕΛΛΟΎΛΗ Δίας"));
EXPECT_THAT(resp,
IsMapWithSize("d:4", IsMap("visits", "100", "title", "πανίσχυρη ΛΙΒΕΛΛΟΎΛΗ Δίας")));
}
TEST_F(SearchFamilyTest, UnicodeWords) {
@ -798,7 +838,7 @@ TEST_F(SearchFamilyTest, DocsEditing) {
EXPECT_EQ(resp, "OK");
resp = Run({"FT.SEARCH", "index", "*"});
EXPECT_THAT(resp, IsArray(IntArg(1), "k1", IsArray("$", R"({"a":"1"})")));
EXPECT_THAT(resp, IsMapWithSize("k1", IsMap("$", R"({"a":"1"})")));
// Test dump and restore
resp = Run({"DUMP", "k1"});
@ -811,18 +851,18 @@ TEST_F(SearchFamilyTest, DocsEditing) {
EXPECT_EQ(resp, "OK");
resp = Run({"FT.SEARCH", "index", "*"});
EXPECT_THAT(resp, IsArray(IntArg(1), "k1", IsArray("$", R"({"a":"1"})")));
EXPECT_THAT(resp, IsMapWithSize("k1", IsMap("$", R"({"a":"1"})")));
// Test renaming a key
EXPECT_EQ(Run({"RENAME", "k1", "new_k1"}), "OK");
resp = Run({"FT.SEARCH", "index", "*"});
EXPECT_THAT(resp, IsArray(IntArg(1), "new_k1", IsArray("$", R"({"a":"1"})")));
EXPECT_THAT(resp, IsMapWithSize("new_k1", IsMap("$", R"({"a":"1"})")));
EXPECT_EQ(Run({"RENAME", "new_k1", "k1"}), "OK");
resp = Run({"FT.SEARCH", "index", "*"});
EXPECT_THAT(resp, IsArray(IntArg(1), "k1", IsArray("$", R"({"a":"1"})")));
EXPECT_THAT(resp, IsMapWithSize("k1", IsMap("$", R"({"a":"1"})")));
}
TEST_F(SearchFamilyTest, AggregateGroupBy) {
@ -1044,4 +1084,73 @@ TEST_F(SearchFamilyTest, FlushSearchIndices) {
EXPECT_THAT(resp, ErrArg("ERR Index already exists"));
}
TEST_F(SearchFamilyTest, SearchWithLoadOptionHard) {
// Test HASH
Run({"HSET", "h1", "a", "1", "b", "one", "first", "h1"});
Run({"HSET", "h2", "a", "2", "b", "two", "second", "h2"});
Run({"HSET", "h3", "a", "1", "b", "one", "third", "h3"});
auto resp = Run({"FT.CREATE", "i1", "ON", "HASH", "SCHEMA", "@a", "TAG"});
EXPECT_EQ(resp, "OK");
resp = Run({"FT.SEARCH", "i1", "*", "LOAD", "1", "@b"});
EXPECT_THAT(resp, IsMapWithSize("h1", IsMap("a", "1", "b", "one", "first", "h1"), "h2",
IsMap("a", "2", "b", "two", "second", "h2"), "h3",
IsMap("a", "1", "b", "one", "third", "h3")));
// Test JSON
Run({"JSON.SET", "j1", ".", R"({"a":"1","b":"one","first":"j1"})"});
Run({"JSON.SET", "j2", ".", R"({"a":"2","b":"two","second":"j2"})"});
Run({"JSON.SET", "j3", ".", R"({"a":"1","b":"one","third":"j3"})"});
resp = Run({"FT.CREATE", "i2", "ON", "JSON", "SCHEMA", "$.a", "AS", "a", "TAG"});
EXPECT_EQ(resp, "OK");
resp = Run({"FT.SEARCH", "i2", "*", "LOAD", "1", "$.b", "AS", "b"});
EXPECT_THAT(
resp, IsMapWithSize("j1", IsMap("b", "\"one\"", "$", R"({"a":"1","b":"one","first":"j1"})"),
"j2", IsMap("b", "\"two\"", "$", R"({"a":"2","b":"two","second":"j2"})"),
"j3", IsMap("b", "\"one\"", "$", R"({"a":"1","b":"one","third":"j3"})")));
}
TEST_F(SearchFamilyTest, AggregateWithLoadOptionHard) {
// Test HASH
Run({"HSET", "h1", "word", "item1", "foo", "10", "text", "first key"});
Run({"HSET", "h2", "word", "item2", "foo", "20", "text", "second key"});
auto resp = Run(
{"FT.CREATE", "i1", "ON", "HASH", "SCHEMA", "word", "TAG", "foo", "NUMERIC", "text", "TEXT"});
EXPECT_EQ(resp, "OK");
resp = Run({"FT.AGGREGATE", "i1", "*", "LOAD", "2", "foo", "text", "GROUPBY", "2", "@word",
"@text", "REDUCE", "SUM", "1", "@foo", "AS", "foo_total"});
EXPECT_THAT(resp,
IsUnordArrayWithSize(IsMap("foo_total", "20", "word", "item2", "text", "second key"),
IsMap("foo_total", "10", "word", "item1", "text", "first key")));
resp = Run({"FT.AGGREGATE", "i1", "*", "LOAD", "1", "@word", "GROUPBY", "1", "@word", "REDUCE",
"SUM", "1", "@foo", "AS", "foo_total"});
EXPECT_THAT(resp, IsUnordArrayWithSize(IsMap("foo_total", "20", "word", "item2"),
IsMap("foo_total", "10", "word", "item1")));
// Test JSON
Run({"JSON.SET", "j1", ".", R"({"word":"item1","foo":"10","text":"first key"})"});
Run({"JSON.SET", "j2", ".", R"({"word":"item2","foo":"20","text":"second key"})"});
resp = Run({"FT.CREATE", "i2", "ON", "JSON", "SCHEMA", "$.word", "AS", "word", "TAG", "$.foo",
"AS", "foo", "NUMERIC", "$.text", "AS", "text", "TEXT"});
EXPECT_EQ(resp, "OK");
resp = Run({"FT.AGGREGATE", "i2", "*", "LOAD", "2", "foo", "text", "GROUPBY", "2", "@word",
"@text", "REDUCE", "SUM", "1", "@foo", "AS", "foo_total"});
EXPECT_THAT(resp, IsUnordArrayWithSize(
IsMap("foo_total", "20", "word", "\"item2\"", "text", "\"second key\""),
IsMap("foo_total", "10", "word", "\"item1\"", "text", "\"first key\"")));
resp = Run({"FT.AGGREGATE", "i2", "*", "LOAD", "1", "@word", "GROUPBY", "1", "@word", "REDUCE",
"SUM", "1", "@foo", "AS", "foo_total"});
EXPECT_THAT(resp, IsUnordArrayWithSize(IsMap("foo_total", "20", "word", "\"item2\""),
IsMap("foo_total", "10", "word", "\"item1\"")));
}
} // namespace dfly