def test_aggregations_sort_by_and_limit(client): client.ft().create_index(( TextField("t1"), TextField("t2"), )) client.ft().client.hset("doc1", mapping={"t1": "a", "t2": "b"}) client.ft().client.hset("doc2", mapping={"t1": "b", "t2": "a"}) # test sort_by using SortDirection req = aggregations.AggregateRequest("*").sort_by(aggregations.Asc("@t2"), aggregations.Desc("@t1")) res = client.ft().aggregate(req) assert res.rows[0] == ["t2", "a", "t1", "b"] assert res.rows[1] == ["t2", "b", "t1", "a"] # test sort_by without SortDirection req = aggregations.AggregateRequest("*").sort_by("@t1") res = client.ft().aggregate(req) assert res.rows[0] == ["t1", "a"] assert res.rows[1] == ["t1", "b"] # test sort_by with max req = aggregations.AggregateRequest("*").sort_by("@t1", max=1) res = client.ft().aggregate(req) assert len(res.rows) == 1 # test limit req = aggregations.AggregateRequest("*").sort_by("@t1").limit(1, 1) res = client.ft().aggregate(req) assert len(res.rows) == 1 assert res.rows[0] == ["t1", "b"]
def test_aggregations_apply(client): client.ft().create_index(( TextField("PrimaryKey", sortable=True), NumericField("CreatedDateTimeUTC", sortable=True), )) client.ft().client.hset( "doc1", mapping={ "PrimaryKey": "9::362330", "CreatedDateTimeUTC": "637387878524969984" }, ) client.ft().client.hset( "doc2", mapping={ "PrimaryKey": "9::362329", "CreatedDateTimeUTC": "637387875859270016" }, ) req = aggregations.AggregateRequest("*").apply( CreatedDateTimeUTC="@CreatedDateTimeUTC * 10") res = client.ft().aggregate(req) assert res.rows[0] == ["CreatedDateTimeUTC", "6373878785249699840"] assert res.rows[1] == ["CreatedDateTimeUTC", "6373878758592700416"]
def test_aggregations_filter(client): client.ft().create_index(( TextField("name", sortable=True), NumericField("age", sortable=True), )) client.ft().client.hset("doc1", mapping={"name": "bar", "age": "25"}) client.ft().client.hset("doc2", mapping={"name": "foo", "age": "19"}) req = aggregations.AggregateRequest("*").filter( "@name=='foo' && @age < 20") res = client.ft().aggregate(req) assert len(res.rows) == 1 assert res.rows[0] == ["name", "foo", "age", "19"] req = aggregations.AggregateRequest("*").filter("@age > 15").sort_by( "@age") res = client.ft().aggregate(req) assert len(res.rows) == 2 assert res.rows[0] == ["age", "19"] assert res.rows[1] == ["age", "25"]
def test_aggregations_load(client): client.ft().create_index(( TextField("t1"), TextField("t2"), )) client.ft().client.hset("doc1", mapping={"t1": "hello", "t2": "world"}) # load t1 req = aggregations.AggregateRequest("*").load("t1") res = client.ft().aggregate(req) assert res.rows[0] == ["t1", "hello"] # load t2 req = aggregations.AggregateRequest("*").load("t2") res = client.ft().aggregate(req) assert res.rows[0] == ["t2", "world"] # load all req = aggregations.AggregateRequest("*").load() res = client.ft().aggregate(req) assert res.rows[0] == ["t1", "hello", "t2", "world"]
def test_profile(client): client.ft().create_index((TextField("t"), )) client.ft().client.hset("1", "t", "hello") client.ft().client.hset("2", "t", "world") # check using Query q = Query("hello|world").no_content() res, det = client.ft().profile(q) assert det["Iterators profile"]["Counter"] == 2.0 assert len(det["Iterators profile"]["Child iterators"]) == 2 assert det["Iterators profile"]["Type"] == "UNION" assert det["Parsing time"] < 0.5 assert len(res.docs) == 2 # check also the search result # check using AggregateRequest req = (aggregations.AggregateRequest("*").load("t").apply( prefix="startswith(@t, 'hel')")) res, det = client.ft().profile(req) assert det["Iterators profile"]["Counter"] == 2.0 assert det["Iterators profile"]["Type"] == "WILDCARD" assert det["Parsing time"] < 0.5 assert len(res.rows) == 2 # check also the search result
def test_aggregations_groupby(client): # Creating the index definition and schema client.ft().create_index(( NumericField("random_num"), TextField("title"), TextField("body"), TextField("parent"), )) # Indexing a document client.ft().add_document( "search", title="RediSearch", body="Redisearch impements a search engine on top of redis", parent="redis", random_num=10, ) client.ft().add_document( "ai", title="RedisAI", body= "RedisAI executes Deep Learning/Machine Learning models and managing their data.", # noqa parent="redis", random_num=3, ) client.ft().add_document( "json", title="RedisJson", body= "RedisJSON implements ECMA-404 The JSON Data Interchange Standard as a native data type.", # noqa parent="redis", random_num=8, ) req = aggregations.AggregateRequest("redis").group_by( "@parent", reducers.count(), ) res = client.ft().aggregate(req).rows[0] assert res[1] == "redis" assert res[3] == "3" req = aggregations.AggregateRequest("redis").group_by( "@parent", reducers.count_distinct("@title"), ) res = client.ft().aggregate(req).rows[0] assert res[1] == "redis" assert res[3] == "3" req = aggregations.AggregateRequest("redis").group_by( "@parent", reducers.count_distinctish("@title"), ) res = client.ft().aggregate(req).rows[0] assert res[1] == "redis" assert res[3] == "3" req = aggregations.AggregateRequest("redis").group_by( "@parent", reducers.sum("@random_num"), ) res = client.ft().aggregate(req).rows[0] assert res[1] == "redis" assert res[3] == "21" # 10+8+3 req = aggregations.AggregateRequest("redis").group_by( "@parent", reducers.min("@random_num"), ) res = client.ft().aggregate(req).rows[0] assert res[1] == "redis" assert res[3] == "3" # min(10,8,3) req = aggregations.AggregateRequest("redis").group_by( "@parent", reducers.max("@random_num"), ) res = client.ft().aggregate(req).rows[0] assert res[1] == "redis" assert res[3] == "10" # max(10,8,3) req = aggregations.AggregateRequest("redis").group_by( "@parent", reducers.avg("@random_num"), ) res = client.ft().aggregate(req).rows[0] assert res[1] == "redis" assert res[3] == "7" # (10+3+8)/3 req = aggregations.AggregateRequest("redis").group_by( "@parent", reducers.stddev("random_num"), ) res = client.ft().aggregate(req).rows[0] assert res[1] == "redis" assert res[3] == "3.60555127546" req = aggregations.AggregateRequest("redis").group_by( "@parent", reducers.quantile("@random_num", 0.5), ) res = client.ft().aggregate(req).rows[0] assert res[1] == "redis" assert res[3] == "8" # median of 3,8,10 req = aggregations.AggregateRequest("redis").group_by( "@parent", reducers.tolist("@title"), ) res = client.ft().aggregate(req).rows[0] assert res[1] == "redis" assert res[3] == ["RediSearch", "RedisAI", "RedisJson"] req = aggregations.AggregateRequest("redis").group_by( "@parent", reducers.first_value("@title").alias("first"), ) res = client.ft().aggregate(req).rows[0] assert res == ["parent", "redis", "first", "RediSearch"] req = aggregations.AggregateRequest("redis").group_by( "@parent", reducers.random_sample("@title", 2).alias("random"), ) res = client.ft().aggregate(req).rows[0] assert res[1] == "redis" assert res[2] == "random" assert len(res[3]) == 2 assert res[3][0] in ["RediSearch", "RedisAI", "RedisJson"]