def testGroup(self): # Check the group class on its own self.assertRaises(ValueError, a.Group, [], []) self.assertRaises(ValueError, a.Group, ['foo'], []) # Zero fields, single reducer g = a.Group([], r.count()) ret = g.build_args() self.assertEqual(['GROUPBY', '0', 'REDUCE', 'COUNT', '0'], ret) # Single field, single reducer g = a.Group('foo', r.count()) ret = g.build_args() self.assertEqual(['GROUPBY', '1', 'foo', 'REDUCE', 'COUNT', '0'], ret) # Multiple fields, single reducer g = a.Group(['foo', 'bar'], r.count()) self.assertEqual( ['GROUPBY', '2', 'foo', 'bar', 'REDUCE', 'COUNT', '0'], g.build_args()) # Multiple fields, multiple reducers g = a.Group(['foo', 'bar'], [r.count(), r.count_distinct('@fld1')]) self.assertEqual([ 'GROUPBY', '2', 'foo', 'bar', 'REDUCE', 'COUNT', '0', 'REDUCE', 'COUNT_DISTINCT', '1', '@fld1' ], g.build_args())
def test_reducers(self): self.assertEqual((), r.count().args) self.assertEqual(('f1', ), r.sum('f1').args) self.assertEqual(('f1', ), r.min('f1').args) self.assertEqual(('f1', ), r.max('f1').args) self.assertEqual(('f1', ), r.avg('f1').args) self.assertEqual(('f1', ), r.tolist('f1').args) self.assertEqual(('f1', ), r.count_distinct('f1').args) self.assertEqual(('f1', ), r.count_distinctish('f1').args) self.assertEqual(('f1', '0.95'), r.quantile('f1', 0.95).args) self.assertEqual(('f1', ), r.stddev('f1').args) self.assertEqual(('f1', ), r.first_value('f1').args) self.assertEqual(('f1', 'BY', 'f2', 'ASC'), r.first_value('f1', a.Asc('f2')).args) self.assertEqual(('f1', 'BY', 'f1', 'ASC'), r.first_value('f1', a.Asc).args) self.assertEqual(('f1', '50'), r.random_sample('f1', 50).args)
def testAggRequest(self): req = a.AggregateRequest() self.assertEqual(['*'], req.build_args()) # Test with group_by req = a.AggregateRequest().group_by('@foo', r.count()) self.assertEqual(['*', 'GROUPBY', '1', '@foo', 'REDUCE', 'COUNT', '0'], req.build_args()) # Test with limit req = a.AggregateRequest().\ group_by('@foo', r.count()).\ sort_by('@foo') self.assertEqual([ '*', 'GROUPBY', '1', '@foo', 'REDUCE', 'COUNT', '0', 'SORTBY', '1', '@foo' ], req.build_args()) # Test with sort_by req = a.AggregateRequest().group_by('@foo', r.count()).sort_by('@date') # print req.build_args() self.assertEqual([ '*', 'GROUPBY', '1', '@foo', 'REDUCE', 'COUNT', '0', 'SORTBY', '1', '@date' ], req.build_args()) req = a.AggregateRequest().group_by('@foo', r.count()).sort_by(a.Desc('@date')) # print req.build_args() self.assertEqual([ '*', 'GROUPBY', '1', '@foo', 'REDUCE', 'COUNT', '0', 'SORTBY', '2', '@date', 'DESC' ], req.build_args()) req = a.AggregateRequest().group_by('@foo', r.count()).sort_by( a.Desc('@date'), a.Asc('@time')) # print req.build_args() self.assertEqual([ '*', 'GROUPBY', '1', '@foo', 'REDUCE', 'COUNT', '0', 'SORTBY', '4', '@date', 'DESC', '@time', 'ASC' ], req.build_args()) req = a.AggregateRequest().group_by('@foo', r.count()).sort_by(a.Desc('@date'), a.Asc('@time'), max=10) self.assertEqual([ '*', 'GROUPBY', '1', '@foo', 'REDUCE', 'COUNT', '0', 'SORTBY', '4', '@date', 'DESC', '@time', 'ASC', 'MAX', '10' ], req.build_args())
def get_movie_group_by(field): req = aggregations.AggregateRequest("*").group_by( "@" + field, reducers.count().alias("nb_of_movies")).sort_by( aggregations.Asc("@" + field)).limit(0, 1000) res = g.movieIdx.aggregate(req) reslist = [] for row in res.rows: item = { row[0].decode("utf-8"): row[1].decode("utf-8"), row[2].decode("utf-8"): int(row[3].decode("utf-8")) } reslist.append(item) dictResult = { "totalResults": len(res.rows), "rows": reslist, } return dictResult
def testAggregations(self): conn = self.redis() with conn as r: client = Client('myIndex', port=conn.port) client.redis.flushdb() # Creating the index definition and schema client.create_index( (NumericField('random_num'), TextField('title'), TextField('body'), TextField('parent'))) # Indexing a document client.add_document( 'search', title='RediSearch', body='Redisearch impements a search engine on top of redis', parent='redis', random_num=10) client.add_document( 'ai', title='RedisAI', body= 'RedisAI executes Deep Learning/Machine Learning models and managing their data.', parent='redis', random_num=3) client.add_document( 'json', title='RedisJson', body= 'RedisJSON implements ECMA-404 The JSON Data Interchange Standard as a native data type.', parent='redis', random_num=8) req = aggregations.AggregateRequest('redis').group_by( "@parent", reducers.count(), reducers.count_distinct('@title'), reducers.count_distinctish('@title'), reducers.sum("@random_num"), reducers.min("@random_num"), reducers.max("@random_num"), reducers.avg("@random_num"), reducers.stddev("random_num"), reducers.quantile("@random_num", 0.5), reducers.tolist("@title"), reducers.first_value("@title"), reducers.random_sample("@title", 2), ) res = client.aggregate(req) res = res.rows[0] self.assertEqual(len(res), 26) self.assertEqual(b'redis', res[1]) self.assertEqual(b'3', res[3]) self.assertEqual(b'3', res[5]) self.assertEqual(b'3', res[7]) self.assertEqual(b'21', res[9]) self.assertEqual(b'3', res[11]) self.assertEqual(b'10', res[13]) self.assertEqual(b'7', res[15]) self.assertEqual(b'3.60555127546', res[17]) self.assertEqual(b'10', res[19]) self.assertEqual([b'RediSearch', b'RedisAI', b'RedisJson'], res[21]) self.assertEqual(b'RediSearch', res[23]) self.assertEqual(2, len(res[25]))
def agg_by(field): ar = aggregation.AggregateRequest().group_by(field, reducers.count().alias('my_count')).sort_by(aggregation.Desc('@my_count')) return (client.aggregate(ar).rows)
def testAggRequest(self): req = a.AggregateRequest() self.assertEqual(['*'], req.build_args()) # Test with group_by req = a.AggregateRequest().group_by('@foo', r.count()) self.assertEqual(['*', 'GROUPBY', '1', '@foo', 'REDUCE', 'COUNT', '0'], req.build_args()) # Test with group_by and alias on reducer req = a.AggregateRequest().group_by('@foo', r.count().alias('foo_count')) self.assertEqual([ '*', 'GROUPBY', '1', '@foo', 'REDUCE', 'COUNT', '0', 'AS', 'foo_count' ], req.build_args()) # Test with limit req = a.AggregateRequest(). \ group_by('@foo', r.count()). \ sort_by('@foo') self.assertEqual([ '*', 'GROUPBY', '1', '@foo', 'REDUCE', 'COUNT', '0', 'SORTBY', '1', '@foo' ], req.build_args()) # Test with apply req = a.AggregateRequest(). \ apply(foo="@bar / 2"). \ group_by('@foo', r.count()) self.assertEqual([ '*', 'APPLY', '@bar / 2', 'AS', 'foo', 'GROUPBY', '1', '@foo', 'REDUCE', 'COUNT', '0' ], req.build_args()) # Test with filter req = a.AggregateRequest().group_by('@foo', r.count()).filter("@foo=='bar'") self.assertEqual([ '*', 'GROUPBY', '1', '@foo', 'REDUCE', 'COUNT', '0', 'FILTER', "@foo=='bar'" ], req.build_args()) # Test with filter on different state of the pipeline req = a.AggregateRequest().filter("@foo=='bar'").group_by( '@foo', r.count()) self.assertEqual([ '*', 'FILTER', "@foo=='bar'", 'GROUPBY', '1', '@foo', 'REDUCE', 'COUNT', '0' ], req.build_args()) # Test with filter on different state of the pipeline req = a.AggregateRequest().filter(["@foo=='bar'", "@foo2=='bar2'" ]).group_by('@foo', r.count()) self.assertEqual([ '*', 'FILTER', "@foo=='bar'", 'FILTER', "@foo2=='bar2'", 'GROUPBY', '1', '@foo', 'REDUCE', 'COUNT', '0' ], req.build_args()) # Test with sort_by req = a.AggregateRequest().group_by('@foo', r.count()).sort_by('@date') # print req.build_args() self.assertEqual([ '*', 'GROUPBY', '1', '@foo', 'REDUCE', 'COUNT', '0', 'SORTBY', '1', '@date' ], req.build_args()) req = a.AggregateRequest().group_by('@foo', r.count()).sort_by(a.Desc('@date')) # print req.build_args() self.assertEqual([ '*', 'GROUPBY', '1', '@foo', 'REDUCE', 'COUNT', '0', 'SORTBY', '2', '@date', 'DESC' ], req.build_args()) req = a.AggregateRequest().group_by('@foo', r.count()).sort_by( a.Desc('@date'), a.Asc('@time')) # print req.build_args() self.assertEqual([ '*', 'GROUPBY', '1', '@foo', 'REDUCE', 'COUNT', '0', 'SORTBY', '4', '@date', 'DESC', '@time', 'ASC' ], req.build_args()) req = a.AggregateRequest().group_by('@foo', r.count()).sort_by(a.Desc('@date'), a.Asc('@time'), max=10) self.assertEqual([ '*', 'GROUPBY', '1', '@foo', 'REDUCE', 'COUNT', '0', 'SORTBY', '4', '@date', 'DESC', '@time', 'ASC', 'MAX', '10' ], req.build_args())