def search(self, query=None, offset=None, limit=None, order_by=None): if query: q = {'query': compile_condition('elasticsearch', query)()} else: q = {'query': {'match_all': {}}} if limit is not None: q['from'] = 0 q['size'] = limit self.create_index() params = {} if offset is not None: params['from_'] = offset if limit: params['size'] = limit if order_by: params['sort'] = [':'.join(order_by)] res = self.client.search(index=self.index_name, doc_type=self.doc_type, body=q, **params) data = [ self.model(self.request, self, o['_source']) for o in res['hits']['hits'] ] return list(data)
def search(self, query=None, offset=None, limit=None, order_by=None): if query: f = compile_condition('sqlalchemy', query) filterquery = f(self.orm_model) q = self.session.query(self.orm_model).filter( sa.and_(self.orm_model.deleted.is_(None), filterquery)) else: q = self.session.query(self.orm_model).filter( self.orm_model.deleted.is_(None)) if order_by is not None: col = order_by[0] d = order_by[1] if d not in ['asc', 'desc']: raise KeyError(d) colattr = getattr(self.orm_model, col) if d == 'desc': q = q.order_by(colattr.desc()) else: q = q.order_by(colattr) if offset is not None: q = q.offset(offset) if limit is not None: q = q.limit(limit) return [self.model(self.request, self, o) for o in q.all()]
def search(self, query=None, offset=None, limit=None, order_by=None): res = [] if query: f = compile_condition('native', query) for o in DATA[self.typekey].values(): if f(o.data): res.append(o) else: res = list(DATA[self.typekey].values()) for r in res: r.request = self.request if offset is not None: res = res[offset:] if limit is not None: res = res[:limit] if order_by is not None: col, d = order_by res = list(sorted(res, key=lambda x: x.data[col])) if d == 'desc': res = list(reversed(res)) return res
def aggregate(self, query=None, group=None, order_by=None): group_bys = [] group_bys_map = {} if group: fields = [] for k, v in group.items(): if isinstance(v, str): c = getattr(self.orm_model, v) fields.append(c) group_bys.append(c) elif isinstance(v, dict): ff = v['function'] f = v['field'] c = getattr(self.orm_model, f) if ff == 'count': op = func.count(c).label(k) fields.append(op) group_bys_map[k] = op elif ff == 'sum': op = func.sum(c).label(k) fields.append(op) group_bys_map[k] = op elif ff == 'avg': op = func.avg(c).label(k) fields.append(op) group_bys_map[k] = op elif ff == 'min': op = func.min(c).label(k) fields.append(op) group_bys_map[k] = op elif ff == 'max': op = func.max(c).label(k) fields.append(op) group_bys_map[k] = op elif ff == 'year': op = func.date_part('YEAR', c).label(k) fields.append(op) group_bys.append(op) group_bys_map[k] = op elif ff == 'month': op = func.date_part('MONTH', c).label(k) fields.append(op) group_bys.append(op) group_bys_map[k] = op elif ff == 'day': op = func.date_part('DAY', c).label(k) fields.append(op) group_bys.append(op) group_bys_map[k] = op elif ff == 'date': op = func.to_char(c, 'YYYY-MM-DD').label(k) fields.append(op) group_bys.append(op) group_bys_map[k] = op else: raise ValueError('Unknown function %s' % ff) else: fields = [self.orm_model] if query: f = compile_condition('sqlalchemy', query) filterquery = f(self.orm_model) filterquery = sa.and_(self.orm_model.deleted.is_(None), filterquery) q = self.session.query(*fields).filter(filterquery) else: q = self.session.query(*fields).filter( self.orm_model.deleted.is_(None)) if order_by is not None: col = order_by[0] d = order_by[1] if d not in ['asc', 'desc']: raise KeyError(d) if col in group_bys_map: colattr = group_bys_map[col] else: colattr = getattr(self.orm_model, col) if d == 'desc': q = q.order_by(colattr.desc()) else: q = q.order_by(colattr) if group_bys: q = q.group_by(*group_bys) results = [] for o in q.all(): d = o._asdict() for k, v in d.items(): if isinstance(v, datetime): d[k] = v.isoformat() elif isinstance(v, Decimal): d[k] = float(v) elif isinstance(v, uuid.UUID): d[k] = v.hex results.append(d) return results
def aggregate(self, query=None, group=None, order_by=None): if query: q = {'query': compile_condition('elasticsearch', query)()} else: q = {'query': {'match_all': {}}} q['size'] = 0 self.create_index() params = {} if order_by: params['sort'] = [':'.join(order_by)] if group: aggs = Aggregate() for k, v in group.items(): if isinstance(v, str): aggs.add_group(k, v) elif isinstance(v, dict): ff = v['function'] f = v['field'] if ff == 'count': aggs.add(k, 'count', f) elif ff == 'sum': aggs.add(k, 'sum', f) elif ff == 'avg': aggs.add(k, 'avg', f) elif ff == 'year': aggs.add_group(k, f, type='date_histogram', opts={ 'interval': 'year', 'format': 'yyyy' }) elif ff == 'month': aggs.add_group(k, f, type='date_histogram', opts={ 'interval': 'month', 'format': 'MM' }) elif ff == 'day': aggs.add_group(k, f, type='date_histogram', opts={ 'interval': 'day', 'format': 'dd' }) elif ff == 'interval-1m': aggs.add_group(k, f, type='date_histogram', opts={ 'interval': '1m', 'format': "yyyy-MM-dd'T'HH:mm" }) elif ff == 'interval-15m': aggs.add_group(k, f, type='date_histogram', opts={ 'interval': '15m', 'format': "yyyy-MM-dd'T'HH:mm" }) elif ff == 'interval-30m': aggs.add_group(k, f, type='date_histogram', opts={ 'interval': '30m', 'format': "yyyy-MM-dd'T'HH:mm" }) elif ff == 'interval-1h': aggs.add_group(k, f, type='date_histogram', opts={ 'interval': '1h', 'format': "yyyy-MM-dd'T'HH:mm" }) else: raise ValueError('Unknown function %s' % ff) aggs.finalize() q['aggs'] = aggs.json() res = self.client.search(index=self.index_name, doc_type=self.doc_type, body=q, **params) data = aggs.parse(res['aggregations']) return list(data)