def query_person_contacts(): return ( DBSession.query( Person.id.label('person_id'), func.array_to_string( func.array_agg( case([(Contact.contact_type == 'phone', Contact.contact)]) ), ', ' ).label('phone'), func.array_to_string( func.array_agg( case([(Contact.contact_type == 'email', Contact.contact)]) ), ', ' ).label('email'), func.array_to_string( func.array_agg( case([(Contact.contact_type == 'skype', Contact.contact)]) ), ', ' ).label('skype'), ) .join(Contact, Person.contacts) .group_by(Person.id) )
def search(): ''' Generic search page ''' form = SearchForm() if request.method == 'GET': return render_template('search.html', form=form) if request.method == 'POST': query = User.query #pylint: disable=no-member if form.country.data and form.country.data != 'ZZ': query = query.filter(User.country == form.country.data) if form.locales.data: query = query.join(User.languages).filter(UserLanguage.locale.in_( form.locales.data)) if form.expertise_domain_names.data: query = query.join(User.expertise_domains).filter(UserExpertiseDomain.name.in_( form.expertise_domain_names.data)) if form.fulltext.data: query = query.filter(func.to_tsvector(func.array_to_string(array([ User.first_name, User.last_name, User.organization, User.position, User.projects]), ' ')).op('@@')(func.plainto_tsquery(form.fulltext.data))) # TODO ordering by relevance return render_template('search-results.html', title='Expertise search', form=form, results=query.limit(20).all())
def emails(env): emails = ( session.query(*Email.columns()) .order_by(Email.gm_thrid, Email.date.desc()) ) if 'label' in env.request.args: label_id = env.request.args['label'] label = session.query(Label).filter(Label.id == label_id).one() emails = emails.filter(Email.labels.has_key(label_id)) elif 'email' in env.request.args: email = env.request.args['email'] emails = emails.filter( func.array_to_string(Email.from_ + Email.to, ',') .contains('<%s>' % email) ) label = None else: env.abort(404) groups = groupby(emails, lambda v: v.gm_thrid) groups = [(k, list(v)) for k, v in groups] counts = {k: len(v) for k, v in groups} emails = (Email.model(v[0]) for k, v in groups) emails = sorted(emails, key=lambda v: v.date, reverse=True) return env.render('emails.tpl', { 'emails': emails, 'counts': counts, 'label': label, 'labels': get_labels() })
def service_info_column(crop): """return optionally cropped service.info column""" if crop: return func.array_to_string( func.string_to_array(Service.info, ' ', type_=postgresql.ARRAY( db.String))[1:int(crop)], ' ') return Service.info
def query_person_passports(): return ( DBSession.query( Person.id.label('person_id'), func.array_to_string( func.array_agg( case([(Passport.passport_type == 'citizen', Passport.num)]) ), ', ' ).label('citizen'), func.array_to_string( func.array_agg( case([(Passport.passport_type == 'foreign', Passport.num)]) ), ', ' ).label('foreign'), ) .join(Passport, Person.passports) .group_by(Person.id) )
def _search_in_fields_query(cls, term, fields=None, array_fields=None, apply_to_query=None): """ Perform the actual search in specified fields. Search is performed using SQL ILIKE, and all array fields are joined using a comma (','). All clauses are OR'ed. E.g. ``Searchable._search_in_fields('aba', ['title', 'description'], ['keywords'])`` gives the following SQL: .. sourcecode:: sql SELECT * FROM whatever WHERE title ILIKE 'aba' OR description ILIKE 'aba' OR array_to_string(keywords, ",") ILIKE 'aba'; ``array_to_string`` is ``sqlalchemy.func.array_to_string``. ``apply_to_query`` can be used for chaining multiple logical searches. .. sourcecode:: python first_clause = Model._search_in_fields_query('term', ['whatever']) results = Model._search_in_fields_query('term2', ['whatever2'], apply_to_query=first_clause).all() :param term: term to search for. :param fields: usual text fields to perform search in. :param array_fields: array fields to perform search in. :param apply_to_query: chaining helper. :return: list of results. """ if fields is None: fields = [] if array_fields is None: array_fields = [] if apply_to_query is None: apply_to_query = cls.query filters = [ getattr(cls, field).ilike('%' + term + '%') for field in fields ] + [ func.array_to_string(getattr(cls, field), ',').ilike('%' + term + '%') for field in array_fields ] return apply_to_query.filter(or_(*filters))
def __init__(self, column_name): column = ColumnClause(column_name) # Postgres allows the condition "<sth> <op> ALL (<array>)" that # is true iff for all elements of array "<sth> <op> <element>". # This works for (in)equality but, as the order of the operands # is preserved, it doesn't work for regexp matching, where the # syntax is "<text> ~ <pattern>". Our regexp operates on a per # character basis so we can work around it by concatenating the # items of the array (using array_to_string) and match the # regexp on the result. empty = literal_column("''") super(FilenameListConstraint, self).__init__( and_( func.array_to_string(column, empty).op("~")( literal_column("'^[A-Za-z0-9_.%-]*$'")), empty != all_(column), literal_column("'.'") != all_(column), literal_column("'..'") != all_(column)))
def emails(env): emails = ( session.query(Email.gm_thrid) .order_by(Email.gm_thrid, Email.date.desc()) ) if 'label' in env.request.args: label_id = env.request.args['label'] label = session.query(Label).filter(Label.id == label_id).one() emails = emails.filter(Email.labels.has_key(label_id)) elif 'email' in env.request.args: email = env.request.args['email'] emails = emails.filter( func.array_to_string(Email.from_ + Email.to, ',') .contains('<%s>' % email) ) label = None else: env.abort(404) threads = list( session.query( Email.gm_thrid, func.count('*').label('count'), func.max(Email.uid).label('uid') ) .filter(Email.gm_thrid.in_([m.gm_thrid for m in emails])) .group_by(Email.gm_thrid) ) emails = ( session.query(Email) .filter(Email.uid.in_([m.uid for m in threads])) .order_by(Email.date.desc()) ) counts = {t.gm_thrid: t.count for t in threads} return env.render('emails.tpl', { 'emails': emails, 'counts': counts, 'label': label, 'labels': get_labels() })
def visit_reduction(self, expr): if getattr(expr, '_unique', False): raise NotImplementedError input = self._expr_to_sqlalchemy[expr.input] # TODO: MEDIAN does not support if isinstance(expr, (Max, GroupedMax)): f = func.max elif isinstance(expr, (Min, GroupedMin)): f = func.min elif isinstance(expr, (Count, GroupedCount)): f = func.count elif isinstance(expr, (Sum, GroupedSum)): f = func.sum elif isinstance(expr, (Var, GroupedVar)) and expr._ddof in (0, 1): f = func.var_pop if expr._ddof == 0 else func.var_samp elif isinstance(expr, (Std, GroupedStd)) and expr._ddof in (0, 1): f = func.stddev_pop if expr._ddof == 0 else func.stddev_samp elif isinstance(expr, (Mean, GroupedMean)): f = func.avg elif isinstance(expr, (NUnique, GroupedNUnique)): f = lambda *x: func.count(distinct(*x)) elif isinstance(expr, (Cat, GroupedCat)): f = lambda x: func.array_to_string(func.array_agg(x), self._expr_to_sqlalchemy[expr._sep]) else: raise NotImplementedError if isinstance(expr, (Count, GroupedCount)) and \ isinstance(expr.input, CollectionExpr): reduced = f() elif isinstance(expr, (NUnique, GroupedNUnique)): if len(expr.inputs) > 1: raise NotImplementedError reduced = f(*(self._expr_to_sqlalchemy[i] for i in expr.inputs)) else: reduced = f(input) self._add(expr, reduced)
def _query_session(app, staff, domain, type, is_active, context_label=None, handler_context_label=None, leader_context_label=None, owner=None, handler_filter_self=None, leader_filter_self=None, tag=None, is_online=None, customer=None): q = Session.query.join('project').options(orm.undefer('project.*')) \ .filter(Session.is_active == is_active, Session.project.has( app_name=app.name, domain=domain, type=type)) filter_project_scopes = Session.project.has( func.x_scopes_match_ctxes(Project.scope_labels, staff.uid, staff.context_labels)) leader_only_self = leader_filter_self == 'only' handler_only_self = handler_filter_self == 'only' if handler_only_self: # 仅自己接待的 q = q.filter(Session.handler_id == staff.id) if leader_only_self: # 仅自己负责的 q = q.filter(Session.project.has(leader_id=staff.id)) elif leader_filter_self == 'exclude': # 排除自己负责的 q = q.filter(Session.project.has(Project.leader_id != staff.id)) elif handler_filter_self == 'exclude': # 排除自己接待的 q = q.filter(Session.handler_id != staff.id) if leader_only_self: # 仅自己负责的 q = q.filter(Session.project.has(leader_id=staff.id)) elif leader_filter_self == 'exclude': # 排除自己负责的 q = q.filter(Session.project.has(Project.leader_id != staff.id), filter_project_scopes) else: # 包含自己负责的 q = q.filter( or_(Session.project.has(leader_id=staff.id), filter_project_scopes)) else: # 包含自己接待的 if leader_only_self: # 仅自己负责的 q = q.filter(Session.project.has(leader_id=staff.id)) elif leader_filter_self == 'exclude': # 排除自己负责的 q = q.filter(Session.project.has(Project.leader_id != staff.id), filter_project_scopes) else: # 包含自己负责的 q = q.filter( or_(Session.handler_id == staff.id, Session.project.has(leader_id=staff.id), filter_project_scopes)) if context_label is not None: path, uids = context_label if len(uids) > 0: q = q.filter( Session.project.has( func.x_scopes_match_targets(Project.scope_labels, [path + uid for uid in uids]))) else: q = q.filter( Session.project.has( func.x_scopes_match_target(Project.scope_labels, path))) if handler_context_label is not None: path, uids = handler_context_label q = q.filter( Session.handler.has( func.x_target_match_ctxes(path, Staff.uid, Staff.context_labels))) if len(uids) > 0 and not handler_only_self: q = q.filter(Session.handler.has(Staff.uid.in_(uids))) if leader_context_label is not None: path, uids = leader_context_label q = q.filter( Session.project.has( Project.leader.has( func.x_target_match_ctxes(path, Staff.uid, Staff.context_labels)))) if len(uids) > 0 and not leader_filter_self: q = q.filter( Session.handler.has(Project.leader.has(Staff.uid.in_(uids)))) if owner: s = f'%{owner}%' q = q.filter( Session.project.has( Project.owner.has( or_(Customer.name.like(s), Customer.mobile.like(s), Customer.uid.like(s))))) if customer: s = f'%{customer}%' q = q.filter( Session.project.has( Project.customers.any( or_(Customer.name.like(s), Customer.mobile.like(s), Customer.uid.like(s))))) if tag: s = f'%{tag}%' q = q.filter( Session.project.has( func.array_to_string(Project.tags, '*', ',').like(s))) if is_online is not None: q = q.filter(Session.project.has(Project.is_online == is_online)) return q
def emails(env): label = None emails = ( session.query(Email.gm_thrid) .order_by(Email.gm_thrid, Email.date.desc()) ) if 'label' in env.request.args: label_id = env.request.args['label'] label = session.query(Label).filter(Label.id == label_id).one() emails = emails.filter(Email.labels.has_key(label_id)).all() elif 'email' in env.request.args: email = env.request.args['email'] emails = emails.filter( func.array_to_string(Email.from_ + Email.to, ',') .contains('<%s>' % email) ).all() elif 'subj' in env.request.args: subj = env.request.args['subj'] subj = re.sub(r'([()\[\]{}_*|+?])', r'\\\1', subj) emails = emails.filter( Email.subject.op('SIMILAR TO')('(_{2,10}:)*\ ?' + subj) ).all() elif 'q' in env.request.args and env.request.args['q']: query = env.request.args['q'] query = query.replace(' ', '\ ') emails = session.execute( ''' SELECT id, gm_thrid FROM emails_search WHERE document @@ to_tsquery('simple', :query) ORDER BY ts_rank(document, to_tsquery('simple', :query)) DESC ''', {'query': query} ).fetchall() else: env.abort(404) if len(emails): threads = list( session.query( Email.gm_thrid, func.count('*').label('count'), func.max(Email.uid).label('uid') ) .filter(Email.gm_thrid.in_([m.gm_thrid for m in emails])) .group_by(Email.gm_thrid) ) emails = ( session.query(Email) .filter(Email.uid.in_([m.uid for m in threads])) .order_by(Email.date.desc()) ).all() counts = {t.gm_thrid: t.count for t in threads} else: emails, counts = [], {} return env.render('emails.tpl', { 'emails': emails, 'emails_count': len(emails), 'counts': counts, 'label': label, 'labels': get_labels() })
def array_vectorizer(column): return func.array_to_string(column, ' ')
# Order by time: q = q.unique_join(Posi_Poly_Ext).unique_join(Inference_Ext).unique_join( Grd_Ext) q = q.order_by(Grd_Ext.starttime) if MockRequest.get("startdate"): q = q.filter(Grd_Ext.starttime >= MockRequest.get("startdate")) if MockRequest.get("enddate"): q = q.filter(Grd_Ext.starttime <= MockRequest.get("enddate")) if MockRequest.get("eez_sov"): q = q.unique_join(Posi_Poly_Ext).unique_join( Eez_Ext, func.ST_Intersects(Posi_Poly_Ext.geometry, Eez_Ext.geometry)) q = q.filter( or_((func.array_to_string(Eez_Ext.sovereigns, "||").ilike(f"%{sov}%") for sov in MockRequest.get("eez_sov")))) if MockRequest.get("min_score"): q = q.unique_join(Posi_Poly_Ext).unique_join(Coincident_Ext) q = q.filter(Coincident_Ext.score >= MockRequest.get("min_score")) if MockRequest.get("vessel_mmsi"): q = (q.unique_join(Posi_Poly_Ext).unique_join( Coincident_Ext).unique_join(Vessel_Ext)) q = q.filter( or_((Vessel_Ext.mmsi == m for m in MockRequest.get("vessel_mmsi")))) if MockRequest.get("vessel_flag"): q = (q.unique_join(Posi_Poly_Ext).unique_join( Coincident_Ext).unique_join(Vessel_Ext)) q = q.filter( or_((Vessel_Ext.flag == m
def _ilike_in_str_array_sql_filter_(self, model_column, value, **kwargs): return func.array_to_string(model_column, '|').ilike(f'%{value}%')
def users_with_groups(): result = db.session.query(User, func.array_to_string(func.array_agg(func.distinct(Group.name)), ',').label('group_name'), func.count(Group.id)).join('groups').group_by(User).order_by(desc(User.name)).all() setJSONFormat() return jsonify(result)
def bulletin_query(self, q): query = [] tsv = q.get('tsv') if tsv: words = tsv.split(' ') qsearch = [Bulletin.search.ilike('%{}%'.format(word)) for word in words] query.extend(qsearch) # exclude filter extsv = q.get('extsv') if extsv: words = extsv.split(' ') for word in words: query.append(not_(Bulletin.search.ilike('%{}%'.format(word)))) # ref ref = q.get('ref') if ref: search = ['%' + r + '%' for r in ref] #get serach operator op = q.get('opref', False) if op: query.append(or_(func.array_to_string(Bulletin.ref, '').ilike(r) for r in search)) else: query.append(and_(func.array_to_string(Bulletin.ref, '').ilike(r) for r in search)) exref = q.get('exref') if exref: # get operator opexref = q.get('opexref') if opexref: subq = Bulletin.query.filter(and_(Bulletin.ref.any(x) for x in exref)).with_entities('id') query.append(~Bulletin.id.in_(subq)) else: query.extend([~Bulletin.ref.any(ref) for ref in exref]) labels = q.get('labels', []) if len(labels): ids = [item.get('id') for item in labels] # children search ? recursive = q.get('childlabels', None) if q.get('oplabels'): # or operator if recursive: # get ids of children // update ids result = Label.query.filter(Label.id.in_(ids)).all() direct = [label for label in result] all = direct + Label.get_children(direct) #remove dups all = list(set(all)) ids = [label.id for label in all] query.append(Bulletin.labels.any(Label.id.in_(ids))) else: # and operator (modify children search logic) if recursive: direct = Label.query.filter(Label.id.in_(ids)).all() for label in direct: children = Label.get_children([label]) # add original label + uniquify list children = list(set([label] + children)) ids = [child.id for child in children] query.append(Bulletin.labels.any(Label.id.in_(ids))) else: # non-recursive (apply and on all ids) query.extend([Bulletin.labels.any(Label.id == id) for id in ids]) # Excluded labels exlabels = q.get('exlabels', []) if len(exlabels): ids = [item.get('id') for item in exlabels] query.append(~Bulletin.labels.any(Label.id.in_(ids))) vlabels = q.get('vlabels', []) if len(vlabels): ids = [item.get('id') for item in vlabels] # children search ? recursive = q.get('childverlabels', None) if q.get('opvlabels'): # or operator if recursive: # get ids of children // update ids result = Label.query.filter(Label.id.in_(ids)).all() direct = [label for label in result] all = direct + Label.get_children(direct) # remove dups all = list(set(all)) ids = [label.id for label in all] query.append(Bulletin.ver_labels.any(Label.id.in_(ids))) else: # and operator (modify children search logic) if recursive: direct = Label.query.filter(Label.id.in_(ids)).all() for label in direct: children = Label.get_children([label]) # add original label + uniquify list children = list(set([label] + children)) ids = [child.id for child in children] query.append(Bulletin.ver_labels.any(Label.id.in_(ids))) else: # non-recursive (apply and on all ids) query.extend([Bulletin.ver_labels.any(Label.id == id) for id in ids]) # Excluded vlabels exvlabels = q.get('exvlabels', []) if len(exvlabels): ids = [item.get('id') for item in exvlabels] query.append(~Bulletin.ver_labels.any(Label.id.in_(ids))) sources = q.get('sources', []) if len(sources): ids = [item.get('id') for item in sources] # children search ? recursive = q.get('childsources', None) if q.get('opsources'): # or operator if recursive: # get ids of children // update ids result = Source.query.filter(Source.id.in_(ids)).all() direct = [source for source in result] all = direct + Source.get_children(direct) # remove dups all = list(set(all)) ids = [source.id for source in all] query.append(Bulletin.sources.any(Source.id.in_(ids))) else: # and operator (modify children search logic) if recursive: direct = Source.query.filter(Source.id.in_(ids)).all() for source in direct: children = Source.get_children([source]) # add original label + uniquify list children = list(set([source] + children)) ids = [child.id for child in children] query.append(Bulletin.sources.any(Source.id.in_(ids))) else: # non-recursive (apply and on all ids) query.extend([Bulletin.sources.any(Source.id == id) for id in ids]) # Excluded sources exsources = q.get('exsources', []) if len(exsources): ids = [item.get('id') for item in exsources] query.append(~Bulletin.sources.any(Source.id.in_(ids))) locations = q.get('locations', []) if len(locations): ids = [item.get('id') for item in locations] children = Location.find_by_ids(ids) all_ids = [l.get('id')for l in children] if q.get('oplocations'): query.append(Bulletin.locations.any(Location.id.in_(all_ids))) else: for id in ids: children = Location.find_by_ids([id]) all_ids = [l.get("id") for l in children] query.append(Bulletin.locations.any(Location.id.in_(all_ids))) # Excluded sources exlocations = q.get('exlocations', []) if len(exlocations): ids = [item.get('id') for item in exlocations] query.append(~Bulletin.locations.any(Location.id.in_(ids))) # event date edate = q.get('edate', None) edatewithin = q.get('edatewithin', None) if edate: if edatewithin in self.ACCEPTED_DATE_RANGES: diff = timedelta(days=int(edatewithin[:-1])) edate = parse(edate).date() query.append(Bulletin.events.any((func.date(Event.from_date) >= edate - diff) & (func.date(Event.from_date) <= edate + diff))) else: query.append(Bulletin.events.any(func.date(Event.from_date) == edate)) elocation = q.get('elocation') if elocation: id = elocation.get('id', -1) query.append(Bulletin.events.any(Event.location_id.in_([id]))) etype = q.get('etype', None) if etype: id = etype.get('id', -1) query.append(Bulletin.events.any(Event.eventtype_id == id)) # publish date pubdate = q.get('pubdate', None) pubdatewithin = q.get('pubdatewithin', None) if pubdate: if pubdatewithin in self.ACCEPTED_DATE_RANGES: diff = timedelta(days=int(pubdatewithin[:-1])) pubdate = parse(pubdate).date() query.append((func.date(Bulletin.publish_date) >= pubdate - diff) & (func.date(Bulletin.publish_date) <= pubdate + diff)) else: query.append(func.date(Bulletin.publish_date) == pubdate) # documentation date docdate = q.get('docdate', None) docdatewithin = q.get('docdatewithin', None) if docdate: if docdatewithin in self.ACCEPTED_DATE_RANGES: diff = timedelta(days=int(docdatewithin[:-1])) docdate = parse(docdate).date() query.append((func.date(Bulletin.documentation_date) >= docdate - diff) & (func.date(Bulletin.documentation_date) <= docdate + diff)) else: query.append(func.date(Bulletin.documentation_date) == docdate) # creation date created = q.get('created', None) created_within = q.get('createdwithin', None) if created: if created_within in self.ACCEPTED_DATE_RANGES: diff = timedelta(days=int(created_within[:-1])) created = parse(created).date() query.append((func.date(Bulletin.created_at) >= created - diff) & (func.date(Bulletin.created_at) <= created + diff)) else: query.append(func.date(Bulletin.created_at) == created) # modified date updated = q.get('updated', None) updated_within = q.get('updatedwithin', None) if updated: if updated_within in self.ACCEPTED_DATE_RANGES: diff = timedelta(days=int(updated_within[:-1])) updated = parse(updated).date() query.append((func.date(Bulletin.updated_at) >= updated - diff) & (func.date(Bulletin.updated_at) <= updated + diff)) else: query.append(func.date(Bulletin.updated_at) == updated) # assigned user(s) assigned = q.get('assigned', []) if (assigned): query.append(Bulletin.assigned_to_id.in_(assigned)) # unassigned unassigned = q.get('unassigned', None) if unassigned: query.append(Bulletin.assigned_to == None) # First peer reviewer fpr = q.get('reviewer', []) if fpr: query.append(Bulletin.first_peer_reviewer_id.in_(fpr)) # workflow status status = q.get('status', None) if status: query.append(Bulletin.status == status) # review status review_action = q.get('reviewAction', None) if review_action: query.append(Bulletin.review_action == review_action) # Related to bulletin search rel_to_bulletin= q.get('rel_to_bulletin') if rel_to_bulletin: bulletin = Bulletin.query.get(int(rel_to_bulletin)) if bulletin: ids = [b.get_other_id(bulletin.id) for b in bulletin.bulletin_relations] query.append(Bulletin.id.in_(ids)) # Related to actor search rel_to_actor = q.get('rel_to_actor') if rel_to_actor: actor = Actor.query.get(int(rel_to_actor)) if actor: ids = [b.bulletin_id for b in actor.bulletin_relations] query.append(Bulletin.id.in_(ids)) # Related to incident search rel_to_incident = q.get('rel_to_incident') if rel_to_incident: incident = Incident.query.get(int(rel_to_incident)) if incident: ids = [b.bulletin_id for b in incident.bulletin_relations] query.append(Bulletin.id.in_(ids)) return query