def get(self): args = self.get_parser.parse_args() queryset = Project.query if args.query: queryset = queryset.filter( or_( func.lower(Project.name).contains(args.query.lower()), func.lower(Project.slug).contains(args.query.lower()), ), ) if args.status: queryset = queryset.filter( Project.status == ProjectStatus[args.status] ) if args.sort == 'name': queryset = queryset.order_by(Project.name.asc()) elif args.sort == 'date': queryset = queryset.order_by(Project.date_created.asc()) project_list = list(queryset) context = [] if project_list: latest_build_results = get_latest_builds_query(project_list) latest_build_map = dict( zip([b.project_id for b in latest_build_results], self.serialize(latest_build_results)) ) passing_build_map = {} missing_passing_builds = set() for build in latest_build_results: if build.result == Result.passed: passing_build_map[build.project_id] = build else: passing_build_map[build.project_id] = None missing_passing_builds.add(build.project_id) if missing_passing_builds: passing_build_results = get_latest_builds_query( project_list, result=Result.passed, ) passing_build_map.update(dict( zip([b.project_id for b in passing_build_results], self.serialize(passing_build_results)) )) for project, data in zip(project_list, self.serialize(project_list)): # TODO(dcramer): build serializer is O(N) for stats data['lastBuild'] = latest_build_map.get(project.id) data['lastPassingBuild'] = passing_build_map.get(project.id) context.append(data) return self.respond(context)
def search(self, string, show_error): """ Search the database for a given item. :param string: The search string :param show_error: The error string to be show. """ search = '%' + string.lower() + '%' search_results = self.plugin.db_manager.get_all_objects(CustomSlide, or_(func.lower(CustomSlide.title).like(search), func.lower(CustomSlide.text).like(search)), order_by_ref=CustomSlide.title) return [[custom.id, custom.title] for custom in search_results]
def tag_search(text, count=5): """Search in the tag_search_items table (for location tags).""" QTag = aliased(LocationTag) QParent = aliased(LocationTag) text = text.lower().strip() query = meta.Session.query(TagSearchItem)\ .join(QTag)\ .outerjoin((QParent, QParent.id==QTag.parent_id))\ .filter(TagSearchItem.terms.op('@@')(func.plainto_tsquery(text)))\ .order_by(or_(func.lower(func.btrim(QParent.title)) == text, func.lower(func.btrim(QParent.title_short)) == text).desc())\ .order_by(or_(func.lower(func.btrim(QTag.title)) == text, func.lower(func.btrim(QTag.title_short)) == text).desc())\ .order_by(func.ts_rank_cd(TagSearchItem.terms, func.plainto_tsquery(text))) if count is not None: query = query.limit(count) return query.all()
def extract_users(session, comment_txt): """ extract referenced users from a message replaces "@name" with "@NN" where NN is userid (because username might change, but id won't) returns new comment string and list of resolved users """ userpat = re.compile("@([a-zA-Z0-9_]+)", re.I) # look them all up in the db usernames = set(m.group(1) for m in userpat.finditer(comment_txt)) users = ( session.query(UserAccount) .filter(func.lower(UserAccount.username).in_([u.lower() for u in usernames])) .all() ) user_lookup = dict((u.username.lower(), u) for u in users) def user_replace(m): if m.group(1).lower() not in user_lookup: return m.group(0) user = user_lookup[m.group(1).lower()] return u"@%d" % (user.id,) comment_txt = userpat.sub(user_replace, comment_txt) return comment_txt, users
def get_user_by_username(db_conn, username): s = select(['id', 'name', 'email', 'is_superuser']) \ .select_from(user_tbl) \ .where(func.lower(user_tbl.c.name) == username.lower()) rs = db_conn.execute(s) assert rs.rowcount == 1 return rs.fetchone()
def __init__(self, word): if isinstance(word, basestring): self.word = word.lower() elif isinstance(word, CaseInsensitiveWord): self.word = word.word else: self.word = func.lower(word)
async def find_series(ser): async with engine.acquire() as conn: series = model.Series.__table__ res = await conn.execute(select([series.c.id, series.c.title]).where(func.lower(series.c.title) == ser['title'].lower())) s = await res.fetchone() if s: return {'id': s[0], 'title': s[1]}
def handle_list(self, event, owner, status, milestone): session = ibid.databases.trac() status = status or 'open' if status.lower() == 'open': statuses = (u'new', u'assigned', u'reopened') else: statuses = (status.lower(),) query = session.query(Ticket).filter(Ticket.status.in_(statuses)) if owner: if owner.lower() == 'my': owner = event.sender['nick'] else: owner = owner.lower().replace("'s", '') query = query.filter(func.lower(Ticket.owner)==(owner.lower())) if milestone: query = query.filter_by(milestone=milestone) tickets = query.order_by(Ticket.id).all() if len(tickets) > 0: event.addresponse(u', '.join(['%s (%s): "%s"' % (ticket.id, ticket.owner, ticket.summary) for ticket in tickets])) else: event.addresponse(u"No tickets found") session.close()
async def find_synonym(name, what): async with engine.acquire() as conn: synonym = model.Synonym.__table__ res=await conn.execute(select([synonym.c.our_name]).where(and_(func.lower(synonym.c.other_name) == name.lower(), synonym.c.category == what))) s = await res.fetchone() if s: return s[0]
def get(self, build_id): build = Build.query.get(build_id) if build is None: return '', 404 args = self.parser.parse_args() test_list = TestCase.query.options( contains_eager('job') ).join( Job, TestCase.job_id == Job.id, ).filter( Job.build_id == build.id, ) if args.query: test_list = test_list.filter( func.lower(TestCase.name).contains(args.query.lower()), ) if args.result: test_list = test_list.filter( TestCase.result == Result[args.result], ) if args.sort == 'duration': sort_by = TestCase.duration.desc() elif args.sort == 'name': sort_by = TestCase.name.asc() elif args.sort == 'retries': sort_by = TestCase.reruns.desc() test_list = test_list.order_by(sort_by) return self.paginate(test_list, max_per_page=None)
def get(self): args = self.get_parser.parse_args() queryset = Repository.query if args.query: queryset = queryset.filter( func.lower(Repository.url).contains(args.query.lower()), ) if args.backend: queryset = queryset.filter( Repository.backend == RepositoryBackend[args.backend] ) if args.status: queryset = queryset.filter( Repository.status == RepositoryStatus[args.status], ) if args.sort == 'url': queryset = queryset.order_by(Repository.url.asc()) elif args.sort == 'date': queryset = queryset.order_by(Repository.date_created.asc()) return self.paginate(queryset)
def get(self, project_id): project = Project.get(project_id) if project is None: return '', 404 args = self.get_parser.parse_args() queryset = Plan.query.filter( Plan.project_id == project.id, ) if args.query: queryset = queryset.filter( func.lower(Plan.label).contains(args.query.lower()), ) if args.status: queryset = queryset.filter( Plan.status == PlanStatus[args.status], ) if args.sort == 'name': queryset = queryset.order_by(Plan.label.asc()) elif args.sort == 'date': queryset = queryset.order_by(Plan.date_created.asc()) return self.paginate(queryset)
def get_results( transcript_name, minimum_CG, maximum_CG, maximum_offtarget, scaffold, immunostimulatory ): try: stored_input = db_session.query(InputData).filter( func.lower(InputData.transcript_name) == transcript_name.lower(), InputData.minimum_CG == minimum_CG, InputData.maximum_CG == maximum_CG, InputData.maximum_offtarget == maximum_offtarget, func.lower(InputData.scaffold) == scaffold.lower(), func.lower(InputData.immunostimulatory) == immunostimulatory.lower() ).outerjoin(InputData.results).one() except NoResultFound: return None return [result.as_json() for result in stored_input.results]
def frames_by_scaffold(scaffold): if scaffold == 'all': return db_session.query(Backbone).all() return db_session.query(Backbone).filter( func.lower(Backbone.name) == scaffold.lower() ).all()
def get_by_name_query(table, name): """Finds a single row in the given table by name, ignoring case. Don't use this for Pokémon! Use `pokemon()`, as it knows about forms. """ q = pokedex_session.query(table).filter(func.lower(table.name) == name.lower()) return q
def search_users(name): ''' Search users in a team''' if not require.team.read(): abort(403) team = cached_teams.get_team(name) form = SearchForm(request.form) users = db.session.query(User).all() if request.method == 'POST' and form.user.data: query = '%' + form.user.data.lower() + '%' founds = db.session.query(User)\ .filter(or_(func.lower(User.name).like(query), func.lower(User.fullname).like(query)))\ .all() if not founds: msg = gettext('Ooops! We didn\'t find a user matching your query:') flash(msg) return render_template( '/team/search_users.html', founds = [], team = team, title = gettext('Search name of User')) else: for found in founds: user2team = User2Team.query\ .filter(User2Team.team_id==team.id)\ .filter(User2Team.user_id==found.id)\ .first() found.belong = (1, 0)[user2team is None] return render_template( '/team/search_users.html', founds = founds, team = team, title = gettext('Search User')) return render_template( '/team/search_users.html', founds = [], team = team, title = gettext('Search User'))
def search_teams(type): ''' Search Teams ''' if not require.team.read(): abort(403) title = gettext('Search name of teams') form = SearchForm(request.form) teams = db.session.query(Team).all() if request.method == 'POST' and form.user.data: query = '%' + form.user.data.lower() + '%' if type == 'public': founds = db.session.query(Team)\ .filter(func.lower(Team.name).like(query))\ .filter(Team.public == True)\ .all() else: founds = db.session.query(Team)\ .join(User2Team)\ .filter(func.lower(Team.name).like(query))\ .filter(User2Team.user_id == current_user.id)\ .all() if not founds: msg = gettext('Ooops! We didn\'t find a team matching your query:') flash(msg) return render_template( '/team/search_teams.html', founds= [], team_type = type, title=gettext('Search Team')) else: return render_template( '/team/search_teams.html', founds = founds, team_type = type, title = gettext('Search Team')) return render_template( '/team/search_teams.html', found = [], team_type = type, title = gettext('Search Team'))
def get_artist2scrobbles(user, min_count, max_uts=None): where = Scrobble.user == user if max_uts is not None: where = where & (Scrobble.uts <= max_uts) return defaultdict(lambda: 0, session.query(func.lower(Scrobble.artist), func.count(Scrobble.id)).\ group_by(Scrobble.artist).\ filter(where).\ having(func.count(Scrobble.id) >= min_count))
def _after_create(target, conn, **kw): assert target is column.table col_func = func.indico_unaccent(func.lower(column)) index_kwargs = {} if not current_app.config['TESTING'] and has_extension(conn, 'pg_trgm'): index_kwargs = {'postgresql_using': 'gin', 'postgresql_ops': {col_func.key: 'gin_trgm_ops'}} elif not current_app.config['TESTING']: print 'Warning: pg_trgm extension is not available' Index(conv('ix_{}_{}_unaccent'.format(column.table.name, column.name)), col_func, **index_kwargs).create(conn)
def js_add_tag(self): if hasattr(self, 'form_result'): json = {'success': '', 'error': ''} parent = None created = None location = self.form_result['location'] newlocation = self.form_result['newlocation'] for index, item in enumerate(newlocation): if item['title'] == '' and location[index] != '': try: parent = meta.Session.query(LocationTag ).filter(LocationTag.title == location[index] ).filter(LocationTag.parent == parent ).one() except: # XXX bare except break else: try: InURLValidator.to_python(item['title_short']) existing = meta.Session.query(LocationTag).filter(or_(func.lower(LocationTag.title_short) == item['title_short'].lower(), func.lower(LocationTag.title) == item['title'].lower()))\ .filter(LocationTag.parent == parent).first() if existing is not None: if existing.title.lower() == item['title'].lower(): json['error'] = _('The entry already exists') break else: json['error'] = _('Choose a different short title') break except: # XXX bare except json['error'] = _('The short title must contain no spaces') break created = LocationTag(item['title'], item['title_short'], u'', parent, confirmed=False) meta.Session.add(created) meta.Session.commit() break if created is not None: json['success'] = created.title return json
def _getLabelFromDatabase(self, name): if not name: return None name = name.lower().strip() stmt = or_(func.lower(Label.name) == name, text("(lower(alternateNames) = '" + name.replace("'", "''") + "'" + "" " OR alternateNames like '" + name.replace( "'", "''") + "|%'" + " OR alternateNames like '%|" + name.replace("'", "''") + "|%'" + " OR alternateNames like '%|" + name.replace("'", "''") + "')")) return self.session.query(Label).filter(stmt).first()
def __call__(self, node, value): query = self.db.query(self.model_class) f = self.field v = value if not self.case_sensitive: f = func.lower(self.field) v = value.lower() if query.filter(f == v).scalar() is not None: s = (u'A %s already exists with that %s' % (self.model_class.__name__.lower(), node.name)) raise colander.Invalid(node, s)
def change_user(self, r, first_name, last_name, username): if r.user.username == None and r.user.username != username: # attempting username change user_count = r.connection.execute(select([func.count(t_user.c.user_id)], func.lower(t_user.c.username) == username.lower())).fetchone()[0] if user_count: raise ValidationFailedException(dict(username='******')) r.user.username = username r.user.first_name = first_name r.user.last_name = last_name r.session.flush()
def system_utilisation_counts_by_group(grouping, systems): retval = defaultdict(lambda: dict((k, 0) for k in ['recipe', 'manual', 'idle_automated', 'idle_manual', 'idle_broken', 'idle_removed'])) query = systems.outerjoin(System.open_reservation)\ .with_entities(grouping, func.coalesce(Reservation.type, func.concat('idle_', func.lower(System.status))), func.count(System.id))\ .group_by(literal_column("1"), literal_column("2")) for group, state, count in query: retval[group][state] = count return retval
def calc_unique_username(session, base_username): """ return a unique, valid username based on some existing name """ # scrub and sanitise base_username = re.sub("[^a-zA-Z0-9_]", "", base_username) assert UserAccount.USERNAME_PAT.match(base_username) is not None foo = ( session.query(UserAccount.username) .filter(func.lower(UserAccount.username) == func.lower(base_username)) .first() ) if foo is None: return base_username # append a number, keep incrementing until free name found. i = 1 while True: u = "%s%d" % (base_username, i) foo = session.query(UserAccount.username).filter(func.lower(UserAccount.username) == func.lower(u)).first() if foo is None: return u i = i + 1
def autocomplete(request): context = contexts.Ctx(request) pattern = '{0}%'.format(request.params.get('q', '')) num = int(request.params.get('num', 0)) query = DB.query(distinct(func.lower(Tag.name)).label('name'), func.count(PackageTag.package_id).label('total')) query = query.join(PackageTag) query = query.filter(Tag.name.ilike(pattern)) query = query.order_by('total desc', 'name').group_by('name') if num: query = query.limit(num) data = [row[0] for row in query] headers = wsgihelpers.handle_cross_origin_resource_sharing(context) return wsgihelpers.respond_json(context, data, headers=headers)
def get_project(self, name): query = ( select([packages.c.name]) .where( packages.c.normalized_name == func.lower( func.regexp_replace(name, "_", "-", "ig"), ) ) ) with self.engine.connect() as conn: result = conn.execute(query).scalar() if result is not None: return Project(result)
def system_utilisation_counts(systems): """ Similar to the above except returns counts of systems based on the current state, rather than historical data about particular systems. """ retval = dict((k, 0) for k in ['recipe', 'manual', 'idle_automated', 'idle_manual', 'idle_broken', 'idle_removed']) query = systems.outerjoin(System.open_reservation)\ .with_entities(func.coalesce(Reservation.type, func.concat('idle_', func.lower(System.status))), func.count(System.id))\ .group_by(literal_column("1")) for state, count in query: retval[state] = count return retval
def validate(cls, data_type, sdvalue): """ This method validates whether given SD value is present in the model or not. If exists returns SD description otherwise None """ print "Data type:%s SD Value: %s" % (data_type, sdvalue) query = meta.Session.query(cls).filter(cls.data_type == data_type) query = query.filter(func.lower(cls.description) == sdvalue.lower()) member = query.first() if member is None: return None else: if member.code is not None: return member.code else: return member.description
def post(self, server_type, server_id): """Create a new KeyWord""" form = ns.payload server_group_id = get_group_id(server_type, server_id) if (KeyWords.query.filter_by(server_group_id=server_group_id).filter( func.lower(KeyWords.name) == form["name"].lower()).first() is not None): abort(400, f"Key word {form['name']} already exists") keyWord = KeyWords( server_group_id=server_group_id, name=form["name"], responses=form["responses"], ) db.session.add(keyWord) db.session.commit() return keyWord
def _validate_unique_container_name(self, context, name): if not CONF.compute.unique_container_name_scope: return lowername = name.lower() base_query = model_query(models.Container).\ filter(func.lower(models.Container.name) == lowername) if CONF.compute.unique_container_name_scope == 'project': container_with_same_name = base_query.\ filter_by(project_id=context.project_id).count() elif CONF.compute.unique_container_name_scope == 'global': container_with_same_name = base_query.count() else: return if container_with_same_name > 0: raise exception.ContainerAlreadyExists(field='name', value=lowername)
def _getFromDatabaseByTitle(self, artist, title): if not title: return None title = title.lower().strip() cleanedTitle = createCleanedName(title) stmt = or_(func.lower(Release.title) == title, text("(lower(alternateNames) = '" + title.replace("'", "''") + "'" + "" " OR alternateNames like '" + title.replace( "'", "''") + "|%'" + " OR alternateNames like '%|" + title.replace("'", "''") + "|%'" + " OR alternateNames like '%|" + title.replace("'", "''") + "')"), text("(alternateNames = '" + cleanedTitle + "'" + "" " OR alternateNames like '" + cleanedTitle + "|%'" + " OR alternateNames like '%|" + cleanedTitle + "|%'" + " OR alternateNames like '%|" + cleanedTitle + "')") ) return self.session.query(Release).filter(Release.artistId == artist.id).filter(stmt).first()
def get(self): args = self.get_parser.parse_args() queryset = User.query if args.query: queryset = queryset.filter( func.lower(User.email).contains(args.query.lower()), ) if args.is_admin is not None: queryset = queryset.filter(User.is_admin == args.is_admin) if args.sort == 'email': queryset = queryset.order_by(User.email.asc()) elif args.sort == 'date': queryset = queryset.order_by(User.date_created.asc()) return self.paginate(queryset)
async def do_overlay(request, projectversion_id, name): if not name: return ErrorResponse(400, "No valid name for the projectversion received") if not is_name_valid(name): return ErrorResponse(400, "Invalid project name") db = request.cirrina.db_session projectversion = db.query(ProjectVersion).filter(ProjectVersion.id == projectversion_id).first() if not projectversion: return ErrorResponse(400, "Projectversion not found") overlay_projectversion = db.query(ProjectVersion).filter(func.lower(ProjectVersion.name) == name.lower(), ProjectVersion.project_id == projectversion.project_id).first() if overlay_projectversion: return ErrorResponse(400, "Overlay already exists") overlay_projectversion = ProjectVersion( name=name, project=projectversion.project, # add the projectversion where the overlay is created from as a dependency dependencies=[projectversion], mirror_architectures=projectversion.mirror_architectures, basemirror=projectversion.basemirror, description=projectversion.description, dependency_policy=projectversion.dependency_policy, ci_builds_enabled=projectversion.ci_builds_enabled, projectversiontype="overlay", baseprojectversion_id=projectversion.id ) db.add(overlay_projectversion) db.commit() basemirror = overlay_projectversion.basemirror await enqueue_aptly({"init_repository": [ basemirror.project.name, basemirror.name, overlay_projectversion.project.name, overlay_projectversion.name, db2array(overlay_projectversion.mirror_architectures), []]}) return OKResponse({"id": overlay_projectversion.id, "name": overlay_projectversion.name})
def get_process_hist(data, database_session): empty_value = '-' query = database_session.query(SystemProcessStatusHist.name, SystemProcessStatusHist.nodetype, SystemProcessStatusHist.usetype, SystemProcessStatusHist.action_kind, SystemProcessStatusHist.action_status, SystemProcessStatusHist.action_message, SystemProcessStatusHist.register_dt) if 'run_uuid' in data: if data.get('run_uuid') is not None: query = query.filter( func.lower(SystemProcessStatusHist.run_uuid) == data.get( 'run_uuid').lower()) if 'kind' in data: if data.get('kind') is not None: query = query.filter( SystemProcessStatusHist.kind == data.get('kind')) if 'namelist' in data: namelist = data.get('namelist') query = query.filter(SystemProcessStatusHist.name.in_(namelist)) if 'limit' in data: limit_cnt = int(data.get('limit')) if limit_cnt > 0: query = query.limit(limit_cnt) query = query.order_by( SystemProcessStatusHist.register_dt.desc()).all() field_list = [ 'name', 'nodetype', 'usetype', 'action_kind', 'action_status', 'action_message', 'register_dt' ] result = BaseDAO.return_data(query=query, field_list=field_list) for res_raw in result: res_raw['err_code'] = '-' res_raw['err_message'] = '-' return result
def pokemon_form_query(name, form=None): """Returns a query that will look for the specified Pokémon form, or the default form of the named Pokémon. """ q = pokedex_session.query(tables.PokemonForm) q = q.join(tables.PokemonForm.pokemon) q = q.join(tables.Pokemon.species) q = q.join(tables.PokemonSpecies.names_local) \ .filter(func.lower(tables.PokemonSpecies.names_table.name) == name.lower()) if form: # If a form has been specified, it must match q = q.filter(tables.PokemonForm.form_identifier == form) else: # If there's NOT a form, just get the default form q = q.filter(tables.Pokemon.is_default == True) q = q.filter(tables.PokemonForm.is_default == True) return q
def get_by_name_query(table, name, query=None): """Returns a query to find a single row in the given table by name, ignoring case. Don't use this for Pokémon! Use `pokemon_query()`, as it knows about forms. If query is given, it will be extended joined with table.name_table, otherwise table will be queried. """ name = name.lower() if query is None: query = pokedex_session.query(table) query = query.join(table.names_local) \ .filter(func.lower(table.names_table.name) == name) return query
def find_player(user_info: UserInfo) -> Player: """Find the player associated with the user info. Args: user_info (UserInfo): the user info Raises: NotPartOfLeagueException: if the user not part of league Returns: Player: the player in the legaue """ email = user_info.get('email') players = DB.session.query(Player).filter( func.lower(Player.email) == email.lower()).all() if len(players) == 0: LOGGER.info(f"{email} is not part of league right now") raise NotPartOfLeagueException( "Sorry, looks like you are not in the league") return players[0]
def search_artists(): search_term = request.form.get('search_term') artist_matches_query = Artist.query.filter( func.lower(Artist.name).contains(search_term.lower())).all() response = {} response['count'] = len(artist_matches_query) response['data'] = [] for artist_query in artist_matches_query: artist_dict = {} artist_dict['id'] = artist_query.id artist_dict['name'] = artist_query.name artist_dict['num_upcoming_shows'] = len( Show.query.filter( Show.artist_id == artist_dict['id'], Show.start_time >= datetime.datetime.today()).all()) response['data'].append(artist_dict) return render_template('pages/search_artists.html', results=response, search_term=request.form.get('search_term', ''))
def autocomplete_formats(request): context = contexts.Ctx(request) pattern = request.params.get('q', '') headers = wsgihelpers.handle_cross_origin_resource_sharing(context) if not pattern: return wsgihelpers.respond_json(context, [], headers=headers) pattern = '{0}%'.format(pattern) num = int(request.params.get('num', 0)) query = DB.query( distinct(func.lower(Resource.format)).label('format'), func.count(Resource.id).label('count')) query = query.filter(Resource.format.ilike(pattern)) query = query.order_by('count', 'format').group_by('format') if num: query = query.limit(num) data = [row[0] for row in query] return wsgihelpers.respond_json(context, data, headers=headers)
def pokemon_query(name, form=None): """Returns a query that will look for the named Pokémon. form, if given, is a form identifier. """ query = pokedex_session.query(tables.Pokemon) query = query.join(tables.Pokemon.species) query = query.join(tables.PokemonSpecies.names_local) query = query.filter( func.lower(tables.PokemonSpecies.names_table.name) == name.lower()) if form: # If a form has been specified, it must match query = query.join(tables.Pokemon.forms) \ .filter(tables.PokemonForm.form_identifier == form) else: # If there's NOT a form, just make sure we get a default Pokémon query = query.filter(tables.Pokemon.is_default == True) return query
def get(self): args = self.get_parser.parse_args() queryset = Repository.query if args.query: queryset = queryset.filter( func.lower(Repository.url).contains(args.query.lower()), ) if args.backend: queryset = queryset.filter( Repository.backend == RepositoryBackend[args.backend] ) if args.sort == 'url': queryset = queryset.order_by(Repository.url.asc()) elif args.sort == 'date': queryset = queryset.order_by(Repository.date_created.asc()) return self.paginate(queryset)
def _multi_attrs_column_filter(instrumented_attributes, query, filter_string, sort_dir_func): """Generic column query filter logic, applied to the list of the specified columns for this domain type. instrumented_attributes:[sqlalchemy.orm.attributes.InstrumentedAttribute] query:sqlalchemy.orm.query.Query filter_string:str space-separated value to filter the column on sort_dir_func:callable(sqlalchemy.orm.attributes.InstrumentedAttribute) the sqlalchemy sort expression desc or asc """ filter_strings = filter_string.lower().split() for fs in filter_strings: query = query.filter( expression.or_(*[ func.lower(attr).like("%%%s%%" % fs) for attr in instrumented_attributes ])) return query.order_by( *[sort_dir_func(attr) for attr in instrumented_attributes])
def create_new_blog_post( data: BlogPostCreateSchema, session: Session = Depends(deps.get_database_session), _: models.Admin = Depends(deps.get_current_admin_user), ): current_blog_post_with_same_title = (session.query(models.BlogPost).filter( func.lower(models.BlogPost.title) == data.title.lower()).one_or_none()) if current_blog_post_with_same_title: raise HTTPException(400, "Blog post with same title already exists") blog_tags = [] for tag in data.tags: blog_tag = session.query(models.BlogTag).get(tag) if not blog_tag: blog_tag = models.BlogTag(name=tag) blog_tags.append(blog_tag) post: models.BlogPost = models.BlogPost(title=data.title, content=data.content, tags=blog_tags) session.add(post) session.commit() return post
def search_food_frequent(search_term, user_id): """ Search the user's history for the search term, ordered by frequency. Food items that have been logged more often will appear first. """ foods = db.session.query(Food) \ .with_entities( func.mode().within_group(Food.name), Food.quantity, Food.calories, Food.protein, func.count('*'), func.max(Food.date) ) \ .filter_by(user_id=user_id) \ .filter(not_(Food.name == '')) \ .filter(Food.name.ilike('%{0}%'.format(search_term))) \ .group_by( func.lower(Food.name), Food.quantity, Food.calories, Food.protein, ) \ .order_by(func.count('*').desc()) \ .limit(5) \ .all() def cast_decimal(dec): if dec is None: return None return float(dec) def to_dict(f): return { 'name': f[0], 'quantity': f[1], 'calories': cast_decimal(f[2]), 'protein': cast_decimal(f[3]), 'count': f[4] } return [to_dict(f) for f in foods]
def search_venues(): search_term = request.form.get('search_term') venue_matches_query = Venue.query.filter( func.lower(Venue.name).contains(search_term.lower())).all() response = {} response['count'] = len(venue_matches_query) response['data'] = [] for venue_query in venue_matches_query: venue_dict = {} venue_dict['id'] = venue_query.id venue_dict['name'] = venue_query.name venue_dict['num_upcoming_shows'] = len( Show.query.filter( Show.venue_id == venue_dict['id'], Show.start_time >= datetime.datetime.today()).all()) response['data'].append(venue_dict) return render_template('pages/search_venues.html', results=response, search_term=request.form.get('search_term', ''))
def get_station(system_name, station_name, create=True): system = System.query.filter( func.lower(System.name) == system_name.lower()).first() if not system: assert create, 'No such system {}'.format(system_name) system = System(name=system_name) db.session.add(system) station = Station.query.filter( Station.system_id == system.id, Station.name_lower == station_name.lower()).first() if not station: assert create, 'No such station {} in {} system'.format( station_name, system_name) assert not ('Confined to the' in station_name or 'Doing activity' in station_name or 'Hotel Room' in station_name or 'Docks' in station_name), \ '{} does not look like a proper station name'.format(station_name) station = Station(system=system, name=station_name, name_lower=station_name.lower()) db.session.add(station) return station
def search_venues(): search_term = request.form.get('search_term', '') # lower to ensure case insensitive venues = Venue.query.filter( func.lower(Venue.name).like("%{}%".format(search_term.lower()))).all() data = [] for venue in venues: data.append({ "id": venue.id, "name": venue.name, "num_upcoming_shows": sum([ x.start_time > datetime.now(pytz.utc) for x in venue.venue_shows ]) }) response = {"count": len(venues), "data": data} return render_template('pages/search_venues.html', results=response, search_term=request.form.get('search_term', ''))
def get(self, build_id): build = Build.query.get(build_id) if build is None: return self.respond({}, status_code=404) args = self.parser.parse_args() target_list = BazelTarget.query.options(contains_eager('job')).join( Job, BazelTarget.job_id == Job.id, ).filter(Job.build_id == build.id, ) if args.query: target_list = target_list.filter( func.lower(BazelTarget.name).contains(args.query.lower()), ) if args.result: target_list = target_list.filter( BazelTarget.result == Result[args.result], ) sort_col, sort_dir = None, None if args.sort == 'duration': sort_col, sort_dir = BazelTarget.duration, desc elif args.sort == 'name': sort_col, sort_dir = BazelTarget.name, asc if args.reverse: sort_dir = {asc: desc, desc: asc}[sort_dir] target_list = target_list.order_by(sort_dir(sort_col)) return self.paginate(target_list, max_per_page=None, serializers={ BazelTarget: BazelTargetWithMessagesCrumbler( max_messages=args.max_messages_per_target) })
def get(self, project_id): project = Project.get(project_id) if project is None: return '', 404 args = self.get_parser.parse_args() queryset = Plan.query.filter(Plan.project_id == project.id, ) if args.query: queryset = queryset.filter( func.lower(Plan.label).contains(args.query.lower()), ) if args.status: queryset = queryset.filter( Plan.status == PlanStatus[args.status], ) if args.sort == 'name': queryset = queryset.order_by(Plan.label.asc()) elif args.sort == 'date': queryset = queryset.order_by(Plan.date_created.asc()) return self.paginate(queryset)
def post(self, server_type, server_id): """Create a new Counter""" form = ns.payload server_group_id = get_group_id(server_type, server_id) if (Counter.query.filter_by(server_group_id=server_group_id).filter( func.lower(Counter.name) == form["name"].lower()).first() is not None): abort(400, f"Counter {form['name']} already exists") response = None if "response" in form: response = form["response"] if response is None: # response could still be None from the post data response = f"Counter {form['name']}" + " is now at {}" counter = Counter( server_group_id=server_group_id, name=form["name"], count=form.get("count", 0), response=response, ) db.session.add(counter) db.session.commit() return counter
class NewsletterSubscription(db.Model): """ TODO: We use reccommended SQLite's native functions for generating slugs. There is extremely unlikely chance for collision. Create DB based collision avoidant function for handling this. And yes, it is possible to make, no excuses. """ id = db.Column(db.Integer, primary_key=True) slug = db.Column(db.String, unique=True, nullable=False, default=select( [func.lower(func.hex(func.randomblob(16)))])) email = db.Column(db.String, unique=True, nullable=False) confirmed = db.Column(db.Boolean, default=False) modified_at = db.Column(db.DateTime(timezone=True), default=func.now()) created_at = db.Column(db.DateTime(timezone=True), server_default=func.now()) def __repr__(self): return '<NewsletterSubscription id: {} email: {} confirmed: {}>'.format( self.id, self.email, self.confirmed)
def __eq__(self, other): return func.lower(self.__clause_element__()) == func.lower(other)
async def get_projectversions2(request): """ Returns a list of projectversions. --- description: Returns a list of projectversions. tags: - ProjectVersions parameters: - name: project_name in: path required: true type: string - name: basemirror_id in: query required: false type: integer - name: is_basemirror in: query required: false type: bool - name: page in: query required: false type: integer - name: page_size in: query required: false type: integer - name: per_page in: query required: false type: integer produces: - text/json """ db = request.cirrina.db_session project_id = request.match_info["project_name"] basemirror_id = request.GET.getone("basemirror_id", None) is_basemirror = request.GET.getone("isbasemirror", False) filter_name = request.GET.getone("q", None) query = db.query(ProjectVersion).join(Project).filter( Project.is_mirror.is_(False), ProjectVersion.is_deleted.is_(False)) if project_id: query = query.filter( or_( func.lower(Project.name) == project_id.lower(), Project.id == parse_int(project_id))) if filter_name: query = query.filter( ProjectVersion.name.ilike("%{}%".format( escape_for_like(filter_name)))) if basemirror_id: query = query.filter(ProjectVersion.basemirror_id == basemirror_id) elif is_basemirror: query = query.filter(Project.is_basemirror.is_(True), ProjectVersion.mirror_state == "ready") query = query.order_by(ProjectVersion.id.desc()) nb_projectversions = query.count() query = paginate(request, query) projectversions = query.all() results = [] for projectversion in projectversions: results.append(projectversion.data()) data = {"total_result_count": nb_projectversions, "results": results} return OKResponse(data)
def simpleresults(page=1, showAll=None): args = request.args.copy() minTemp = args.get('minTemp') maxTemp = args.get('maxTemp') district = args.get('dist') feature_system = args.get('fsys') location = args.get('loc') minPH = args.get('minPH') maxPH = args.get('maxPH') minTurb = args.get('minTurb') maxTurb = args.get('maxTurb') minCond = args.get('minCond') maxCond = args.get('maxCond') latestSampleIds = Location.latestSampleIdsAllLocations() latestFilteredSamples = Sample.query.options( joinedload('location'), joinedload('image'), joinedload('phys')).filter( Physical_data.id == Sample.phys_id, or_(Physical_data.initialTemp == None, Physical_data.initialTemp >= minTemp), or_(Physical_data.initialTemp == None, Physical_data.initialTemp < maxTemp), or_(Physical_data.pH == None, Physical_data.pH >= minPH), or_(Physical_data.pH == None, Physical_data.pH < maxPH), or_(Physical_data.conductivity == None, Physical_data.conductivity >= minCond), or_(Physical_data.conductivity == None, Physical_data.conductivity < maxCond), or_(Physical_data.turbidity == None, Physical_data.turbidity >= minTurb), or_(Physical_data.turbidity == None, Physical_data.turbidity < maxTurb), Sample.location_id == Location.id, Sample.id.in_(latestSampleIds)).order_by( func.lower(Location.feature_name)) if district != "" and district != None: latestFilteredSamples = latestFilteredSamples.filter( Location.district == district) if feature_system != "" and feature_system != None: latestFilteredSamples = latestFilteredSamples.filter( Location.feature_system == feature_system) if location != "" and location != None: latestFilteredSamples = latestFilteredSamples.filter( Location.location == location) if showAll == "all": resultsPerPage = latestFilteredSamples.count() else: resultsPerPage = app.config["RESULTS_PER_PAGE"] # Feature names have an optional numeric part, e.g the 17 in 'Wairakei Terraces Feature 17', # and results need to be sorted by the non-numeric part then the numeric part (if present). # Due to MySQL's limited support for regular expression searching (no group extraction), # samples have to be sorted in memory. sortedSamples = [] for s in latestFilteredSamples: sortedSamples.append(s) featureNameRegEx = re.compile('^(.*\D)(\d+)\s*$') sortedSamples = sorted(sortedSamples, key=lambda s: (_sort_key(s, featureNameRegEx))) # SQLAlchemy pagination only works on the returned Query object, so need # this custom implementation to work with sorted samples paginatedSamples = Paginator(sortedSamples, resultsPerPage, page) form = SearchForm() count = {'1-25': 0, '26-50': 0, '51-75': 0, '76-100': 0} for s in latestFilteredSamples: if s.phys.initialTemp >= 1 and s.phys.initialTemp <= 25: count["1-25"] += 1 if s.phys.initialTemp >= 26 and s.phys.initialTemp <= 50: count["26-50"] += 1 if s.phys.initialTemp >= 51 and s.phys.initialTemp <= 75: count["51-75"] += 1 if s.phys.initialTemp >= 76 and s.phys.initialTemp <= 100: count["76-100"] += 1 pieChart = [ dict(range=k, count=v) for k, v in zip(count.keys(), count.values()) ] locations = Location.query.with_entities(Location.district).group_by( Location.district) locations = [i[0] for i in locations if i[0] != None] return render_template('simpleresults.html', entries=paginatedSamples, form=form, minTemp=minTemp, maxTemp=maxTemp, pieChart=pieChart, locations=locations)
class Test(db.Model, TypenameMixin, StatusPredicatesMixin, HasSubjectsMixin, UserDetailsMixin, TimespanMixin): id = db.Column(db.Integer, primary_key=True) test_index = db.Column(db.Integer) test_info_id = db.Column(db.Integer, db.ForeignKey('test_information.id', ondelete='CASCADE'), index=True) test_info = db.relationship('TestInformation', lazy='joined') test_variation_id = db.Column(db.Integer, db.ForeignKey('test_variation.id', ondelete='CASCADE'), index=True) test_variation = db.relationship('TestVariation', lazy='joined') subject_instances = db.relationship( 'SubjectInstance', secondary=session_subject, primaryjoin='Test.session_id==session_subject.c.session_id', lazy='joined', order_by=session_subject.c.ordinal) user = db.relationship('User', secondary=Session.__table__, primaryjoin='Test.session_id==Session.id', secondaryjoin='Session.user_id==User.id', lazy='joined', uselist=False) metadatas = db.relationship('TestMetadata', lazy='dynamic') parameters = db.Column(JSONB) @rendered_field def variation(self): v = self.test_variation if v is None: return None return v.variation @rendered_field def session_display_id(self): return self.session.logical_id or self.session.id @rendered_field def is_session_abandoned(self): return self.session.is_abandoned() scm = db.Column(db.String(5), default=None) scm_dirty = db.Column(db.Boolean, server_default='false') scm_revision = db.Column(db.String(40), default=None) scm_local_branch = db.Column(db.String(256), default=None, nullable=True) scm_remote_branch = db.Column(db.String(256), default=None, nullable=True) file_hash = db.Column(db.String(40), default=None) session_id = db.Column(db.Integer, db.ForeignKey('session.id', ondelete='CASCADE'), index=True) logical_id = db.Column(db.String(256), index=True, unique=True) start_time = db.Column(db.Float, default=None, index=True) end_time = db.Column(db.Float, default=None, index=True) errors = db.relationship('Error') comments = db.relationship('Comment', primaryjoin='Comment.test_id==Test.id') first_error_obj = db.relationship( lambda: Error, primaryjoin=lambda: and_( Test.id == Error.test_id, # pylint: disable=undefined-variable Error.timestamp == select([func.min(Error.timestamp)]).where( Error.test_id == Test.id).correlate(Test.__table__)), uselist=False, lazy='joined') @rendered_field def first_error(self): if self.first_error_obj is None: return None return render_api_object(self.first_error_obj, only_fields={'message', 'exception_type'}) @rendered_field def first_error_id(self): if self.first_error_obj is None: return None return self.first_error_obj.id last_comment_obj = db.relationship( lambda: Comment, primaryjoin=lambda: and_( Test.id == Comment.test_id, Comment.timestamp == select([ func.max(Comment.timestamp) ]).where(Comment.test_id == Test.id).correlate(Test.__table__)), uselist=False, lazy='joined') @rendered_field def last_comment(self): comment = self.last_comment_obj if comment is None: return None return {'comment': comment.comment, 'user_email': comment.user.email} related_entities = db.relationship('Entity', secondary='test_entity') is_interactive = db.Column(db.Boolean, server_default='FALSE') status = db.Column(db.String(20), nullable=False, default=statuses.STARTED) skip_reason = db.Column(db.Text(), nullable=True) num_errors = db.Column(db.Integer, default=0) num_failures = db.Column(db.Integer, default=0) num_comments = db.Column(db.Integer, default=0) num_warnings = db.Column(db.Integer, nullable=False, server_default="0") __table_args__ = ( Index('ix_test_start_time', start_time.desc()), Index('ix_test_session_id_start_time', session_id, start_time), Index('ix_test_status_lower_start_time', func.lower(status), start_time.desc()), Index('ix_test_start_time_status_lower', start_time.desc(), func.lower(status)), Index('ix_test_test_info_id_start_time', test_info_id, start_time.desc()), Index('ix_test_timespan', 'timespan', postgresql_using='gist'), ) @rendered_field def duration(self): if self.end_time is None or self.start_time is None: return None return self.end_time - self.start_time @rendered_field def info(self): return { attr: getattr(self.test_info, attr) for attr in ('file_name', 'class_name', 'name') }
class Session(db.Model, TypenameMixin, StatusPredicatesMixin, HasSubjectsMixin, UserDetailsMixin, TimespanMixin): id = db.Column(db.Integer, primary_key=True) logical_id = db.Column(db.String(256), unique=True, index=True) parent_logical_id = db.Column(db.String(256), db.ForeignKey('session.logical_id'), default=None, index=True) children = db.relationship('Session', backref=backref('parent', remote_side=[logical_id])) is_parent_session = db.Column(db.Boolean, server_default='FALSE') child_id = db.Column(db.String(20), default=None) start_time = db.Column(db.Float, default=get_current_time) end_time = db.Column(db.Float, default=None, index=True) hostname = db.Column(db.String(100)) in_pdb = db.Column(db.Boolean, server_default='FALSE') infrastructure = db.Column(db.String(50), default=None) tests = db.relationship('Test', backref=backref('session', lazy='joined'), cascade='all, delete, delete-orphan') errors = db.relationship('Error', backref=backref('session', lazy='joined')) comments = db.relationship('Comment', primaryjoin='Comment.session_id==Session.id') metadata_items = db.relationship('SessionMetadata', lazy='dynamic', cascade='all, delete, delete-orphan') subject_instances = db.relationship('SubjectInstance', secondary=session_subject, backref=backref('sessions', lazy='dynamic'), lazy='joined', order_by=session_subject.c.ordinal) labels = db.relationship('Label', secondary='session_label', lazy='joined') # test counts total_num_tests = db.Column(db.Integer, default=None) num_failed_tests = db.Column(db.Integer, default=0) num_error_tests = db.Column(db.Integer, default=0) num_skipped_tests = db.Column(db.Integer, default=0) num_finished_tests = db.Column(db.Integer, default=0) num_interrupted_tests = db.Column(db.Integer, server_default="0") num_warnings = db.Column(db.Integer, nullable=False, server_default="0") num_test_warnings = db.Column(db.Integer, nullable=False, server_default="0") user_id = db.Column(db.Integer, db.ForeignKey('user.id', ondelete='CASCADE'), index=True, nullable=False) user = db.relationship('User', lazy='joined', foreign_keys=user_id) real_user_id = db.Column(db.Integer, db.ForeignKey('user.id', ondelete='CASCADE'), nullable=True) real_user = db.relationship('User', lazy='joined', foreign_keys=real_user_id) # status num_errors = db.Column(db.Integer, default=0) num_failures = db.Column(db.Integer, default=0) status = db.Column(db.String(20), nullable=False, default=statuses.STARTED) # keepalive keepalive_interval = db.Column(db.Integer, nullable=True, default=None) next_keepalive = db.Column(db.Float, nullable=True, default=None, index=True) # activity num_comments = db.Column(db.Integer, default=0) has_fatal_errors = db.Column(db.Boolean, default=False) delete_at = db.Column(db.Float, nullable=True) ttl_seconds = db.Column(db.Integer, nullable=True) __table_args__ = ( Index('ix_session_start_time', start_time.desc()), Index('ix_session_status_lower', func.lower(status)), Index('ix_session_start_time_status_lower', start_time.desc(), func.lower(status)), Index('ix_session_timespan', 'timespan', postgresql_using='gist'), Index('ix_session_delete_at', delete_at, postgresql_where=(delete_at != None)), ) last_comment_obj = db.relationship( lambda: Comment, primaryjoin=lambda: and_( Session.id == Comment.session_id, # pylint: disable=undefined-variable Comment.timestamp == select([func.max(Comment.timestamp)]).where( Comment.session_id == Session.id).correlate(Session.__table__) ), uselist=False, lazy='joined') @rendered_field def last_comment(self): comment = self.last_comment_obj if comment is None: return None return {'comment': comment.comment, 'user_email': comment.user.email} @rendered_field def is_abandoned(self): if self.next_keepalive is None: return False if self.next_keepalive > get_current_time(): return False return self.end_time is None # rendered extras related_entities = db.relationship('Entity', secondary='session_entity') @rendered_field def real_email(self): user = self.real_user if user is None: return None return user.email @rendered_field(name='labels') def label_names(self): return [l.name for l in self.labels] def update_keepalive(self): if self.keepalive_interval is not None: next_keepalive = flux.current_timeline.time( ) + self.keepalive_interval self.next_keepalive = next_keepalive self.extend_timespan_to(next_keepalive) if self.ttl_seconds is not None: self.delete_at = self.next_keepalive + self.ttl_seconds
def query_for_user(session, username): return ( session.query(User) .filter(func.lower(User.username) == username.lower()) .first() )
def project_list(): return render_template( 'project_list.html', projects=ISP.query.filter_by(is_disabled=False).order_by( asc(func.lower(ISP.name))))
name = Column(String(name_length_limit), nullable=False, unique=True) sales = relationship("Sale") wins = relationship("Winner") @validates('name') def validate_name(self, key, name_): assert "@" not in name_, 'name may not include @ symbol' return name_ def __repr__(self): n_sales = len(self.sales) if self.sales is not None else 0 n_wins = len(self.wins) if self.wins is not None else 0 return f"<User(name={self.name}, {n_sales} sale(s), {n_wins} win(s))>" Index('user_name_idx', func.lower(User.name), unique=True) class Drawing(Base): __tablename__ = 'drawings' id = Column(Integer, Sequence('drawing_id_seq'), primary_key=True) date_started = Column(DateTime, nullable=False, server_default=utcnow()) date_drawn = Column(DateTime, nullable=True, default=None) sales = relationship("Sale") winners = relationship("Winner") def __repr__(self): n_sales = len(self.sales) if self.sales is not None else 0 n_winners = len(self.winners) if self.winners is not None else 0 return f"<Drawing(id={self.id}, date_started={self.date_started}, date_drawn={self.date_drawn}, " \ f"{n_sales} sale(s), {n_winners} winner(s)"