def fetch_legs(db, where): legs = db.metadata.tables["leg_modes"] legends0 = db.metadata.tables["leg_ends"] legends1 = legends0.alias("legends1") places0 = db.metadata.tables["places"] places1 = places0.alias("places1") s = select( [ legs.c.id, legs.c.time_start, legs.c.time_end, legs.c.activity, legs.c.line_type, legs.c.line_name, legs.c.km, legs.c.trip, func.coalesce(places0.c.label, cast( places0.c.id, String)).label("startplace"), func.coalesce(places1.c.label, cast( places1.c.id, String)).label("endplace")], where, legs.outerjoin(legends0, legs.c.cluster_start == legends0.c.id) \ .outerjoin(legends1, legs.c.cluster_end == legends1.c.id) \ .outerjoin(places0, legends0.c.place == places0.c.id) \ .outerjoin(places1, legends1.c.place == places1.c.id), order_by=legs.c.time_start) return db.engine.execute(s)
def find_day2scrobbles(user, artist): day2scrobbles = OrderedDict([(day, 0) for day in range(int(db.session.query(func.coalesce(func.min(Scrobble.uts), 0)).\ filter(Scrobble.user == user, Scrobble.artist == artist).\ scalar() / 86400), int(db.session.query(func.coalesce(func.max(Scrobble.uts), 0)).\ filter(Scrobble.user == user, Scrobble.artist == artist).\ scalar() / 86400) + 1)]) for uts, in db.session.query(Scrobble.uts).\ filter(Scrobble.user == user, Scrobble.artist == artist): day2scrobbles[int(uts / 86400)] += 1 for day in day2scrobbles: if day2scrobbles[day] < 4: day2scrobbles[day] = 0 for day in day2scrobbles: if day2scrobbles[day] != 0: break del day2scrobbles[day] for day in reversed(day2scrobbles): if day2scrobbles[day] != 0: break del day2scrobbles[day] return day2scrobbles
def accumulator(self, column_name, new_row, agg_row, old_row=None): new_count = new_row.count new_total = new_row.c[column_name] * new_row.count if old_row is not None: new_count = new_count - old_row.count new_total = (new_total - (old_row.c[column_name] * old_row.count)) agg_count = func.coalesce(agg_row.count, 0) agg_value = func.coalesce(agg_row.c[column_name]) * agg_count total_count = new_count + agg_count return case([(total_count == 0, 0)], else_=(agg_value + new_total) / total_count)
def get(self): # pylint: disable=singleton-comparison # Cannot use `is` in SQLAlchemy filters key = "address:%s" % ["public", "all"][self.deep_visible()] value = self.cache.get(key) if value: self.write(value) return address_list = self.orm.query( Address.address_id, func.coalesce(Address.latitude, Address.manual_latitude), func.coalesce(Address.longitude, Address.manual_longitude), ).filter(func.coalesce( Address.latitude, Address.manual_latitude, Address.longitude, Address.manual_longitude ) != None) org_list = address_list \ .join((org_address, Address.address_id == org_address.c.address_id)) \ .join((Org, Org.org_id == org_address.c.org_id)) \ .add_columns(Org.org_id, Org.name, literal("org")) event_list = address_list \ .join((event_address, Address.address_id == event_address.c.address_id)) \ .join((Event, Event.event_id == event_address.c.event_id)) \ .add_columns(Event.event_id, Event.name, literal("event")) today = datetime.datetime.now().date() event_list = event_list.filter(Event.start_date >= today) if not (self.moderator and self.deep_visible()): org_list = org_list.filter(Org.public == True) event_list = event_list.filter(Event.public == True) address_list = org_list.union(event_list) obj_list = [] for result in address_list.all(): obj_list.append(dict(list(zip([ "address_id", "latitude", "longitude", "entity_id", "name", "entity" ], result)))) value = self.dump_json(obj_list) self.cache.set(key, value) self.write(value)
def set_order_to_last(self, context): """Set order of the line to maximum rank + 1.""" self._set_order( self.query.session.query( func.coalesce(func.max(self._order_column), 0)) .filter(context) .as_scalar() + 1)
def user_summary(users, projects=None, resources=None, after=None, before=None): s = Session() jobs_q = s.query(Job.user_id.label("user_id"), func.count(Job.id).label("job_count")) charges_q = s.query(Job.user_id.label("user_id"), func.sum(Charge.amount).label("charge_sum")) charges_q = charges_q.join(Charge.job) refunds_q = s.query(Job.user_id.label("user_id"), func.sum(Refund.amount).label("refund_sum")) refunds_q = refunds_q.join(Refund.charge, Charge.job) jobs_q.filter(Job.user_id.in_(user.id for user in users)) if projects: jobs_q = jobs_q.filter(Job.account_id.in_(project.id for project in projects)) charges_ = Charge.allocation.has(Allocation.project_id.in_(project.id for project in projects)) charges_q = charges_q.filter(charges_) refunds_q = refunds_q.filter(charges_) if resources: charges_ = Charge.allocation.has(Allocation.resource_id.in_(resource.id for resource in resources)) jobs_q = jobs_q.filter(Job.charges.any(charges_)) charges_q = charges_q.filter(charges_) refunds_q = refunds_q.filter(charges_) if after: jobs_q = jobs_q.filter(Job.end > after) charges_ = Charge.datetime >= after charges_q = charges_q.filter(charges_) refunds_q = refunds_q.filter(charges_) if before: jobs_q = jobs_q.filter(Job.start < before) charges_ = Charge.datetime < before charges_q = charges_q.filter(charges_) refunds_q = refunds_q.filter(charges_) jobs_q = jobs_q.group_by(Job.user_id).subquery() charges_q = charges_q.group_by(Job.user_id).subquery() refunds_q = refunds_q.group_by(Job.user_id).subquery() query = s.query( Job.user_id, func.coalesce(jobs_q.c.job_count, 0), (func.coalesce(charges_q.c.charge_sum, 0) - func.coalesce(refunds_q.c.refund_sum, 0)), ) query = query.outerjoin( (jobs_q, Job.user_id == jobs_q.c.user_id), (charges_q, Job.user_id == charges_q.c.user_id), (refunds_q, Job.user_id == refunds_q.c.user_id), ) query = query.filter(Job.user_id.in_(user.id for user in users)) query = query.distinct().order_by(Job.user_id) return query
def update_after(self, object, result, session): join = self.get_parent_primary_keys(object) end_date_field = object._table.sa_table.c[self.end] setattr(result, self.field_name, select([func.max(func.coalesce(end_date_field, self.default_end))], and_(*join)) ) session.add_no_validate(result)
def _build_notification_before_days_filter(notification_before_days): days_until_occurrence = cast(ReservationOccurrence.start_dt, Date) - cast(func.now(), Date) notification_before_days = func.coalesce(Room.notification_before_days, notification_before_days) if datetime.now().hour >= settings.get('notification_hour', 6): # Notify of today and delayed occurrences (happening in N or less days) return days_until_occurrence <= notification_before_days else: # Notify only of delayed occurrences (happening in less than N days) return days_until_occurrence < notification_before_days
def _record_test_duration(step): create_or_update(ItemStat, where={ 'item_id': step.id, 'name': 'test_duration', }, values={ 'value': db.session.query(func.coalesce(func.sum(TestCase.duration), 0)).filter( TestCase.step_id == step.id, ).as_scalar(), })
def gen_invoice_no(organization, jurisdiction, invoice_dt): """ Generates a sequential invoice number scoped by the given organization for the fiscal year of the given invoice datetime """ fy_start_at, fy_end_at = get_fiscal_year(jurisdiction, invoice_dt) return select([func.coalesce(func.max(Invoice.invoice_no + 1), 1)]).where( Invoice.organization == organization).where( Invoice.invoiced_at >= fy_start_at).where(Invoice.invoiced_at < fy_end_at)
def get_protection_parent_cte(cls): cat_alias = db.aliased(cls) cte_query = (select([cat_alias.id, db.cast(literal(None), db.Integer).label('protection_parent')]) .where(cat_alias.parent_id.is_(None)) .cte(recursive=True)) rec_query = (select([cat_alias.id, db.case({ProtectionMode.inheriting.value: func.coalesce(cte_query.c.protection_parent, 0)}, else_=cat_alias.id, value=cat_alias.protection_mode)]) .where(cat_alias.parent_id == cte_query.c.id)) return cte_query.union_all(rec_query)
def _service_get_all_topic_subquery(context, session, topic, subq, label): sort_value = getattr(subq.c, label) return model_query(context, models.Service, func.coalesce(sort_value, 0), session=session, read_deleted="no").\ filter_by(topic=topic).\ filter_by(disabled=False).\ outerjoin((subq, models.Service.host == subq.c.host)).\ order_by(sort_value).\ all()
def get_protection_parent_cte(self): cte_query = (select([Category.id, db.cast(literal(None), db.Integer).label('protection_parent')]) .where(Category.id == self.id) .cte(recursive=True)) rec_query = (select([Category.id, db.case({ProtectionMode.inheriting.value: func.coalesce(cte_query.c.protection_parent, self.id)}, else_=Category.id, value=Category.protection_mode)]) .where(Category.parent_id == cte_query.c.id)) return cte_query.union_all(rec_query)
def get_transformations(self, measures): transformations = {} for name, expr in measures.items(): measure = measures[name] transformations[expr.name] = (measure.agg.accumulator( expr.name, self.new_row, self.agg_row).label(expr.name)) # Update the fact count transformations[self.agg.fact_count_column.name] = (( self.new_row.c[self.agg.fact_count_column.name] + func.coalesce(self.agg_row.c[self.agg.fact_count_column.name], 0)) .label(self.agg.fact_count_column.name)) return transformations
def system_utilisation_counts_by_group(grouping, systems): retval = defaultdict(lambda: dict((k, 0) for k in ['recipe', 'manual', 'idle_automated', 'idle_manual', 'idle_broken', 'idle_removed'])) query = systems.outerjoin(System.open_reservation)\ .with_entities(grouping, func.coalesce(Reservation.type, func.concat('idle_', func.lower(System.status))), func.count(System.id))\ .group_by(literal_column("1"), literal_column("2")) for group, state, count in query: retval[group][state] = count return retval
def on_get(self, req, resp, phrase_id): columns = [] columns.extend(phrase.c) columns.append( f.coalesce( select([f.array_agg(gs_phrase.c.section_id)]). where(gs_phrase.c.phrase_id == phrase_id). as_scalar(), [] ). label("grammar_sections") ) columns.append( f.coalesce( select([f.array_agg(theme_phrase.c.theme_id)]). where(theme_phrase.c.phrase_id == phrase_id). as_scalar(), [] ). label("themes") ) sel = select(columns).where(phrase.c.id == phrase_id) resp.body = phrase_editor_view(self.db.execute(sel).fetchone())
def get_storage_used(self, user=None): """ Get the storage used by model runs for a user or all users :param user: the user :return: a tuple of user id, model run status name, sum of storage in mb (if null returns 0) """ with self.readonly_scope() as session: query = session\ .query(ModelRun.user_id, ModelRunStatus.name, func.coalesce(func.sum(ModelRun.storage_in_mb), 0))\ .join(ModelRunStatus)\ .group_by(ModelRun.user_id, ModelRunStatus.name) if user is not None: query = query.filter(ModelRun.user_id == user.id) return query.all()
def get_aggregated_stats(environment_id): unit = db_session.get_session() now = timeutils.utcnow_ts() query = unit.query(models.Instance.instance_type, func.sum( func.coalesce(models.Instance.destroyed, now) - models.Instance.created), func.count()).filter( models.Instance.environment_id == environment_id) res = query.group_by(models.Instance.instance_type).all() return [{ 'type': int(record[0]), 'duration': int(record[1]), 'count': int(record[2]) } for record in res]
def run(self, db): session = db.session today = datetime.date.today() skills = session.query( BotIdentity.id, func.coalesce(func.sum(MatchResult.delta_chips), 0), func.coalesce(func.sum(MatchResult.hands), 0), func.coalesce(func.count(MatchResult.id), 0), ) \ .outerjoin(MatchResult) \ .group_by(BotIdentity.id) \ .all() yesterday = today - datetime.timedelta(days=1) yesterdays_skills = {b.bot: b.skill for b in BotSkill.query.filter_by(date=yesterday).all()} BotSkill.query.filter_by(date=today).delete() session.bulk_save_objects( [BotSkill(s[0], today, self.calc_winnings_per_hand(s[1], s[2]), yesterdays_skills.get(s[0], 0), s[3]) for s in skills] ) session.commit()
def get_chart_data(self, project_id, query_date): calendar = db.session.query( func.generate_series( query_date - timedelta(days=CHART_DATA_LIMIT - 1), query_date, timedelta(days=1) ).label('day') ).subquery() historical_data = db.session.query( calendar.c.day, func.coalesce(func.sum(FlakyTestStat.flaky_runs), 0), func.coalesce(func.sum(FlakyTestStat.double_reruns), 0), func.coalesce(func.sum(FlakyTestStat.passing_runs), 0) ).outerjoin( FlakyTestStat, and_( calendar.c.day == FlakyTestStat.date, FlakyTestStat.project_id == project_id ) ).order_by( calendar.c.day.desc() ).group_by( calendar.c.day ) chart_data = [] for d, flaky_runs, double_reruns, passing_runs in historical_data: chart_data.append({ 'date': str(d.date()), 'flaky_runs': flaky_runs, 'double_reruns': double_reruns, 'passing_runs': passing_runs }) return chart_data
def system_utilisation_counts(systems): """ Similar to the above except returns counts of systems based on the current state, rather than historical data about particular systems. """ retval = dict((k, 0) for k in ['recipe', 'manual', 'idle_automated', 'idle_manual', 'idle_broken', 'idle_removed']) query = systems.outerjoin(System.open_reservation)\ .with_entities(func.coalesce(Reservation.type, func.concat('idle_', func.lower(System.status))), func.count(System.id))\ .group_by(literal_column("1")) for state, count in query: retval[state] = count return retval
def get_protection_parent_cte(self): cte_query = (select([ Category.id, db.cast(literal(None), db.Integer).label('protection_parent') ]).where(Category.id == self.id).cte(recursive=True)) rec_query = (select([ Category.id, db.case( { ProtectionMode.inheriting.value: func.coalesce(cte_query.c.protection_parent, self.id) }, else_=Category.id, value=Category.protection_mode) ]).where(Category.parent_id == cte_query.c.id)) return cte_query.union_all(rec_query)
def airport_departures(airport: str) -> Response: """Get a list of departures for a certain airport""" airport = airport.upper() dropoff = datetime.now(tz=UTC) - timedelta(hours=5) result = flights_engine.execute( flights.select().where( and_( flights.c.origin == airport, flights.c.flight_number != "BLOCKED", func.coalesce(flights.c.actual_out, flights.c.actual_off) > dropoff, ) ) ) if result is None: abort(404) return jsonify([dict(e) for e in result])
def _set_orderby_desc(query, orderby, descending): """Set order by to query.""" if orderby == 'fav_user_ids': n_favs = func.coalesce(func.array_length(Task.fav_user_ids, 1), 0).label('n_favs') query = query.add_column(n_favs) if descending: query = query.order_by(desc("n_favs")) else: query = query.order_by("n_favs") else: if descending: query = query.order_by(getattr(Task, orderby).desc()) else: query = query.order_by(getattr(Task, orderby).asc()) #query = query.order_by(Task.id.asc()) return query
def _set_orderby_desc(query, orderby, descending): """Set order by to query.""" if orderby == 'fav_user_ids': n_favs = func.coalesce(func.array_length(Task.fav_user_ids, 1), 0).label('n_favs') query = query.add_column(n_favs) if descending: query = query.order_by(desc("n_favs")) else: query = query.order_by("n_favs") else: if descending: query = query.order_by(getattr(Task, orderby).desc()) else: query = query.order_by(getattr(Task, orderby).asc()) #query = query.order_by(Task.id.asc()) return query
def aggregate_job_stat(job, name, func_=func.sum): value = db.session.query(func.coalesce(func_(ItemStat.value), 0), ).filter( ItemStat.item_id.in_( db.session.query(JobStep.id).filter( JobStep.job_id == job.id, JobStep.replacement_id.is_(None), )), ItemStat.name == name, ).as_scalar() try_create(ItemStat, where={ 'item_id': job.id, 'name': name, 'value': value, })
def get_aggregated_stats(environment_id): unit = db_session.get_session() now = timeutils.utcnow_ts() query = unit.query( models.Instance.instance_type, func.sum( func.coalesce(models.Instance.destroyed, now) - models.Instance.created), func.count()).filter( models.Instance.environment_id == environment_id) res = query.group_by(models.Instance.instance_type).all() return [{ 'type': int(record[0]), 'duration': int(record[1]), 'count': int(record[2]) } for record in res]
def venues(): """ venue "index" page data this method is much bigger than I'd otherwise like; given the freedom to do so, I'd probably look for a way to reduce the dictionary that gets built at the end """ venue_model = models['venue'] show_model = models['show'] results = db.session.query( venue_model.state.label('state'), venue_model.city.label('city'), venue_model.name.label('name'), venue_model.id.label('id'), func.count(func.coalesce( show_model.id, 0)).label('upcoming_show_count')).outerjoin( show_model, venue_model.id == show_model.venue_id).group_by( venue_model.id).order_by( venue_model.state, venue_model.city, venue_model.name, ).all() last_city_state = '' data = [] city_state = {} for venue in results: if '{}, {}'.format(venue.city, venue.state) != last_city_state: if 'venues' in city_state: data.append(city_state) city_state = { 'city': venue.city, 'state': venue.state, 'venues': [], } last_city_state = '{}, {}'.format(venue.city, venue.state) city_state['venues'].append({ 'id': venue.id, 'name': venue.name, 'num_upcoming_shows': venue.upcoming_show_count, }) if 'venues' in city_state: data.append(city_state) return render_template('pages/venues.html', areas=data)
def aggregate_build_stat(build, name, func_=func.sum): value = db.session.query( func.coalesce(func_(ItemStat.value), 0), ).filter( ItemStat.item_id.in_( db.session.query(Job.id).filter( Job.build_id == build.id, ) ), ItemStat.name == name, ).as_scalar() try_create(ItemStat, where={ 'item_id': build.id, 'name': name, 'value': value, })
def update_after(self, object, result, session): relation_attr = self.target_to_base_path[0] join_tuples = self.get_join_tuples(relation_attr, object._table, self.base_level) target_table = self.database.tables[self.target_table].sa_table.alias() join_keys = [key[0] for key in join_tuples] key_values = [target_table.c[key] == getattr(object, key) for key in join_keys] setattr(object, self.field_name, select([select([func.max(func.coalesce(target_table.c[self.field_name], 0)) + 1], and_(*key_values)).alias()]) ) session.add_no_validate(object)
def aggregate_job_stat(job, name, func_=func.sum): value = db.session.query( func.coalesce(func_(ItemStat.value), 0), ).filter( ItemStat.item_id.in_( db.session.query(JobStep.id).filter( JobStep.job_id == job.id, JobStep.replacement_id.is_(None), ) ), ItemStat.name == name, ).as_scalar() try_create(ItemStat, where={ 'item_id': job.id, 'name': name, 'value': value, })
def aggregate_stat_for_build(build: Build, name: str, func_=func.sum): """ Aggregates a single stat for all jobs the given build. """ value = db.session.query(func.coalesce(func_(ItemStat.value), 0), ).filter( ItemStat.item_id.in_( db.session.query(Job.id).filter(Job.build_id == build.id, )), ItemStat.name == name, ).as_scalar() create_or_update( model=ItemStat, where={ 'item_id': build.id, 'name': name, }, values={'value': value}, )
def __call__(self, args, session): def crawl_single_c(c): for r in c.root_paths: crawler.crawl_recursive(session, r.path, collection=c) session.commit() c.last_crawled = time.time() if args.collection is not None: c = (session.query(model.Collection).filter( model.Collection.name == args.collection).one()) crawl_single_c(c) else: q = (session.query(model.Collection).order_by( func.coalesce(model.Collection.last_crawled, -1.0))) for c in q: crawl_single_c(c)
def child_rel_query(post_id, page=0, sort_by='top'): if sort_by == 'top': counts = db.session.query(Vote.rel_id, func.sum(Vote.value)\ .label('votecount'))\ .group_by(Vote.rel_id).subquery() rels_count = db.session.query(Relation, counts.c.votecount)\ .filter(Relation.parent_id==post_id)\ .outerjoin(counts, Relation.id==counts.c.rel_id)\ .order_by(func.coalesce(counts.c.votecount, 0).desc(), Relation.id)\ .slice(page*8, (page+1) * 8) rels = [rel_count[0] for rel_count in rels_count] else: rels = db.session.query(Relation)\ .order_by(Relation.time_linked.desc())\ .filter(Relation.parent_id==post_id)\ .slice(page*8, (page+1) * 8).all() return rels
def _list_q(self, board_slug): """Internal method for querying topic list. :param board_slug: The slug :type:`str` identifying a board. """ anchor = datetime.datetime.now() - datetime.timedelta(days=7) return (self.dbsession.query(Topic).join( Topic.board, Topic.meta).options(contains_eager(Topic.meta)).filter( and_( Board.slug == board_slug, or_( Topic.status == "open", and_(Topic.status != "open", TopicMeta.bumped_at >= anchor), ), )).order_by( desc(func.coalesce(TopicMeta.bumped_at, Topic.created_at))))
def child_rel_query(post_id, page=0, sort_by='top'): if sort_by == 'top': counts = db.session.query(Vote.rel_id, func.sum(Vote.value)\ .label('votecount'))\ .group_by(Vote.rel_id).subquery() rels_count = db.session.query(Relation, counts.c.votecount)\ .filter(Relation.parent_id==post_id)\ .outerjoin(counts, Relation.id==counts.c.rel_id)\ .order_by(func.coalesce(counts.c.votecount, 0).desc(), Relation.id)\ .slice(page*8, (page+1) * 8) rels = [rel_count[0] for rel_count in rels_count] else: rels = db.session.query(Relation)\ .order_by(Relation.time_linked.desc())\ .filter(Relation.parent_id==post_id)\ .slice(page*8, (page+1) * 8).all() return rels
def detail_item(): event_id = request.args.get('event_id') category = request.args.get('category') person_id = request.args.get('person_id') sub_debt = db.session.query(func.avg(OrmItem.cost).label("costs"), func.sum(OrmDebt.sum).label("sums"), OrmItem.id.label("id")). \ join(OrmItem, OrmItem.id == OrmDebt.item_id). \ join(OrmCheck, OrmCheck.id == OrmItem.check_id). \ filter(OrmCheck.event_id == event_id). \ group_by(OrmItem.id).subquery() query = \ db.session.query(OrmCheck.description, OrmItem.name, OrmItem.category, (sub_debt.c.costs / sub_debt.c.sums * func.coalesce(OrmDebt.sum, 0)).label("sum"), OrmUser.name.label("pname"), OrmUser.surname). \ join(OrmDebt, OrmDebt.item_id == OrmItem.id). \ join(OrmCheck, OrmCheck.id == OrmItem.check_id). \ join(OrmEvent, OrmEvent.id == OrmCheck.event_id). \ join(OrmParticipant, and_(OrmParticipant.c.event_id == OrmEvent.id, OrmParticipant.c.person_id == OrmDebt.person_id)). \ join(OrmUser, OrmParticipant.c.person_id == OrmUser.id) if category and person_id: details = query.filter( and_(OrmItem.category == category, OrmCheck.event_id == event_id, OrmDebt.person_id == person_id, sub_debt.c.id == OrmItem.id)).all() flag = None elif category: details = query.filter( and_(OrmItem.category == category, OrmCheck.event_id == event_id, sub_debt.c.id == OrmItem.id)).all() flag = "category" elif person_id: details = query.filter( and_(OrmCheck.event_id == event_id, OrmDebt.person_id == person_id, sub_debt.c.id == OrmItem.id)).all() flag = "person" return render_template('item_table.html', details=details, event_id=event_id, flag=flag)
def airport_enroute(airport: str) -> Response: """Get a list of flights enroute to a certain airport""" airport = airport.upper() past_dropoff = datetime.now(tz=UTC) - timedelta(hours=5) future_dropoff = datetime.now(tz=UTC) + timedelta(hours=6) result = flights_engine.execute( flights.select().where( and_( flights.c.destination == airport, flights.c.flight_number != "BLOCKED", func.coalesce(flights.c.actual_in, flights.c.actual_on, flights.c.cancelled) == None, flights.c.estimated_on.between(past_dropoff, future_dropoff), ) ) ) if result is None: abort(404) return jsonify([dict(e) for e in result])
def list_recent(self, _limit=100): """Query recent topics regardless of the board.""" return list( self.dbsession.query(Topic) .join(Topic.meta) .options(contains_eager(Topic.meta), joinedload(Topic.board)) .filter( and_( or_( Topic.status == "open", and_( Topic.status != "open", TopicMeta.bumped_at >= func.now() - TOPIC_RECENT_DELTA, ), ) ) ) .order_by(desc(func.coalesce(TopicMeta.bumped_at, Topic.created_at))) .limit(_limit) )
def get_transformations(self, measures): transformations = {} self.trigger_old = TriggerRow(self.cube.selectable, 'OLD') self.trigger_oldquery = self.sql_query.replace_selectable( self.cube.selectable, self.trigger_old).alias() self.old_row = AccumulatorRow(self.trigger_oldquery, self.agg) for name, expr in measures.items(): measure = measures[name] transformations[expr.name] = (measure.agg.accumulator( expr.name, self.new_row, self.agg_row, self.old_row) .label(expr.name)) # Update the fact count transformations[self.agg.fact_count_column.name] = (( (self.new_row.c[self.agg.fact_count_column.name] - self.old_row.c[self.agg.fact_count_column.name]) + func.coalesce(self.agg_row.c[self.agg.fact_count_column.name], 0)) .label(self.agg.fact_count_column.name)) return transformations
def __getSpotsStatus(): """ Get spots' status, per level, per vehicleType Returns: Row: (levelId, vehicleType, totalNumberOfSpots, reservedSpots) """ sq1 = db.session.query(ParkingSpot.levelId, ParkingSpot.vehicleTypeId, VehicleType.name, func.count(ParkingSpot.id).label('allSpots')) \ .join(VehicleType, ParkingSpot.vehicleTypeId == VehicleType.id) \ .group_by(ParkingSpot.levelId, ParkingSpot.vehicleTypeId, VehicleType.name) \ .subquery() sq2 = db.session.query(ParkingSpot.vehicleTypeId, ParkingSpot.levelId, func.count(Reservation.id).label('reservedSpots')) \ .join(ParkingSpot, Reservation.parkingSpotId == ParkingSpot.id) \ .filter(and_(Reservation.checkinTimestamp != None, Reservation.checkoutTimestamp == None)) \ .group_by(ParkingSpot.levelId, ParkingSpot.vehicleTypeId) \ .subquery() r = db.session.query(sq1.c.levelId, sq1.c.name, sq1.c.allSpots, func.coalesce(sq2.c.reservedSpots, 0)) \ .outerjoin(sq2, (sq1.c.levelId == sq2.c.levelId) & (sq1.c.vehicleTypeId == sq2.c.vehicleTypeId)).all() return r
def select(self): from sqlalchemy.sql.expression import false return (self.db.query( Booking.id.label('#'), Booking.booking_day.label('Datum'), (to_string(Booking.booking_text).concat('<br />').concat( to_string(Booking.purpose)).concat('<br />').concat( to_string(Booking.recipient)).label('Buchungsinfo')), Booking.value.label('Betrag'), (to_string(Member.lastname).concat(' (').concat( func.coalesce(string_agg(Allotment.number), 'n/a')).concat(')').label('Zugeordnet')), BookingKind.shorttitle.label('Typ'), ).select_from(Booking).outerjoin(Member).outerjoin( Allotment, Allotment.member_id == Member.id).outerjoin(BookingKind).group_by( Booking.id, Member.lastname, BookingKind.shorttitle).filter( Booking.banking_account == self.context).filter( Booking.is_splitted == false()).filter( Booking.accounting_year == get_selected_year()))
class Idea(Base): __tablename__ = 'ideas' idea_id = Column(Integer, primary_key=True) target_id = Column(Integer, ForeignKey('ideas.idea_id')) comments = relation('Idea', cascade="delete", backref=backref('target', remote_side=idea_id)) author_id = Column(Integer, ForeignKey('users.user_id')) author = relation(User, cascade="delete", backref='ideas') title = Column(UnicodeText) text = Column(UnicodeText) hits = Column(Integer, default=0) misses = Column(Integer, default=0) tags = relation(Tag, secondary=ideas_tags, backref='ideas') voted_users = relation(User, secondary=voted_users, lazy='dynamic', backref='voted_ideas') hit_percentage = func.coalesce(hits / (hits + misses) * 100, 0) hit_percentage = column_property(hit_percentage.label('hit_percentage')) total_votes = column_property((hits + misses).label('total_votes')) vote_differential = column_property( (hits - misses).label('vote_differential')) @classmethod def get_by_id(cls, idea_id): return DBSession.query(cls).filter(cls.idea_id == idea_id).first() @classmethod def get_by_tagname(cls, tag_name): return DBSession.query(Idea).filter(Idea.tags.any(name=tag_name)) @classmethod def ideas_bunch(cls, order_by, how_many=10): q = DBSession.query(cls).join('author').filter(cls.target == None) return q.order_by(order_by)[:how_many] def user_voted(self, username): return bool(self.voted_users.filter_by(username=username).first())
def posts(self, parent_id=None): """ Returns an iterable query of posts whose content comes from a source that belongs to this discussion. The result is a list of posts sorted by their youngest descendent in descending order. """ lower_post = aliased(Post, name="lower_post") lower_content = aliased(Content, name="lower_content") upper_post = aliased(Post, name="upper_post") upper_content = aliased(Content, name="upper_content") latest_update = select([ func.coalesce( func.max(lower_content.creation_date), upper_content.creation_date ) ], lower_post.content_id==lower_content.id).where( lower_post.ancestry.like( upper_post.ancestry + cast(upper_post.id, String) + ',%' ) ).label("latest_update") query = self.db.query( upper_post, ).join( upper_content, ).filter( upper_post.parent_id==parent_id ).order_by( desc(latest_update) ) if not parent_id: query = query.join( Source ).filter( Source.discussion_id==self.id, upper_content.source_id==Source.id, ) return query
def period_list(self): current_page = int(self.request.params.get("page", 1)) session_person = self.request.dbsession.query(Person).filter( Person.id == self.request.session['person_id']).one() entries = self.request.dbsession.query(Period).filter( Period.person == session_person).order_by( func.coalesce(Period.date, Period.modified_date).desc()) page = SqlalchemyOrmPage(entries, page=current_page, items_per_page=30) return dict(entries=entries, page=page, period_intensity_choices={ **period_intensity_choices, 1: '' }, cervical_fluid_choices={ **cervical_fluid_choices, 1: '' }, lh_surge_choices={ 1: '', 2: 'Negative', 3: 'Positive' })
def record_test_stats(job_id: UUID): create_or_update( ItemStat, where={ "item_id": job_id, "name": "tests.count" }, values={ "value": db.session.query(func.count( TestCase.id)).filter(TestCase.job_id == job_id).as_scalar() }, ) create_or_update( ItemStat, where={ "item_id": job_id, "name": "tests.failures" }, values={ "value": db.session.query(func.count(TestCase.id)).filter( TestCase.job_id == job_id, TestCase.result == Result.failed).as_scalar() }, ) create_or_update( ItemStat, where={ "item_id": job_id, "name": "tests.duration" }, values={ "value": db.session.query(func.coalesce( func.sum(TestCase.duration), 0)).filter(TestCase.job_id == job_id).as_scalar() }, ) db.session.flush()
def targetGetProductSales(itemCode, date): # Setup database connection, table, and query con = targetDbEng.connect() sale_items = Table('sale_items', MetaData(targetDbEng), autoload=True) inventory_transactions = Table('inventory_transactions', MetaData(targetDbEng), autoload=True) stm = select([ func.coalesce(func.sum(inventory_transactions.c.amount), 0) ]).select_from( sale_items.join( inventory_transactions, sale_items.c.inventory_transaction == inventory_transactions.c.id) ).where(inventory_transactions.c.item_code == itemCode).where( inventory_transactions.c.creation_datetime.like(date + '%')) num_sales = con.execute(stm).fetchone() con.close() return -int(num_sales[0])
def upsertReponseContent(self, job, response): while 1: try: # If we have already fetched the page, push what we have back # into the history table. last = None if job.content: last = self.pushBackHistory(job, response) job.title = response['title'] job.content = response['contents'] job.mimetype = response['mimeType'] # Update the tsv_content column if we have data for it. if response['contents']: job.tsv_content = func.to_tsvector(func.coalesce(response['contents'])) if "text" in job.mimetype: job.is_text = True else: job.is_text = False job.state = 'complete' # Disabled for space-reasons. # if 'rawcontent' in response: # job.raw_content = response['rawcontent'] job.fetchtime = datetime.datetime.now() self.db.get_session().commit() self.log.info("Marked plain job with id %s, url %s as complete!", job.id, job.url) break except sqlalchemy.exc.OperationalError: self.db.get_session().rollback() except sqlalchemy.exc.InvalidRequestError: self.db.get_session().rollback()
def airport_enroute(airport: str) -> Response: """Get a list of flights enroute to a certain airport""" airport = airport.upper() result = flights_engine.execute(flights.select().where( and_( flights.c.destination == airport, flights.c.flight_number != "BLOCKED", func.coalesce(flights.c.actual_in, flights.c.actual_on, flights.c.cancelled) == None, flights.c.estimated_on.between( select([ func.datetime(func.max(flights.c.actual_off), text(f"'-5 hours'")) ]), select([ func.datetime(func.max(flights.c.actual_off), text(f"'+6 hours'")) ]), ), ))) if result is None: abort(404) return jsonify([dict(e) for e in result])
def query_books(session, tags, categories): books = session.query(Book).order_by( desc(func.coalesce(Book.finished_at, Book.started_at, Book.acquired_at))).all() tags = set(tags) categories = [ set(Category.tree_names(session, category)) for category in categories ] def matches(book): book_tags = set(tag.name for tag in book.tags) if tags and not tags.issubset(book_tags): return False book_categories = set(c.name for c in book.categories) for category_tree in categories: if category_tree.isdisjoint(book_categories): return False return True return filter(matches, books)
def coinflip(): if "user_id" in session: user_id = session["user_id"] ticket_balance = (db.session.query( func.coalesce(func.sum(Transaction.ticket_amount), 0)).filter(Transaction.user_id == user_id).scalar()) if ticket_balance < 30: return {"error": "Insufficient balance"}, 400 amount = return_balance() transaction = Transaction(user_id=user_id, ticket_amount=amount, activity="skiball") fee = Transaction(user_id=user_id, ticket_amount=-30, activity="skiball") db.session.add(fee) db.session.add(transaction) db.session.commit() return {"amount": amount}
def do_commands(self, arg): 'Maintain commands and aliases' if arg.add: a = arg.add[0] c = arg.add[1] ca = self.db.query(CommandAlias).filter( CommandAlias.alias == a).first() if not ca: ca = CommandAlias(alias=a) ca.cmd = c if arg.pipe_to: ca.pipe_to = arg.pipe_to self.db.add(ca) self.db.commit() if arg.list: q = self.db.query(CommandAlias.alias, CommandAlias.cmd, CommandAlias.pipe_to).filter( CommandAlias.enabled) self.print_table(q) if arg.results: q = self.db.query( Command.guid, Command.host_address, Command.host_port, Command.username, Command.cmd, Command.exit_status, ("STDOUT: " + func.substr(Command.stdout, 0, 50) + os.linesep + "STDERR: " + func.substr(Command.stderr, 0, 50) + os.linesep + "EXC: " + func.substr(Command.exception, 0, 50)).label('output'), Command.updated) self.print_table(q) if arg.save: r = self.db.query( func.coalesce(Command.stdout, Command.stderr, Command.exception).label('output')).filter( Command.guid == arg.save[0]).scalar() with open(arg.save[0], 'wt') as f: f.write(r) f.close()
def _run_directory_match_update(self): rd: RpmDetail = aliased(RpmDetail) fd: FileDetail = aliased(FileDetail) lk: RpmFileDetailLink = aliased(RpmFileDetailLink) query = State.get_db_session().query( rd.rpm_detail_id, fd.file_detail_id).join( ResolvedSymlinks, (rd.system_id == ResolvedSymlinks.system_id) & (ResolvedSymlinks.target_type == "D") & (func.length(rd.file_location) > func.length( ResolvedSymlinks.file_location)) & (ResolvedSymlinks.file_location == func.substr( rd.file_location, 1, func.length(ResolvedSymlinks.file_location)))).join( fd, (fd.system_id == ResolvedSymlinks.system_id) & (fd.file_location == (ResolvedSymlinks.resolved_location + func.substr( rd.file_location, func.length(ResolvedSymlinks.file_location) + 1))) ).outerjoin(lk, (lk.file_detail_id == fd.file_detail_id) & (lk.rpm_detail_id == rd.rpm_detail_id)).filter( (rd.system_id == self.system_id) & (lk.rpm_file_detail_link_id == None) & (func.coalesce(fd.file_type, "") != "S") ).distinct() insert_dml = insert(RpmFileDetailLink).from_select([ rd.rpm_detail_id, fd.file_detail_id, ], query) result = State.get_db_session().execute(insert_dml) State.get_db_session().flush() State.get_db_session().commit() self.analyze_database() return result.rowcount
def airport_scheduled(airport: str) -> Response: """Get a list of scheduled flights from a certain airport""" airport = airport.upper() result = flights_engine.execute(flights.select().where( and_( flights.c.origin == airport, flights.c.flight_number != "BLOCKED", or_( func.coalesce(flights.c.actual_out, flights.c.actual_off) == None, # You can actually get true_cancel'ed flights with an actual_out/off. Weird? flights.c.true_cancel, ), flights.c.scheduled_off.between( select([ func.datetime(func.max(flights.c.actual_off), text(f"'-5 hours'")) ]), select([ func.datetime(func.max(flights.c.actual_off), text(f"'+6 hours'")) ]), ), or_( flights.c.cancelled == None, and_( flights.c.true_cancel, flights.c.cancelled > (select([ func.datetime(func.max(flights.c.actual_off), text(f"'-5 hours'")) ])), ), ), ))) if result is None: abort(404) return jsonify([dict(e) for e in result])
def get_busiest_airports() -> Response: """Get the busiest airport""" limit = request.args.get("limit", 10) since = datetime.fromtimestamp(int(request.args.get("since", 0)), tz=UTC) query = request.args.get("query") if query: result = flights_engine.execute( union( select([flights.c.origin]).distinct().where( flights.c.origin.like(f"%{query}%")), select([flights.c.destination]).distinct().where( flights.c.destination.like(f"%{query}%")), )) if result is None: abort(404) return jsonify([row[0] for row in result]) return jsonify([ row.origin for row in flights_engine.execute( select([flights.c.origin]).where( func.coalesce(flights.c.actual_off, flights.c.actual_out) > since).group_by(flights.c.origin).order_by( func.count().desc()).limit(limit)) ])
def record_test_stats(job_id: UUID): create_or_update(ItemStat, where={ 'item_id': job_id, 'name': 'tests.count', }, values={ 'value': db.session.query(func.count(TestCase.id)).filter( TestCase.job_id == job_id, ).as_scalar(), }) create_or_update(ItemStat, where={ 'item_id': job_id, 'name': 'tests.failures', }, values={ 'value': db.session.query(func.count(TestCase.id)).filter( TestCase.job_id == job_id, TestCase.result == Result.failed, ).as_scalar(), }) create_or_update( ItemStat, where={ 'item_id': job_id, 'name': 'tests.duration', }, values={ 'value': db.session.query(func.coalesce( func.sum(TestCase.duration), 0)).filter(TestCase.job_id == job_id, ).as_scalar(), }) db.session.flush()
def get_synthese_per_tax_level_stat(taxLevel): params = request.args if taxLevel == "Règne": q = (DB.session.query( func.coalesce(VSynthese.regne, "Not defined"), func.count(VSynthese.id_synthese), ).group_by(VSynthese.regne).order_by(VSynthese.regne)) if taxLevel == "Phylum": q = (DB.session.query( func.coalesce(VSynthese.phylum, "Not defined"), func.count(VSynthese.id_synthese), ).group_by(VSynthese.phylum).order_by(VSynthese.phylum)) if taxLevel == "Classe": q = (DB.session.query( func.coalesce(VSynthese.classe, "Not defined"), func.count(VSynthese.id_synthese), ).group_by(VSynthese.classe).order_by(VSynthese.classe)) if taxLevel == "Ordre": q = (DB.session.query( func.coalesce(VSynthese.ordre, "Not defined"), func.count(VSynthese.id_synthese), ).group_by(VSynthese.ordre).order_by(VSynthese.ordre)) if taxLevel == "Groupe INPN 1": q = (DB.session.query( func.coalesce(VSynthese.group1_inpn, "Not defined"), func.count(VSynthese.id_synthese), ).group_by(VSynthese.group1_inpn).order_by(VSynthese.group1_inpn)) if taxLevel == "Groupe INPN 2": q = (DB.session.query( func.coalesce(VSynthese.group2_inpn, "Not defined"), func.count(VSynthese.id_synthese), ).group_by(VSynthese.group2_inpn).order_by(VSynthese.group2_inpn)) if "selectedYearRange" in params: yearRange = params["selectedYearRange"].split(",") q = q.filter( func.date_part("year", VSynthese.date_min) <= yearRange[1]) q = q.filter( func.date_part("year", VSynthese.date_max) >= yearRange[0]) return q.all()
def normalize_query_by_date(query, date_label, report_label, start_date=None, end_date=None, interval='1 month', granularity='1 day'): """ Fills in missing date "buckets" for an aggregate query grouped by date. Aggregate queries grouped by date are often used for generating reports, like "How many widgets did I sell on each day of last month?" These queries often look similar to this:: SELECT date_of_sale AS day, count(id) AS sales_count FROM sales WHERE date_of_sale > now() - interval '1 month' GROUP BY day ORDER BY day; This kind of query will ONLY return days that contain at least one sale. Days with zero sales will not be returned at all, leaving gaps in the report. To combat this, we can normalize the query by generating a UNION with a Postgres date series:: SELECT date_of_sale AS day, coalesce(sales_count, 0) AS sales_count FROM ( SELECT date_of_sale AS day, count(id) AS sales_count FROM sales WHERE date_of_sale > now() - interval '1 month' GROUP BY day UNION SELECT generate_series(now() - interval '1 month', now(), '1 day') AS day, 0 AS sales_count ) AS union_query ORDER BY union_query.day; Args: query (sqlalchemy.orm.query.Query): The original query grouped by date, in which the "group by date" column is labelled using column.label(date_label), and the "aggregate" column is labelled using column.label(report_label). date_label (str): The label used to label the "group by date" column. report_label (str): The label used to label the "aggregate" column. start_date (datetime): Start date of the query. end_date (datetime): End date of the query. interval (str): Alternately, the length of time from either `start_date` or `end_date` expressed as a Postgres interval. Defaults to '1 month'. granularity (str): The granularity of each date "bucket" expressed as a Postgres interval. Defaults to '1 day'. Returns: sqlalchemy.orm.query.Query: A normalized aggregate date query. """ series = generate_date_series(start_date, end_date, interval, granularity) series_query = select([ series.label(date_label), literal(0).label(report_label) ]) query = union(query, series_query).alias() query = select([ text(date_label), func.coalesce(text(report_label), literal(0)).label(report_label) ], from_obj=query).order_by(date_label) return query
def score_expr(cls): return (select([func.coalesce(func.sum(ArgumentVote.value), 0) ]).where(ArgumentVote.argument_id == cls.id))