async def poll_db(loop, db, hub, period): o = objects.c os = objects_status.c recent_status = (select( [os.object_id, func.max(os.timestamp).label("ts")]).group_by(os.object_id).order_by( os.object_id.desc()).limit(ROW_LIMITS[-1]).cte("recent_status")) rs = recent_status.c sel = (select([ o.description, os.object_id, os.status, os.x, os.y, case([ (func.extract("seconds", func.now() - rs.ts) <= 1.5, "online"), (func.extract("seconds", func.now() - rs.ts) <= 5, "slow"), ], else_="offline").label("online") ]).where((o.id == os.object_id) & (os.object_id == rs.object_id) & (os.timestamp == rs.ts)).order_by(os.object_id.desc())) while True: ts = loop.time() async with db.acquire() as conn: result_proxy = await conn.execute(sel) res = await result_proxy.fetchall() hub.broadcast([dict(i) for i in res]) td = loop.time() - ts await asyncio.sleep(max(period - td, 0), loop=loop)
def events(self): has_all_permissions = self.request.has_permission(ALL_PERMISSIONS) converter = DateConverter() start_date = converter.to_python(self.request.params.get('start')) or datetime.today().date() end_date = converter.to_python(self.request.params.get('end')) or datetime.today().date() if has_all_permissions: interactions = Interaction.query() contacts = ContactPerson.query() else: user = self.request.authenticated_user interactions = Interaction.query_with_permissions(user) contacts = ContactPerson.query_with_permissions(user) interactions = interactions.filter( func.date(Interaction.followup_date) >= start_date, func.date(Interaction.followup_date) <= end_date, Interaction.status != 'Deleted' ) contacts = contacts.filter( func.extract('month', ContactPerson.birth_date) >= start_date.month, func.extract('month', ContactPerson.birth_date) <= end_date.month ) return Event.to_json(interactions, contacts)
def get_data_over_year(id_area, timeinterval="year"): """ :param id_area: :return: """ try: query = ( DB.session.query( func.extract(timeinterval, Synthese.date_min).label("year"), func.count(distinct(Synthese.id_synthese)).label("count_occtax"), func.count(distinct(Synthese.cd_nom)).label("count_taxa"), func.count(distinct(Synthese.date_min)).label("count_date"), func.count(distinct(Synthese.id_dataset)).label("count_dataset"), ) .filter(Synthese.id_synthese == CorAreaSynthese.id_synthese) .filter(Synthese.cd_nom == Taxref.cd_nom) .filter(CorAreaSynthese.id_area == id_area) .group_by(func.extract(timeinterval, Synthese.date_min)) .order_by(func.extract(timeinterval, Synthese.date_min)) ) results = query.all() return jsonify(results) except Exception as e: error = "<get_data_over_year> ERROR: {}".format(e) current_app.logger.error(error) return {"Error": error}, 400
def get_billing_data_per_resource_month_center(year, value_to_match, project_id, output_type): billing_data = db_session.query(Usage.resource_type, func.sum(Usage.cost)). \ filter(func.extract('year', Usage.usage_date) == year, func.extract(output_type, Usage.usage_date) == value_to_match, Usage.project_id == project_id, ). \ group_by(Usage.resource_type) return billing_data
def get_billing_data_per_year(year, output_type): if output_type == 'day': billing_data = db_session.query(func.unix_timestamp(Usage.usage_date), func.sum(Usage.cost), Usage.usage_value, Usage.measurement_unit). \ filter(func.extract('year', Usage.usage_date) == year).group_by(func.unix_timestamp(Usage.usage_date)) else: billing_data = db_session.query(func.extract(output_type, Usage.usage_date), func.sum(Usage.cost)). \ filter(func.extract('year', Usage.usage_date) == year).group_by(func.extract(output_type, Usage.usage_date)) return billing_data
def get_billing_data_per_year_month(year, value_to_match, output_type): if year == value_to_match: billing_data = db_session.query(Usage.project_id, func.sum(Usage.cost)). \ filter(func.extract('year', Usage.usage_date) == year).group_by(Usage.project_id) else: billing_data = db_session.query(Usage.project_id, func.sum(Usage.cost)). \ filter(func.extract('year', Usage.usage_date) == year, func.extract(output_type, Usage.usage_date) == value_to_match).group_by(Usage.project_id) return billing_data
def read_hour(self, date_time, url): m = db.session.query(func.extract( 'hour', Measurement.datetime)).distinct( func.extract('hour', Measurement.datetime)).filter( db.and_(Measurement.datetime.startswith(date_time), Measurement.sensor_url == url)).all() values = [] for e in m: values.append(e[0]) return values
def get_balance(year, month): (_, day) = calendar.monthrange(year, month) start_date = datetime.date(year, month, 1) end_date = datetime.date(year, month, day) balance_schema = BalanceSchema() amounts = db.session.query( func.sum(Record.amount).label("cash_flow"), func.sum( case([(Record.record_type == Record.RECORD_TYPE_INCOME, Record.amount)], else_=0) ).label('income'), func.sum( case([(Record.record_type == Record.RECORD_TYPE_EXPENSE, Record.amount)], else_=0) ).label('expense'), func.date_trunc('month', Record.date).label("date"), ).filter( func.extract('year', Record.date) == year, func.extract('month', Record.date) == month, ).group_by( func.date_trunc('month', Record.date) ).first() current_balance = db.session.query( func.sum( case([(Record.date < start_date, Record.amount)], else_=0) ).label('start_balance'), func.sum(Record.amount).label("end_balance") ).filter( Record.date <= end_date ).first() if amounts: balance = balance_schema.dump({ 'cash_flow': amounts.cash_flow, 'income': amounts.income, 'expense': amounts.expense, 'date': amounts.date, 'start_balance': current_balance.start_balance, 'end_balance': current_balance.end_balance, }).data else: balance = balance_schema.dump({ 'cash_flow': 0, 'income': 0, 'expense': 0, 'date': end_date, 'start_balance': current_balance.start_balance, 'end_balance': current_balance.end_balance, }).data return balance
def get_monthly_earnings(): monthly_earnings = db.session.query( func.extract('year', Order.date), func.extract('month', Order.date), func.sum(Order.quantity * Product.price), func.count() )\ .join(Product)\ .group_by( func.extract('year', Order.date), func.extract('month', Order.date) )\ .all() return monthly_earnings
def query_rain_rate_15_rfr_test(self, session): """A query to get the maximum annual 15 minute duration rainfall amounts. This method uses the rainfall_rate variable which is a daily maximum. This method returns the maximum annual 15 minute duration rainfall amounts. ----------------------------------------- Returns: query (sqlalchemy query): sqlalchemy query object containing hdd values """ # get annual max 15 minute rainfall rate rainfall_rate = func.max(Obs.datum*0.1).label("rainfall_rate") print(self.yr_interval) query = (session.query(rainfall_rate, self.time_min, self.time_max, self.lat, self.lon, self.station_id, (func.count(Obs.datum)/(self.total_days/self.yr_interval)).label('completeness'), History.freq) .select_from(Obs) .join(Variable, Obs.vars_id == Variable.id) .join(History, Obs.history_id == History.id) .filter(and_(Obs.time >= self.start_time, Obs.time < self.end_time)) .filter(Variable.standard_name == 'rainfall_rate') .filter(Variable.name == '127') .group_by(func.extract("year", Obs.time), History.lat, History.lon, History.station_id, History.freq)) return query
class UserBioModel(Model): from_ = user_bios_t fields = ('summary', 'birthday', Field( 'age', lambda rec: func.extract('year', func.age(rec['birthday'])), Integer))
def get_taxa_simple_list(id_area): """ :param type: :return: """ try: query_area = ( DB.session.query( Taxref.cd_ref.label("id"), Taxref.cd_ref, func.split_part(Taxref.nom_vern, ",", 1).label("nom_vern"), Taxref.lb_nom, Taxref.group2_inpn, func.coalesce(TMaxThreatenedStatus.threatened, False).label( "threatened" ), func.count(distinct(Synthese.id_synthese)).label("count_occtax"), func.count(distinct(Synthese.observers)).label("count_observer"), func.count(distinct(Synthese.date_min)).label("count_date"), func.count(distinct(Synthese.id_dataset)).label("count_dataset"), func.max(distinct(func.extract("year", Synthese.date_min))).label( "last_year" ), ) .select_from(Taxref) .outerjoin( TMaxThreatenedStatus, TMaxThreatenedStatus.cd_nom == Taxref.cd_nom ) .filter(Synthese.id_synthese == CorAreaSynthese.id_synthese) .filter(Synthese.cd_nom == Taxref.cd_nom) .filter(CorAreaSynthese.id_area == id_area) .distinct() .group_by( func.coalesce(TMaxThreatenedStatus.threatened, False), Taxref.cd_ref, Taxref.nom_vern, Taxref.lb_nom, Taxref.group1_inpn, Taxref.group2_inpn, ) .order_by( func.coalesce(TMaxThreatenedStatus.threatened, False).desc(), func.count(distinct(Synthese.id_synthese)).desc(), Taxref.group1_inpn, Taxref.group2_inpn, ) ) print("query_territory", query_area) result = query_area.all() data = [] for r in result: data.append(r._asdict()) return jsonify({"count": len(result), "data": data}), 200 except Exception as e: error = "<get_taxa_list> ERROR: {}".format(e) current_app.logger.error(error) return {"Error": error}, 400
def query_rain_rate_15(start_time, end_time, session): """A query to get the maximum annual 15 minute duration rainfall amounts. This method uses quarter hour observatons and extracts the maximum at a given station in a given year. ----------------------------------------- Returns: query (sqlalchemy query): sqlalchemy query object containing hdd values """ # get max 15 minute rainfall rate rainfall_rate = func.max(Obs.datum).label("rainfall_rate") days = total_days(start_time, end_time) years = total_years(start_time, end_time) completeness = (count(Obs) / (days / years)).label("completeness") query = (session.query( rainfall_rate, min_time(Obs), max_time(Obs), History.lat, History.lon, History.station_id, completeness).select_from(Obs).join( Variable, Obs.vars_id == Variable.id).join( History, Obs.history_id == History.id).filter( and_(Obs.time >= start_time, Obs.time < end_time)).filter( Variable.standard_name == 'rainfall_rate').filter( Variable.name == '127').group_by( func.extract("year", Obs.time), History.lat, History.lon, History.station_id, )) return query
def get_billing_data_per_year_per_center_days(year, project_ids): billing_data = db_session.query(func.unix_timestamp(Usage.usage_date), func.sum(Usage.cost), Usage.usage_value, Usage.measurement_unit). \ filter(func.extract('year', Usage.usage_date) == year, Usage.project_id.in_(project_ids)). \ group_by(func.unix_timestamp(Usage.usage_date)) return billing_data
def query_design_temp_wet(start_time, end_time, session, quantile=0.025): """A query to get the percentile of maximum daily wet_bulb_temperatures at a station in a given time frame. There is a mixed frequency of this variable in dbmsc, and so a temp patch to guess the frequency based on obs_count is used. As of Feb 28, 2019, Faron Anslow is working to include a filtering option in net_vars_id. This is a known issue. --------------------------------------------------------- Args: session (sqlalchemy Session): session constructed using connection string and engine percentile (float, optional): desire percentile in fraction. Default value is 0.0025 or the 2.5th percentile month (int, optional): desired month in which to calculate the desired percentile, default is 7, or July. Returns: query (sqlalchemy query): sqlalchemy query object for design value """ percentile = (func.percentile_cont(quantile).within_group( Obs.datum.desc()).label("air_temperature")) days = days_in_month(start_time, end_time) hours = hours_in_month(start_time, end_time) daily_complete = (count(Obs) / days) hourly_complete = (count(Obs) / hours) month = start_time.month # NOTE: DBMSC has mix-matched frequency of sampling for wet bulb measurements. # Some are daily and some are hourly. If there are more observations greater # than the days in 1 month then it assumes hourly. This does not account for # undersampled hourly measurements, and assumes that if a station is sampled # hourly, it has at least more obs than days in 1 month. Thus, extremely bad # hourly data may be in this query, however, if any, it is likely sparse. # create a condition that separates daily and hourly data and guesses freqw completeness = (case([(func.count(Obs.datum) <= days, daily_complete)], else_=hourly_complete).label('completeness')) query = ( session.query(percentile, min_time(Obs), max_time(Obs), History.lat, History.lon, History.station_id, completeness).select_from(Obs).join( Variable, Obs.vars_id == Variable.id).join( History, Obs.history_id == History.id).filter( and_(Obs.time >= start_time, Obs.time < end_time)). filter(func.extract("month", Obs.time) == month).filter( Variable.name == '79').filter( Variable.standard_name == 'wet_bulb_temperature').filter( Obs.datum != 0.0) # bad obs are sometimes 0.0 .group_by(History.lat, History.lon, History.station_id)) return query
def get_billing_data_per_resource_per_project(year, project_id, resource, output_type): if output_type == 'day': billing_data = db_session.query(func.unix_timestamp(Usage.usage_date), func.sum(Usage.cost), Usage.usage_value, Usage.measurement_unit). \ filter(func.extract('year', Usage.usage_date) == year, Usage.project_id == project_id, Usage.resource_type == resource). \ group_by(func.unix_timestamp(Usage.usage_date)) elif output_type == 'week': billing_data = db_session.query(Usage.project_id, func.extract(output_type, Usage.usage_date), func.sum(Usage.cost)). \ filter(func.extract('year', Usage.usage_date) == year, Usage.project_id == project_id, Usage.resource_type == resource). \ group_by(func.month(Usage.usage_date)) else: billing_data = db_session.query(func.extract(output_type, Usage.usage_date), func.sum(Usage.cost)). \ filter(func.extract('year', Usage.usage_date) == year, Usage.project_id == project_id, Usage.resource_type == resource). \ group_by(func.month(Usage.usage_date)) return billing_data
def get_billing_data_per_resource_all_project_per_day_quarter(year, value_to_match, project_ids, resource, output_type): if output_type == 'day': billing_data = db_session.query(func.unix_timestamp(Usage.usage_date), func.sum(Usage.cost), Usage.usage_value, Usage.measurement_unit). \ filter(func.extract('year', Usage.usage_date) == year, Usage.project_id.in_(project_ids), Usage.resource_type == resource, func.extract('quarter', Usage.usage_date) == value_to_match). \ group_by(func.unix_timestamp(Usage.usage_date)) elif output_type == 'week': billing_data = db_session.query(Usage.project_id, func.extract(output_type, Usage.usage_date), func.sum(Usage.cost)). \ filter(func.extract('year', Usage.usage_date) == year, Usage.project_id.in_(project_ids), Usage.resource_type == resource, func.extract('quarter', Usage.usage_date) == value_to_match). \ group_by(func.extract(output_type, Usage.usage_date)) elif output_type == 'month' or output_type == 'week': billing_data = db_session.query(func.extract(output_type, Usage.usage_date), func.sum(Usage.cost)). \ filter(func.extract('year', Usage.usage_date) == year, Usage.project_id.in_(project_ids), Usage.resource_type == resource, func.extract('quarter', Usage.usage_date) == value_to_match). \ group_by(func.extract(output_type, Usage.usage_date)) return billing_data
def news(): raterlist = db.session.execute( '''select rater.name, count(rating) as ratings from rater join rating on rater.user_id=rating.user_id group by rater.name''').fetchall() # restaurants restaurants = db.session().query( Restaurant ,Rating , Location).\ filter( func.extract('year',Rating.date) == '2015').\ filter( Location.business_id == Restaurant.business_id)\ .limit(5).all() restaurant, _, location = zip(*restaurants) # raters raters = db.session().query(Rater, func.min( Rating.mood), Location, Restaurant).filter( func.upper(Rater.name).like( 'Peter'.upper()) # since 0 is Staff in our description ).filter(Rating.business_id == Location.business_id).filter( Restaurant.business_id == Rating.business_id).group_by( Rater.user_id, Rating.mood, Location.location_id, Restaurant.business_id, Rating.date).order_by(asc(Rating.mood), func.DATE(Rating.date)).limit(20).all() # rest_spec rest_spec = raters[len(raters) - 1][3] other_critics = db.session.query( Restaurant, Rating.date, Rating.mood).filter( Restaurant.business_id == rest_spec.business_id).group_by( Restaurant.business_id, Rating.id, Rating.date).order_by(Rating.date).limit(10).all() Best_cate = db.session.execute(''' select rating,food_cate.cate_type from rating join ( select unnest(restaurant.food_type) as cate_type, restaurant.business_id as b_id from restaurant ) as food_cate on rating.business_id = food_cate.b_id where food_cate.cate_type='{}' group by ( food_cate.cate_type , rating.mood , rating.* ) having ( rating.mood >= avg(rating.mood) ) order by rating.mood desc '''.format("Active Life")).fetchall() os.system("clear") return render_template('news.html', restaurant=restaurant[0], location=location[0], johns=raters[0], others=other_critics)
def query_all_temp(start_time, end_time, session): """A query to get the percentile of minimum daily air_temperatures at a station in a given time frame. Daily minimum air temperatures are used. ----------------------------------------- Args: session (sqlalchemy Session): session constructed using connection string and engine percentile (float): desire percentile in fraction month (datetime): desired month in which to calculate the desired percentile, default is 1, or January. Returns: query (sqlalchemy query): sqlalchemy query object for design values """ days = days_in_month(start_time, end_time) percentile = (func.percentile_cont(.10).within_group( Obs.datum.asc()).label("air_temperature")) completeness = (count(Obs) / days).label("completeness") month = start_time.month query = ( session.query( percentile, completeness, func.min(Obs.datum), func.max(Obs.datum), func.min(Obs.time), func.max(Obs.time), History.lat, History.lon, History.station_id, Variable.name, ).select_from(Obs).join(Variable, Obs.vars_id == Variable.id).join( History, Obs.history_id == History.id).filter( and_(Obs.time >= start_time, Obs.time < end_time)).filter( func.extract("month", Obs.time) == month).filter( (Variable.name == '1') | (Variable.name == '2') | (Variable.name == '3')).filter( (Variable.standard_name == 'air_temperature')) #.filter(Obs.datum != 0.0) # bad obs are sometimes 0.0 .group_by( History.lat, History.lon, History.station_id, Variable.name, Variable.standard_name, )) return query
def all_statistics(): lm_year = (datetime.now() - timedelta(days=datetime.now().day + 1)).year lm_month = (datetime.now() - timedelta(days=datetime.now().day + 1)).month lm_bill = db.session.query(func.coalesce( func.sum(Interaction.bill), 0)).filter(func.extract('year', Interaction.date) == lm_year).filter( func.extract('month', Interaction.date) == lm_month).first() lm_paid = db.session.query(func.coalesce( func.sum(Interaction.paid), 0)).filter(func.extract('year', Interaction.date) == lm_year).filter( func.extract('month', Interaction.date) == lm_month).first() try: lm_debt = lm_paid[0] - lm_bill[0] except: lm_debt = 0 h_bill = db.session.query(func.coalesce(func.sum(Interaction.bill), 0)).first() h_paid = db.session.query(func.coalesce(func.sum(Interaction.paid), 0)).first() try: h_debt = h_paid[0] - h_bill[0] except: h_debt = 0 return render_template('all_statistics.html', title='Statystyki', lm_month=lm_month, lm_year=lm_year, lm_debt=lm_debt, lm_bill=lm_bill[0], h_debt=h_debt, h_bill=h_bill[0])
def is_albums_bad(self): albums = Album.query.filter( Album.rating < 0.10 ).order_by( Album.rating, Album.count_rated.desc() ).all() avg_played = Song.query.with_entities( func.avg(func.extract('epoch', Song.played_at)) ).scalar() avg_played = datetime.fromtimestamp(int(avg_played)) app.logger.info('AVG played {}'.format(avg_played)) for album in albums: if self._delete_empty_album(album): continue # check if all songs on album has been rated (req to be sure it is bad) all_rated = True all_played = True all_played_after_avg = True all_badly_rated = True for song in album.songs: if song.count_rated < 3: all_rated = False app.logger.info('Not all songs rated 3x in {}'.format(album.name)) break if song.count_played < 1: all_played = False app.logger.info('Not all songs played 1x in {}'.format(album.name)) break if song.played_at > avg_played: all_played_after_avg = False app.logger.info('Not all songs played earlier than avg {}'.format(album.name)) break if song.rating > 0.50: all_badly_rated = False app.logger.info('Not all songs are badly rated {}'.format(album.name)) break if all_rated and all_played and all_played_after_avg and all_badly_rated: app.logger.info('bad album: {} {}'.format(album.artist.name, album.name)) return { 'album': album, 'songs': album.songs, } return
def read_measurement_today(self, date_time, type, url): #r=Measurement.query.filter(db.and_(Measurement.datetime==datetime.startswith(date_time),Measurement.url==url)). #(func.avg(Measurement.humidity).label('average')) #hour_=['00:00', '01:00', '02:00', '03:00','04:00','05:00','06:00','07:00','08:00','09:00','10:00','11:00','12:00','13:00','14:00','15:00','16:00','17:00','18:00','19:00','20:00','21:00','22:00','23:00','24:00'] value = [] if type == 'humidity': r = db.session.query(func.avg( Measurement.humidity), Measurement.datetime).filter( db.and_(Measurement.datetime.startswith(date_time), Measurement.sensor_url == url)).group_by( func.extract('hour', Measurement.datetime)).all() else: r = db.session.query(func.avg( Measurement.temperature), Measurement.datetime).filter( db.and_(Measurement.datetime.startswith(date_time), Measurement.sensor_url == url)).group_by( func.extract('hour', Measurement.datetime)).all() for e in r: value.append(e[0]) return value
def get_billing_data_per_year_per_center(year, project_ids, output_type): if output_type == 'week': billing_data = db_session.query(Usage.project_id, func.extract(output_type, Usage.usage_date), func.sum(Usage.cost)). \ filter(func.extract('year', Usage.usage_date) == year, Usage.project_id.in_(project_ids)). \ group_by(func.extract(output_type, Usage.usage_date)) else: billing_data = db_session.query(func.extract(output_type, Usage.usage_date), func.sum(Usage.cost)). \ filter(func.extract('year', Usage.usage_date) == year, Usage.project_id.in_(project_ids)). \ group_by(func.extract(output_type, Usage.usage_date)) return billing_data
def get_billing_data_per_resource_week_day_center(year, value_to_match, project_id, output_type): if output_type == 'day': billing_data = db_session.query(func.unix_timestamp(Usage.usage_date), func.sum(Usage.cost), Usage.usage_value, Usage.measurement_unit). \ filter(func.extract('year', Usage.usage_date) == year, func.extract('week', Usage.usage_date) == value_to_match, Usage.project_id == project_id). \ group_by(func.extract(output_type, Usage.usage_date)) else: billing_data = db_session.query(Usage.resource_type, func.extract(output_type, Usage.usage_date), func.sum(Usage.cost)). \ filter(func.extract('year', Usage.usage_date) == year, Usage.project_id == project_id, func.extract('week', Usage.usage_date) == value_to_match).group_by( func.extract(output_type, Usage.usage_date), Usage.resource_type) return billing_data
def get_billing_data_per_year_month_week_day(year, value_to_match, output_type, project_ids): if output_type == 'day': billing_data = db_session.query(func.unix_timestamp(Usage.usage_date), func.sum(Usage.cost), Usage.usage_value, Usage.measurement_unit). \ filter(func.extract('year', Usage.usage_date) == year, Usage.project_id.in_(project_ids), func.extract('month', Usage.usage_date) == value_to_match). \ group_by(func.extract(output_type, Usage.usage_date)) else: billing_data = db_session.query(Usage.project_id, func.extract(output_type, Usage.usage_date), func.sum(Usage.cost)). \ filter(func.extract('year', Usage.usage_date) == year, func.extract('month', Usage.usage_date) == value_to_match).group_by( func.extract(output_type, Usage.usage_date), Usage.project_id) return billing_data
def get_billing_data_per_project_year(year, project_id, output_type): if output_type == 'day': billing_data = db_session.query(func.unix_timestamp(Billing.usage_date), func.sum(Billing.cost), Billing.usage_value, Billing.measurement_unit). \ filter(func.extract('year', Billing.usage_date) == year, Billing.project_id == project_id). \ group_by(func.unix_timestamp(Billing.usage_date)) elif output_type == 'week': billing_data = db_session.query(Billing.project_id, func.extract(output_type, Billing.usage_date), func.sum(Billing.cost)). \ filter(func.extract('year', Billing.usage_date) == year, Billing.project_id == project_id). \ group_by(func.extract(output_type, Billing.usage_date)) else: billing_data = db_session.query(func.extract(output_type, Billing.usage_date), func.sum(Billing.cost)). \ filter(func.extract('year', Billing.usage_date) == year, Billing.project_id == project_id). \ group_by(func.extract(output_type, Billing.usage_date)) return billing_data
def query_design_temp_dry(start_time, end_time, session, quantile=0.025): """A query to get the percentile of maximum daily air_temperatures (it is assumed that dry bulb temperatures are identical to regular air temperatures) at a station in a given time frame. Daily maximum air temperatures are used. --------------------------------------------------------- Args: session (sqlalchemy Session): session constructed using connection string and engine percentile (float, optional): desire percentile in fraction. Default value is 0.0025 or the 2.5th percentile month (int, optional): desired month in which to calculate the desired percentile, default is 7, or July. Returns: query (sqlalchemy query): sqlalchemy query object for design value """ percentile = (func.percentile_cont(quantile).within_group( Obs.datum.desc()).label("air_temperature")) days = days_in_month(start_time, end_time) completeness = (count(Obs) / days).label("completeness") month = start_time.month # construct query table query = ( session.query(percentile, min_time(Obs), max_time(Obs), History.lat, History.lon, History.station_id, completeness).select_from(Obs).join( Variable, Obs.vars_id == Variable.id).join( History, Obs.history_id == History.id).filter( and_(Obs.time >= start_time, Obs.time < end_time)). filter(func.extract("month", Obs.time) == month).filter( and_(Variable.standard_name == 'air_temperature', Variable.cell_method == 'time: maximum')).filter( Variable.name == '1').filter( Obs.datum != 0.0) # bad obs are sometimes 0.0 .group_by(History.lat, History.lon, History.station_id)) return query
def query_rain_rate_15(self, session): """A query to get the maximum annual 15 minute duration rainfall amounts. This method uses quarter hour observatons and extracts the maximum at a given station in a given year. ----------------------------------------- Returns: query (sqlalchemy query): sqlalchemy query object containing hdd values """ # get max 15 minute rainfall rate rainfall_rate = func.max(Obs.datum).label("rainfall_rate") query = ( session.query(rainfall_rate, self.time_min, self.time_max, self.lat, self.lon, self.station_id, (func.count(Obs.datum)/(self.total_days/self.yr_interval)/(24*4)).label('completeness'), History.freq) .select_from(Obs) .join(Variable, Obs.vars_id == Variable.id) .join(History, Obs.history_id == History.id) .filter(and_(Obs.time >= self.start_time, Obs.time < self.end_time)) .filter(Variable.standard_name == 'lwe_thickness_of_precipitation_amount') .filter(or_(Variable.name == '263', Variable.name == '264', Variable.name == '265', Variable.name == '266')) .group_by(func.extract("year", Obs.time), History.lat, History.lon, History.station_id, History.freq) ) return query
def get_billing_data_per_resource_per_project_per_week(year, value_to_match, project_id, resource, output_type): if output_type == 'day': billing_data = db_session.query(func.unix_timestamp(Billing.usage_date), func.sum(Billing.cost), Billing.usage_value, Billing.measurement_unit). \ filter(func.extract('year', Billing.usage_date) == year, Billing.project_id == project_id, Billing.resource_type == resource, func.extract('week', Billing.usage_date) == value_to_match). \ group_by(func.unix_timestamp(Billing.usage_date)) elif output_type == 'week': billing_data = db_session.query(Billing.project_id, func.extract('week', Billing.usage_date), func.sum(Billing.cost)). \ filter(func.extract('year', Billing.usage_date) == year, Billing.project_id == project_id, Billing.resource_type == resource, func.extract('week', Billing.usage_date) == value_to_match). \ group_by(func.extract('week', Billing.usage_date)) return billing_data
def query_rain_rate_one_day_1_50(self, session): """A query to get the maximum annual 24hr duration rainfall amounts. ----------------------------------------- Returns: query (sqlalchemy query): sqlalchemy query object containing hdd values """ # get annual max 24hr duration rainfall rate rainfall_rate = func.max(Obs.datum*0.1).label("rainfall_rate") query = ( session.query(rainfall_rate, History.elevation, self.time_min, self.time_max, self.lat, self.lon, self.station_id, (func.count(Obs.datum)/(self.total_days/self.yr_interval)).label('completeness')) .select_from(Obs) .join(Variable, Obs.vars_id == Variable.id) .join(History, Obs.history_id == History.id) .filter(and_(Obs.time >= self.start_time, Obs.time < self.end_time)) .filter(and_(Variable.standard_name == 'rainfall_rate', Variable.name == '161')) .group_by(func.extract("year", Obs.time), History.elevation, History.lat, History.lon, History.station_id) ) return query
def stats(group_id): g = Group.query.get_or_404(group_id) s = dict() Histogram = namedtuple('Histogram', ['data', 'maxval']) s['nworks'] = g.entries.count() if s['nworks'] == 0: return redirect(url_for('group_detail', group_id=group_id)) creators = ( db.session.query( Entry.creator, func.count(Entry.id).label('nworks') ) .filter(Entry.group_id == group_id) .group_by(Entry.creator) ) s['ncreators'] = creators.count() s['nworks_top'] = creators.order_by(desc('nworks')).limit(20).all() creators = creators.subquery() nworks_data = db.session.query( creators.c.nworks, func.count(creators.c.nworks) ).group_by(creators.c.nworks).all() nworks_max = max(map(operator.itemgetter(1), nworks_data)) s['nworks_hist'] = Histogram(nworks_data, nworks_max) months = db.session.query( func.extract('year', Entry.date_added).label('year'), func.extract('month', Entry.date_added).label('month'), ).filter(Entry.group_id == group_id).subquery() months = db.session.query( months.c.year, months.c.month, func.count(months.c.year) ).group_by(months.c.year, months.c.month).all() months_data = map(lambda m: ("{:.0f}-{:02.0f}".format(*m), m[2]), months) months_max = max(map(operator.itemgetter(1), months_data)) s['months_hist'] = Histogram(months_data, months_max) lengths_data = ( db.session.query( func.length(Entry.notes) / 200 * 200, func.count(Entry.id) ) .filter(Entry.group_id == group_id) .group_by(func.length(Entry.notes) / 200).all() ) lengths_max = max(map(operator.itemgetter(1), lengths_data)) s['lengths_hist'] = Histogram(lengths_data, lengths_max) lengths = db.session.query( Entry.title, func.length(Entry.notes).label("length") ).filter(Entry.group_id == group_id).order_by(desc("length")).all() s['longest'] = lengths[:16] s['shortest'] = reversed(lengths[-16:]) s['total_len'] = sum(map(operator.itemgetter(1), lengths)) s['avg_len'] = 1.0 * s['total_len'] / s['nworks'] return render_template('stats.html', group=g, stats=s)
def get_taxa_list(id_area): """ :param type: :return: """ try: reproduction_id = ( ( DB.session.query(TNomenclatures.id_nomenclature) .join( BibNomenclaturesTypes, TNomenclatures.id_type == BibNomenclaturesTypes.id_type, ) .filter( and_( BibNomenclaturesTypes.mnemonique.like("STATUT_BIO"), TNomenclatures.cd_nomenclature.like("3"), ) ) ) .first() .id_nomenclature ) print("reproduction_id", reproduction_id) query_territory = ( DB.session.query( Taxref.cd_ref.label("id"), LAreas.id_area, LAreas.area_code, Taxref.cd_ref, func.split_part(Taxref.nom_vern, ",", 1).label("nom_vern"), Taxref.nom_valide, Taxref.group1_inpn, Taxref.group2_inpn, func.count(distinct(Synthese.id_synthese)).label("count_occtax"), func.count(distinct(Synthese.observers)).label("count_observer"), func.count(distinct(Synthese.date_min)).label("count_date"), func.count(distinct(Synthese.id_dataset)).label("count_dataset"), func.max(distinct(func.extract("year", Synthese.date_min))).label( "last_year" ), func.array_agg( aggregate_order_by( distinct(func.extract("year", Synthese.date_min)), func.extract("year", Synthese.date_min).desc(), ) ).label("list_years"), func.array_agg( aggregate_order_by( distinct(func.extract("month", Synthese.date_min)), func.extract("month", Synthese.date_min).asc(), ) ).label("list_months"), func.bool_or( Synthese.id_nomenclature_bio_status == reproduction_id ).label("reproduction"), func.max(distinct(func.extract("year", Synthese.date_min))) .filter(Synthese.id_nomenclature_bio_status == reproduction_id) .label("last_year_reproduction"), func.array_agg(distinct(Synthese.id_nomenclature_bio_status)).label( "bio_status_id" ), case( [(func.count(TaxrefProtectionEspeces.cd_nom) > 0, True)], else_=False, ).label("protection"), ) .select_from(CorAreaSynthese) .join(Synthese, Synthese.id_synthese == CorAreaSynthese.id_synthese) .join(Taxref, Synthese.cd_nom == Taxref.cd_nom) .join(LAreas, LAreas.id_area == CorAreaSynthese.id_area) .outerjoin(TaxrefLR, TaxrefLR.cd_nom == Taxref.cd_ref) .outerjoin( TaxrefProtectionEspeces, TaxrefProtectionEspeces.cd_nom == Taxref.cd_nom ) .filter(LAreas.id_area == id_area) .group_by( LAreas.id_area, LAreas.area_code, Taxref.cd_ref, Taxref.nom_vern, Taxref.nom_valide, Taxref.group1_inpn, Taxref.group2_inpn, ) .order_by( func.count(distinct(Synthese.id_synthese)).desc(), Taxref.group1_inpn, Taxref.group2_inpn, Taxref.nom_valide, ) ) print("query_territory", query_territory) result = query_territory.all() count = len(result) data = [] for r in result: dict = r._asdict() bio_status = [] for s in r.bio_status_id: bio_status.append(get_nomenclature(s)) dict["bio_status"] = bio_status redlist = get_redlist_status(r.cd_ref) dict["redlist"] = redlist data.append(dict) redlistless_data = list(filter(redlist_list_is_null, data)) print("redlistless_data", len(redlistless_data)) redlist_data = list(filter(redlist_is_not_null, data)) print("redlist_data", len(redlist_data)) redlist_sorted_data = sorted( redlist_data, key=lambda k: ( k["redlist"][0]["priority_order"], k["redlist"][0]["threatened"], ), ) sorted_data = redlist_sorted_data + list(redlistless_data) return jsonify({"count": count, "data": sorted_data}), 200 except Exception as e: error = "<get_taxa_list> ERROR: {}".format(e) current_app.logger.error(error) return {"Error": error}, 400
def to_ts(col): return cast(func.extract('EPOCH', col), Integer)
def get_billing_data_per_year_per_center_quarter(year, project_ids, quarter, output_type): if output_type == 'day': billing_data = db_session.query(func.unix_timestamp(Usage.usage_date), func.sum(Usage.cost), Usage.usage_value, Usage.measurement_unit). \ filter(func.extract('year', Usage.usage_date) == year, Usage.project_id.in_(project_ids), func.extract('quarter', Usage.usage_date) == quarter). \ group_by(func.extract(output_type, Usage.usage_date)) elif output_type == 'week': billing_data = db_session.query(Usage.project_id, func.extract(output_type, Usage.usage_date), func.sum(Usage.cost)). \ filter(func.extract('year', Usage.usage_date) == year, Usage.project_id.in_(project_ids), func.extract('quarter', Usage.usage_date) == quarter). \ group_by(func.extract(output_type, Usage.usage_date)) else: billing_data = db_session.query(func.extract(output_type, Usage.usage_date), func.sum(Usage.cost)). \ filter(func.extract('year', Usage.usage_date) == year, Usage.project_id.in_(project_ids), func.extract('quarter', Usage.usage_date) == quarter). \ group_by(func.extract(output_type, Usage.usage_date)) return billing_data