def _do_merge_shows_list(self, progress, **kwargs): update_time = time.time() seen_forum_ids = { (s.has_forum, s.forum_id) for s in Show.select(Show.has_forum, Show.forum_id) .where(Show.hidden)} for i, site_show in enumerate(get_site_show_list(**kwargs)): progress(step='main', current=i) seen_forum_ids.add((site_show.has_forum, site_show.forum_id)) update_show_info(site_show) progress(step='wrapup') # patch up the mega-shows for mega, children_ids in iteritems(megashow_children): with db.atomic(): child_topics, child_posts = ( Show.select(fn.sum(Show.forum_topics), fn.sum(Show.forum_posts)) .where(Show.forum_id << list(children_ids)) .scalar(as_tuple=True)) Show.update( forum_topics=Show.forum_topics - child_topics, forum_posts=Show.forum_posts - child_posts, ).where(Show.forum_id == mega).execute() # mark unseen shows as deleted unseen = [] for has_forum in [True, False]: seen_ids = [forum_id for h, forum_id in seen_forum_ids if h is has_forum] if seen_ids: unseen.extend(Show.select().where( ~(Show.forum_id << seen_ids), Show.has_forum == has_forum)) now = datetime.datetime.fromtimestamp(update_time) thresh = datetime.timedelta(days=1) get_state = operator.attrgetter('state') for s in unseen: if s.deleted_at is None: s.deleted_at = now s.save() elif (now - s.deleted_at) > thresh: mod_info = [] bits = {k: ', '.join(t.mod.name for t in v) for k, v in itertools.groupby( s.turf_set.order_by(Turf.state), key=get_state)} for k, n in TURF_STATES.items(): if k in bits: mod_info.append('{}: {}'.format(n, bits[k])) if not mod_info: mod_info.append('no mods') tvdb_info = ', '.join(str(st.tvdb_id) for st in s.tvdb_ids) logger.info("Deleting {} ({}) ({})".format( s.name, '; '.join(mod_info), tvdb_info)) s.delete_instance() Meta.set_value('forum_update_time', update_time)
def insert_media(): data = webapp_news_main\ .select(webapp_news_main.source,\ webapp_keyword.keyword,\ fn.count(1),\ fn.sum(webapp_news_main.share_count).alias('share_count'),\ fn.sum(webapp_news_main.view_count).alias('view_count'),\ fn.sum(webapp_news_main.positive).alias('positive'),\ fn.sum(webapp_news_main.negative).alias('negative'),\ fn.Lower('itri').alias('username')\ )\ .join(webapp_keyword_match,\ on=(webapp_news_main.url == webapp_keyword_match.guid))\ .join(webapp_keyword,\ on=(webapp_keyword_match.kwgrp == webapp_keyword.kwgrp))\ .where(webapp_keyword_match.kwgrp << (34,3,4,5,6,42,2,8,18,19,20,9),\ ~(webapp_news_main.source % 'Match%'))\ .group_by(webapp_news_main.source, webapp_keyword.keyword)\ .order_by(webapp_news_main.source, webapp_keyword.keyword) print data query = (media_keyword_group\ .insert_from(\ fields=[media_keyword_group.source,\ media_keyword_group.keyword,\ media_keyword_group.count,\ media_keyword_group.share_count,\ media_keyword_group.view_count,\ media_keyword_group.positive,\ media_keyword_group.negative,\ media_keyword_group.username,\ ],\ query=data)\ .execute()) print query
def _calculate_retained_spp_avg(self, species_ids): """ FIELD-1900: Used to auto-calc DR12/15 weights Separate function to store retained average calculation Take all retained baskets with counts and weights for species id param and get avg fish wt :param species_ids: int[], list of species_ids for a given species_complex :return: float """ if self._current_set: faid = self._current_set.fishing_activity avg = Catches.select( (fn.sum(SpeciesCompositionBaskets.basket_weight_itq) / fn.sum(SpeciesCompositionBaskets.fish_number_itq)) ).join(SpeciesCompositions).join(SpeciesCompositionItems).join( SpeciesCompositionBaskets ).where( (Catches.fishing_activity == faid) & # haul (Catches.catch_disposition == 'R') & # retained (SpeciesCompositionBaskets.basket_weight_itq.is_null(False)) & # baskets with actual weights (SpeciesCompositionBaskets.fish_number_itq.is_null(False)) & # baskets with actual counts (SpeciesCompositionItems.species.in_(species_ids) ) # species associated with CC ).scalar() # DO NOT ROUND HERE # avg = ObserverDBUtil.round_up(avg) # round here, precision will propagate to downstream vals self._logger.info( f"Retained avg for set {faid}, species_ids {species_ids} = {avg}" ) return avg
def cache_namespace_repository_sizes(namespace_name): namespace = user.get_user_or_org(namespace_name) now_ms = get_epoch_timestamp_ms() subquery = (Tag.select(Tag.repository_id).where( Tag.hidden == False).where((Tag.lifetime_end_ms >> None) | (Tag.lifetime_end_ms > now_ms)).group_by( Tag.repository_id).having( fn.Count(Tag.name) > 0)) namespace_repo_sizes = (Manifest.select( (Repository.id).alias("repository_id"), (Repository.name).alias("repository_name"), fn.sum(Manifest.layers_compressed_size).alias("repository_size"), ).join(Repository).join( subquery, on=(subquery.c.repository_id == Repository.id)).where( Repository.namespace_user == namespace.id).group_by(Repository.id)) insert_query = (namespace_repo_sizes.select( Repository.id, fn.sum(Manifest.layers_compressed_size)).join_from( Repository, RepositorySize, JOIN.LEFT_OUTER).where(RepositorySize.repository_id.is_null())) RepositorySize.insert_from( insert_query, fields=[RepositorySize.repository_id, RepositorySize.size_bytes], ).execute()
def users_rank(): arr = [] fsum = fn.sum(models.User.points) for s in models.User.select(models.User.twitter_user_id, fsum.alias('points')).group_by(models.User.twitter_user_id).order_by(fsum): arr.append({"user_id": s.twitter_user_id, "points": s.points}) return jsonify({"data": arr})
def _sum_species_comp_item_weight(self, item_id, note=None): """ Sum underlying baskets and update species_comp_item.species_weight Used to refresh values after underlying basket updates, post-set :param item_id: species comp DB ID (int) :param note: string saved to species_comp_item.notes if present :return: None """ try: sc_item = SpeciesCompositionItems.get( SpeciesCompositionItems.species_comp_item == item_id) except SpeciesCompositionItems.DoesNotExist as e: self._logger.error( f"unable to find species comp item {item_id} in DB") return new_wt = SpeciesCompositionBaskets.select( fn.sum(SpeciesCompositionBaskets.basket_weight_itq)).where( SpeciesCompositionBaskets.species_comp_item == item_id).scalar() if sc_item.species_weight != new_wt: sc_item.species_weight = new_wt sc_item.save() self._logger.info( f"Species comp item species_weight updated to {new_wt}") if note: sc_item.notes = note sc_item.save()
def compute_moved_stock_overview(*, organisation_id, after, before): """Construct filter for date range, if at least one of `after` or `before` is given. Compute number of boxes, and contained items, moved by `organisation_id` that were served in that date range (default to all time). Group by ProductCategory. """ date_filter = _build_range_filter(Box.last_modified_on, low=after, high=before) boxes = (Box.select( ProductCategory.name, fn.sum(Box.items).alias("number_of_items"), fn.Count(Box.id).alias("number_of_boxes"), ).join(Location).join(Base).switch(Box).join(Product).join( ProductCategory).where((Base.organisation == organisation_id) & (date_filter) & (Location.visible == 1) & (Location.is_lost != 1) & (Location.is_scrap != 1)).group_by( ProductCategory.name)) overview = [] for box in boxes: overview.append({ "product_category_name": box.product.category.name, "number_of_boxes": box.number_of_boxes, "number_of_items": box.number_of_items, }) return overview
def loudest(ctx): """ Loudest users. Retrieve the top 5 loudest users. """ loudest = [ (shout.who, shout.network, shout.count) for shout in Shout.select(Shout.who, Shout.network, fn.sum(1).alias("count")). group_by(Shout.who, Shout.network).order_by(SQL("count DESC")).limit(5) ] if not loudest: ctx.respond(ctx._("Nobody has shouted yet.")) else: ctx.respond( ctx._("Loudest people: {loudest}.").format(loudest=", ".join( "{who} from {network} ({count} shout{s})".format( who=who, network=network, count=count, s="s" if count != 1 else "") for who, network, count in loudest)))
def contract_finish(body): if "contract_id" not in body: return contract = Contract.select().where(Contract.id == body["contract_id"]).first() if not contract: return ts = TeamStatistics().select().where(TeamStatistics.team == contract.team).first() if not ts: ts = TeamStatistics() ts.user = contract.team.user ts.team = contract.team ts.total_amount += contract.total_amount if contract.hourly > 0: time_count = WeekStone.select(fn.sum(WeekStone.shot_times)).where(WeekStone.contract==contract).scalar() ts.hours += time_count ts.save() us = UserStatistics.select().where(UserStatistics.user == contract.user).first() if not us: us = UserStatistics() us.user = contract.user us.total_amount += contract.total_amount if contract.hourly > 0: us.hours += time_count us.coop_success += 1 coop_two_count = Contract.select().where(Contract.user==contract.user, Contract.status=="finish").group_by(Contract.team).having(fn.count(Contract.team) >= 2).count() us.coop_two = coop_two_count us.save()
def datenvormonatladen(exportdaten): # Berechnen des Timestamps für das Vormonat und aktueller Monat timestamp = datetime.datetime.now() monat = timestamp.month jahr = timestamp.year if monat == 1: vormonat = 12 vorjahr = jahr - 1 else: vormonat = monat - 1 vorjahr = jahr timestampvormonat = datetime.datetime(vorjahr, vormonat, 1) timestampvormonat = timestampvormonat.timestamp() timestamp = datetime.datetime(jahr, monat, 1).timestamp() # Auslesen der Regenmenge und Temperaturdurchschnittes im Vormonat data = db_weewx.Archive.select( fn.sum(db_weewx.Archive.rain).alias("regenmenge"), fn.avg(db_weewx.Archive.out_temp).alias("durchschnittstemp"), db_weewx.Archive.us_units).where( db_weewx.Archive.date_time.between(timestampvormonat, timestamp)).namedtuples()[0] if data.us_units: regen_vormonat = mwu.regen_menge(data.regenmenge) temp = mwu.temperaturumrechner(data.durchschnittstemp) else: regen_vormonat = data.regenmenge temp = data.durchschnittstemp temp_schnitt_vormonat = round(temp, 1) exportdaten["tempSchnittVormonat"] = temp_schnitt_vormonat exportdaten["regenVormonat"] = regen_vormonat return exportdaten
def consulta(): admin = auth.get_logged_in_user() if (request.form["pasado"] != ""): from datetime import date anio, mes, dia = request.form["pasado"].split("-") pasado = date(int(anio), int(mes), int(dia)) if (request.form["futuro"] == ""): futuro = datetime.datetime.now().date() else: anio, mes, dia = request.form["futuro"].split("-") futuro = date(int(anio), int(mes), int(dia)) if (pasado <= futuro): arreglo_consumo = {} consumo_semanal = (Consumo.select( Usuario.nombre, fn.sum(Consumo.precio*Consumo.cantidad).alias('total') ).join(Usuario ).where((Consumo.fecha >= pasado) & (Consumo.fecha <= futuro) & (Consumo.activo == True)).group_by(Usuario.id).order_by(Usuario.nombre.desc())) for consumo_item in consumo_semanal: arreglo_consumo[str(consumo_item.usuario.nombre)] = consumo_item.total args = {} args['consumos'] = arreglo_consumo args['auth'] = admin args['pasado'] = str(pasado) args['futuro'] = str(futuro) return render_template("consultas.html", args=args,) else: abort(406) else: abort(406)
def datenmonatladen(exportdaten): # Berechnen des Timestamps für Beginn des aktuellen Monats timestamp = datetime.datetime.now() timestamp = datetime.datetime(timestamp.year, timestamp.month, 1).timestamp() # Auslesen der Regenmenge im aktuellen Monat data = db_weewx.ArchiveDayRain.select(fn.sum( db_weewx.ArchiveDayRain.sum)).where( db_weewx.ArchiveDayRain.date_time >= timestamp).scalar() regen_monat = mwu.regen_menge(data) # Auslesen und berechnen des Temperaturschnittes im aktuellen Monat data = db_weewx.Archive.select(db_weewx.Archive.us_units, db_weewx.Archive.out_temp).where( db_weewx.Archive.date_time >= timestamp) anzahl = 0 temp_summe = 0 for datensatz in data: if datensatz.us_units == 1: temp = mwu.temperaturumrechner(datensatz.out_temp) else: temp = datensatz.out_temp if isinstance(temp, (int, float)): temp_summe = temp_summe + temp anzahl += 1 if anzahl != 0: temp_schnitt_monat = round((temp_summe / anzahl), 1) else: temp_schnitt_monat = "" exportdaten["tempSchnittMonat"] = temp_schnitt_monat exportdaten["regenMonat"] = regen_monat return exportdaten
def loudest(ctx): """ Loudest users. Retrieve the top 5 loudest users. """ loudest = [(shout.who, shout.network, shout.count) for shout in Shout.select(Shout.who, Shout.network, fn.sum(1).alias("count")) .group_by(Shout.who, Shout.network) .order_by(SQL("count DESC")) .limit(5) ] if not loudest: ctx.respond(ctx._("Nobody has shouted yet.")) else: ctx.respond(ctx._("Loudest people: {loudest}.").format( loudest=", ".join("{who} from {network} ({count} shout{s})".format( who=who, network=network, count=count, s="s" if count != 1 else "" ) for who, network, count in loudest) ))
def _calculate_subsample_count(self): """ FIELD-1471: get rounded avg weight of fish in same day as haul (using created_date of current haul) :return: sets float val _todays_avg_weight (exposed as todaysAvgWeight property) """ trip_id = ObserverDBUtil.get_current_trip_id() species_list = self._observer_species.get_related_species( self._observer_species.currentSpeciesItemSpeciesID) haulset_date = ObserverDBUtil.get_current_haulset_createddate() if not haulset_date or not species_list or not trip_id: self.logger.error( f"haulset_date/species/trip list not retrieved, can't calculate subsample count" ) self._logger.info( f"Daily subsample count calc'd, species {species_list} day {haulset_date[:10]} trip {trip_id}" ) self.subsampleCount = Trips.select( fn.sum(SpeciesCompositionBaskets.fish_number_itq) ).join(FishingActivities).join( Catches, on=FishingActivities.fishing_activity == Catches.fishing_activity ).join(SpeciesCompositions).join(SpeciesCompositionItems).join( SpeciesCompositionBaskets).join( Species, on=SpeciesCompositionItems.species == Species.species # no rel_model in model def... ).where((Trips.trip == trip_id) & (Species.species.in_(species_list)) & (fn.substr(SpeciesCompositionBaskets.created_date, 1, 10) == fn.substr(haulset_date, 1, 10)) & (SpeciesCompositionBaskets.is_subsample == 1)).scalar()
def post(self, *args, **kwargs): user_id = self.get_argument('user_id') try: user = User().select().where(User.id == user_id).get() except: user = False pic = user.picture_address payment = \ Payment.select(fn.sum(Payment.amount)).where(Payment.payer_id == user_id, Payment.type == True).dicts()[0][ 'sum(`t1`.`amount`)'] if not payment: payment = 0 recive = \ Payment.select(fn.sum(Payment.amount)).where(Payment.payer_id == user_id, Payment.type == False).dicts()[0][ 'sum(`t1`.`amount`)'] if not recive: recive = 0 spent = Buy.select(fn.sum(Buy.amount)).where( Buy.payer_id == user_id).dicts()[0]['sum(`t1`.`amount`)'] if not spent: spent = 0 try: account_user = User.select().where(User.id == user_id).get() account_user = account_user.account except: account_user = False sum = int(payment) + int(spent) - int(recive) - int(account_user) if sum > 0: status = "بستانکار" elif sum < 0: status = "بدهکار" else: status = "تسویه" dict = { 'picture_address': pic, 'payment': payment, 'recive': recive, 'spent': spent, 'account_user': account_user, 'sum': sum, 'status': status } self.write(dict)
def regenmenge_auslesen(timestamp): data = db_weewx.Archive.select( db_weewx.Archive.us_units, fn.sum(db_weewx.Archive.rain).alias("regen")).where( db_weewx.Archive.date_time >= timestamp).namedtuples()[0] if data.us_units == 1: regen = mwu.regen_menge(data.regen) else: regen = data.regen return regen
def retainedHaulWeight(self): """ assumes fishing_activity_id is loaded, catch if not??? left joins, should always return a number, even if no retained :return: int (sum of retained catch weights for haul) """ return FishingActivities.select(fn.COALESCE(fn.sum(Catches.catch_weight), 0))\ .join(Catches, JOIN.LEFT_OUTER).where( (Catches.fishing_activity == self._current_haul.fishing_activity) & (Catches.catch_disposition == 'R') ).scalar()
def _get_species_item_dict(db_model): """ Build a dict out of a peewee model Purpose is for storing peewee model <-> FramListModel :param db_model: peewee model to convert :return: dict with values translated as desired to be FramListModel friendly """ species_dict = model_to_dict(db_model) # Example for keys that we want to rename from model-> dict: # tripdict['renamed'] = tripdict.pop('rename_me') # Populate "extra" keys if needed species_dict['common_name'] = db_model.species.common_name # TODO: reuse logic in ObserverSpecies.totalFishCounted property # FIELD-2040: display actual count (sum of fish_number_itq) species_dict['total_fish_counted'] = SpeciesCompositionBaskets.select( fn.sum(SpeciesCompositionBaskets.fish_number_itq)).where( SpeciesCompositionBaskets.species_comp_item == db_model.species_comp_item).scalar() # Create temp column for weighed_and_tallied_count weighed_count = db_model.species_number if db_model.species_number else 0 tallied_count = db_model.total_tally if db_model.total_tally else 0 weighed_and_tallied_count = weighed_count + tallied_count species_dict['weighed_and_tallied_count'] = weighed_and_tallied_count # Create temp column for avg_weight avg_weight = db_model.species_weight / db_model.species_number if db_model.species_number and db_model.species_weight else None species_dict['avg_weight'] = avg_weight # This is inefficient, but required to get an accurate count of bios for now... # Get biospecimen item count from DB catch_id = db_model.species_composition.catch.catch species_id = db_model.species.species discard_reason = db_model.discard_reason logging.info( f'Getting biospecimen items catch id is {catch_id} and species_id is {species_id}' ) bios_q = BioSpecimens.select().where( (BioSpecimens.catch == catch_id) & (BioSpecimens.species == species_id) & (BioSpecimens.discard_reason == discard_reason)) total_bio_items_count = 0 for b in bios_q: items_q = BioSpecimenItems.select().where( (BioSpecimenItems.bio_specimen == b.bio_specimen)) total_bio_items_count += items_q.count() species_dict['bio_count'] = total_bio_items_count return species_dict
def compute_number_of_sales(*, organisation_id, after, before): """Construct filter for date range, if at least one of `after` or `before` is given. Compute number of sales performed by `organisation_id` in that date range (default to all time). """ date_filter = _build_range_filter(Transaction.created_on, low=after, high=before) return (Transaction.select(fn.sum(Transaction.count)).join( Beneficiary).join(Base).where((date_filter) & (Base.organisation == organisation_id) & (Transaction.tokens >= 0)). scalar() # returns None if no Transactions selected or 0)
def post(self, *args, **kwargs): user_id = self.get_argument('user_id') try: user = User().select().where(User.id == user_id).get() except: user = False pic = user.picture_address payment = \ Payment.select(fn.sum(Payment.amount)).where(Payment.payer_id == user_id, Payment.type == True).dicts()[0][ 'sum(`t1`.`amount`)'] if not payment: payment = 0 recive = \ Payment.select(fn.sum(Payment.amount)).where(Payment.payer_id == user_id, Payment.type == False).dicts()[0][ 'sum(`t1`.`amount`)'] if not recive: recive = 0 spent = Buy.select(fn.sum(Buy.amount)).where(Buy.payer_id == user_id).dicts()[0]['sum(`t1`.`amount`)'] if not spent: spent = 0 try: account_user = User.select().where(User.id == user_id).get() account_user = account_user.account except: account_user = False sum = int(payment) + int(spent) - int(recive) - int(account_user) if sum > 0: status = "بستانکار" elif sum < 0: status = "بدهکار" else: status = "تسویه" dict = {'picture_address': pic, 'payment': payment, 'recive': recive, 'spent': spent, 'account_user': account_user, 'sum': sum, 'status': status} self.write(dict)
def compute_stock_overview(*, organisation_id): """Compute number of boxes, and number of contained items, managed by `organisation_id`. """ overview = (Box.select( fn.sum(Box.items).alias("number_of_items"), fn.Count(Box.id).alias("number_of_boxes"), ).join(Location).join(Base).where((Base.organisation == organisation_id) & (Location.visible == 1) & (Location.is_lost != 1) & (Location.is_scrap != 1) & (Location.is_donated != 1)).get()) return { n: getattr(overview, n) for n in ["number_of_boxes", "number_of_items"] }
def get_namespace_size(namespace_name): namespace = user.get_user_or_org(namespace_name) now_ms = get_epoch_timestamp_ms() subquery = (Tag.select(Tag.repository_id).where( Tag.hidden == False).where((Tag.lifetime_end_ms >> None) | (Tag.lifetime_end_ms > now_ms)).group_by( Tag.repository_id).having( fn.Count(Tag.name) > 0)) namespace_size = (Manifest.select(fn.sum( Manifest.layers_compressed_size)).join(Repository).join( subquery, on=(subquery.c.repository_id == Repository.id)).where( Repository.namespace_user == namespace.id)).scalar() return namespace_size or 0
def _use_tmp_personunion(query_union): PersonUnion.create_table() PersonUnion \ .insert_from(query_union, [PersonUnion.val, PersonUnion.login_password, PersonUnion.login_uuid]) \ .execute() res = PersonUnion \ .select(fn.sum(PersonUnion.val).alias('sum_val'), PersonUnion.login_password, PersonUnion.login_uuid) \ .group_by(PersonUnion.login_uuid) \ .order_by(SQL('sum_val').desc()) \ .peek() PersonUnion.drop_table() return res
def get_bank_transactions(bank_name=None): transactions = (Transaction.select(fn.COUNT(fn.Distinct( Transaction.id)), fn.sum(Transaction.amount), Bank.name).join( Account, on=(Transaction.account_id == Account.id)).join(Bank).group_by( Bank.name)) if bank_name is not None: db_bank = Bank.get_or_none(name=bank_name) if db_bank is None: raise BankNotFoundError(bank_name) else: transactions = transactions.where(Bank.name == bank_name) return [{ "name": transaction.account.bank.name, "number of transactions": transaction.count, "total amount": transaction.sum } for transaction in transactions]
def loudest(client, target, origin): loudest = [(shout.who, shout.network, shout.count) for shout in Shout.select(Shout.who, Shout.network, fn.sum(1).alias("count")) .group_by(Shout.who, Shout.network) .order_by(SQL("count DESC")) .limit(5) ] if not loudest: client.message(target, "{origin}: Nobody has shouted yet.".format( origin=origin )) else: client.message(target, "{origin}: Loudest people: {loudest}.".format( origin=origin, loudest=", ".join("{who} from {network} ({count} shout{s})".format( who=who, network=network, count=count, s="s" if count != 1 else "" ) for who, network, count in loudest) ))
def _sum_catch_weight(self, catch_id): """ Sum underlying species_comp_item.species_weight values and update Catch.sample_weight Used to refresh values after underlying basket updates, post-set :param catch_id: catch DB id (int) :return: None """ try: catch = Catches.get(Catches.catch == catch_id) except Catches.DoesNotExist as e: self._logger.error(f"Unable to find catch {catch_id} in DB") return new_wt = SpeciesCompositionItems.select( fn.sum(SpeciesCompositionItems.species_weight)).join( SpeciesCompositions).where( SpeciesCompositions.catch == catch_id).scalar() if catch.sample_weight != new_wt: catch.sample_weight = new_wt catch.save() self._logger.info( f"Catch {catch_id} sample_weight updated to {new_wt}")
def resolve_beneficiary_tokens(beneficiary_obj, info): authorize(permission="transaction:read") # If the beneficiary has no transactions yet, the select query returns None return (Transaction.select(fn.sum(Transaction.tokens)).where( Transaction.beneficiary == beneficiary_obj.id).scalar() or 0)