def api_stat_site_cate_daily(date): try: date = datetime.strptime(date, '%Y-%m-%d').date() except ValueError or KeyError: return jsonify({'code': 1, 'msg': 'invalid request'}) query = RoomHourlyStat.select( RoomHourlyStat.cate, fn.SUM(RoomHourlyStat.dcount).alias('dsum'), fn.SUM(RoomHourlyStat.gcount).alias('gsum'), fn.SUM(RoomHourlyStat.ucount).alias('usum'), fn.SUM(RoomHourlyStat.income).alias('isum'), fn.COUNT(fn.DISTINCT(RoomHourlyStat.room)).alias('rsum') ).join(Date, on=(RoomHourlyStat.date == Date.date_key)) \ .where(Date.date == date) \ .group_by(RoomHourlyStat.cate) payload = [] for row in query: payload.append({ 'cate': row.cate.name, 'income': row.isum, 'ucount': row.usum, 'gcount': row.gsum, 'dcount': row.dsum, 'rcount': row.rsum }) return jsonify({'code': 0, 'msg': 'success', 'data': payload})
def get(self): query = (Event.select(fn.DISTINCT(Event.year)).order_by( Event.year.desc()).namedtuples()) return ([r.year for r in query], 200, { 'Expires': formatdate(timeval=time() + cache_timeout, usegmt=True) })
def confirm_pending_upgrades(upgrade_discipline): """ Since upgrades are recognized the next race after they're earned, we don't have a good way of suppressing them if someone is upgraded on the OBRA website but don't race again. Work around that by creating a PendingUpgrade record that will mark it until they race again. """ logger.info( 'Checking for confirmed upgrades - upgrade_discipline={}'.format( upgrade_discipline)) (PendingUpgrade.delete().where( PendingUpgrade.discipline == upgrade_discipline).execute()) last_result = (Result.select().join(Race, src=Result).join( Event, src=Race).join( Person, src=Result).where(Race.categories.length() > 0).where( Event.discipline << DISCIPLINE_MAP[upgrade_discipline]).select( fn.DISTINCT( fn.FIRST_VALUE(Result.id).over( partition_by=[Result.person_id], order_by=[Race.date.desc(), Race.created.desc()], start=Window.preceding())).alias('first_id'))) query = (Result.select(Result, Race, Event, Person, Points).join( Race, src=Result).join(Event, src=Race).join(Person, src=Result).join( Points, src=Result).where(Result.id << last_result).where( Points.needs_upgrade == True).where( ~(Race.name.contains('Junior'))).order_by( Points.sum_categories.asc(), Points.sum_value.desc())) for result in query.prefetch(Points): result.points[0].sum_categories = [ min(result.points[0].sum_categories) - 1 ] confirm_category_change(result, ['UPGRADED']) if result.points[0].upgrade_confirmation_id: logger.debug('Confirmed pending upgrade for {}, {} to {}'.format( result.person.last_name, result.person.first_name, result.points[0].sum_categories[0])) (PendingUpgrade.insert(result_id=result.id, upgrade_confirmation_id=result.points[0]. upgrade_confirmation_id, discipline=upgrade_discipline).on_conflict( conflict_target=[PendingUpgrade.result], preserve=[ PendingUpgrade.upgrade_confirmation, PendingUpgrade.discipline ]).execute())
def get(self): cur_year = date.today().year start_date = date(cur_year - 1, 1, 1) # Subquery to find the most recent result for each person latest_cte = (Result.select().join(Race, src=Result).join( Event, src=Race).join( Person, src=Result).where(Race.date >= start_date).where( Race.categories.length() > 0).select( fn.DISTINCT( fn.FIRST_VALUE(Result.id).over( partition_by=[Result.person_id], order_by=[ Race.date.desc(), Race.created.desc() ], start=Window.preceding())).alias( 'result_id')).cte('latest_results')) latest_results = Select(from_list=[ fn.JSON_EACH( latest_cte.select_from( fn.JSON_GROUP_ARRAY( latest_cte.c.result_id).python_value( str)).scalar()) ], columns=[Entity('value')]) query = (Result.select( Result, Race, Event, Series, Person, Points, Rank, Quality).join(Race, src=Result).join(Event, src=Race).join( Series, src=Event, join_type=JOIN.LEFT_OUTER).join( Person, src=Result).join(Points, src=Result).join( Rank, src=Result, join_type=JOIN.LEFT_OUTER).join( Quality, src=Race, join_type=JOIN.LEFT_OUTER).where( Result.id << latest_results).where( Points.needs_upgrade == True).order_by( Points.sum_categories.asc(), Points.sum_value.desc()).limit(6)) return ([ marshal(r, result_with_person_and_race_with_event) for r in query.prefetch(Race, Event, Series, Person, Points, Rank, Quality) ], 200, { 'Expires': formatdate(timeval=time() + cache_timeout, usegmt=True) })
def image_list_annotated(session_name=None, page=0, items_per_page=10): query = (ImageStorage.session_name == session_name) & (ImageStorage.id.in_( ImageAnnotation.select(fn.DISTINCT(ImageAnnotation.image_id)), )) length = ImageStorage.select().where(query).count() result = ImageStorage.\ select(ImageStorage.id, ImageStorage.session_name).\ where(query).\ order_by(ImageStorage.id).\ paginate(page, items_per_page).dicts() result = list(result) for i in result: i['url'] = '/api/get_image_by_id/' + str(i['id']) app.logger.info(result) return dict(result=result, num_page=math.ceil(length / items_per_page), current_page=page)
def jsonb_keys(self, field): field, lookups = StatsQueryBuilder.parse_json_field(field) model_attr = getattr(self.model, field) for lookup in lookups: model_attr = model_attr[lookup] if hasattr(model_attr, "as_json"): json_lookup = model_attr.as_json() else: json_lookup = model_attr query = self.model.select(fn.DISTINCT(fn.jsonb_object_keys(json_lookup)).alias('key')) try: keys = [row["key"] for row in query.dicts()] except (DataError, ProgrammingError): keys = [] return self.response({"keys": keys})
def user_summary(): # noqa: D103 form = DateRangeForm(request.args) sort = request.args.get("sort") if sort: try: sort = int(sort) except: sort = None desc = request.args.get("desc") if desc: try: desc = int(desc) except: desc = None if not (form.from_date.data and form.to_date.data): date_range = User.select( fn.MIN(User.created_at).alias('from_date'), fn.MAX(User.created_at).alias('to_date')).first() return redirect( url_for( "user_summary", from_date=date_range.from_date.date().isoformat(), to_date=date_range.to_date.date().isoformat(), sort=sort, desc=desc)) user_counts = (User.select( User.organisation.alias("org_id"), fn.COUNT(fn.DISTINCT(User.id)).alias("user_count")).where( User.created_at.between(form.from_date.data, form.to_date.data)).join( UserOrg, JOIN.LEFT_OUTER, on=(UserOrg.org_id == User.id)).group_by( User.organisation)).alias("user_counts") linked_counts = (OrcidToken.select( OrcidToken.org.alias("org_id"), fn.COUNT(fn.DISTINCT(OrcidToken.user)).alias("linked_user_count")).where( OrcidToken.created_at.between(form.from_date.data, form.to_date.data)).group_by( OrcidToken.org).alias("linked_counts")) query = (Organisation.select( Organisation.name, fn.COALESCE(user_counts.c.user_count, 0).alias("user_count"), fn.COALESCE(linked_counts.c.linked_user_count, 0).alias("linked_user_count")).join( user_counts, on=(Organisation.id == user_counts.c.org_id)).join( linked_counts, JOIN.LEFT_OUTER, on=(Organisation.id == linked_counts.c.org_id))) if sort == 1: order_fields = [SQL("user_count"), SQL("linked_user_count"), ] else: order_fields = [Organisation.name, ] if desc: order_fields = [f.desc() for f in order_fields] query = query.order_by(*order_fields) total_user_count = sum(r.user_count for r in query if r.user_count) total_linked_user_count = sum(r.linked_user_count for r in query if r.linked_user_count) headers = [(h, url_for( "user_summary", from_date=form.from_date.data, to_date=form.to_date.data, sort=i, desc=1 if sort == i and not desc else 0)) for i, h in enumerate(["Name", "Linked User Count / User Count (%)"])] return render_template( "user_summary.html", form=form, query=query, total_user_count=total_user_count, total_linked_user_count=total_linked_user_count, sort=sort, desc=desc, headers=headers)
def calculate_race_ranks(upgrade_discipline, incremental=False): # Delete all Rank and Quality data for this discipline and recalc from scratch category_filter = (Race.categories.length() != 0) if upgrade_discipline == 'cyclocross': category_filter |= ((Race.name**'%single%') & ~(Race.name**'%person%')) if not incremental: (Rank.delete().where(Rank.result_id << (Result.select(Result.id).join( Race, src=Result).join(Event, src=Race).where( Event.discipline << DISCIPLINE_MAP[upgrade_discipline]))). execute()) (Quality.delete().where(Quality.race_id << ( Race.select(Race.id).join(Event, src=Race).where( Event.discipline << DISCIPLINE_MAP[upgrade_discipline]))). execute()) prev_race = Race() races = (Race.select(Race, Event).join(Event, src=Race).where( Race.id.not_in( Quality.select(fn.DISTINCT(Race.id)).join(Race, src=Quality).join( Event, src=Race).where( Event.discipline << DISCIPLINE_MAP[upgrade_discipline])) ).where(Event.discipline << DISCIPLINE_MAP[upgrade_discipline]).where( category_filter).order_by(Race.date.asc(), Race.created.asc())) for race in races: """ 1. From top 10 finishers, get top 5 ranked riders; average ranks and multiply by 0.9 2. Average all ranked finishers and multiply by 0.9 3. If 2 is less than 1, and 2 is greater the lowest rank in the top 10, then use 2 as quality.value, else use 1 4. Store (((Average all ranked finishers) - (quality.value)) * 2) / (race.results.count() - 1) as quality.points_per_place 5. For each result, store quality.value + ((result.place - 1) * quality.points_per_place) as rank.value """ logger.info('Processing Race: [{}]{}: [{}]{} on {}'.format( race.event.id, race.event.name, race.id, race.name, race.date)) results = (race.results.select().where(~(Result.place.contains('dns'))) .where(~(Result.place.contains('dnf'))).order_by( Result.id.asc())) finishers = results.count() if finishers <= 2: logger.debug('Insufficient finishers: {}'.format(finishers)) Quality.create(race=race, value=0, points_per_place=0) continue # Bulk cache everyone's ranks for this date so we don't have to re-query them all one by one if prev_race.date != race.date: people = get_ranks(upgrade_discipline, race.date) ranks = [600] * 5 ranks += [people[result.person_id] for result in results.limit(10)] min_rank = min(ranks) top_average = sum(sorted(ranks)[:5]) / 5 ranks = [people[result.person_id] for result in results] all_average = sum(ranks) / len(ranks) value = (all_average if all_average < top_average and all_average > min_rank else top_average) * 0.9 per_place = ((all_average - value) * 2) / (finishers - 1) logger.info('\tStart/Finishers: {}/{}'.format(race.starters, finishers)) logger.info('\tAverage of top 5: {}'.format(top_average)) logger.info('\tAverage of field: {}'.format(all_average)) logger.info('\tBest top 10 rank: {}'.format(min_rank)) logger.info('\tQuality value: {}'.format(value)) logger.info('\tPoints per Place: {}'.format(per_place)) Quality.create(race=race, value=value, points_per_place=per_place) insert_ranks = [] for zplace, result in enumerate(results): rank = value + (zplace * per_place) rank = rank if rank <= 590 else 590 insert_ranks.append((result, rank)) Rank.insert_many(insert_ranks, fields=[Rank.result, Rank.value]).on_conflict_replace().execute() prev_race = race
def get(self): cur_year = date.today().year start_date = date(cur_year - 1, 1, 1) disciplines = [] for upgrade_discipline in DISCIPLINE_MAP.keys(): # Subquery to find the most recent result for each person latest_cte = (Result.select().join(Race, src=Result).join( Event, src=Race).join( Person, src=Result).where(Race.date >= start_date).where( Race.categories.length() > 0).where( Event.discipline << DISCIPLINE_MAP[upgrade_discipline]).select( fn.DISTINCT( fn.FIRST_VALUE(Result.id).over( partition_by=[Result.person_id], order_by=[ Race.date.desc(), Race.created.desc() ], start=Window.preceding())). alias('result_id')).cte('latest_results')) latest_results = Select(from_list=[ fn.JSON_EACH( latest_cte.select_from( fn.JSON_GROUP_ARRAY( latest_cte.c.result_id).python_value( str)).scalar()) ], columns=[Entity('value')]) query = (Result.select( Result, Race, Event, Person, Points, PendingUpgrade, ObraPersonSnapshot, Rank, Quality).join(Race, src=Result).join(Event, src=Race).join( Person, src=Result).join(Points, src=Result).join( PendingUpgrade, src=Result, join_type=JOIN.LEFT_OUTER).join( ObraPersonSnapshot, src=PendingUpgrade, join_type=JOIN.LEFT_OUTER).join( Rank, src=Result, join_type=JOIN.LEFT_OUTER).join( Quality, src=Race, join_type=JOIN.LEFT_OUTER). where(Result.id << latest_results).where( Points.needs_upgrade == True).order_by( Points.sum_categories.asc(), Points.sum_value.desc())) disciplines.append({ 'name': upgrade_discipline, 'display': upgrade_discipline.split('_')[0].title(), 'results': query.prefetch(Race, Event, Person, Points, PendingUpgrade, ObraPersonSnapshot, Rank, Quality), }) return ([marshal(d, discipline_results) for d in disciplines], 200, { 'Expires': formatdate(timeval=time() + cache_timeout, usegmt=True) })