def last_metrics(cls): last_24_hours = arrow.utcnow().replace(hours=-24).datetime objects = cls.select().where(cls.timestamp >= last_24_hours) count = objects.count() if count == 0: return last_1_hour = arrow.utcnow().replace(hours=-1).datetime accepted = cls.select(fn.Sum( cls.accepts)).where(cls.timestamp >= last_24_hours) rejected = cls.select(fn.Sum( cls.rejects)).where(cls.timestamp >= last_24_hours) delay = cls.select(fn.Avg(cls.delay)).where( cls.timestamp >= last_24_hours, cls.accepts >= 0, cls.delay >= 0) metrics = { 'count': count, 'accepted': accepted or 0, 'rejected': rejected or 0, 'delay': delay or 0.0, 'abandoned': objects.filter(cls.accepts == 0, cls.timestamp <= last_1_hour).count(), #'count_accepts': objects.filter(accepts__gte=1).count(), } metrics['requests'] = metrics['accepted'] + metrics['rejected'] return metrics
def sLookUpBalance(self, email): balance = 0 total_deposit_query = (DepositPoint.select(fn.Sum(DepositPoint.val).alias('total')) .where(DepositPoint.user_email == email)) total_withdraw_query = (WithdrawPoint.select(fn.Sum(WithdrawPoint.val).alias('total')) .where(WithdrawPoint.user_email == email)) total_receive_query = (TransferPoint.select(fn.Sum(TransferPoint.val).alias('total')) .where(TransferPoint.receiver_email == email)) total_send_query = (TransferPoint.select(fn.Sum(TransferPoint.val).alias('total')) .where(TransferPoint.sender_email == email)) for row in total_deposit_query: if row.total is not None: balance += row.total for row in total_withdraw_query: if row.total is not None: balance -= row.total for row in total_receive_query: if row.total is not None: balance += row.total for row in total_send_query: if row.total is not None: balance -= row.total return int(balance)
def month_overview(): month = get_one_month() year = get_one_year() msum = (Balance .select(fn.Sum(Balance.amount)) .where(Balance.date.month == int(month), Balance.date.year == int(year)) .scalar() ) msum_in = (Balance .select(fn.Sum(Balance.amount)) .where(Balance.date.month == int(month), Balance.date.year == int(year), Balance.tag == 'income') .scalar() ) msum_out = msum_in - msum print('--- {}/{} ---'.format(month, year)) print('Income = {}'.format(msum_in)) print('Expenses = {}'.format(msum_out)) print('Left = {}'.format(msum))
def topContactsData(start, end): return (Contact.select( Contact, fn.Sum(Booking.finalPrice).alias('TotalSpend'), fn.Sum(Booking.catereringTotal).alias('TotalCaterering'), fn.Sum(Booking.roomTotal).alias('TotalRooms'), fn.Sum(Booking.discountTotal).alias('TotalDiscounts')).join( Booking, on=Booking.contact_id == Contact.id, join_type=JOIN.LEFT_OUTER).where((Booking.startTime >= start) & ( Booking.startTime <= end)).group_by(Contact).order_by( fn.Sum(Booking.finalPrice).desc()).limit(10))
def calc_hangman(games, user): HANGMAN_STATS.games = games.count() HANGMAN_STATS.wins = games.where(Games.winner == user.id).count() HANGMAN_STATS.total_guesses = games.select( fn.Sum(Games.guesses_correct + Games.guesses_incorrect).alias( 'total')).execute().next().total or '0' HANGMAN_STATS.total_guesses_c = games.select( fn.Sum(Games.guesses_correct)).execute().next( ).guesses_correct or '0' HANGMAN_STATS.total_guesses_i = games.select( fn.Sum(Games.guesses_incorrect)).execute().next( ).guesses_incorrect or '0' HANGMAN_STATS.wl = str( round(HANGMAN_STATS.wins / HANGMAN_STATS.games, 3)) if HANGMAN_STATS.games > 0 else '0'
def get_transaction_list_details(transaction_list): """Return complete data set on a specified transaction.""" query = (Files().select( Files.transaction.alias('upload_id'), fn.Max(Transactions.updated).alias('upload_date'), fn.Min(Files.mtime).alias('file_date_start'), fn.Max(Files.mtime).alias('file_date_end'), fn.Min(Transactions.submitter).alias('uploaded_by_id'), fn.Sum(Files.size).alias('bundle_size'), fn.Count(Files.id).alias('file_count'), fn.Min(Transactions.updated).alias('upload_datetime'), fn.Min(Transactions.proposal).alias('proposal_id'), fn.Min(Transactions.instrument).alias('instrument_id')).join( Transactions).where( Files.transaction << transaction_list).group_by( Files.transaction)) return { str(r['upload_id']): { 'upload_id': str(r['upload_id']), 'upload_date': r['upload_date'].date().strftime('%Y-%m-%d'), 'file_date_start': r['file_date_start'].date().strftime('%Y-%m-%d'), 'file_date_end': r['file_date_end'].date().strftime('%Y-%m-%d'), 'uploaded_by_id': int(r['uploaded_by_id']), 'bundle_size': int(r['bundle_size']), 'file_count': int(r['file_count']), 'upload_datetime': r['upload_date'].strftime('%Y-%m-%d %H:%M:%S'), 'proposal_id': r['proposal_id'], 'instrument_id': r['instrument_id'] } for r in query.dicts() }
def force_cache_repo_size(repo_id: int): try: cache = (Manifest.select( fn.Sum(Manifest.layers_compressed_size).alias("size_bytes")).where( Manifest.repository == repo_id)).scalar() size = cache except Manifest.DoesNotExist: size = 0 if size is None: size = 0 with db_transaction(): repo_size_ref = get_repository_size(repo_id) try: if repo_size_ref is not None: update = RepositorySize.update(size_bytes=size).where( RepositorySize.repository_id == repo_id) update.execute() else: RepositorySize.create(repository_id=repo_id, size_bytes=size) except IntegrityError: # It it possible that this gets preempted by another worker. # If that's the case, it should be safe to just ignore the IntegrityError, # as the RepositorySize should have been created with the correct value. logger.warning("RepositorySize for repo id %s already exists", repo_id) return size return size
def test_get_picks_sum(targets, sources, nodes, nodes_query): sources[0]['picks'] = { 'field': 'sum' } targets.get(sources) nodes.select.assert_called_with(fn.Sum(nodes.field)) assert nodes_query.join().execute.call_count == 1
def _count_subquery(rh_account_id): # pylint: disable=singleton-comparison return (SystemVulnerabilities.select( SystemVulnerabilities.cve_id.alias("cve_id_"), fn.Count(SystemVulnerabilities.id).alias("systems_affected_"), fn.Sum( Case(None, ((SystemVulnerabilities.status_id != CveAccountData.status_id, 1), ), 0)).alias("systems_status_divergent_") ).join( SystemPlatform, on=((SystemVulnerabilities.system_id == SystemPlatform.id) & (SystemPlatform.rh_account_id == rh_account_id) & (SystemPlatform.opt_out == False) & # noqa: E712 (SystemPlatform.stale == False) & # noqa: E712 (SystemPlatform.when_deleted.is_null(True)))).join( CveAccountData, JOIN.LEFT_OUTER, on=((SystemVulnerabilities.cve_id == CveAccountData.cve_id) & (CveAccountData.rh_account_id == rh_account_id))). where(SystemVulnerabilities.rh_account_id == rh_account_id). where((SystemVulnerabilities.mitigation_reason.is_null(True)) | (SystemVulnerabilities.rule_id << InsightsRule.select( InsightsRule.id).where((InsightsRule.active == False) & (~InsightsRule.rule_only)))) .where((SystemVulnerabilities.when_mitigated.is_null(True)) | (SystemVulnerabilities.rule_id << InsightsRule.select( InsightsRule.id).where( (InsightsRule.active == True) & (~InsightsRule.rule_only)))).group_by( SystemVulnerabilities.cve_id))
def report(): donations = Donation.select( Donation.donor, fn.Count(Donation.value).alias('count'), fn.Sum(Donation.value).alias('total'), fn.Avg(Donation.value).alias('average')).group_by(Donation.donor) return render_template('report.jinja2', donations=donations)
def _get_transaction_info_blocks(transaction_list, option='details'): transactions = (Transactions .select( Transactions, fn.Sum(Files.size).alias('file_size_bytes'), fn.Count(Files.id).alias('file_count') ) .join(Files, JOIN.LEFT_OUTER) .group_by(Transactions) .where(Transactions.id << transaction_list)) transaction_results = {'transactions': {}, 'times': {}} for trans in transactions: kv_list = {} entry = trans.to_hash() metadata = QueryBase._get_base_transaction_metadata(entry, option) transaction = {} kvs = QueryBase._get_transaction_key_values(trans.id) for key_value in kvs: kv_list.update({key_value['key']: key_value['value']}) transaction['file_size_bytes'] = int( trans.file_size_bytes) if trans.file_size_bytes is not None else 0 transaction['file_count'] = int( trans.file_count) if trans.file_count is not None else 0 transaction['status'] = { 'trans_id': trans.id, 'person_id': trans.submitter_id, 'step': 6, 'message': 'verified', 'status': 'success' } transaction['metadata'] = metadata transaction['kv_pairs'] = kv_list transaction_results['transactions'][trans.id] = transaction transaction_results['times'][entry.get('updated')] = trans.id return transaction_results
def get_freetimes_total(self): """ 获取全部成员的免费次数之和 """ return TeamMember.select( fn.Sum(TeamMember.free_times).alias("sum") ).where( TeamMember.team == self.id ).scalar() or 0
def project(name, region=None): project = Project.get(Project.name == name) desc = project.description.replace('\n', '<br>') cnt = Feature.select(Feature.id).where(Feature.project == project) val1 = Feature.select(Feature.id).where(Feature.project == project, Feature.validates_count > 0) val2 = Feature.select(Feature.id).where(Feature.project == project, Feature.validates_count >= 2) corrected = Feature.select(Feature.id).where(Feature.project == project, Feature.audit.is_null(False), Feature.audit != '') skipped = Feature.select(Feature.id).where( Feature.project == project, Feature.audit.contains('"skip": true')) if region is not None: val1 = val1.where(Feature.region == region) val2 = val2.where(Feature.region == region) cnt = cnt.where(Feature.region == region) corrected = corrected.where(Feature.region == region) skipped = skipped.where(Feature.region == region) if project.validate_modified: val1 = val1.where(Feature.action == 'm') val2 = val2.where(Feature.action == 'm') cnt = cnt.where(Feature.action == 'm') regions = [] if project.regional: regions = Feature.select( Feature.region, fn.Count(), # fn.Sum(Case(None, [(Feature.validates_count >= 1, 1)], 0))).where( fn.Sum(fn.Min(Feature.validates_count, 1) )).where(Feature.project == project).group_by( Feature.region).order_by(Feature.region).tuples() if len(regions) == 1: regions = [] else: regions = [(None, cnt.count(), val1.count())] + list(regions) user = get_user() if user: has_skipped = Task.select().join(Feature).where( Task.user == user, Task.skipped == True, Feature.project == project).count() > 0 else: has_skipped = False return render_template('project.html', project=project, admin=is_admin(user, project), count=cnt.count(), desc=desc, val1=val1.count(), val2=val2.count(), corrected=corrected.count(), skipped=skipped.count(), has_skipped=has_skipped, region=region, regions=regions)
def sum_last_week(cls): date = fn.date(Pedido.dt_pedido) rs = Pedido\ .select(date.alias('date'), fn.Sum(Pedido.vr_pedido).alias("total"))\ .where(Pedido.dt_pedido >= (datetime.now() - timedelta(days=7)))\ .group_by(date)\ .order_by(date)\ .execute() return {e.date.strftime("%d/%m/%Y"): e.total for e in rs}
def get_tag_sum(tags): tag_sum = 0 for tag in tags: tsum = (Balance .select(fn.Sum(Balance.amount)) .where(Balance.tag == tag) .scalar() ) tag_sum = tag_sum + tsum return tag_sum
def _get_transaction_entries(transaction_list): # pylint: disable=no-member transactions = (TransSIP.select( TransSIP, fn.Sum(Files.size).alias('file_size_bytes'), fn.Count(Files.id).alias('file_count')).join( Files, JOIN.LEFT_OUTER, on=(TransSIP.id == Files.transaction)).group_by( TransSIP).where(TransSIP.id << transaction_list)) # pylint: enable=no-member return transactions
def left_in_exercise(cls, exercise: Exercise) -> int: one_if_is_checked = Case(Solution.state, ((Solution.STATES.DONE.name, 1), ), 0) active_solutions = cls.state.in_(Solution.STATES.active_solutions()) response = cls.filter( cls.exercise == exercise, active_solutions, ).select( fn.Count(cls.id).alias('submitted'), fn.Sum(one_if_is_checked).alias('checked'), ).dicts().get() return int(response['checked'] * 100 / response['submitted'])
def year_overview(): year = get_one_year() ysum = (Balance .select(fn.Sum(Balance.amount)) .where(Balance.date.year == int(year)) .scalar() ) ysum_in = (Balance .select(fn.Sum(Balance.amount)) .where(Balance.date.year == int(year), Balance.tag == 'income') .scalar() ) ysum_out = ysum_in - ysum print('--- {} ---'.format(year)) print('Income = {}'.format(ysum_in)) print('Expenses = {}'.format(ysum_out)) print('Left = {}'.format(ysum))
def get_month_year_sum(years, months): my_sum = 0 for year in years: for month in months: mysum = (Balance .select(fn.Sum(Balance.amount)) .where(Balance.date.month == int(month), Balance.date.year == int(year)) .scalar() ) if mysum is not None: my_sum = my_sum + mysum return my_sum
def fetch_data_by_path(cls, project_access_key_id='', site_id=0): """Fetches result values by path.""" return cls.select( cls.project_id, cls.site_id, cls.path, fn.Count(cls.subject_type).alias('paragraph_numbers'), fn.Sum(cls.total_count).alias('total_count')).where( # not database id cls.project_id == project_access_key_id, cls.site_id == site_id, cls.subject_type == 'paragraph', ).group_by( cls.project_id, cls.site_id, cls.path, ).order_by(cls.path.asc(), )
def sLookUpBalanceForADay(self, email): balance = 0 now = datetime.datetime.now() from_day = datetime.datetime(year=now.year, month=now.month, day=now.day, hour=now.hour, minute=now.minute, second=now.second) - datetime.timedelta(days=1) total_receive_query = (TransferPoint.select(fn.Sum(TransferPoint.val).alias('total')) .where((TransferPoint.receiver_email == email) & (TransferPoint.created_at >= from_day))) for row in total_receive_query: if row.total is not None: balance += row.total return int(balance)
def get_transaction_list_details(transaction_list): """Return complete data set on a specified transaction.""" # pylint: disable=no-member query = (Files().select( Files.transaction.alias('upload_id'), fn.Max(TransSIP.updated).alias('upload_date'), fn.Min(Files.mtime).alias('file_date_start'), fn.Max(Files.mtime).alias('file_date_end'), fn.Min(TransSIP.submitter).alias('uploaded_by_id'), fn.Sum(Files.size).alias('bundle_size'), fn.Count(Files.id).alias('file_count'), fn.Min(TransSIP.updated).alias('upload_datetime'), fn.Min(TransSIP.project).alias('project_id'), fn.Min(TransSIP.instrument).alias('instrument_id')).join( TransSIP, on=(TransSIP.id == Files.transaction )).where(Files.transaction << transaction_list).group_by( Files.transaction)) # pylint: enable=no-member return { str(r['upload_id']): { 'upload_id': str(r['upload_id']), 'upload_date': r['upload_date'].date().strftime('%Y-%m-%d'), 'file_date_start': SummarizeByDate.utc_to_local( r['file_date_start']).date().strftime('%Y-%m-%d'), 'file_date_end': SummarizeByDate.utc_to_local( r['file_date_end']).date().strftime('%Y-%m-%d'), 'uploaded_by_id': int(r['uploaded_by_id']), 'bundle_size': int(r['bundle_size']), 'file_count': int(r['file_count']), 'upload_datetime': SummarizeByDate.utc_to_local( r['upload_date']).strftime('%Y-%m-%d %H:%M:%S'), 'project_id': r['project_id'], 'instrument_id': r['instrument_id'] } for r in query.dicts() }
async def index_view(request): user = request['user'] analytics = { 'total_user': await User.objects.count(User.select()), 'checkin_user': await User.objects.count(User.select().where(User.last_check_in_time > 0)), 'traffic_usage': tools.flow_auto_show(await User.objects.scalar( User.select(fn.Sum(User.u + User.d)))), 'online_user': await User.objects.count(User.select().where(User.t > 3600)), 'total_node': await SS_Node.objects.count(SS_Node.select()), } return render('admin_panel/index.html', request, user=user, **analytics)
def report(): donations = Donor.select( Donor, fn.Count(Donation.value).alias('count'), fn.Sum(Donation.value).alias('sum') ).join( Donation, JOIN.LEFT_OUTER ).group_by( Donor ) return render_template( 'report.jinja2', donations=donations, request=request, username=session['username'] if 'username' in session.keys() else '' )
def status(): fields = [ Exercise.id, Exercise.subject.alias('name'), fn.Count(Solution.id).alias('submitted'), fn.Sum(Case(Solution.is_checked, ((True, 1), ), 0)).alias('checked'), ] solutions = ( Exercise.select(*fields).join( Solution, 'LEFT OUTER', on=(Solution.exercise == Exercise.id)).where( Exercise.is_archived == False) # NOQA: E712 .group_by(Exercise.subject, Exercise.id).order_by(Exercise.id)) return render_template( 'status.html', exercises=solutions, )
def _get_matching_users(username_prefix, robot_namespace=None, organization=None, limit=20, exact_matches_only=False): user_search = prefix_search(User.username, username_prefix) if exact_matches_only: user_search = User.username == username_prefix direct_user_query = user_search & (User.organization == False) & (User.robot == False) if robot_namespace: robot_prefix = format_robot_username(robot_namespace, username_prefix) robot_search = prefix_search(User.username, robot_prefix) direct_user_query = (robot_search & (User.robot == True)) | direct_user_query query = (User.select(User.id, User.username, User.email, User.robot).group_by( User.id, User.username, User.email, User.robot).where(direct_user_query)) if organization: query = (query.select(User.id, User.username, User.email, User.robot, fn.Sum(Team.id)).join( TeamMember, JOIN.LEFT_OUTER).join( Team, JOIN.LEFT_OUTER, on=((Team.id == TeamMember.team) & (Team.organization == organization)), ).order_by(User.robot.desc())) class MatchingUserResult(object): def __init__(self, *args): self.id = args[0] self.username = args[1] self.email = args[2] self.robot = args[3] if organization: self.is_org_member = args[3] != None else: self.is_org_member = None return (MatchingUserResult(*args) for args in query.tuples().limit(limit))
def status(cls): one_if_is_checked = Case(Solution.state, ((Solution.STATES.DONE.name, 1), ), 0) fields = [ Exercise.id, Exercise.subject.alias('name'), Exercise.is_archived.alias('is_archived'), fn.Count(Solution.id).alias('submitted'), fn.Sum(one_if_is_checked).alias('checked'), ] join_by_exercise = (Solution.exercise == Exercise.id) active_solutions = Solution.state.in_( Solution.STATES.active_solutions()) return (Exercise.select(*fields).join( Solution, 'LEFT OUTER', on=join_by_exercise).where(active_solutions).group_by( Exercise.subject, Exercise.id).order_by(Exercise.id))
def get_repositories_action_sums(repository_ids): """ Returns a map from repository ID to total actions within that repository in the last week. """ if not repository_ids: return {} # Filter the join to recent entries only. last_week = datetime.now() - timedelta(weeks=1) tuples = (RepositoryActionCount.select( RepositoryActionCount.repository, fn.Sum(RepositoryActionCount.count)).where( RepositoryActionCount.repository << repository_ids).where( RepositoryActionCount.date >= last_week).group_by( RepositoryActionCount.repository).tuples()) action_count_map = {} for record in tuples: action_count_map[record[0]] = record[1] return action_count_map
def get_size_during_upload(repo_id: int): # TODO: Make this one trip to the db instead of 2? size = get_repository_size_and_cache(repo_id) query = (BlobUpload.select( fn.Sum(BlobUpload.byte_count).alias("size_bytes")).where( BlobUpload.repository_id == repo_id)).get() repo_size = 0 size_bytes = 0 if size["repository_size"] is not None: repo_size = size["repository_size"] if query.size_bytes is not None: size_bytes = query.size_bytes size_bytes = repo_size + size_bytes return size_bytes
def recalc_tags_for_area(self, area): ignore = self.common_ignore + [t.tag for t in area.location.ignore_list] hf_sum = fn.Sum(HashtagFrequency.count) select = Hashtag.select(Hashtag, hf_sum.alias('sum')).join(HashtagFrequency).join(TagsOfAreaInHour) where = select.where(TagsOfAreaInHour.area == area, ~(Hashtag.name << ignore)) group = where.group_by(Hashtag).order_by(hf_sum.desc()) if group.count() > 0: tag = group.get() area.most_popular_tag_name = tag.name area.most_popular_tag_count = tag.sum else: area.most_popular_tag_name = None area.most_popular_tag_count = None area.save() self.logger.debug("Area {0} of {1} recalculated".format(area.id, area.location.name))