예제 #1
0
    def __init__(self, group_strategy, timeseries_unit = 'day', date_filter_attributes=None):
        self.filterable_attributes = [DATE, CUSTOM_ATTRIBUTE, TRANSFER_ACCOUNT, USER]
        self.timeseries_unit = timeseries_unit
        self.date_filter_attributes = date_filter_attributes
        self.metrics = []

        # Special case query-- this is just used to calculate grouped per_user
        if group_strategy:
            total_users_grouped_timeseries_query = db.session.query(func.count(User.id).label('volume'),
                    func.date_trunc(self.timeseries_unit, self.date_filter_attributes[User]).label('date'), group_strategy.group_by_column).group_by(func.date_trunc(self.timeseries_unit, self.date_filter_attributes[User]))
            self.total_users_grouped_timeseries = metric.Metric(
                metric_name='total_population_grouped',
                query=group_strategy.build_query_group_by_with_join(total_users_grouped_timeseries_query, User),
                object_model=User,
                stock_filters=[],
                timeseries_caching_combinatory_strategy=metrics_cache.SUM_OBJECTS,
                caching_combinatory_strategy=metrics_cache.QUERY_ALL,
                filterable_by=self.filterable_attributes,
                query_actions=[ADD_MISSING_DAYS_TO_TODAY, ACCUMULATE_TIMESERIES])

        # Special case query-- this is just used to calculate ungrouped per_user
        total_users_timeseries_query = db.session.query(func.count(User.id).label('volume'),
                func.date_trunc(self.timeseries_unit, self.date_filter_attributes[User]).label('date')).group_by(func.date_trunc(self.timeseries_unit, self.date_filter_attributes[User]))
        self.total_users_timeseries = metric.Metric(
            metric_name='total_population',
            query=total_users_timeseries_query,
            object_model=User,
            stock_filters=[],
            timeseries_caching_combinatory_strategy=metrics_cache.SUM_OBJECTS,
            caching_combinatory_strategy=metrics_cache.QUERY_ALL,
            filterable_by=self.filterable_attributes,
            query_actions=[ADD_MISSING_DAYS_TO_TODAY, ACCUMULATE_TIMESERIES])
예제 #2
0
def get_cash_flows():
    date_range_filter_schema = DateRangeFilterSchema().load(request.args)
    if date_range_filter_schema.errors:
        return {'errors': date_range_filter_schema.errors}, 400

    cash_flow_schema = CashFlowSchema()

    amounts = db.session.query(
        func.sum(Record.amount).label("cash_flow"),
        func.sum(
            case([(Record.record_type == Record.RECORD_TYPE_INCOME, Record.amount)], else_=0)
        ).label('income'),
        func.sum(
            case([(Record.record_type == Record.RECORD_TYPE_EXPENSE, Record.amount)], else_=0)
        ).label('expense'),
        func.date_trunc('month', Record.date).label("date"),
    ).group_by(
        func.date_trunc('month', Record.date)
    ).order_by(
        func.date_trunc('month', Record.date)
    )

    if 'date_from' in date_range_filter_schema.data:
        amounts = amounts.filter(Record.date >= date_range_filter_schema.data['date_from'])

    if 'date_to' in date_range_filter_schema.data:
        amounts = amounts.filter(Record.date < date_range_filter_schema.data['date_to'])

    return {'objects': cash_flow_schema.dump(amounts, many=True).data}
예제 #3
0
파일: __init__.py 프로젝트: xxguo/leopard
    def _investments(self):
        today_investments = Investment.query.filter(
            cast(Investment.added_at, Date) == date.today(),
            Investment.status.in_(
                (get_enum('INVESTMENT_PENDING'),
                 get_enum('INVESTMENT_SUCCESSED'))
            )
        ).order_by("added_at desc").limit(10)

        history_investments = db_session.query(
            func.date_trunc('day', Investment.added_at),
            func.sum(Investment.amount)).group_by(
            func.date_trunc('day', Investment.added_at)
        ).order_by(func.date_trunc('day', Investment.added_at)).all()

        total_investments = db_session.query(
            func.sum(Investment.amount)).scalar()

        today_invest_amount = db_session.query(
            func.sum(Investment.amount)).filter(
            cast(Investment.added_at, Date) == date.today(),
            Investment.status.in_(
                (get_enum('INVESTMENT_PENDING'),
                 get_enum('INVESTMENT_SUCCESSED'))
            )
        ).scalar()
        if not today_invest_amount:
            today_invest_amount = 0

        app.jinja_env.globals['today_invest_amount'] = today_invest_amount
        app.jinja_env.globals['today_investments'] = today_investments
        app.jinja_env.globals['total_investments'] = total_investments
        app.jinja_env.globals['history_investments'] = history_investments
예제 #4
0
def author(mit_id, conn):
    """
    Returns an author object for insertion into mongo summary collection.

    The format is as follows:
        {"_id": {"name": <name>, "mitid": <mitid>},
         "type": "author",
         "size": <num docs>,
         "downloads": <num downloads>,
         "countries": [
            {"country": <3 ltr code>, "downloads": <num downloads>},...
         ]
         "dates": [
            {"date": <YYYY-MM-DD>, "downloads": <num>},...
         ]}
    """

    requests_to_authors = requests.join(documents)\
                                  .join(documents_authors)\
                                  .join(authors)

    totals = select([
                authors.c.mit_id,
                authors.c.name,
                select([func.count()])
                    .select_from(documents_authors.join(authors))
                    .where(authors.c.mit_id==bindparam('mit_id'))
                    .label('size'),
                select([func.count()])
                    .select_from(requests_to_authors)
                    .where(authors.c.mit_id==bindparam('mit_id'))
                    .label('downloads')
                ])\
             .where(authors.c.mit_id==bindparam('mit_id'))
    countries = select([requests.c.country, func.count().label('downloads')])\
                .select_from(requests_to_authors)\
                .where(authors.c.mit_id==bindparam('mit_id'))\
                .group_by(requests.c.country)
    dates = select([
                func.date_trunc('day', requests.c.datetime).label('date'),
                func.count().label('downloads')])\
            .select_from(requests_to_authors)\
            .where(authors.c.mit_id==bindparam('mit_id'))\
            .group_by(func.date_trunc('day', requests.c.datetime))

    author_obj = {'type': 'author'}
    res = conn.execute(totals, mit_id=mit_id).first()
    author_obj['_id'] = {'name': res['name'], 'mitid': res['mit_id']}
    author_obj['size'] = res['size']
    author_obj['downloads'] = res['downloads']
    res = conn.execute(countries, mit_id=mit_id)
    for row in res:
        author_obj.setdefault('countries', [])\
            .append({'country': row['country'], 'downloads': row['downloads']})
    res = conn.execute(dates, mit_id=mit_id)
    for row in res:
        author_obj.setdefault('dates', [])\
            .append({'date': row['date'].strftime('%Y-%m-%d'),
                     'downloads': row['downloads']})
    return author_obj
예제 #5
0
 def make_filter(cls, field, ftype, value):
     filter = None
     if ftype == 'IN':
         filter = field.in_([v for v in value if v])
     elif ftype == 'date_gt':
         filter = field >  value
     elif ftype == 'date_gte':
         filter = field >= value
     elif ftype == 'date_gt_now_less':
         qty, granularity = value.split(" ")
         filter = field > func.date_trunc(granularity, func.now() - cast(value, Interval()))
     elif ftype == 'date_lt_now_less':
         qty, granularity = value.split(" ")
         filter = field < func.date_trunc(granularity, func.now() - cast(value, Interval()))
     elif ftype == 'date_x_last_n':
         qty, granularity, count_current_period = value.split(" ")
         filter = (field > func.date_trunc(granularity, func.now() - cast("%s %s" % (qty, granularity), Interval())), field < func.date_trunc(granularity, func.now() - cast('0', Interval())),)
         if count_current_period == 'on':
             filter = filter[0]
     elif ftype == 'date_month_ne':
         filter = extract('month', field) != value
     elif ftype == 'date_month_gt':
         filter = extract('month', field) > value
     elif ftype == 'date_month_lt':
         filter = extract('month', field) < value
     elif ftype == 'date_month_eq':
         filter = extract('month', field) == value
     elif ftype == 'date_hour_ne':
         filter = extract('hour', field) != value
     elif ftype == 'date_hour_gt':
         filter = extract('hour', field) > value
     elif ftype == 'date_hour_lt':
         filter = extract('hour', field) < value
     elif ftype == 'date_hour_eq':
         filter = extract('hour', field) == value
     elif ftype == 'date_lt':
         filter = field <  value
     elif ftype == 'date_lte':
         filter = field <= value
     elif ftype == '=':
         filter = field == value
     elif ftype == '!=':
         filter = field != value
     elif ftype == '>':
         filter = field >  value
     elif ftype == '>=':
         filter = field >= value
     elif ftype == '<':
         filter = field <  value
     elif ftype == '<=':
         filter = field <= value
     elif ftype == 'like':
         filter = field.ilike(value)
     return filter
예제 #6
0
def get_balance(year, month):
    (_, day) = calendar.monthrange(year, month)
    start_date = datetime.date(year, month, 1)
    end_date = datetime.date(year, month, day)
    balance_schema = BalanceSchema()

    amounts = db.session.query(
        func.sum(Record.amount).label("cash_flow"),
        func.sum(
            case([(Record.record_type == Record.RECORD_TYPE_INCOME, Record.amount)], else_=0)
        ).label('income'),
        func.sum(
            case([(Record.record_type == Record.RECORD_TYPE_EXPENSE, Record.amount)], else_=0)
        ).label('expense'),
        func.date_trunc('month', Record.date).label("date"),
    ).filter(
        func.extract('year', Record.date) == year,
        func.extract('month', Record.date) == month,
    ).group_by(
        func.date_trunc('month', Record.date)
    ).first()

    current_balance = db.session.query(
        func.sum(
            case([(Record.date < start_date, Record.amount)], else_=0)
        ).label('start_balance'),
        func.sum(Record.amount).label("end_balance")
    ).filter(
        Record.date <= end_date
    ).first()

    if amounts:
        balance = balance_schema.dump({
            'cash_flow': amounts.cash_flow,
            'income': amounts.income,
            'expense': amounts.expense,
            'date': amounts.date,
            'start_balance': current_balance.start_balance,
            'end_balance': current_balance.end_balance,
        }).data
    else:
        balance = balance_schema.dump({
            'cash_flow': 0,
            'income': 0,
            'expense': 0,
            'date': end_date,
            'start_balance': current_balance.start_balance,
            'end_balance': current_balance.end_balance,
        }).data

    return balance
예제 #7
0
def get_items_per_minute():
    items_per_minute = OrderItem.query.with_entities(func.date_trunc('minute', OrderItem.created_at),
                                                     func.count(OrderItem.id)).group_by(
        func.date_trunc('minute', OrderItem.created_at)).order_by(
        func.date_trunc('minute', OrderItem.created_at)) \
        .limit(15) \
        .all()
    data_items_per_minute = []
    labels_items_per_minute = []
    for item in items_per_minute:
        labels_items_per_minute.append(item[0].strftime("%H:%M:%S"))
        data_items_per_minute.append(item[1])
    legend_items_per_minute = 'Total received'
    labels_items_per_minute = labels_items_per_minute
    values_items_per_minute = data_items_per_minute
    return labels_items_per_minute, legend_items_per_minute, values_items_per_minute
예제 #8
0
def get_orders_per_minute():
    orders_per_minute = CssOrder.query.with_entities(func.date_trunc('minute', CssOrder.created_at),
                                                     func.count(CssOrder.id)).group_by(
        func.date_trunc('minute', CssOrder.created_at)).order_by(
        func.date_trunc('minute', CssOrder.created_at)) \
        .limit(15) \
        .all()
    data_orders_per_minute = []
    labels_orders_per_minute = []
    for order in orders_per_minute:
        labels_orders_per_minute.append(order[0].strftime("%H:%M:%S"))
        data_orders_per_minute.append(order[1])
    legend_orders_per_minute = 'Total received'
    labels_orders_per_minute = labels_orders_per_minute
    values_orders_per_minute = data_orders_per_minute
    return labels_orders_per_minute, legend_orders_per_minute, values_orders_per_minute
예제 #9
0
def get_orders_per_minute_complete():
    orders_per_minute_complete = CssOrder.query \
        .with_entities(func.date_trunc('minute', CssOrder.completed_at), func.count(CssOrder.id)) \
        .filter(CssOrder.status == CssConstants.ORDER_COMPLETE) \
        .group_by(func.date_trunc('minute', CssOrder.completed_at)) \
        .order_by(func.date_trunc('minute', CssOrder.completed_at)) \
        .limit(15) \
        .all()
    data_orders_per_minute_complete = []
    labels_orders_per_minute_complete = []
    for order in orders_per_minute_complete:
        labels_orders_per_minute_complete.append(order[0].strftime("%H:%M:%S"))
        data_orders_per_minute_complete.append(order[1])
    legend_orders_per_minute_complete = 'Completed'
    labels_orders_per_minute_complete = labels_orders_per_minute_complete
    values_orders_per_minute_complete = data_orders_per_minute_complete
    return labels_orders_per_minute_complete, legend_orders_per_minute_complete, values_orders_per_minute_complete
예제 #10
0
def get_items_per_minute_complete():
    items_per_minute_complete = OrderItem.query \
        .with_entities(func.date_trunc('minute', OrderItem.completed_at), func.count(OrderItem.id)) \
        .filter(OrderItem.status == CssConstants.ORDER_COMPLETE) \
        .group_by(func.date_trunc('minute', OrderItem.completed_at)) \
        .order_by(func.date_trunc('minute', OrderItem.completed_at)) \
        .limit(15) \
        .all()
    data_items_per_minute_complete = []
    labels_items_per_minute_complete = []
    for item in items_per_minute_complete:
        labels_items_per_minute_complete.append(item[0].strftime("%H:%M:%S"))
        data_items_per_minute_complete.append(item[1])
    legend_items_per_minute_complete = 'Completed'
    labels_items_per_minute_complete = labels_items_per_minute_complete
    values_items_per_minute_complete = data_items_per_minute_complete
    return labels_items_per_minute_complete, legend_items_per_minute_complete, values_items_per_minute_complete
예제 #11
0
def groupby_created(precision='month'):
    conn = sa.connect()
    month = func.date_trunc('month', users.c.created).label('month')
    q = select([month,
                func.count(users.c.id)]).group_by('month').order_by('month')
    #return conn.execute(q).fetchall()
    return [(dt.strftime('%b %Y'), num)
            for (dt, num) in conn.execute(q).fetchall()]
예제 #12
0
def dlc(dlc_id, conn):
    requests_to_dlcs = requests.join(documents)\
                               .join(documents_dlcs)\
                               .join(dlcs)
    totals = select([
                dlcs.c.canonical_name,
                dlcs.c.display_name,
                select([func.count()])
                    .select_from(documents_dlcs.join(dlcs))
                    .where(dlcs.c.id==bindparam('dlc_id'))
                    .label('size'),
                select([func.count()])
                    .select_from(requests_to_dlcs)
                    .where(dlcs.c.id==bindparam('dlc_id'))
                    .label('downloads')
                ])\
            .where(dlcs.c.id==bindparam('dlc_id'))
    countries = select([requests.c.country, func.count().label('downloads')])\
                .select_from(requests_to_dlcs)\
                .where(dlcs.c.id==bindparam('dlc_id'))\
                .group_by(requests.c.country)
    dates = select([
                func.date_trunc('day', requests.c.datetime).label('date'),
                func.count().label('downloads')])\
            .select_from(requests_to_dlcs)\
            .where(dlcs.c.id==bindparam('dlc_id'))\
            .group_by(func.date_trunc('day', requests.c.datetime))
    dlc_obj = {'type': 'dlc'}
    res = conn.execute(totals, dlc_id=dlc_id).first()
    dlc_obj['_id'] = {'canonical': res['canonical_name'],
                      'display': res['display_name']}
    dlc_obj['size'] = res['size']
    dlc_obj['downloads'] = res['downloads']
    res = conn.execute(countries, dlc_id=dlc_id)
    for row in res:
        dlc_obj.setdefault('countries', [])\
            .append({'country': row['country'],
                     'downloads': row['downloads']})
    res = conn.execute(dates, dlc_id=dlc_id)
    for row in res:
        dlc_obj.setdefault('dates', [])\
            .append({'date': row['date'].strftime('%Y-%m-%d'),
                     'downloads': row['downloads']})
    return dlc_obj
예제 #13
0
파일: play.py 프로젝트: swipswaps/playlog
async def count_for_period(conn, params):
    period = params.get('period')
    if not period:
        label_edge = 'year'
    else:
        label_edge = {
            'year': 'month',
            'month': 'day',
            'day': 'hour'
        }[period['kind']]
    label = func.date_trunc(label_edge, play.c.date).label('label')
    stmt = select([label, func.count().label('value')])
    if period:
        stmt = stmt.where(
            func.date_trunc(period['kind'], play.c.date) == period['value'])
    stmt = stmt.group_by(label).order_by(label)

    filter_kind = params.get('filter_kind')
    if filter_kind == 'artist':
        filter_column = artist.c.id
        from_clause = play.join(track).join(album).join(artist)
    elif filter_kind == 'album':
        filter_column = album.c.id
        from_clause = play.join(track).join(album)
    elif filter_kind == 'track':
        filter_column = track.c.id
        from_clause = play.join(track)
    else:
        filter_column = None
        from_clause = None
    if filter_column is not None:
        filter_value = params.get('filter_value')
        if not filter_value:
            raise ValidationError(
                {'filter_value': ['This field is required.']})
        stmt = stmt.where(filter_column == filter_value)
    if from_clause is not None:
        stmt = stmt.select_from(from_clause)

    result = await conn.execute(stmt)
    return await result.fetchall()
예제 #14
0
def handle(doc_id, conn):
    totals = select([
                documents.c.title,
                documents.c.handle,
                select([func.count()])
                    .select_from(requests)
                    .where(requests.c.document_id==bindparam('doc_id'))
                    .label('downloads')
                ])\
            .where(documents.c.id==bindparam('doc_id'))
    parents = select([authors.c.name, authors.c.mit_id])\
              .select_from(authors.join(documents_authors).join(documents))\
              .where(documents.c.id==bindparam('doc_id'))
    countries = select([requests.c.country, func.count().label('downloads')])\
                .where(requests.c.document_id==bindparam('doc_id'))\
                .group_by(requests.c.country)
    dates = select([
                func.date_trunc('day', requests.c.datetime).label('date'),
                func.count().label('downloads')])\
            .where(requests.c.document_id==bindparam('doc_id'))\
            .group_by(func.date_trunc('day', requests.c.datetime))
    handle_obj = {'type': 'handle'}
    res = conn.execute(totals, doc_id=doc_id).first()
    handle_obj['_id'] = res['handle']
    handle_obj['title'] = res['title']
    handle_obj['downloads'] = res['downloads']
    res = conn.execute(parents, doc_id=doc_id)
    for row in res:
        handle_obj.setdefault('parents', [])\
            .append({'mitid': row['mit_id'], 'name': row['name']})
    res = conn.execute(countries, doc_id=doc_id)
    for row in res:
        handle_obj.setdefault('countries', [])\
            .append({'country': row['country'],
                     'downloads': row['downloads']})
    res = conn.execute(dates, doc_id=doc_id)
    for row in res:
        handle_obj.setdefault('dates', [])\
            .append({'date': row['date'].strftime('%Y-%m-%d'),
                     'downloads': row['downloads']})
    return handle_obj
예제 #15
0
def get_avg_time_to_completion():
    avg_wait_times_per_minute = OrderItem.query \
        .with_entities(func.date_trunc('minute', OrderItem.created_at),
                       func.avg(OrderItem.completed_at - OrderItem.created_at).label('average')) \
        .filter(OrderItem.status == CssConstants.ORDER_COMPLETE) \
        .group_by(func.date_trunc('minute', OrderItem.created_at)) \
        .order_by(func.date_trunc('minute', OrderItem.created_at)) \
        .limit(15) \
        .all()
    data_avg_wait_times = []
    labels_avg_wait_times = []
    for wait_time in avg_wait_times_per_minute:
        seconds = wait_time[1].seconds % 60
        minutes = round((wait_time[1].seconds / 60) % 60, 2)
        hours = (wait_time[1].seconds // 3600) % 24
        days = wait_time[1].days

        labels_avg_wait_times.append(wait_time[0].strftime("%H:%M:%S"))
        data_avg_wait_times.append(minutes)
    legend_avg_wait_times = 'Avg. Time To Completion'
    labels_avg_wait_times = labels_avg_wait_times
    values_avg_wait_times = data_avg_wait_times
    return labels_avg_wait_times, legend_avg_wait_times, values_avg_wait_times
예제 #16
0
def overall(conn):
    totals = select([
        select([func.count()]).select_from(requests).label('downloads'),
        select([func.count()]).select_from(documents).label('size')])
    countries = select([requests.c.country, func.count().label('downloads')])\
                    .group_by(requests.c.country)
    dates = select([func.date_trunc('day', requests.c.datetime).label('date'),
                    func.count().label('downloads')])\
                .group_by(func.date_trunc('day', requests.c.datetime))
    overall_obj = {'type': 'overall'}
    res = conn.execute(totals).first()
    overall_obj['downloads'] = res['downloads']
    overall_obj['size'] = res['size']
    res = conn.execute(countries)
    for row in res:
        overall_obj.setdefault('countries', [])\
            .append({'country': row['country'],
                     'downloads': row['downloads']})
    res = conn.execute(dates)
    for row in res:
        overall_obj.setdefault('dates', [])\
            .append({'date': row['date'].strftime('%Y-%m-%d'),
                     'downloads': row['downloads']})
    return overall_obj
예제 #17
0
파일: app.py 프로젝트: SafeBlues/backend
    def Stats(self, request, context):
        with session_scope() as session:
            date_ = func.date_trunc("day", Report.time_received)

            stats = (session.query(date_, func.sum(StrandInReport.state)).join(
                StrandInReport,
                StrandInReport.report_id == Report.report_id).filter(
                    StrandInReport.strand_id == request.strand_id).group_by(
                        date_).all())

            return sb_pb2.StatsRes(
                strand_id=1,
                times=[timestamp_from_datetime(datetime.datetime.now())],
                total_incubating_strands=[6],
                total_infected_strands=[3],
                total_removed_strands=[2],
            )
예제 #18
0
def get_build_history(project, end_period, days=90):
    def date_to_key(dt):
        return int(dt.replace(
            minute=0, hour=0, second=0, microsecond=0
        ).strftime('%s'))

    query = db.session.query(
        Job.result,
        func.count('*').label('num'),
        func.date_trunc(literal('day'), Job.date_created).label('date')
    ).filter(
        Job.project_id == project.id,
        Job.date_created >= end_period - timedelta(days=days),
        Job.date_created < end_period,
        Job.status == Status.finished,
        Source.revision_sha != None,  # NOQA
        Source.patch_id == None,
    ).group_by(Job.result, 'date').order_by('date asc')

    # group results by day
    results = {}
    for n in xrange(days):
        results[date_to_key(end_period - timedelta(days=n))] = {
            'counts': {
                'passed': 0,
                'failed': 0,
                'aborted': 0,
                'unknown': 0,
            },
            'numBuilds': 0,
        }

    for result, num, date in query:
        this = results[date_to_key(date)]
        this['counts'][result.name] += num
        this['numBuilds'] += num

    return results
예제 #19
0
 def make_grouping(cls, logger, grouping_info):
     group_type, group_args, group_name = grouping_info
     grouping = None
     if group_type == "extract":
         subfield, field_name = group_args
         real_field = getattr(cls, field_name, None)
         if real_field:
             if subfield == 'ampm':
                 grouping = case(whens = [(cast(extract('hour', real_field), Integer()) > 12, 1),], else_ = 0).label(group_name)
             else:
                 grouping = cast(extract(subfield, real_field), Integer()).label(group_name)
         else:
             logger.error("Invalid grouping %s (%s)", grouping_info, cls)
     elif group_type == "date_trunc":
         subfield, field_name = group_args
         real_field = getattr(cls, field_name, None)
         if real_field:
             grouping = func.date_trunc(subfield, real_field)
             grouping = grouping.label(group_name)
         else:
             logger.error("Invalid grouping %s (%s)", grouping_info, cls)
     elif group_type == "func":
         logger.error("Grouping by func not implemented yet")
     elif group_type == 'coalesce_trim':
         # trim(coalesce(field_name, ''))
         field_name = group_args.get('field', group_name)
         real_field = getattr(cls, field_name, None)
         if real_field:
             grouping = func.coalesce(real_field, group_args.get('coalesce_to', ''))
             if group_args.get('trim', True):
                 grouping = func.trim(grouping)
             grouping = grouping.label(group_name)
         else:
             logger.error("Invalid grouping %s (%s)", grouping_info, cls)
     else:
         logger.error("Unknown grouping type %s", group_type)
     return grouping
예제 #20
0
    def __init__(self,
                 group_strategy,
                 timeseries_unit='day',
                 date_filter_attributes=None):
        self.filterable_attributes = [
            DATE, CUSTOM_ATTRIBUTE, TRANSFER_ACCOUNT, USER
        ]
        self.timeseries_unit = timeseries_unit
        self.date_filter_attributes = date_filter_attributes
        self.metrics = []

        # Timeseries Metrics
        if group_strategy:
            users_created_timeseries_query = group_strategy.build_query_group_by_with_join(db.session.query(func.count(User.id).label('volume'),
                    func.date_trunc(self.timeseries_unit, self.date_filter_attributes[User]).label('date'), group_strategy.group_by_column)\
                    .group_by(func.date_trunc(self.timeseries_unit, self.date_filter_attributes[User])), User)
            aggregated_users_created_query = group_strategy\
                .build_query_group_by_with_join(db.session.query(func.count(User.id).label('volume'), group_strategy.group_by_column), User)
        else:
            users_created_timeseries_query = db.session.query(func.count(User.id).label('volume'),
                    func.date_trunc(self.timeseries_unit, self.date_filter_attributes[User]).label('date'))\
                    .group_by(func.date_trunc(self.timeseries_unit, self.date_filter_attributes[User]))
            aggregated_users_created_query = None
        total_users_created_query = db.session.query(
            func.count(User.id).label('volume'))
        self.metrics.append(
            metric.Metric(
                metric_name='users_created',
                is_timeseries=True,
                query=users_created_timeseries_query,
                aggregated_query=aggregated_users_created_query,
                total_query=total_users_created_query,
                object_model=User,
                #stock_filters=[filters.beneficiary_filters], # NOTE: Do we still want this filter?
                stock_filters=[],
                query_caching_combinatory_strategy=metrics_cache.SUM_OBJECTS,
                aggregated_query_caching_combinatory_strategy=metrics_cache.
                SUM_OBJECTS,
                total_query_caching_combinatory_strategy=metrics_cache.TALLY,
                filterable_by=self.filterable_attributes,
                query_actions=[FORMAT_TIMESERIES],
                aggregated_query_actions=[FORMAT_AGGREGATE_METRICS],
                total_query_actions=[GET_FIRST],
            ))

        if group_strategy:
            active_users_timeseries_query = group_strategy.build_query_group_by_with_join(db.session.query(func.count(func.distinct(CreditTransfer.sender_user_id)).label('volume'),
                    func.date_trunc(self.timeseries_unit, self.date_filter_attributes[CreditTransfer]).label('date'), group_strategy.group_by_column)\
                    .group_by(func.date_trunc(self.timeseries_unit, self.date_filter_attributes[CreditTransfer])), CreditTransfer)
            aggregated_active_users_query = group_strategy.build_query_group_by_with_join(
                db.session.query(
                    func.count(func.distinct(
                        CreditTransfer.sender_user_id)).label('volume'),
                    group_strategy.group_by_column), CreditTransfer)
        else:
            active_users_timeseries_query = db.session.query(func.count(func.distinct(CreditTransfer.sender_user_id)).label('volume'),
                    func.date_trunc(self.timeseries_unit, self.date_filter_attributes[CreditTransfer]).label('date'))\
                    .group_by(func.date_trunc(self.timeseries_unit, self.date_filter_attributes[CreditTransfer]))
            aggregated_active_users_query = None
        total_active_users_query = db.session.query(
            func.count(func.distinct(
                CreditTransfer.sender_user_id)).label('volume'))
        self.metrics.append(
            metric.Metric(
                metric_name='active_users',
                is_timeseries=True,
                query=active_users_timeseries_query,
                aggregated_query=aggregated_active_users_query,
                total_query=total_active_users_query,
                object_model=CreditTransfer,
                #stock_filters=[filters.beneficiary_filters], # NOTE: Do we still want this filter?
                stock_filters=[],
                query_caching_combinatory_strategy=metrics_cache.SUM_OBJECTS,
                aggregated_query_caching_combinatory_strategy=metrics_cache.
                QUERY_ALL,
                total_query_caching_combinatory_strategy=metrics_cache.
                QUERY_ALL,
                filterable_by=self.filterable_attributes,
                query_actions=[FORMAT_TIMESERIES],
                aggregated_query_actions=[FORMAT_AGGREGATE_METRICS],
                total_query_actions=[GET_FIRST],
            ))

        if group_strategy:
            total_users_timeseries_query = group_strategy.build_query_group_by_with_join(db.session.query(func.count(User.id).label('volume'),
                    func.date_trunc(self.timeseries_unit, self.date_filter_attributes[User]).label('date'), group_strategy.group_by_column)\
                    .group_by(func.date_trunc(self.timeseries_unit, self.date_filter_attributes[User])), User)

        else:
            total_users_timeseries_query = db.session.query(func.count(User.id).label('volume'),
                    func.date_trunc(self.timeseries_unit, self.date_filter_attributes[User]).label('date'))\
                    .group_by(func.date_trunc(self.timeseries_unit, self.date_filter_attributes[User]))
        self.metrics.append(
            metric.Metric(
                metric_name='total_population_cumulative',
                is_timeseries=True,
                query=total_users_timeseries_query,
                total_query=total_users_created_query,
                aggregated_query=aggregated_active_users_query,
                object_model=User,
                stock_filters=[],
                query_caching_combinatory_strategy=metrics_cache.SUM_OBJECTS,
                aggregated_query_caching_combinatory_strategy=metrics_cache.
                SUM_OBJECTS,
                total_query_caching_combinatory_strategy=metrics_cache.TALLY,
                filterable_by=self.filterable_attributes,
                aggregated_query_actions=[FORMAT_AGGREGATE_METRICS],
                total_query_actions=[GET_FIRST],
                query_actions=[
                    ADD_MISSING_DAYS_TO_TODAY, ACCUMULATE_TIMESERIES,
                    FORMAT_TIMESERIES
                ]))
예제 #21
0
파일: main.py 프로젝트: zehome/claritick
        except (QBException,),e:
            self.logger.error("Can't build query with bad parameters : %s", e)
            return

        # Prepare the query and the specific stuff depending on the 'whattodo'
        filter_nbs, grouping_nbs = self.prepare_filters_and_grouping()
        filter_nbs = self.generate_prefilters(filter_nbs)
        self.prepare_query()
        # Prepare and generate the filters and grouping
        self.generate_filters(filter_nbs)
        self.generate_groupings(grouping_nbs)
        # Decide of the correct granularity when displaying evolutions
        if self.granularity_criteria is None and self.query_parameters.get("whattodo", "count") in ('evolution', 'event_delta_evolution'):
            date_delta = self.end_date - self.start_date
            self.granularity = get_granularity(date_delta, self.MIN_POINTS, self.MAX_POINTS)#'month'
            self.granularity_criteria = func.date_trunc(self.granularity, self.date_criteria_field).label("Date")
        # A specific granularity has speen specified or decided, generate
        # a subquery with generate_series to avoid holes in the time
        if self.granularity_criteria is not None:
            self.subquery = self.session.query(func.generate_series(func.date_trunc(self.granularity, cast(self.start_date, DateTime())), func.date_trunc(self.granularity, cast(self.end_date, DateTime())), cast('1 %s' % self.granularity, Interval())).label('Temps')).subquery()
            self.select.insert(1,self.granularity_criteria)
            self.display_translations.insert(1, None)
            self.group_by.insert(0,self.granularity_criteria)
        ### Extras ###
        # The user specified a boundary
        extras = self.query_parameters.get("extras", {})
        if extras and 'boundary' in self.extras:
            boundary_max = extras.get('boundary', {}).get('max', None)
            classes_case = []
            if boundary_max and self.field_to_color is not None:
                classes_case.append((self.field_to_color > boundary_max, literal_column("'breaks_max_bound'",String)))
예제 #22
0
    def get(self, repo: Repository):
        """
        Return various stats per-day for the given repository.
        """
        stat = request.args.get("stat")
        if not stat:
            return self.error({"stat": "invalid stat"})

        if stat not in STAT_CHOICES:
            return self.error({"stat": "invalid stat"})

        aggregate = request.args.get("aggregate", "time")
        if aggregate not in ("time", "build"):
            return self.error({"aggregate": "invalid aggregate"})

        branch = request.args.get("branch")
        since = request.args.get("since")

        if since:
            date_end = datetime.utcfromtimestamp(
                float(since)).replace(tzinfo=timezone.utc)
        else:
            date_end = timezone.now() + timedelta(days=1)

        date_end = date_end.replace(minute=0, second=0, microsecond=0)

        if aggregate == "time":
            resolution = request.args.get("resolution", "1d")
            points = int(
                request.args.get("points") or POINTS_DEFAULT[resolution])
            if resolution == "1h":
                grouper = func.date_trunc("hour", Build.date_created)
                decr_res = decr_hour
            elif resolution == "1d":
                grouper = func.date_trunc("day", Build.date_created)
                date_end = date_end.replace(hour=0)
                decr_res = decr_day
            elif resolution == "1w":
                grouper = func.date_trunc("week", Build.date_created)
                date_end = date_end.replace(hour=0)
                date_end -= timedelta(days=date_end.weekday())
                decr_res = decr_week
            elif resolution == "1m":
                grouper = func.date_trunc("month", Build.date_created)
                date_end = date_end.replace(hour=0, day=1)
                decr_res = decr_month
        elif aggregate == "build":
            grouper = Build.number
            points = int(request.args.get("points") or 100)

        queryset = build_queryset(repo.id, stat, grouper)

        if aggregate == "time":
            date_begin = date_end
            for _ in range(points):
                date_begin = decr_res(date_begin)
            queryset = queryset.filter(Build.date_created >= date_begin,
                                       Build.date_created < date_end)
        elif aggregate == "build":
            revision_shas = get_revisions(repo, branch, limit=points * 2)
            queryset = (queryset.join(
                Source, Source.id == Build.source_id).filter(
                    Source.revision_sha.in_(revision_shas)).order_by(
                        Build.number.desc()))

        queryset = queryset.limit(points)

        if aggregate == "time":
            results = {
                # HACK(dcramer): force (but dont convert) the timezone to be utc
                # while this isnt correct, we're not looking for correctness yet
                k.replace(tzinfo=timezone.utc): v
                for k, v in queryset
            }

            data = []
            cur_date = date_end
            for _ in range(points):
                cur_date = decr_res(cur_date)
                data.append({
                    "time":
                    int(float(cur_date.strftime("%s.%f")) * 1000),
                    "value": (int(float(results[cur_date]))
                              if results.get(cur_date) else
                              (0 if stat in ZERO_FILLERS else None)),
                })
        elif aggregate == "build":
            data = [{
                "build":
                k,
                "value": (int(float(v)) if v is not None else
                          (0 if stat in ZERO_FILLERS else None)),
            } for k, v in sorted(queryset, key=lambda x: -x[0])]

        return self.respond(data)
예제 #23
0
파일: fbg.py 프로젝트: lawrencejberry/nrfis
    def __call__(
        self,
        session: Session = Depends(get_db),
        averaging_window: AveragingWindow = Query(
            None,
            alias="averaging-window",
            description=
            "Bucket and average samples within a particular time window.",
        ),
        start_time: datetime = Query(
            ...,
            alias="start-time",
            description=
            "ISO 8601 format string representing the start time of the range of data requested.",
            example="2020-02-01T17:28:14.723333",
        ),
        end_time: datetime = Query(
            ...,
            alias="end-time",
            description=
            "ISO 8601 format string representing the end time of the range of data requested.",
            example="2020-02-01T17:28:14.723333",
        ),
    ):
        if start_time > end_time:
            raise HTTPException(status_code=422,
                                detail="Start time is later than end time")

        if averaging_window is not None:
            window = func.date_trunc(
                averaging_window.value,
                self.package.values_table.timestamp).label("timestamp")

            raw_data = (session.query(
                window,
                *[
                    func.avg(getattr(self.package.values_table,
                                     field)).label(field)
                    for field in self.package.values_table.attrs()
                ],
            ).filter(window > start_time).filter(
                window < end_time).group_by(window).order_by(window).all())
        else:
            raw_data = (session.query(self.package.values_table).filter(
                self.package.values_table.timestamp > start_time).filter(
                    self.package.values_table.timestamp < end_time).all())

        if self.data_type == DataType.raw:
            return raw_data

        metadata = {
            row.uid: row
            for row in session.query(self.package.metadata_table).all()
        }

        selected_sensors = [
            uid for uid, sensor in metadata.items()
            if sensor.type == self.data_type.value
        ]

        return [{
            "timestamp": row.timestamp,
            **{(metadata[uid].name or uid): Calculations[str(self.package)][self.data_type](uid, row, metadata)
               for uid in selected_sensors},
        } for row in raw_data]
예제 #24
0
    def __init__(self, group_strategy, timeseries_unit = 'day'):
        self.filterable_attributes = [DATE, CUSTOM_ATTRIBUTE, TRANSFER_ACCOUNT, USER]
        self.timeseries_unit = timeseries_unit
        self.metrics = []

        total_beneficiaries_query = db.session.query(User)
        self.metrics.append(metric.Metric(
            metric_name='total_beneficiaries',
            query=total_beneficiaries_query,
            object_model=User,
            stock_filters=[filters.beneficiary_filters],
            caching_combinatory_strategy=metrics_cache.COUNT,
            filterable_by=self.filterable_attributes))

        total_vendors_query = db.session.query(User)
        self.metrics.append(metric.Metric(
            metric_name='total_vendors',
            query=total_vendors_query,
            object_model=User,
            stock_filters=[filters.vendor_filters],
            caching_combinatory_strategy=metrics_cache.COUNT,
            filterable_by=self.filterable_attributes))

        # Timeseries Metrics
        if group_strategy:
            users_created_timeseries_query = group_strategy.build_query_group_by_with_join(db.session.query(func.count(User.id).label('volume'),
                    func.date_trunc(self.timeseries_unit, User.created).label('date'), group_strategy.group_by_column).group_by(func.date_trunc(self.timeseries_unit, User.created)), User)
            aggregated_users_created_query = group_strategy.build_query_group_by_with_join(db.session.query(func.count(User.id).label('volume'), group_strategy.group_by_column), User)
        else:
            users_created_timeseries_query = db.session.query(func.count(User.id).label('volume'),
                    func.date_trunc(self.timeseries_unit, User.created).label('date')).group_by(func.date_trunc(self.timeseries_unit, User.created))
            aggregated_users_created_query = None
        total_users_created_query = db.session.query(func.count(User.id).label('volume'))
        self.metrics.append(metric.Metric(
            metric_name='users_created',
            is_timeseries=True,
            query=users_created_timeseries_query,
            aggregated_query=aggregated_users_created_query,
            total_query=total_users_created_query,
            object_model=User,
            #stock_filters=[filters.beneficiary_filters], # NOTE: Do we still want this filter?
            stock_filters=[],
            caching_combinatory_strategy=metrics_cache.QUERY_ALL,
            filterable_by=self.filterable_attributes,
            query_actions=[FORMAT_TIMESERIES],
            aggregated_query_actions=[FORMAT_AGGREGATE_METRICS],
            total_query_actions=[GET_FIRST],
        ))

        if group_strategy:
            active_users_timeseries_query = group_strategy.build_query_group_by_with_join(db.session.query(func.count(func.distinct(CreditTransfer.sender_user_id)).label('volume'),
                    func.date_trunc(self.timeseries_unit, CreditTransfer.created).label('date'), group_strategy.group_by_column).group_by(func.date_trunc(self.timeseries_unit, CreditTransfer.created)), CreditTransfer)
            aggregated_active_users_query = group_strategy.build_query_group_by_with_join(db.session.query(func.count(func.distinct(CreditTransfer.sender_user_id)).label('volume'), group_strategy.group_by_column), CreditTransfer)
        else:
            active_users_timeseries_query = db.session.query(func.count(func.distinct(CreditTransfer.sender_user_id)).label('volume'),
                    func.date_trunc(self.timeseries_unit, CreditTransfer.created).label('date')).group_by(func.date_trunc(self.timeseries_unit, CreditTransfer.created))
            aggregated_active_users_query = None
        total_active_users_query = db.session.query(func.count(func.distinct(CreditTransfer.sender_user_id)).label('volume'))
        self.metrics.append(metric.Metric(
            metric_name='active_users',
            is_timeseries=True,
            query=active_users_timeseries_query,
            aggregated_query=aggregated_active_users_query,
            total_query=total_active_users_query,
            object_model=CreditTransfer,
            #stock_filters=[filters.beneficiary_filters], # NOTE: Do we still want this filter?
            stock_filters=[],
            caching_combinatory_strategy=metrics_cache.QUERY_ALL,
            filterable_by=self.filterable_attributes,
            query_actions=[FORMAT_TIMESERIES],
            aggregated_query_actions=[FORMAT_AGGREGATE_METRICS],
            total_query_actions=[GET_FIRST],
        ))
예제 #25
0
# 'default' are
aggregations_by_engine = {
    "default": aggregations,
    "redshift+psycopg2": aggregations_redshift,
    "bigquery": aggregations_bigquery,
}


#######################
# Conversions are a callable on a column expression that yields a
# nonaggregated column expression
# for instance, quarter(sales_date) => func.date_trunc('quarter', MyTable.sales_date)
#######################

conversions = {
    "month": lambda fld: func.date_trunc("month", fld),
    "week": lambda fld: func.date_trunc("week", fld),
    "year": lambda fld: func.date_trunc("year", fld),
    "quarter": lambda fld: func.date_trunc("quarter", fld),
    "string": lambda fld: func.cast(fld, String()),
    "int": lambda fld: func.cast(fld, Integer()),
}


conversions_redshift = {
    # age doesn't work on all databases
    "age": lambda fld: postgres_age(fld),
}

conversions_bigquery = {
    "month": lambda fld: func.date_trunc(fld, text("month")),
예제 #26
0
    def get(self):
        """
        Return various stats per-day for the installation.
        """
        stat = request.args.get("stat")
        if not stat:
            return self.error("invalid stat")

        if stat not in STAT_CHOICES:
            return self.error("invalid stat")

        since = request.args.get("since")

        if since:
            date_end = timezone.fromtimestamp(float(since))
        else:
            date_end = timezone.now() + timedelta(days=1)

        if stat == "users.active":
            date_field = User.date_active
        elif stat == "users.created":
            date_field = User.date_created
        else:
            date_field = Build.date_created

        date_end = date_end.replace(minute=0, second=0, microsecond=0)

        resolution = request.args.get("resolution", "1d")
        points = int(
            request.args.get("points") or stats.POINTS_DEFAULT[resolution])
        if resolution == "1h":
            grouper = func.date_trunc("hour", date_field)
            decr_res = stats.decr_hour
        elif resolution == "1d":
            grouper = func.date_trunc("day", date_field)
            date_end = date_end.replace(hour=0)
            decr_res = stats.decr_day
        elif resolution == "1w":
            grouper = func.date_trunc("week", date_field)
            date_end = date_end.replace(hour=0)
            date_end -= timedelta(days=date_end.weekday())
            decr_res = stats.decr_week
        elif resolution == "1m":
            grouper = func.date_trunc("month", date_field)
            date_end = date_end.replace(hour=0, day=1)
            decr_res = stats.decr_month

        date_begin = date_end
        for _ in range(points):
            date_begin = decr_res(date_begin)

        if stat.startswith("users."):
            queryset = db.session.query(grouper.label("grouper"),
                                        func.count(
                                            User.id)).group_by("grouper")
        else:
            queryset = stats.build_queryset(stat, grouper)

        queryset = queryset.filter(date_field >= date_begin,
                                   date_field < date_end)

        queryset = queryset.limit(points)

        results = {
            # HACK(dcramer): force (but dont convert) the timezone to be utc
            # while this isnt correct, we're not looking for correctness yet
            k.replace(tzinfo=timezone.utc): v
            for k, v in queryset
        }

        data = []
        cur_date = date_end
        for _ in range(points):
            cur_date = decr_res(cur_date)
            data.append({
                "time":
                int(float(cur_date.strftime("%s.%f")) * 1000),
                "value":
                (int(float(results[cur_date])) if results.get(cur_date) else
                 (0 if stat in stats.ZERO_FILLERS else None)),
            })

        return self.respond(data)
예제 #27
0
파일: play.py 프로젝트: swipswaps/playlog
async def get_biggest_day(conn):
    day = func.date_trunc('DAY', play.c.date).label('day')
    plays = func.count().label('plays')
    result = await conn.execute(
        select([day, plays]).group_by(day).order_by(plays.desc()).limit(1))
    return await result.fetchone()
예제 #28
0
    def get(self, repo: Repository):
        """
        Return various stats per-day for the given repository.
        """
        stat = request.args.get('stat')
        if not stat:
            return self.error('missing stat')

        if stat not in STAT_CHOICES:
            return self.error('invalid stat')

        resolution = request.args.get('resolution', '1d')
        points = int(request.args.get('points') or POINTS_DEFAULT[resolution])
        since = request.args.get('since')

        if since:
            date_end = datetime.utcfromtimestamp(
                float(since)).replace(tzinfo=timezone.utc)
        else:
            date_end = timezone.now()

        date_end = date_end.replace(minute=0, second=0, microsecond=0)

        if resolution == '1h':
            grouper = func.date_trunc('hour', Build.date_created)
            decr_res = decr_hour
        elif resolution == '1d':
            grouper = func.date_trunc('day', Build.date_created)
            date_end = date_end.replace(hour=0)
            decr_res = decr_day
        elif resolution == '1w':
            grouper = func.date_trunc('week', Build.date_created)
            date_end = date_end.replace(hour=0)
            date_end -= timedelta(days=date_end.weekday())
            decr_res = decr_week
        elif resolution == '1m':
            grouper = func.date_trunc('month', Build.date_created)
            date_end = date_end.replace(hour=0, day=1)
            decr_res = decr_month

        date_begin = date_end
        for _ in range(points):
            date_begin = decr_res(date_begin)

        # TODO(dcramer): put minimum date bounds
        if stat in ('builds.aborted', 'builds.failed', 'builds.passed',
                    'builds.total', 'builds.duration'):
            if stat == 'builds.failed':
                filters = [Build.result == Result.failed]
            elif stat == 'builds.passed':
                filters = [Build.result == Result.passed]
            elif stat == 'builds.aborted':
                filters = [Build.result == Result.aborted]
            else:
                filters = [Build.status == Status.finished]

            if stat == 'builds.duration':
                value = func.avg((extract('epoch', Build.date_finished) -
                                  extract('epoch', Build.date_started)) * 1000)
                filters = [
                    Build.status == Status.finished,
                    Build.result == Result.passed
                ]
            else:
                value = func.count(Build.id)

            results = {
                # HACK(dcramer): force (but dont convert) the timezone to be utc
                # while this isnt correct, we're not looking for correctness yet
                k.replace(tzinfo=timezone.utc): v
                for k, v in db.session.query(
                    grouper.label('grouper'),
                    value.label('value'),
                ).filter(Build.repository_id == repo.id, Build.date_created >=
                         date_begin, Build.date_created < date_end, *
                         filters).group_by('grouper')
            }
        else:
            results = {
                # HACK(dcramer): force (but dont convert) the timezone to be utc
                # while this isnt correct, we're not looking for correctness yet
                k.replace(tzinfo=timezone.utc): v
                for k, v in db.session.query(
                    grouper.label('grouper'),
                    func.avg(ItemStat.value).label('value'),
                ).filter(
                    ItemStat.item_id == Build.id,
                    ItemStat.name == stat,
                    Build.repository_id == repo.id,
                    Build.result == Result.passed,
                    Build.date_created >= date_begin,
                    Build.date_created < date_end,
                ).group_by('grouper')
            }

        data = []
        cur_date = date_end
        for _ in range(points):
            cur_date = decr_res(cur_date)
            data.append({
                'time': int(float(cur_date.strftime('%s.%f')) * 1000),
                'value': int(float(results.get(cur_date) or 0)),
            })
        # data.reverse()

        return self.respond(data)
예제 #29
0
def calculate_transfer_stats(total_time_series=False,
                             start_date=None,
                             end_date=None,
                             user_filter={}):
    date_filter = []
    filter_active = False
    if start_date is not None and end_date is not None:
        date_filter.append(CreditTransfer.created >= start_date)
        date_filter.append(CreditTransfer.created <= end_date)
        filter_active = True

    disbursement_filters = [
        CreditTransfer.transfer_status == TransferStatusEnum.COMPLETE,
        CreditTransfer.transfer_type == TransferTypeEnum.PAYMENT,
        CreditTransfer.transfer_subtype == TransferSubTypeEnum.DISBURSEMENT
    ]

    standard_payment_filters = [
        CreditTransfer.transfer_status == TransferStatusEnum.COMPLETE,
        CreditTransfer.transfer_type == TransferTypeEnum.PAYMENT,
        CreditTransfer.transfer_subtype == TransferSubTypeEnum.STANDARD
    ]

    exchanged_filters = [
        CreditTransfer.transfer_status == TransferStatusEnum.COMPLETE,
        CreditTransfer.transfer_type == TransferTypeEnum.EXCHANGE,
        CreditTransfer.token == g.active_organisation.token
    ]

    beneficiary_filters = [User.has_beneficiary_role == True]
    vendor_filters = [User.has_vendor_role == True]

    exhaused_balance_filters = [
        CreditTransfer.transfer_type == TransferTypeEnum.PAYMENT,
        TransferAccount._balance_wei == 0
    ]

    transfer_use_filters = [
        *standard_payment_filters,
        CreditTransfer.transfer_use.isnot(None),
    ]

    # Disable cache if any filters are being used
    disable_cache = False
    if user_filter or date_filter:
        disable_cache = True

    total_distributed = db.session.query(
        func.sum(CreditTransfer.transfer_amount).label('total'))
    total_distributed = apply_filters(total_distributed, user_filter,
                                      CreditTransfer)
    total_distributed = total_distributed.filter(*disbursement_filters).filter(
        *date_filter)
    total_distributed = metrics_cache.execute_with_partial_history_cache(
        'total_distributed',
        total_distributed,
        CreditTransfer,
        metrics_cache.SUM,
        disable_cache=disable_cache)

    total_spent = db.session.query(
        func.sum(CreditTransfer.transfer_amount).label('total'))
    total_spent = apply_filters(total_spent, user_filter, CreditTransfer)
    total_spent = total_spent.filter(*standard_payment_filters).filter(
        *date_filter)
    total_spent = metrics_cache.execute_with_partial_history_cache(
        'total_spent',
        total_spent,
        CreditTransfer,
        metrics_cache.SUM,
        disable_cache=disable_cache)

    total_exchanged = db.session.query(
        func.sum(CreditTransfer.transfer_amount).label('total'))
    total_exchanged = apply_filters(total_exchanged, user_filter,
                                    CreditTransfer)
    total_exchanged = total_exchanged.filter(*exchanged_filters).filter(
        *date_filter)
    total_exchanged = metrics_cache.execute_with_partial_history_cache(
        'total_exchanged',
        total_exchanged,
        CreditTransfer,
        metrics_cache.SUM,
        disable_cache=disable_cache)

    total_beneficiaries = db.session.query(User).filter(*beneficiary_filters)
    total_beneficiaries = metrics_cache.execute_with_partial_history_cache(
        'total_beneficiaries',
        total_beneficiaries,
        CreditTransfer,
        metrics_cache.COUNT,
        disable_cache=disable_cache)

    total_vendors = db.session.query(User).filter(*vendor_filters)
    total_vendors = metrics_cache.execute_with_partial_history_cache(
        'total_vendors',
        total_vendors,
        CreditTransfer,
        metrics_cache.COUNT,
        disable_cache=disable_cache)

    total_users = total_beneficiaries + total_vendors

    has_transferred_count = db.session.query(func.count(func.distinct(CreditTransfer.sender_user_id))
        .label('transfer_count'))\
        .filter(*standard_payment_filters) \
        .filter(*date_filter) \
            .first().transfer_count

    exhausted_balance_count = db.session.query(func.count(func.distinct(
        CreditTransfer.sender_transfer_account_id))
        .label('transfer_count')) \
        .join(CreditTransfer.sender_transfer_account)\
        .filter(*exhaused_balance_filters) \
        .filter(*date_filter) \
            .first().transfer_count

    daily_transaction_volume = db.session.query(
        func.sum(CreditTransfer.transfer_amount).label('volume'),
        func.date_trunc('day', CreditTransfer.created).label('date'))
    daily_transaction_volume = apply_filters(daily_transaction_volume,
                                             user_filter, CreditTransfer)
    daily_transaction_volume = daily_transaction_volume.group_by(func.date_trunc('day', CreditTransfer.created))\
        .filter(*standard_payment_filters) \
        .filter(*date_filter)
    daily_transaction_volume = metrics_cache.execute_with_partial_history_cache(
        'daily_transaction_volume',
        daily_transaction_volume,
        CreditTransfer,
        metrics_cache.SUM_OBJECTS,
        disable_cache=disable_cache)

    daily_disbursement_volume = db.session.query(
        func.sum(CreditTransfer.transfer_amount).label('volume'),
        func.date_trunc('day', CreditTransfer.created).label('date'))
    daily_disbursement_volume = apply_filters(daily_disbursement_volume,
                                              user_filter, CreditTransfer)
    daily_disbursement_volume = daily_disbursement_volume.group_by(func.date_trunc('day', CreditTransfer.created)) \
        .filter(*disbursement_filters) \
        .filter(*date_filter)
    daily_disbursement_volume = metrics_cache.execute_with_partial_history_cache(
        'daily_disbursement_volume',
        daily_disbursement_volume,
        CreditTransfer,
        metrics_cache.SUM_OBJECTS,
        disable_cache=disable_cache)

    transfer_use_breakdown = db.session.query(
        CreditTransfer.transfer_use.cast(JSONB),
        func.count(CreditTransfer.transfer_use))
    transfer_use_breakdown = apply_filters(transfer_use_breakdown, user_filter,
                                           CreditTransfer)
    transfer_use_breakdown = transfer_use_breakdown.filter(*transfer_use_filters) \
        .group_by(CreditTransfer.transfer_use.cast(JSONB)) \
            .all()

    try:
        last_day = daily_transaction_volume[0][1]
        last_day_volume = daily_transaction_volume[0][0]
        transaction_vol_list = [{
            'date': item[1].isoformat(),
            'volume': item[0]
        } for item in daily_transaction_volume]
    except IndexError:  # No transactions
        last_day = datetime.datetime.utcnow()
        last_day_volume = 0
        has_transferred_count = 0
        transaction_vol_list = [{
            'date': datetime.datetime.utcnow().isoformat(),
            'volume': 0
        }]

    try:
        disbursement_vol_list = [{
            'date': item[1].isoformat(),
            'volume': item[0]
        } for item in daily_disbursement_volume]
    except IndexError:
        disbursement_vol_list = [{
            'date': datetime.datetime.utcnow().isoformat(),
            'volume': 0
        }]

    try:
        master_wallet_balance = cached_funds_available()
    except:
        master_wallet_balance = 0

    data = {
        'total_distributed': total_distributed,
        'total_spent': total_spent,
        'total_exchanged': total_exchanged,
        'has_transferred_count': has_transferred_count,
        'zero_balance_count': exhausted_balance_count,
        'total_beneficiaries': total_beneficiaries,
        'total_users': total_users,
        'master_wallet_balance': master_wallet_balance,
        'daily_transaction_volume': transaction_vol_list,
        'daily_disbursement_volume': disbursement_vol_list,
        'transfer_use_breakdown': transfer_use_breakdown,
        'last_day_volume': {
            'date': last_day.isoformat(),
            'volume': last_day_volume
        },
        'filter_active': filter_active
    }
    return data
예제 #30
0
파일: admin.py 프로젝트: theotix/ccvpn
def admin_graph(request):
    graph_name = request.matchdict['name']

    try:
        import pygal
    except ImportError:
        raise HTTPNotFound()

    def get(name, default=None, type=str):
        try:
            return type(request.GET.get(name, default))
        except ValueError:
            raise HTTPBadRequest()

    def graphround(f):
        """ round(), then int() if f is integer """
        f = round(f, 4)
        if f % 1 == 0:
            f = int(f)
        return f


    pygalopts = {
        'js': [
            request.static_url('ccvpn:static/pygal/svg.jquery.js'),
            request.static_url('ccvpn:static/pygal/pygal-tooltips.js')
        ]
    }

    period = get('period', 'm')
    if period == 'm':
        period_time = timedelta(days=30)
    if period == 'y':
        period_time = timedelta(days=365)

    if graph_name == 'users':
        period = get('period', 'm')

        chart = pygal.Line(fill=True, x_label_rotation=75, show_legend=False,
                           **pygalopts)
        chart.title = 'Users (%s)' % period
        chart.x_labels = []
        values = []
        gen = last_days(30) if period == 'm' else last_months(12)
        users = DBSession.query(User).all()

        for m in gen:
            filter_ = time_filter_future(period, m, lambda o: o.signup_date)
            users_filtered = filter(filter_, users)
            values.append(len(list(users_filtered)))
            chart.x_labels.append('%s/%s/%s' % (m.year, m.month, m.day))

        chart.add('Users', values)
        return Response(chart.render(), content_type='image/svg+xml')

    elif graph_name == 'income':
        currency = get('currency', str)

        if currency == 'eur':
            graph_methods = (methods.PaypalMethod, methods.StripeMethod)
        elif currency == 'btc':
            graph_methods = (methods.BitcoinMethod, )
        else:
            raise HTTPNotFound()

        chart = pygal.StackedBar(x_label_rotation=75, show_legend=True,
                                 **pygalopts)

        chart.title = 'Income (%s, %s)' % (currency, period)
        orders = DBSession.query(Order) \
            .filter(Order.start_date > datetime.now() - period_time) \
            .filter(Order.paid == True) \
            .filter(or_(*(Order.method == m.id for m in graph_methods))) \
            .all()

        # Prepare value dict
        values = {}
        for order in orders:
            t = order.method
            if t not in values:
                values[t] = []

        chart.x_labels = []
        gen = last_days(30) if period == 'm' else last_months(12)
        for m in gen:
            filter_ = time_filter(period, m, lambda o: o.start_date)
            orders_date = list(filter(filter_, orders))

            for method in values.keys():
                filter_ = lambda o: o.method == method
                orders_dd = list(filter(filter_, orders_date))
                sum_ = sum(o.paid_amount for o in orders_dd)
                values[method].append(graphround(sum_) or None)

            chart.x_labels.append('%s' % m)

        for method, v in values.items():
            label = request.payment_methods[method].name
            chart.add(label, v)
        return Response(chart.render(), content_type='image/svg+xml')
    elif graph_name == 'sessions':
        chart = pygal.StackedBar(x_label_rotation=75, show_legend=True,
                                 **pygalopts)
        chart.title = 'Sessions (%s)' % (period)

        cdate = func.date_trunc('day' if period == 'm' else 'month',
                                VPNSession.connect_date).label('cdate')
        counts = DBSession.query(VPNSession.gateway_id,
                                 VPNSession.gateway_version,
                                 func.count(VPNSession.id).label('count'),
                                 cdate) \
                          .group_by(VPNSession.gateway_id) \
                          .group_by(VPNSession.gateway_version) \
                          .group_by(cdate) \
                          .all()

        values = {}  # gw_key / date / count
        for item in counts:
            gw_key = str(item.gateway_id) + '/' + str(item.gateway_version)
            if not gw_key in values:
                values[gw_key] = {}
            values[gw_key][item.cdate.date()] = item.count

        chart.x_labels = []

        values2 = {}
        gen = last_days(30) if period == 'm' else last_months(12)
        for m in gen:
            for gw_key, dates in values.items():
                if gw_key not in values2:
                    values2[gw_key] = {}
                values2[gw_key][m] = dates.get(m)

            chart.x_labels.append('%s' % m)

        for gw_key, dates in values2.items():
            label = gw_key
            sorted_dates = sorted(dates.items())
            sorted_counts = map(lambda x: x[1], sorted_dates)
            chart.add(label, sorted_counts)
        return Response(chart.render(), content_type='image/svg+xml')
    else:
        raise HTTPNotFound()
예제 #31
0
    def get(self, project_id):
        project = Project.get(project_id)
        if not project:
            return '', 404

        args = self.parser.parse_args()

        points = args.points or POINTS_DEFAULT[args.resolution]

        if args.from_date:
            date_end = datetime.fromtimestamp(args.from_date)
        else:
            date_end = datetime.now()

        date_end = date_end.replace(minute=0, second=0, microsecond=0)

        if args.resolution == '1h':
            grouper = func.date_trunc('hour', Build.date_created)
            decr_res = lambda x: x - timedelta(hours=1)
        elif args.resolution == '1d':
            grouper = func.date_trunc('day', Build.date_created)
            date_end = date_end.replace(hour=0)
            decr_res = lambda x: x - timedelta(days=1)
        elif args.resolution == '1w':
            grouper = func.date_trunc('week', Build.date_created)
            date_end = date_end.replace(hour=0)
            date_end -= timedelta(days=date_end.weekday())
            decr_res = decr_week
        elif args.resolution == '1m':
            grouper = func.date_trunc('month', Build.date_created)
            date_end = date_end.replace(hour=0, day=1)
            decr_res = decr_month

        if args.agg:
            value = getattr(func, args.agg)(ItemStat.value)
        else:
            value = func.avg(ItemStat.value)

        date_begin = date_end.replace()
        for _ in xrange(points):
            date_begin = decr_res(date_begin)

        # TODO(dcramer): put minimum date bounds
        results = dict(
            db.session.query(
                grouper.label('grouper'),
                value.label('value'),
            ).filter(
                ItemStat.item_id == Build.id,
                ItemStat.name == args.stat,
                Build.project_id == project.id,
                Build.date_created >= date_begin,
                Build.date_created < date_end,
            ).group_by('grouper'))

        data = []
        cur_date = date_end.replace()
        for _ in xrange(points):
            cur_date = decr_res(cur_date)
            data.append({
                'time': int(float(cur_date.strftime('%s.%f')) * 1000),
                'value': int(float(results.get(cur_date, 0))),
            })
        data.reverse()

        return self.respond(data, serialize=False)
예제 #32
0
def query_work_day_stats(
    company_id,
    start_date=None,
    end_date=None,
    first=None,
    after=None,
    tzname="Europe/Paris",
):
    tz = gettz(tzname)
    if after:
        max_time, user_id_ = parse_datetime_plus_id_cursor(after)
        max_date = max_time.date()
        end_date = min(max_date, end_date) if end_date else max_date

    query = (Activity.query.join(Mission).join(
        Expenditure,
        and_(
            Activity.user_id == Expenditure.user_id,
            Activity.mission_id == Expenditure.mission_id,
        ),
        isouter=True,
    ).with_entities(
        Activity.id,
        Activity.user_id,
        Activity.mission_id,
        Mission.name,
        Activity.start_time,
        Activity.end_time,
        Activity.type,
        Expenditure.id.label("expenditure_id"),
        Expenditure.type.label("expenditure_type"),
        func.generate_series(
            func.date_trunc(
                "day",
                func.timezone(
                    tzname,
                    func.timezone("UTC", Activity.start_time),
                ),
            ),
            func.timezone(
                tzname,
                func.coalesce(
                    func.timezone("UTC", Activity.end_time),
                    func.now(),
                ),
            ),
            "1 day",
        ).label("day"),
    ).filter(
        Mission.company_id == company_id,
        ~Activity.is_dismissed,
        Activity.start_time != Activity.end_time,
    ))

    query = _apply_time_range_filters(
        query,
        to_datetime(start_date, tz_for_date=tz),
        to_datetime(end_date,
                    tz_for_date=tz,
                    convert_dates_to_end_of_day_times=True),
    )

    has_next_page = False
    if first:
        activity_first = max(first * 5, 200)
        query = query.order_by(desc("day"), desc(
            Activity.user_id)).limit(activity_first + 1)
        has_next_page = query.count() > activity_first

    query = query.subquery()

    query = (db.session.query(query).group_by(
        query.c.user_id, query.c.day, query.c.mission_id,
        query.c.name).with_entities(
            query.c.user_id.label("user_id"),
            query.c.day,
            func.timezone("UTC",
                          func.timezone(tzname,
                                        query.c.day)).label("utc_day_start"),
            query.c.mission_id.label("mission_id"),
            query.c.name.label("mission_name"),
            func.min(
                func.greatest(
                    query.c.start_time,
                    func.timezone("UTC", func.timezone(tzname, query.c.day)),
                )).label("start_time"),
            func.max(
                func.least(
                    func.timezone(
                        "UTC",
                        func.timezone(
                            tzname,
                            query.c.day + func.cast("1 day", Interval)),
                    ),
                    func.coalesce(query.c.end_time, func.now()),
                )).label("end_time"),
            func.bool_or(
                and_(
                    query.c.end_time.is_(None),
                    query.c.day == func.current_date(),
                )).label("is_running"),
            *[
                func.sum(
                    case(
                        [(
                            query.c.type == a_type.value,
                            extract(
                                "epoch",
                                func.least(
                                    func.timezone(
                                        "UTC",
                                        func.timezone(
                                            tzname,
                                            query.c.day +
                                            func.cast("1 day", Interval),
                                        ),
                                    ),
                                    func.coalesce(query.c.end_time,
                                                  func.now()),
                                ) - func.greatest(
                                    query.c.start_time,
                                    func.timezone(
                                        "UTC",
                                        func.timezone(tzname, query.c.day),
                                    ),
                                ),
                            ),
                        )],
                        else_=0,
                    )).label(f"{a_type.value}_duration")
                for a_type in ActivityType
            ],
            func.greatest(func.count(distinct(query.c.expenditure_id)),
                          1).label("n_exp_dups"),
            func.count(distinct(query.c.id)).label("n_act_dups"),
            *[
                func.sum(
                    case(
                        [(query.c.expenditure_type == e_type.value, 1)],
                        else_=0,
                    )).label(f"n_{e_type.value}_expenditures")
                for e_type in ExpenditureType
            ],
        ).subquery())

    query = (db.session.query(query).group_by(
        query.c.user_id, query.c.day).with_entities(
            query.c.user_id.label("user_id"),
            query.c.day,
            func.array_agg(distinct(
                query.c.mission_name)).label("mission_names"),
            func.min(query.c.start_time).label("start_time"),
            func.max(query.c.end_time).label("end_time"),
            func.bool_or(query.c.is_running).label("is_running"),
            *[
                func.sum(
                    getattr(query.c, f"{a_type.value}_duration") /
                    query.c.n_exp_dups).cast(Integer).label(
                        f"{a_type.value}_duration") for a_type in ActivityType
            ],
            *[
                func.sum(
                    getattr(query.c, f"n_{e_type.value}_expenditures") /
                    query.c.n_act_dups).cast(Integer).label(
                        f"n_{e_type.value}_expenditures")
                for e_type in ExpenditureType
            ],
        ).order_by(desc("day"), desc("user_id")).subquery())

    query = db.session.query(query).with_entities(
        *query.c,
        extract("epoch", query.c.end_time -
                query.c.start_time).label("service_duration"),
        reduce(
            lambda a, b: a + b,
            [
                getattr(query.c, f"{a_type.value}_duration")
                for a_type in ActivityType
            ],
        ).label("total_work_duration"),
    )

    results = query.all()
    if after:
        results = [
            r for r in results if r.day.date() < max_date or (
                r.day.date() == max_date and r.user_id < user_id_)
        ]

    if first:
        if has_next_page:
            # The last work day may be incomplete because we didn't fetch all the activities => remove it
            results = results[:-1]
        if len(results) > first:
            results = results[:first]
            has_next_page = True

    return results, has_next_page
예제 #33
0
파일: __init__.py 프로젝트: Kozea/pypet
    def setUp(self):
        engine = create_engine('postgresql://postgres@localhost/pypet')
        self.metadata = MetaData(bind=engine)

        self.store_table = Table('store', self.metadata,
                Column('store_id', types.Integer, primary_key=True),
                Column('store_name', types.String),
                Column('country_id', types.Integer,
                    ForeignKey('country.country_id')))

        self.country_table = Table('country', self.metadata,
                Column('country_id', types.Integer, primary_key=True),
                Column('country_name', types.String),
                Column('region_id', types.Integer,
                    ForeignKey('region.region_id')))

        self.region_table = Table('region', self.metadata,
                Column('region_id', types.Integer, primary_key=True),
                Column('region_name', types.String))

        self.product_table = Table('product', self.metadata,
                Column('product_id', types.Integer, primary_key=True),
                Column('product_name', types.String),
                Column('product_category_id', types.Integer,
                   ForeignKey('product_category.product_category_id')))

        self.product_category_table = Table('product_category', self.metadata,
                Column('product_category_id', types.Integer, primary_key=True),
                Column('product_category_name', types.String))

        self.facts_table = Table('facts_table', self.metadata,
                Column('store_id', types.Integer,
                    ForeignKey('store.store_id')),
                Column('date', types.Date),
                Column('product_id', types.Integer,
                    ForeignKey('product.product_id')),
                Column('price', types.Float),
                Column('qty', types.Integer))

        agg_name = ('agg_time_month_product_product_store_store'
                    '_Unit Price_Quantity')
        self.agg_by_month_table = Table(agg_name,
                self.metadata,
                Column('store_store', types.Integer,
                    ForeignKey('store.store_id')),
                Column('time_month', types.Date),
                Column('product_product', types.Integer,
                    ForeignKey('product.product_id')),
                Column('Unit Price', types.Float),
                Column('Quantity', types.Integer),
                Column('fact_count', types.Integer))
        agg_name = ('agg_time_year_store_country_product_product'
                    '_Unit Price_Quantity')

        self.agg_by_year_country_table = Table(agg_name,
                self.metadata,
                Column('store_country', types.Integer,
                    ForeignKey('country.country_id')),
                Column('time_year', types.Date),
                Column('product_product', types.Integer,
                    ForeignKey('product.product_id')),
                Column('Unit Price', types.Float),
                Column('Quantity', types.Integer),
                Column('fact_count', types.Integer))

        self.metadata.create_all()

        self.store_dim = Dimension('store', [
            Hierarchy('default', [
                Level('region', self.region_table.c.region_id,
                    self.region_table.c.region_name),
                Level('country', self.country_table.c.country_id,
                    self.country_table.c.country_name),
                Level('store', self.store_table.c.store_id,
                    self.store_table.c.store_name)])])

        self.product_dim = Dimension('product', [
            Hierarchy('default', [
                Level('category',
                    self.product_category_table.c.product_category_id,
                    self.product_category_table.c
                    .product_category_name),
                Level('product', self.product_table.c.product_id,
                    self.product_table.c.product_name)])])

        self.time_dim = TimeDimension('time', self.facts_table.c.date,
                ['year', 'month', 'day'])

        unit_price = Measure('Unit Price', self.facts_table.c.price,
                aggregates.avg)
        quantity = Measure('Quantity', self.facts_table.c.qty, aggregates.sum)
        price = ((unit_price.aggregate_with(None) *
                quantity.aggregate_with(None))
                .aggregate_with(aggregates.sum).label('Price'))

        self.cube = Cube(self.metadata, self.facts_table, [self.store_dim,
            self.product_dim, self.time_dim], [unit_price, quantity, price],
            fact_count_column=self.facts_table.c.qty)

        self.region_table.insert({'region_id': 1, 'region_name':
            'Europe'}).execute()

        self.country_table.insert({'region_id': 1, 'country_name':
            'France', 'country_id': 1}).execute()

        self.country_table.insert({'region_id': 1, 'country_name':
            'Germany', 'country_id': 2}).execute()

        self.region_table.insert({'region_id': 2, 'region_name':
            'America'}).execute()

        self.country_table.insert({'region_id': 2, 'country_name':
            'USA', 'country_id': 3}).execute()

        self.country_table.insert({'region_id': 2, 'country_name':
            'Canada', 'country_id': 4}).execute()

        self.store_table.insert({
            'store_id': 1,
            'store_name': 'ACME.fr',
            'country_id': 1}).execute()

        self.store_table.insert({
            'store_id': 2,
            'store_name': 'ACME.de',
            'country_id': 2}).execute()

        self.store_table.insert({
            'store_id': 3,
            'store_name': 'Food Mart.fr',
            'country_id': 1}).execute()

        self.store_table.insert({
            'store_id': 4,
            'store_name': 'Food Mart.de',
            'country_id': 2}).execute()

        self.store_table.insert({
            'store_id': 5,
            'store_name': 'ACME.us',
            'country_id': 3}).execute()

        self.store_table.insert({
            'store_id': 6,
            'store_name': 'Food Mart.us',
            'country_id': 3}).execute()

        self.store_table.insert({
            'store_id': 7,
            'store_name': 'ACME.ca',
            'country_id': 4}).execute()

        self.store_table.insert({
            'store_id': 8,
            'store_name': 'Food Mart.ca',
            'country_id': 4}).execute()

        self.product_category_table.insert({
            'product_category_id': 1,
            'product_category_name': 'Vegetables'}).execute()

        self.product_category_table.insert({
            'product_category_id': 2,
            'product_category_name': 'Shoes'}).execute()

        self.product_table.insert({
            'product_id': 1,
            'product_category_id': 1,
            'product_name': 'Carrots'}).execute()
        self.product_table.insert({
            'product_id': 2,
            'product_category_id': 1,
            'product_name': 'Bananas'}).execute()
        self.product_table.insert({
            'product_id': 3,
            'product_category_id': 2,
            'product_name': 'Red shoes'}).execute()
        self.product_table.insert({
            'product_id': 4,
            'product_category_id': 2,
            'product_name': 'Green shoes'}).execute()
        self.product_table.insert({
            'product_id': 5,
            'product_category_id': 2,
            'product_name': 'Blue shoes'}).execute()

        years = cycle([2009, 2010, 2011])
        months = cycle([1, 5, 8, 9, 11])
        days = cycle([3, 12, 21, 29])
        prices = iter(cycle([100, 500, 1000]))
        quantities = iter(cycle([1, 5, 1, 2, 3, 20, 8]))
        values = iter((date(*value) for value in izip(years, months, days)))
        for value in self.product_table.select().with_only_columns([
            self.product_table.c.product_id,
            self.store_table.c.store_id]).execute():
            self.facts_table.insert({
                'product_id': value.product_id,
                'store_id': value.store_id,
                'date': next(values),
                'qty': next(quantities),
                'price': next(prices)}).execute()
        results = (self.facts_table.select().with_only_columns([
                (func.sum(self.facts_table.c.price *
                    self.facts_table.c.qty) /
                    func.sum(self.facts_table.c.qty))
                    .label('Unit Price'),
                func.sum(self.facts_table.c.qty).label('Quantity'),
                func.sum(self.facts_table.c.qty).label('fact_count'),
                self.facts_table.c.product_id.label('product_product'),
                self.facts_table.c.store_id.label('store_store'),
                func.date_trunc('month',
                    self.facts_table.c.date).label('time_month')])
            .group_by(func.date_trunc('month', self.facts_table.c.date),
                self.facts_table.c.product_id,
                self.facts_table.c.store_id)
            .execute())
        for res in results:
            self.agg_by_month_table.insert().execute(dict(res))
        second_agg = (self.facts_table.select().with_only_columns([
            (func.sum(self.facts_table.c.price *
                    self.facts_table.c.qty) /
                    func.sum(self.facts_table.c.qty))
                    .label('Unit Price'),
            func.sum(self.facts_table.c.qty).label('Quantity'),
            func.sum(self.facts_table.c.qty).label('fact_count'),
            self.facts_table.c.product_id.label('product_product'),
            self.store_table.c.country_id.label('store_country'),
            func.date_trunc('year',
                self.facts_table.c.date).label('time_year')])
            .where(self.facts_table.c.store_id == self.store_table.c.store_id)
            .group_by(self.facts_table.c.product_id.label('product_product'),
            self.store_table.c.country_id.label('store_country'),
            func.date_trunc('year',
                self.facts_table.c.date).label('time_year'))
            .execute())
        for res in second_agg:
            self.agg_by_year_country_table.insert().execute(dict(res))
예제 #34
0
    def get(self, project_id):
        project = Project.get(project_id)
        if not project:
            return '', 404

        args = self.parser.parse_args()

        points = args.points or POINTS_DEFAULT[args.resolution]

        if args.from_date:
            date_end = datetime.fromtimestamp(args.from_date)
        else:
            date_end = datetime.now()

        date_end = date_end.replace(
            minute=0, second=0, microsecond=0)

        if args.resolution == '1h':
            grouper = func.date_trunc('hour', Build.date_created)
            decr_res = lambda x: x - timedelta(hours=1)
        elif args.resolution == '1d':
            grouper = func.date_trunc('day', Build.date_created)
            date_end = date_end.replace(hour=0)
            decr_res = lambda x: x - timedelta(days=1)
        elif args.resolution == '1w':
            grouper = func.date_trunc('week', Build.date_created)
            date_end = date_end.replace(hour=0)
            date_end -= timedelta(days=date_end.weekday())
            decr_res = decr_week
        elif args.resolution == '1m':
            grouper = func.date_trunc('month', Build.date_created)
            date_end = date_end.replace(hour=0, day=1)
            decr_res = decr_month

        if args.agg:
            value = getattr(func, args.agg)(ItemStat.value)
        else:
            value = func.avg(ItemStat.value)

        date_begin = date_end.replace()
        for _ in xrange(points):
            date_begin = decr_res(date_begin)

        # TODO(dcramer): put minimum date bounds
        results = dict(db.session.query(
            grouper.label('grouper'),
            value.label('value'),
        ).filter(
            ItemStat.item_id == Build.id,
            ItemStat.name == args.stat,
            Build.project_id == project.id,
            Build.date_created >= date_begin,
            Build.date_created < date_end,
        ).group_by('grouper'))

        data = []
        cur_date = date_end.replace()
        for _ in xrange(points):
            cur_date = decr_res(cur_date)
            data.append({
                'time': int(float(cur_date.strftime('%s.%f')) * 1000),
                'value': int(float(results.get(cur_date, 0))),
            })
        data.reverse()

        return self.respond(data, serialize=False)
예제 #35
0
def calculate_transfer_stats(total_time_series=False):

    total_distributed = db.session.query(func.sum(models.CreditTransfer.transfer_amount).label('total'))\
        .filter(models.CreditTransfer.transfer_type == models.TransferTypeEnum.DISBURSEMENT).first().total

    total_spent = db.session.query(func.sum(models.CreditTransfer.transfer_amount).label('total'))\
        .filter(models.CreditTransfer.transfer_type == models.TransferTypeEnum.PAYMENT).first().total

    total_beneficiaries = db.session.query(
        models.User).filter(models.User.is_beneficiary == True).count()

    total_vendors = db.session.query(models.User)\
        .filter(models.User.is_vendor == True).count()

    total_users = total_beneficiaries + total_vendors

    has_transferred_count = db.session.query(func.count(func.distinct(models.CreditTransfer.sender_user_id))
        .label('transfer_count'))\
        .filter(models.CreditTransfer.transfer_type == models.TransferTypeEnum.PAYMENT).first().transfer_count

    # zero_balance_count = db.session.query(func.count(models.TransferAccount.id).label('zero_balance_count'))\
    #     .filter(models.TransferAccount.balance == 0).first().zero_balance_count

    exhausted_balance_count = db.session.query(func.count(func.distinct(models.CreditTransfer.sender_transfer_account_id))
        .label('transfer_count')) \
        .join(models.CreditTransfer.sender_transfer_account)\
        .filter(models.CreditTransfer.transfer_type == models.TransferTypeEnum.PAYMENT) \
        .filter(models.TransferAccount.balance == 0).first().transfer_count

    daily_transaction_volume = db.session.query(func.sum(models.CreditTransfer.transfer_amount).label('volume'),
                 func.date_trunc('day', models.CreditTransfer.created).label('date'))\
        .group_by(func.date_trunc('day', models.CreditTransfer.created))\
        .filter(models.CreditTransfer.transfer_type == models.TransferTypeEnum.PAYMENT).all()

    daily_disbursement_volume = db.session.query(func.sum(models.CreditTransfer.transfer_amount).label('volume'),
                                                func.date_trunc('day', models.CreditTransfer.created).label('date')) \
        .group_by(func.date_trunc('day', models.CreditTransfer.created)) \
        .filter(models.CreditTransfer.transfer_type == models.TransferTypeEnum.DISBURSEMENT).all()

    try:
        master_wallet_balance = master_wallet_funds_available()
    except BlockchainError:
        master_wallet_balance = 0

    try:
        last_day = daily_transaction_volume[0].date
        last_day_volume = daily_transaction_volume[0].volume
        transaction_vol_list = [{
            'date': item.date.isoformat(),
            'volume': item.volume
        } for item in daily_transaction_volume]
    except IndexError:  # No transactions
        last_day = datetime.datetime.utcnow()
        last_day_volume = 0
        has_transferred_count = 0
        transaction_vol_list = [{
            'date': datetime.datetime.utcnow().isoformat(),
            'volume': 0
        }]

    try:
        last_day_disbursement_volume = daily_disbursement_volume[0].volume
        disbursement_vol_list = [{
            'date': item.date.isoformat(),
            'volume': item.volume
        } for item in daily_disbursement_volume]
    except IndexError:
        last_day_disbursement_volume = 0
        disbursement_vol_list = [{
            'date': datetime.datetime.utcnow().isoformat(),
            'volume': 0
        }]

    data = {
        'total_distributed': total_distributed,
        'total_spent': total_spent,
        'has_transferred_count': has_transferred_count,
        'zero_balance_count': exhausted_balance_count,
        'total_beneficiaries': total_beneficiaries,
        'total_users': total_users,
        'master_wallet_balance': master_wallet_balance,
        'daily_transaction_volume': transaction_vol_list,
        'daily_disbursement_volume': disbursement_vol_list,
        'last_day_volume': {
            'date': last_day.isoformat(),
            'volume': last_day_volume
        }
    }

    return data
예제 #36
0
 def date_of_posting(cls):
     return cast(func.date_trunc('day', cls.posted), DATE)
예제 #37
0
def groupby_created(precision='month'):
    conn = sa.connect()
    month = func.date_trunc('month', users.c.created).label('month')
    q = select([month, func.count(users.c.id)]).group_by('month').order_by('month')
    #return conn.execute(q).fetchall()
    return [(dt.strftime('%b %Y'), num) for (dt, num) in conn.execute(q).fetchall()]
예제 #38
0
    def __init__(self, group_strategy, timeseries_unit = 'day', token=None, date_filter_attributes=None):
        self.filterable_attributes = [DATE, CUSTOM_ATTRIBUTE, TRANSFER_ACCOUNT, CREDIT_TRANSFER, USER]
        self.timeseries_unit = timeseries_unit
        self.date_filter_attributes = date_filter_attributes
        self.metrics = []

        total_amount_query = db.session.query(func.sum(CreditTransfer.transfer_amount).label('total'))
        self.metrics.append(metric.Metric(
            metric_name='total_distributed',
            query=total_amount_query,
            object_model=CreditTransfer,
            stock_filters=[filters.disbursement_filters],
            caching_combinatory_strategy=metrics_cache.SUM,
            filterable_by=self.filterable_attributes,
            bypass_user_filters=True,
        ))

        self.metrics.append(metric.Metric(
            metric_name='total_reclaimed',
            query=total_amount_query,
            object_model=CreditTransfer,
            stock_filters=[filters.reclamation_filters],
            caching_combinatory_strategy=metrics_cache.SUM,
            filterable_by=self.filterable_attributes,
            bypass_user_filters=True,
        ))

        self.metrics.append(metric.Metric(
            metric_name='total_withdrawn',
            query=total_amount_query,
            object_model=CreditTransfer,
            stock_filters=[filters.withdrawal_filters],
            caching_combinatory_strategy=metrics_cache.SUM,
            filterable_by=self.filterable_attributes,
            bypass_user_filters=True,
        ))

        # Timeseries Metrics
        if group_strategy:
            transaction_volume_timeseries_query = group_strategy.build_query_group_by_with_join(db.session.query(func.sum(CreditTransfer.transfer_amount).label('volume'),
                    func.date_trunc(self.timeseries_unit, self.date_filter_attributes[CreditTransfer]).label('date'), group_strategy.group_by_column).group_by(func.date_trunc(self.timeseries_unit, self.date_filter_attributes[CreditTransfer])), CreditTransfer)
            aggregated_transaction_volume_query = group_strategy.build_query_group_by_with_join(db.session.query(func.sum(CreditTransfer.transfer_amount).label('volume'), group_strategy.group_by_column), CreditTransfer)
        else:
            transaction_volume_timeseries_query = db.session.query(func.sum(CreditTransfer.transfer_amount).label('volume'),
                    func.date_trunc(self.timeseries_unit, self.date_filter_attributes[CreditTransfer]).label('date')).group_by(func.date_trunc(self.timeseries_unit, self.date_filter_attributes[CreditTransfer]))
            aggregated_transaction_volume_query = None
        total_transaction_volume_query = db.session.query(func.sum(CreditTransfer.transfer_amount).label('volume'))

        self.metrics.append(metric.Metric(
            metric_name='all_payments_volume',
            is_timeseries=True,
            query=transaction_volume_timeseries_query,
            aggregated_query=aggregated_transaction_volume_query,
            total_query=total_transaction_volume_query,
            object_model=CreditTransfer,
            timeseries_caching_combinatory_strategy=metrics_cache.SUM_OBJECTS,
            caching_combinatory_strategy=metrics_cache.QUERY_ALL,
            filterable_by=self.filterable_attributes,
            query_actions=[FORMAT_TIMESERIES],
            aggregated_query_actions=[FORMAT_AGGREGATE_METRICS],
            total_query_actions=[GET_FIRST],
            value_type=CURRENCY,
            token=token
        ))

        self.metrics.append(metric.Metric(
            metric_name='transfer_amount_per_user',
            is_timeseries=True,
            query=transaction_volume_timeseries_query,
            aggregated_query=aggregated_transaction_volume_query,
            total_query=total_transaction_volume_query,
            object_model=CreditTransfer,
            stock_filters=[filters.standard_payment_filters],
            timeseries_caching_combinatory_strategy=metrics_cache.SUM_OBJECTS,
            caching_combinatory_strategy=metrics_cache.QUERY_ALL,
            filterable_by=self.filterable_attributes,
            query_actions=[CALCULATE_TIMESERIES_PER_USER, FORMAT_TIMESERIES], # Add per user
            aggregated_query_actions=[CALCULATE_AGGREGATE_PER_USER, FORMAT_AGGREGATE_METRICS],
            total_query_actions=[GET_FIRST, CALCULATE_TOTAL_PER_USER],
            value_type=CURRENCY,
            token=token
        ))

        if group_strategy:
            transaction_count_timeseries_query = group_strategy.build_query_group_by_with_join(db.session.query(func.count(CreditTransfer.id).label('volume'),
                    func.date_trunc(self.timeseries_unit, self.date_filter_attributes[CreditTransfer]).label('date'), group_strategy.group_by_column).group_by(func.date_trunc(self.timeseries_unit, self.date_filter_attributes[CreditTransfer])), CreditTransfer)
            aggregated_transaction_count_query = group_strategy.build_query_group_by_with_join(db.session.query(func.count(CreditTransfer.id).label('volume'), group_strategy.group_by_column), CreditTransfer)
        else:
            transaction_count_timeseries_query = db.session.query(func.count(CreditTransfer.id).label('volume'),
                    func.date_trunc(self.timeseries_unit, self.date_filter_attributes[CreditTransfer]).label('date')).group_by(func.date_trunc(self.timeseries_unit, self.date_filter_attributes[CreditTransfer]))
            aggregated_transaction_count_query = None
        total_transaction_count_query = db.session.query(func.count(CreditTransfer.id).label('volume'))
        self.metrics.append(metric.Metric(
            metric_name='daily_transaction_count',
            is_timeseries=True,
            query=transaction_count_timeseries_query,
            aggregated_query=aggregated_transaction_count_query,
            total_query=total_transaction_count_query,
            object_model=CreditTransfer,
            stock_filters=[filters.standard_payment_filters],
            timeseries_caching_combinatory_strategy=metrics_cache.SUM_OBJECTS,
            caching_combinatory_strategy=metrics_cache.QUERY_ALL,
            filterable_by=self.filterable_attributes,
            query_actions=[FORMAT_TIMESERIES],
            aggregated_query_actions=[FORMAT_AGGREGATE_METRICS],
            total_query_actions=[GET_FIRST],
            value_type=COUNT
        ))
        
        self.metrics.append(metric.Metric(
            metric_name='trades_per_user',
            is_timeseries=True,
            query=transaction_count_timeseries_query,
            aggregated_query=aggregated_transaction_count_query,
            total_query=total_transaction_count_query,
            object_model=CreditTransfer,
            stock_filters=[filters.standard_payment_filters],
            timeseries_caching_combinatory_strategy=metrics_cache.SUM_OBJECTS,
            caching_combinatory_strategy=metrics_cache.QUERY_ALL,
            filterable_by=self.filterable_attributes,
            query_actions=[CALCULATE_TIMESERIES_PER_USER, FORMAT_TIMESERIES], # Add per user
            aggregated_query_actions=[CALCULATE_AGGREGATE_PER_USER, FORMAT_AGGREGATE_METRICS],
            total_query_actions=[GET_FIRST, CALCULATE_TOTAL_PER_USER],
            value_type=COUNT_AVERAGE,
        ))

        if group_strategy:
            active_users_timeseries_query = group_strategy.build_query_group_by_with_join(db.session.query(func.count(func.distinct(CreditTransfer.sender_user_id)).label('volume'),
                    func.date_trunc(self.timeseries_unit, self.date_filter_attributes[CreditTransfer]).label('date'), group_strategy.group_by_column).group_by(func.date_trunc(self.timeseries_unit, self.date_filter_attributes[CreditTransfer])), CreditTransfer)
            aggregated_active_users_query = group_strategy.build_query_group_by_with_join(db.session.query(func.count(func.distinct(CreditTransfer.sender_user_id)).label('volume'), group_strategy.group_by_column), CreditTransfer)
        else:
            active_users_timeseries_query = db.session.query(func.count(func.distinct(CreditTransfer.sender_user_id)).label('volume'),
                func.date_trunc(self.timeseries_unit, self.date_filter_attributes[CreditTransfer]).label('date')).group_by(func.date_trunc(self.timeseries_unit, self.date_filter_attributes[CreditTransfer]))
            aggregated_active_users_query = None
        total_transaction_volume_query = db.session.query(func.count(func.distinct(CreditTransfer.sender_user_id)).label('volume'))
        self.metrics.append(metric.Metric(
            metric_name='users_who_made_purchase',
            is_timeseries=True,
            query=active_users_timeseries_query,
            aggregated_query=aggregated_active_users_query,
            total_query=total_transaction_volume_query,
            object_model=CreditTransfer,
            #stock_filters=[filters.beneficiary_filters], # NOTE: Do we want this filter?
            stock_filters=[],
            timeseries_caching_combinatory_strategy=metrics_cache.SUM_OBJECTS,
            caching_combinatory_strategy=metrics_cache.QUERY_ALL,
            filterable_by=self.filterable_attributes,
            query_actions=[FORMAT_TIMESERIES],
            aggregated_query_actions=[FORMAT_AGGREGATE_METRICS],
            total_query_actions=[GET_FIRST],
            value_type=COUNT,
        ))
예제 #39
0
파일: util.py 프로젝트: Kozea/pypet
 def partial_trunc(column):
     return func.date_trunc(time_slice, column)
예제 #40
0
def admin_graph(request):
    graph_name = request.matchdict['name']

    try:
        import pygal
    except ImportError:
        raise HTTPNotFound()

    def get(name, default=None, type=str):
        try:
            return type(request.GET.get(name, default))
        except ValueError:
            raise HTTPBadRequest()

    def graphround(f):
        """ round(), then int() if f is integer """
        f = round(f, 4)
        if f % 1 == 0:
            f = int(f)
        return f

    pygalopts = {
        'js': [
            request.static_url('ccvpn:static/pygal/svg.jquery.js'),
            request.static_url('ccvpn:static/pygal/pygal-tooltips.js')
        ]
    }

    period = get('period', 'm')
    if period == 'm':
        period_time = timedelta(days=30)
    if period == 'y':
        period_time = timedelta(days=365)

    if graph_name == 'users':
        period = get('period', 'm')

        chart = pygal.Line(fill=True,
                           x_label_rotation=75,
                           show_legend=False,
                           **pygalopts)
        chart.title = 'Users (%s)' % period
        chart.x_labels = []
        values = []
        gen = last_days(30) if period == 'm' else last_months(12)
        users = DBSession.query(User).all()

        for m in gen:
            filter_ = time_filter_future(period, m, lambda o: o.signup_date)
            users_filtered = filter(filter_, users)
            values.append(len(list(users_filtered)))
            chart.x_labels.append('%s/%s/%s' % (m.year, m.month, m.day))

        chart.add('Users', values)
        return Response(chart.render(), content_type='image/svg+xml')

    elif graph_name == 'income':
        currency = get('currency', str)

        if currency == 'eur':
            graph_methods = (methods.PaypalMethod, methods.StripeMethod)
        elif currency == 'btc':
            graph_methods = (methods.BitcoinMethod, )
        else:
            raise HTTPNotFound()

        chart = pygal.StackedBar(x_label_rotation=75,
                                 show_legend=True,
                                 **pygalopts)

        chart.title = 'Income (%s, %s)' % (currency, period)
        orders = DBSession.query(Order) \
            .filter(Order.start_date > datetime.now() - period_time) \
            .filter(Order.paid == True) \
            .filter(or_(*(Order.method == m.id for m in graph_methods))) \
            .all()

        # Prepare value dict
        values = {}
        for order in orders:
            t = order.method
            if t not in values:
                values[t] = []

        chart.x_labels = []
        gen = last_days(30) if period == 'm' else last_months(12)
        for m in gen:
            filter_ = time_filter(period, m, lambda o: o.start_date)
            orders_date = list(filter(filter_, orders))

            for method in values.keys():
                filter_ = lambda o: o.method == method
                orders_dd = list(filter(filter_, orders_date))
                sum_ = sum(o.paid_amount for o in orders_dd)
                values[method].append(graphround(sum_) or None)

            chart.x_labels.append('%s' % m)

        for method, v in values.items():
            label = request.payment_methods[method].name
            chart.add(label, v)
        return Response(chart.render(), content_type='image/svg+xml')
    elif graph_name == 'sessions':
        chart = pygal.StackedBar(x_label_rotation=75,
                                 show_legend=True,
                                 **pygalopts)
        chart.title = 'Sessions (%s)' % (period)

        cdate = func.date_trunc('day' if period == 'm' else 'month',
                                VPNSession.connect_date).label('cdate')
        counts = DBSession.query(VPNSession.gateway_id,
                                 VPNSession.gateway_version,
                                 func.count(VPNSession.id).label('count'),
                                 cdate) \
                          .group_by(VPNSession.gateway_id) \
                          .group_by(VPNSession.gateway_version) \
                          .group_by(cdate) \
                          .all()

        values = {}  # gw_key / date / count
        for item in counts:
            gw_key = str(item.gateway_id) + '/' + str(item.gateway_version)
            if gw_key not in values:
                values[gw_key] = {}
            values[gw_key][item.cdate.date()] = item.count

        chart.x_labels = []

        values2 = {}
        gen = last_days(30) if period == 'm' else last_months(12)
        for m in gen:
            for gw_key, dates in values.items():
                if gw_key not in values2:
                    values2[gw_key] = {}
                values2[gw_key][m] = dates.get(m)

            chart.x_labels.append('%s' % m)

        for gw_key, dates in values2.items():
            label = gw_key
            sorted_dates = sorted(dates.items())
            sorted_counts = map(lambda x: x[1], sorted_dates)
            chart.add(label, sorted_counts)
        return Response(chart.render(), content_type='image/svg+xml')
    else:
        raise HTTPNotFound()