def get_chart_data(self, project_id, query_date):
        calendar = db.session.query(
            func.generate_series(
                query_date - timedelta(days=CHART_DATA_LIMIT - 1), query_date,
                timedelta(days=1)).label('day')).subquery()

        historical_data = db.session.query(
            calendar.c.day, func.coalesce(func.sum(FlakyTestStat.flaky_runs),
                                          0),
            func.coalesce(func.sum(FlakyTestStat.double_reruns), 0),
            func.coalesce(func.sum(FlakyTestStat.passing_runs), 0)).outerjoin(
                FlakyTestStat,
                and_(calendar.c.day == FlakyTestStat.date,
                     FlakyTestStat.project_id == project_id)).order_by(
                         calendar.c.day.desc()).group_by(calendar.c.day)

        chart_data = []
        for d, flaky_runs, double_reruns, passing_runs in historical_data:
            chart_data.append({
                'date': str(d.date()),
                'flaky_runs': flaky_runs,
                'double_reruns': double_reruns,
                'passing_runs': passing_runs
            })

        return chart_data
Пример #2
0
def selectReportMonthDetails(userId, year, month, startDay, endDay):
    """選択された月の日別とその稼働時間を取得するDB処理

    :param userId: 登録ユーザID
    :param year: 登録年
    :param month: 登録月
    :param startDay: 月の初日
    :param endDay: 月の最後の日
    """
    subq1 = db.session.query(
        func.generate_series(
            func.date(startDay) - func.CURRENT_DATE(),
            func.date(endDay) - func.CURRENT_DATE()).label('i')).subquery()

    subq2 = db.session.query(
        func.cast(func.date_part('day',
                                 func.CURRENT_DATE() + subq1.c.i),
                  Integer).label('day')).subquery()

    monthDetails = db.session.query(
        subq2.c.day,
        __model.rest_flg,
        db.func.to_char(
            __model.normal_working_hours + __model.overtime_hours +
            __model.holiday_work_hours, '999D99').label('workTime'),
    ).outerjoin(
        __model,
        and_(subq2.c.day == __model.work_day, __model.user_id == userId,
             __model.work_year == year,
             __model.work_month == month)).order_by(subq2.c.day).all()

    return monthDetails
Пример #3
0
    def test_from_function(self):
        bookcases = self.tables.bookcases
        srf = lateral(func.generate_series(1, bookcases.c.bookcase_shelves))

        self.assert_compile(
            select([bookcases]).select_from(bookcases.join(srf, true())),
            "SELECT bookcases.bookcase_id, bookcases.bookcase_owner_id, "
            "bookcases.bookcase_shelves, bookcases.bookcase_width "
            "FROM bookcases JOIN "
            "LATERAL generate_series(:generate_series_1, "
            "bookcases.bookcase_shelves) AS anon_1 ON true")
Пример #4
0
    def test_from_function(self):
        bookcases = self.tables.bookcases
        srf = lateral(func.generate_series(1, bookcases.c.bookcase_shelves))

        self.assert_compile(
            select([bookcases]).select_from(bookcases.join(srf, true())),
            "SELECT bookcases.bookcase_id, bookcases.bookcase_owner_id, "
            "bookcases.bookcase_shelves, bookcases.bookcase_width "
            "FROM bookcases JOIN "
            "LATERAL generate_series(:generate_series_1, "
            "bookcases.bookcase_shelves) AS anon_1 ON true"
        )
Пример #5
0
def counthistory(page):
    '''
    Count history for all shelters for the past 14 days.
    Used for chart showing counts over time.
    Supports pagination with page in path
    '''
    tz = Prefs['timezone']

    pagesize = 14  # days
    daysback = int(page) * pagesize + pagesize - 1

    today = pendulum.today(tz).subtract(days=(int(page) * pagesize))
    backthen = pendulum.today(tz).subtract(days=daysback)

    date_list = func.generate_series(cast(backthen.to_date_string(), Date),
                                     cast(today.to_date_string(), Date),
                                     '1 day').alias('gen_day')

    time_series = db.session.query(Shelter.name.label('label'), func.array_agg(Count.personcount).label('data'))\
        .join(date_list, true())\
        .outerjoin(Count, (Count.day == column('gen_day')) & (Count.shelter_id == Shelter.id))

    if 'user' not in g or set(['admin', 'visitor']).isdisjoint(
            set([role.name for role in g.user.roles])):
        time_series = time_series.filter(Shelter.visible == True,
                                         Shelter.public)
    else:
        time_series = time_series.filter(Shelter.visible == True)

    time_series = time_series\
        .group_by(Shelter.name)\
        .order_by(Shelter.name)

    results = {
        "dates": [d.to_date_string() for d in (today - backthen)],
        "shelters": [row._asdict() for row in time_series]
    }
    return jsonify(results)
Пример #6
0
    def get_chart_data(self, project_id, query_date):
        calendar = db.session.query(
            func.generate_series(
                query_date - timedelta(days=CHART_DATA_LIMIT - 1),
                query_date,
                timedelta(days=1)
            ).label('day')
        ).subquery()

        historical_data = db.session.query(
            calendar.c.day,
            func.coalesce(func.sum(FlakyTestStat.flaky_runs), 0),
            func.coalesce(func.sum(FlakyTestStat.double_reruns), 0),
            func.coalesce(func.sum(FlakyTestStat.passing_runs), 0)
        ).outerjoin(
            FlakyTestStat,
            and_(
                calendar.c.day == FlakyTestStat.date,
                FlakyTestStat.project_id == project_id
            )
        ).order_by(
            calendar.c.day.desc()
        ).group_by(
            calendar.c.day
        )

        chart_data = []
        for d, flaky_runs, double_reruns, passing_runs in historical_data:
            chart_data.append({
                'date': str(d.date()),
                'flaky_runs': flaky_runs,
                'double_reruns': double_reruns,
                'passing_runs': passing_runs
            })

        return chart_data
Пример #7
0
def generate_date_series(start_date=None, end_date=None, interval='1 month',
                         granularity='1 day'):
    """
    Generates a date series; useful for grouping queries into date "buckets".

    The resulting query looks something like this::

        generate_series(DATE '2017-04-17', DATE '2017-05-07', '1 day')

    Args:
        start_date (datetime): Start date of the series.
        end_date (datetime): End date of the series.
        interval (str): Alternately, the length of time from either
            `start_date` or `end_date` expressed as a Postgres interval.
            Defaults to '1 month'.
        granularity (str): The granularity of each date "bucket" expressed
            as a Postgres interval. Defaults to '1 day'.

    Returns:
        sqlalchemy.orm.query.Query: A date series query.
    """
    if not granularity:
        granularity = '1 day'
    elif not _startswith_digit_re.match(granularity):
        granularity = '1 {}'.format(granularity)

    if start_date:
        if end_date:
            # If the start_date and the end_date are defined then we use those
            return func.generate_series(start_date, end_date, granularity)
        elif interval:
            # If the start_date and the interval are defined then we use the
            # start_date plus the interval as the end_date
            return func.generate_series(
                start_date,
                text('DATE :start_date_param_1 + INTERVAL :interval_param_1',
                     bindparams=[
                         bindparam('start_date_param_1', start_date),
                         bindparam('interval_param_1', interval)]),
                granularity)
        else:
            # If ONLY the start_date is defined then we use the current date
            # as the end_date
            return func.generate_series(
                start_date, datetime.utcnow(), granularity)

    if not end_date:
        # If the start_date and end_date are both undefined, we set the
        # end_date to the current date
        end_date = datetime.utcnow()

    if not interval:
        # If the interval is undefined, we set the interval to "1 month"
        interval = '1 month'

    return func.generate_series(
        text('DATE :end_date_param_1 - INTERVAL :interval_param_1',
             bindparams=[
                 bindparam('end_date_param_1', end_date),
                 bindparam('interval_param_1', interval)]),
        end_date,
        granularity)
Пример #8
0
def query_work_day_stats(
    company_id,
    start_date=None,
    end_date=None,
    first=None,
    after=None,
    tzname="Europe/Paris",
):
    tz = gettz(tzname)
    if after:
        max_time, user_id_ = parse_datetime_plus_id_cursor(after)
        max_date = max_time.date()
        end_date = min(max_date, end_date) if end_date else max_date

    query = (Activity.query.join(Mission).join(
        Expenditure,
        and_(
            Activity.user_id == Expenditure.user_id,
            Activity.mission_id == Expenditure.mission_id,
        ),
        isouter=True,
    ).with_entities(
        Activity.id,
        Activity.user_id,
        Activity.mission_id,
        Mission.name,
        Activity.start_time,
        Activity.end_time,
        Activity.type,
        Expenditure.id.label("expenditure_id"),
        Expenditure.type.label("expenditure_type"),
        func.generate_series(
            func.date_trunc(
                "day",
                func.timezone(
                    tzname,
                    func.timezone("UTC", Activity.start_time),
                ),
            ),
            func.timezone(
                tzname,
                func.coalesce(
                    func.timezone("UTC", Activity.end_time),
                    func.now(),
                ),
            ),
            "1 day",
        ).label("day"),
    ).filter(
        Mission.company_id == company_id,
        ~Activity.is_dismissed,
        Activity.start_time != Activity.end_time,
    ))

    query = _apply_time_range_filters(
        query,
        to_datetime(start_date, tz_for_date=tz),
        to_datetime(end_date,
                    tz_for_date=tz,
                    convert_dates_to_end_of_day_times=True),
    )

    has_next_page = False
    if first:
        activity_first = max(first * 5, 200)
        query = query.order_by(desc("day"), desc(
            Activity.user_id)).limit(activity_first + 1)
        has_next_page = query.count() > activity_first

    query = query.subquery()

    query = (db.session.query(query).group_by(
        query.c.user_id, query.c.day, query.c.mission_id,
        query.c.name).with_entities(
            query.c.user_id.label("user_id"),
            query.c.day,
            func.timezone("UTC",
                          func.timezone(tzname,
                                        query.c.day)).label("utc_day_start"),
            query.c.mission_id.label("mission_id"),
            query.c.name.label("mission_name"),
            func.min(
                func.greatest(
                    query.c.start_time,
                    func.timezone("UTC", func.timezone(tzname, query.c.day)),
                )).label("start_time"),
            func.max(
                func.least(
                    func.timezone(
                        "UTC",
                        func.timezone(
                            tzname,
                            query.c.day + func.cast("1 day", Interval)),
                    ),
                    func.coalesce(query.c.end_time, func.now()),
                )).label("end_time"),
            func.bool_or(
                and_(
                    query.c.end_time.is_(None),
                    query.c.day == func.current_date(),
                )).label("is_running"),
            *[
                func.sum(
                    case(
                        [(
                            query.c.type == a_type.value,
                            extract(
                                "epoch",
                                func.least(
                                    func.timezone(
                                        "UTC",
                                        func.timezone(
                                            tzname,
                                            query.c.day +
                                            func.cast("1 day", Interval),
                                        ),
                                    ),
                                    func.coalesce(query.c.end_time,
                                                  func.now()),
                                ) - func.greatest(
                                    query.c.start_time,
                                    func.timezone(
                                        "UTC",
                                        func.timezone(tzname, query.c.day),
                                    ),
                                ),
                            ),
                        )],
                        else_=0,
                    )).label(f"{a_type.value}_duration")
                for a_type in ActivityType
            ],
            func.greatest(func.count(distinct(query.c.expenditure_id)),
                          1).label("n_exp_dups"),
            func.count(distinct(query.c.id)).label("n_act_dups"),
            *[
                func.sum(
                    case(
                        [(query.c.expenditure_type == e_type.value, 1)],
                        else_=0,
                    )).label(f"n_{e_type.value}_expenditures")
                for e_type in ExpenditureType
            ],
        ).subquery())

    query = (db.session.query(query).group_by(
        query.c.user_id, query.c.day).with_entities(
            query.c.user_id.label("user_id"),
            query.c.day,
            func.array_agg(distinct(
                query.c.mission_name)).label("mission_names"),
            func.min(query.c.start_time).label("start_time"),
            func.max(query.c.end_time).label("end_time"),
            func.bool_or(query.c.is_running).label("is_running"),
            *[
                func.sum(
                    getattr(query.c, f"{a_type.value}_duration") /
                    query.c.n_exp_dups).cast(Integer).label(
                        f"{a_type.value}_duration") for a_type in ActivityType
            ],
            *[
                func.sum(
                    getattr(query.c, f"n_{e_type.value}_expenditures") /
                    query.c.n_act_dups).cast(Integer).label(
                        f"n_{e_type.value}_expenditures")
                for e_type in ExpenditureType
            ],
        ).order_by(desc("day"), desc("user_id")).subquery())

    query = db.session.query(query).with_entities(
        *query.c,
        extract("epoch", query.c.end_time -
                query.c.start_time).label("service_duration"),
        reduce(
            lambda a, b: a + b,
            [
                getattr(query.c, f"{a_type.value}_duration")
                for a_type in ActivityType
            ],
        ).label("total_work_duration"),
    )

    results = query.all()
    if after:
        results = [
            r for r in results if r.day.date() < max_date or (
                r.day.date() == max_date and r.user_id < user_id_)
        ]

    if first:
        if has_next_page:
            # The last work day may be incomplete because we didn't fetch all the activities => remove it
            results = results[:-1]
        if len(results) > first:
            results = results[:first]
            has_next_page = True

    return results, has_next_page
Пример #9
0
 # Prepare the query and the specific stuff depending on the 'whattodo'
 filter_nbs, grouping_nbs = self.prepare_filters_and_grouping()
 filter_nbs = self.generate_prefilters(filter_nbs)
 self.prepare_query()
 # Prepare and generate the filters and grouping
 self.generate_filters(filter_nbs)
 self.generate_groupings(grouping_nbs)
 # Decide of the correct granularity when displaying evolutions
 if self.granularity_criteria is None and self.query_parameters.get("whattodo", "count") in ('evolution', 'event_delta_evolution'):
     date_delta = self.end_date - self.start_date
     self.granularity = get_granularity(date_delta, self.MIN_POINTS, self.MAX_POINTS)#'month'
     self.granularity_criteria = func.date_trunc(self.granularity, self.date_criteria_field).label("Date")
 # A specific granularity has speen specified or decided, generate
 # a subquery with generate_series to avoid holes in the time
 if self.granularity_criteria is not None:
     self.subquery = self.session.query(func.generate_series(func.date_trunc(self.granularity, cast(self.start_date, DateTime())), func.date_trunc(self.granularity, cast(self.end_date, DateTime())), cast('1 %s' % self.granularity, Interval())).label('Temps')).subquery()
     self.select.insert(1,self.granularity_criteria)
     self.display_translations.insert(1, None)
     self.group_by.insert(0,self.granularity_criteria)
 ### Extras ###
 # The user specified a boundary
 extras = self.query_parameters.get("extras", {})
 if extras and 'boundary' in self.extras:
     boundary_max = extras.get('boundary', {}).get('max', None)
     classes_case = []
     if boundary_max and self.field_to_color is not None:
         classes_case.append((self.field_to_color > boundary_max, literal_column("'breaks_max_bound'",String)))
     boundary_min = extras.get('boundary', {}).get('min', None)
     if boundary_min and self.field_to_color is not None:
         classes_case.append((self.field_to_color < boundary_min, literal_column("'breaks_min_bound'",String)))
     self.select.append(case(classes_case).label("_classes_"))