Пример #1
0
 def get(self, record_id: UUID) -> Dict:
     """
     Return a specific BRA with it's ID.
     """
     with session_scope() as sess:
         global_rec = sess.query(
             BraRecord,
             func.lag(BraRecord.br_id).over(
                 order_by=BraRecord.br_production_date,
                 partition_by=BraRecord.br_massif,
             ).label("previous_bra_id"),
             func.lead(BraRecord.br_id).over(
                 order_by=BraRecord.br_production_date,
                 partition_by=BraRecord.br_massif,
             ).label("next_bra_id"),
         ).subquery()
         result_filtered = (sess.query(global_rec).filter(
             global_rec.c.br_id == record_id).first())
         record_as_dict = result_filtered._asdict()
         record_as_dict["massif"] = (sess.query(Massif).filter(
             Massif.m_id == result_filtered.br_massif).first())
         record_as_dict["risks"] = sess.query(Risk).filter(
             Risk.r_record_id == result_filtered.br_id)
         record_as_dict["snow_records"] = sess.query(SnowRecord).filter(
             SnowRecord.s_bra_record == result_filtered.br_id)
         record_as_dict["fresh_snow_records"] = sess.query(
             FreshSnowRecord).filter(
                 FreshSnowRecord.fsr_bra_record == result_filtered.br_id)
         record_as_dict["weather_forecasts"] = sess.query(
             WeatherForecast).filter(
                 WeatherForecast.wf_bra_record == result_filtered.br_id)
         record_as_dict["risk_forecasts"] = sess.query(RiskForecast).filter(
             RiskForecast.rf_bra_record == result_filtered.br_id)
         return marshal(record_as_dict, bra_model)
Пример #2
0
    def count_datasets_through_time_query(self, start, end, period, time_field,
                                          expressions):
        raw_expressions = self._alchemify_expressions(expressions)

        start_times = select(
            (func.generate_series(start, end, cast(
                period, INTERVAL)).label('start_time'), )).alias('start_times')

        time_range_select = (select((func.tstzrange(
            start_times.c.start_time,
            func.lead(start_times.c.start_time).over()).label('time_period'),
                                     ))).alias('all_time_ranges')

        # Exclude the trailing (end time to infinite) row. Is there a simpler way?
        time_ranges = (select(
            (time_range_select,
             )).where(~func.upper_inf(time_range_select.c.time_period))
                       ).alias('time_ranges')

        count_query = (select((func.count('*'), )).select_from(
            self._from_expression(DATASET, expressions)).where(
                and_(
                    time_field.alchemy_expression.overlaps(
                        time_ranges.c.time_period), DATASET.c.archived == None,
                    *raw_expressions)))

        return select(
            (time_ranges.c.time_period, count_query.label('dataset_count')))
Пример #3
0
 def get(self) -> List[Dict]:
     """
     Return the last record for all massifs.
     """
     with session_scope() as sess:
         results = (sess.query(
             BraRecord,
             func.lag(BraRecord.br_id).over(
                 order_by=BraRecord.br_production_date,
                 partition_by=BraRecord.br_massif,
             ).label("previous_bra_id"),
             func.lead(BraRecord.br_id).over(
                 order_by=BraRecord.br_production_date,
                 partition_by=BraRecord.br_massif,
             ).label("next_bra_id"),
         ).filter(
             BraRecord.br_production_date.cast(Date) == select([
                 func.max(BraRecord.br_production_date.cast(Date))
             ])).options(subqueryload(BraRecord.risks)).options(
                 subqueryload(BraRecord.snow_records)).options(
                     subqueryload(BraRecord.fresh_snow_records)).options(
                         subqueryload(BraRecord.weather_forecasts)).options(
                             subqueryload(
                                 BraRecord.risk_forecasts)).options(
                                     subqueryload(BraRecord.massif)))
         final = []
         for res in results.all():
             encoded_bra_record = marshal(res.BraRecord, bra_model)
             encoded_bra_record["previous_bra_id"] = (str(
                 res.previous_bra_id) if res.previous_bra_id else None)
             encoded_bra_record["next_bra_id"] = (str(res.next_bra_id) if
                                                  res.next_bra_id else None)
             final.append(encoded_bra_record)
         return final
Пример #4
0
    def count_datasets_through_time(self, start, end, period, time_field, expressions):
        """
        :type period: str
        :type start: datetime.datetime
        :type end: datetime.datetime
        :type expressions: tuple[datacube.index.postgres._fields.PgExpression]
        :rtype: list[((datetime.datetime, datetime.datetime), int)]
        """

        raw_expressions = self._alchemify_expressions(expressions)

        start_times = select((
            func.generate_series(start, end, cast(period, INTERVAL)).label('start_time'),
        )).alias('start_times')

        time_range_select = (
            select((
                func.tstzrange(
                    start_times.c.start_time,
                    func.lead(start_times.c.start_time).over()
                ).label('time_period'),
            ))
        ).alias('all_time_ranges')

        # Exclude the trailing (end time to infinite) row. Is there a simpler way?
        time_ranges = (
            select((
                time_range_select,
            )).where(
                ~func.upper_inf(time_range_select.c.time_period)
            )
        ).alias('time_ranges')

        count_query = (
            select(
                (func.count('*'),)
            ).select_from(
                self._from_expression(DATASET, expressions)
            ).where(
                and_(
                    time_field.alchemy_expression.overlaps(time_ranges.c.time_period),
                    DATASET.c.archived == None,
                    *raw_expressions
                )
            )
        )

        results = self._connection.execute(select((
            time_ranges.c.time_period,
            count_query.label('dataset_count')
        )))

        for time_period, dataset_count in results:
            # if not time_period.upper_inf:
            yield Range(time_period.lower, time_period.upper), dataset_count
def get_most_selected(db: Session):
    subquery = db.query(
        models.Player,
        func.round((models.Player.selected_by_percent - func.lead(models.Player.selected_by_percent, 1).over(
            order_by=models.Player.timestamp.desc(),
            partition_by=models.Player.id
        )), 2).label('selected_by_percent_change')
    ) \
        .subquery()

    return db \
        .query(subquery) \
        .order_by(subquery.c.timestamp.desc(), subquery.c.selected_by_percent.desc()) \
        .all()
Пример #6
0
 def get(self, massif_id: UUID) -> Dict:
     """
     Return a record for a massifs. With all the associated metadata.
     """
     with session_scope() as sess:
         request = (sess.query(
             BraRecord,
             func.lag(BraRecord.br_id).over(
                 order_by=BraRecord.br_production_date,
                 partition_by=BraRecord.br_massif,
             ).label("previous_bra_id"),
             func.lead(BraRecord.br_id).over(
                 order_by=BraRecord.br_production_date,
                 partition_by=BraRecord.br_massif,
             ).label("next_bra_id")).filter(
                 BraRecord.br_massif == massif_id).order_by(
                     BraRecord.br_production_date.desc()).limit(1))
         result = request.first()
         json = marshal(result.BraRecord, bra_model)
         json["previous_bra_id"] = result.previous_bra_id
         json["next_bra_id"] = result.next_bra_id
         return json
Пример #7
0
 def get(self, massif_id, bra_date) -> Dict:
     parsed_date = date.fromisoformat(bra_date)
     with session_scope() as sess:
         global_rec = sess.query(
             BraRecord,
             func.lag(BraRecord.br_id).over(
                 order_by=BraRecord.br_production_date,
                 partition_by=BraRecord.br_massif,
             ).label("previous_bra_id"),
             func.lead(BraRecord.br_id).over(
                 order_by=BraRecord.br_production_date,
                 partition_by=BraRecord.br_massif,
             ).label("next_bra_id"),
         ).subquery()
         result_filtered = (sess.query(global_rec).filter(
             global_rec.c.br_production_date.cast(Date) == parsed_date
         ).filter(global_rec.c.br_massif == massif_id).first())
         if result_filtered is None:
             return bra_api.abort(
                 404,
                 f"Record for massif id {massif_id} for the date {bra_date} could not be found."
             )
         record_as_dict = result_filtered._asdict()
         record_as_dict["massif"] = (sess.query(Massif).filter(
             Massif.m_id == result_filtered.br_massif).first())
         record_as_dict["risks"] = sess.query(Risk).filter(
             Risk.r_record_id == result_filtered.br_id)
         record_as_dict["snow_records"] = sess.query(SnowRecord).filter(
             SnowRecord.s_bra_record == result_filtered.br_id)
         record_as_dict["fresh_snow_records"] = sess.query(
             FreshSnowRecord).filter(
                 FreshSnowRecord.fsr_bra_record == result_filtered.br_id)
         record_as_dict["weather_forecasts"] = sess.query(
             WeatherForecast).filter(
                 WeatherForecast.wf_bra_record == result_filtered.br_id)
         record_as_dict["risk_forecasts"] = sess.query(RiskForecast).filter(
             RiskForecast.rf_bra_record == result_filtered.br_id)
         return marshal(record_as_dict, bra_model)
Пример #8
0
 def get(self, massif_id: UUID) -> List[Dict]:
     """
     Return a list of records for a massif. Limit to last 50 days by default.
     """
     args = massif_record_parser.parse_args()
     with session_scope() as sess:
         results = (sess.query(
             BraRecord,
             func.lag(BraRecord.br_id).over(
                 order_by=BraRecord.br_production_date,
                 partition_by=BraRecord.br_massif,
             ).label("previous_bra_id"),
             func.lead(BraRecord.br_id).over(
                 order_by=BraRecord.br_production_date,
                 partition_by=BraRecord.br_massif,
             ).label("next_bra_id"),
         ).filter(BraRecord.br_production_date > (
             func.now() -
             func.cast(concat(args["limit"], "DAYS"), INTERVAL))).filter(
                 BraRecord.br_massif == massif_id).order_by(
                     BraRecord.br_production_date.desc()).options(
                         subqueryload(BraRecord.risks)).options(
                             subqueryload(BraRecord.snow_records)).options(
                                 subqueryload(
                                     BraRecord.fresh_snow_records)).
                    options(subqueryload(
                        BraRecord.weather_forecasts)).options(
                            subqueryload(BraRecord.risk_forecasts)).options(
                                subqueryload(BraRecord.massif)).all())
         final = []
         for res in results:
             encoded_bra_record = marshal(res.BraRecord, bra_model)
             encoded_bra_record["previous_bra_id"] = (str(
                 res.previous_bra_id) if res.previous_bra_id else None)
             encoded_bra_record["next_bra_id"] = (str(res.next_bra_id) if
                                                  res.next_bra_id else None)
             final.append(encoded_bra_record)
         return final
Пример #9
0
 def get_final_states_query(cls):
     """Return query that filters complete/failed states and includes a 'duration' column."""
     # We enumerate states for each request and get 'updated' field of the following state.
     # It allows to find out time between two states.
     states = db.session.query(
         cls.request_id,
         cls.updated,
         func.lead(cls.updated,
                   1).over(partition_by=cls.request_id,
                           order_by=cls.updated).label("next_updated"),
         func.row_number().over(partition_by=cls.request_id,
                                order_by=cls.updated).label("num"),
     ).subquery()
     return (
         db.session.query(
             cls.request_id,
             cls.state,
             cls.state_reason,
             cls.updated,
             func.extract(
                 "epoch",
                 cls.updated.cast(TIMESTAMP) -
                 states.c.updated.cast(TIMESTAMP),
             ).label("duration"),
             func.extract(
                 "epoch",
                 states.c.next_updated.cast(TIMESTAMP) -
                 states.c.updated.cast(TIMESTAMP),
             ).label("time_in_queue"),
         ).join(states, states.c.request_id == cls.request_id).filter(
             cls.state.in_([
                 RequestStateMapping.complete.value,
                 RequestStateMapping.failed.value
             ]))
         # We need only 'init' state information here to join it with the final state.
         .filter(states.c.num == 1))