Example #1
0
 def get(self, record_id: UUID) -> Dict:
     """
     Return a specific BRA with it's ID.
     """
     with session_scope() as sess:
         global_rec = sess.query(
             BraRecord,
             func.lag(BraRecord.br_id).over(
                 order_by=BraRecord.br_production_date,
                 partition_by=BraRecord.br_massif,
             ).label("previous_bra_id"),
             func.lead(BraRecord.br_id).over(
                 order_by=BraRecord.br_production_date,
                 partition_by=BraRecord.br_massif,
             ).label("next_bra_id"),
         ).subquery()
         result_filtered = (sess.query(global_rec).filter(
             global_rec.c.br_id == record_id).first())
         record_as_dict = result_filtered._asdict()
         record_as_dict["massif"] = (sess.query(Massif).filter(
             Massif.m_id == result_filtered.br_massif).first())
         record_as_dict["risks"] = sess.query(Risk).filter(
             Risk.r_record_id == result_filtered.br_id)
         record_as_dict["snow_records"] = sess.query(SnowRecord).filter(
             SnowRecord.s_bra_record == result_filtered.br_id)
         record_as_dict["fresh_snow_records"] = sess.query(
             FreshSnowRecord).filter(
                 FreshSnowRecord.fsr_bra_record == result_filtered.br_id)
         record_as_dict["weather_forecasts"] = sess.query(
             WeatherForecast).filter(
                 WeatherForecast.wf_bra_record == result_filtered.br_id)
         record_as_dict["risk_forecasts"] = sess.query(RiskForecast).filter(
             RiskForecast.rf_bra_record == result_filtered.br_id)
         return marshal(record_as_dict, bra_model)
Example #2
0
 def get(self) -> List[Dict]:
     """
     Return the last record for all massifs.
     """
     with session_scope() as sess:
         results = (sess.query(
             BraRecord,
             func.lag(BraRecord.br_id).over(
                 order_by=BraRecord.br_production_date,
                 partition_by=BraRecord.br_massif,
             ).label("previous_bra_id"),
             func.lead(BraRecord.br_id).over(
                 order_by=BraRecord.br_production_date,
                 partition_by=BraRecord.br_massif,
             ).label("next_bra_id"),
         ).filter(
             BraRecord.br_production_date.cast(Date) == select([
                 func.max(BraRecord.br_production_date.cast(Date))
             ])).options(subqueryload(BraRecord.risks)).options(
                 subqueryload(BraRecord.snow_records)).options(
                     subqueryload(BraRecord.fresh_snow_records)).options(
                         subqueryload(BraRecord.weather_forecasts)).options(
                             subqueryload(
                                 BraRecord.risk_forecasts)).options(
                                     subqueryload(BraRecord.massif)))
         final = []
         for res in results.all():
             encoded_bra_record = marshal(res.BraRecord, bra_model)
             encoded_bra_record["previous_bra_id"] = (str(
                 res.previous_bra_id) if res.previous_bra_id else None)
             encoded_bra_record["next_bra_id"] = (str(res.next_bra_id) if
                                                  res.next_bra_id else None)
             final.append(encoded_bra_record)
         return final
Example #3
0
    def main(self):
        camera_id = 1
        timespec = '20220201'

        d_dayDate = datetime.strptime(timespec, '%Y%m%d').date()
        night = True

        createDate_local = func.datetime(
            IndiAllSkyDbImageTable.createDate, 'localtime',
            type_=DateTime).label('createDate_local')

        #timelapse_files_entries = db.session.query(
        #    IndiAllSkyDbImageTable,
        #    createDate_local,
        #)\
        #    .join(IndiAllSkyDbImageTable.camera)\
        #    .filter(IndiAllSkyDbCameraTable.id == camera_id)\
        #    .filter(IndiAllSkyDbImageTable.dayDate == d_dayDate)\
        #    .filter(IndiAllSkyDbImageTable.night == night)\
        #    .order_by(IndiAllSkyDbImageTable.createDate.asc())

        timelapse_files_entries = IndiAllSkyDbImageTable.query\
            .add_columns(createDate_local)\
            .join(IndiAllSkyDbCameraTable)\
            .filter(IndiAllSkyDbCameraTable.id == camera_id)\
            .filter(IndiAllSkyDbImageTable.dayDate == d_dayDate)\
            .filter(IndiAllSkyDbImageTable.night == night)\
            .order_by(IndiAllSkyDbImageTable.createDate.asc())

        now_minus_3h = datetime.now() - timedelta(hours=3)

        createDate_s = func.strftime('%s',
                                     IndiAllSkyDbImageTable.createDate,
                                     type_=Integer)
        image_lag_list = IndiAllSkyDbImageTable.query\
            .add_columns(
                IndiAllSkyDbImageTable.id,
                (createDate_s - func.lag(createDate_s).over(order_by=IndiAllSkyDbImageTable.createDate)).label('lag_diff'),
            )\
            .filter(IndiAllSkyDbImageTable.createDate > now_minus_3h)\
            .order_by(IndiAllSkyDbImageTable.createDate.desc())\
            .limit(50)

        start = time.time()

        #logger.warning('Entries: %d', timelapse_files_entries.count())
        logger.warning('Entries: %d', image_lag_list.count())

        elapsed_s = time.time() - start
        logger.info('SQL executed in %0.4f s', elapsed_s)

        #logger.info('SQL: %s', timelapse_files_entries)
        logger.info('SQL: %s', image_lag_list)
def get_stats(project):
    """
    the changes to create a chart with
    """

    total = DBSession.query(func.sum(ST_Area(Task.geometry))) \
        .filter(
            Task.cur_state.has(TaskState.state != TaskState.state_removed),
            Task.project_id == project.id
        ) \
        .scalar()

    subquery = DBSession.query(
        TaskState.state,
        TaskState.date,
        ST_Area(Task.geometry).label('area'),
        func.lag(TaskState.state).over(
            partition_by=(
                TaskState.task_id,
                TaskState.project_id
            ),
            order_by=TaskState.date
        ).label('prev_state')
    ).join(Task).filter(
        TaskState.project_id == project.id,
        TaskState.state != TaskState.state_ready) \
     .order_by(TaskState.date)

    tasks = subquery.all()
    log.debug('Number of tiles: %s', len(tasks))
    stats = [[project.created.isoformat(), 0, 0]]
    done = 0
    validated = 0

    # for every day count number of changes and aggregate changed tiles
    for task in tasks:
        if task.state == TaskState.state_done:
            done += task.area
        if task.state == TaskState.state_invalidated:
            if task.prev_state == TaskState.state_done:
                done -= task.area
            elif task.prev_state == TaskState.state_validated:
                validated -= task.area
        if task.state == TaskState.state_validated:
            validated += task.area
            done -= task.area

        # append a day to the stats and add total number of 'done' tiles and a
        # copy of a current tile_changes list
        stats.append([task.date.isoformat(), done, validated])

    return {"total": total, "stats": stats}
def get_stats(project):
    """
    the changes to create a chart with
    """

    total = DBSession.query(func.sum(ST_Area(Task.geometry))) \
        .filter(
            Task.cur_state.has(TaskState.state != TaskState.state_removed),
            Task.project_id == project.id
        ) \
        .scalar()

    subquery = DBSession.query(
        TaskState.state,
        TaskState.date,
        ST_Area(Task.geometry).label('area'),
        func.lag(TaskState.state).over(
            partition_by=(
                TaskState.task_id,
                TaskState.project_id
            ),
            order_by=TaskState.date
        ).label('prev_state')
    ).join(Task).filter(
        TaskState.project_id == project.id,
        TaskState.state != TaskState.state_ready) \
     .order_by(TaskState.date)

    tasks = subquery.all()
    log.debug('Number of tiles: %s', len(tasks))
    stats = [[project.created.isoformat(), 0, 0]]
    done = 0
    validated = 0

    # for every day count number of changes and aggregate changed tiles
    for task in tasks:
        if task.state == TaskState.state_done:
            done += task.area
        if task.state == TaskState.state_invalidated:
            if task.prev_state == TaskState.state_done:
                done -= task.area
            elif task.prev_state == TaskState.state_validated:
                validated -= task.area
        if task.state == TaskState.state_validated:
            validated += task.area
            done -= task.area

        # append a day to the stats and add total number of 'done' tiles and a
        # copy of a current tile_changes list
        stats.append([task.date.isoformat(), done, validated])

    return {"total": total, "stats": stats}
Example #6
0
 def get(self, massif_id: UUID) -> Dict:
     """
     Return a record for a massifs. With all the associated metadata.
     """
     with session_scope() as sess:
         request = (sess.query(
             BraRecord,
             func.lag(BraRecord.br_id).over(
                 order_by=BraRecord.br_production_date,
                 partition_by=BraRecord.br_massif,
             ).label("previous_bra_id"),
             func.lead(BraRecord.br_id).over(
                 order_by=BraRecord.br_production_date,
                 partition_by=BraRecord.br_massif,
             ).label("next_bra_id")).filter(
                 BraRecord.br_massif == massif_id).order_by(
                     BraRecord.br_production_date.desc()).limit(1))
         result = request.first()
         json = marshal(result.BraRecord, bra_model)
         json["previous_bra_id"] = result.previous_bra_id
         json["next_bra_id"] = result.next_bra_id
         return json
Example #7
0
 def get(self, massif_id, bra_date) -> Dict:
     parsed_date = date.fromisoformat(bra_date)
     with session_scope() as sess:
         global_rec = sess.query(
             BraRecord,
             func.lag(BraRecord.br_id).over(
                 order_by=BraRecord.br_production_date,
                 partition_by=BraRecord.br_massif,
             ).label("previous_bra_id"),
             func.lead(BraRecord.br_id).over(
                 order_by=BraRecord.br_production_date,
                 partition_by=BraRecord.br_massif,
             ).label("next_bra_id"),
         ).subquery()
         result_filtered = (sess.query(global_rec).filter(
             global_rec.c.br_production_date.cast(Date) == parsed_date
         ).filter(global_rec.c.br_massif == massif_id).first())
         if result_filtered is None:
             return bra_api.abort(
                 404,
                 f"Record for massif id {massif_id} for the date {bra_date} could not be found."
             )
         record_as_dict = result_filtered._asdict()
         record_as_dict["massif"] = (sess.query(Massif).filter(
             Massif.m_id == result_filtered.br_massif).first())
         record_as_dict["risks"] = sess.query(Risk).filter(
             Risk.r_record_id == result_filtered.br_id)
         record_as_dict["snow_records"] = sess.query(SnowRecord).filter(
             SnowRecord.s_bra_record == result_filtered.br_id)
         record_as_dict["fresh_snow_records"] = sess.query(
             FreshSnowRecord).filter(
                 FreshSnowRecord.fsr_bra_record == result_filtered.br_id)
         record_as_dict["weather_forecasts"] = sess.query(
             WeatherForecast).filter(
                 WeatherForecast.wf_bra_record == result_filtered.br_id)
         record_as_dict["risk_forecasts"] = sess.query(RiskForecast).filter(
             RiskForecast.rf_bra_record == result_filtered.br_id)
         return marshal(record_as_dict, bra_model)
Example #8
0
 def get(self, massif_id: UUID) -> List[Dict]:
     """
     Return a list of records for a massif. Limit to last 50 days by default.
     """
     args = massif_record_parser.parse_args()
     with session_scope() as sess:
         results = (sess.query(
             BraRecord,
             func.lag(BraRecord.br_id).over(
                 order_by=BraRecord.br_production_date,
                 partition_by=BraRecord.br_massif,
             ).label("previous_bra_id"),
             func.lead(BraRecord.br_id).over(
                 order_by=BraRecord.br_production_date,
                 partition_by=BraRecord.br_massif,
             ).label("next_bra_id"),
         ).filter(BraRecord.br_production_date > (
             func.now() -
             func.cast(concat(args["limit"], "DAYS"), INTERVAL))).filter(
                 BraRecord.br_massif == massif_id).order_by(
                     BraRecord.br_production_date.desc()).options(
                         subqueryload(BraRecord.risks)).options(
                             subqueryload(BraRecord.snow_records)).options(
                                 subqueryload(
                                     BraRecord.fresh_snow_records)).
                    options(subqueryload(
                        BraRecord.weather_forecasts)).options(
                            subqueryload(BraRecord.risk_forecasts)).options(
                                subqueryload(BraRecord.massif)).all())
         final = []
         for res in results:
             encoded_bra_record = marshal(res.BraRecord, bra_model)
             encoded_bra_record["previous_bra_id"] = (str(
                 res.previous_bra_id) if res.previous_bra_id else None)
             encoded_bra_record["next_bra_id"] = (str(res.next_bra_id) if
                                                  res.next_bra_id else None)
             final.append(encoded_bra_record)
         return final
Example #9
0
    def get_user_level_subscription_features(self):
        sub_data = self.bq_session.query(
            *[column.label(column.name) for column in self.sub_data.columns],
            self.sub_web_access(),
            func.lag(self.sub_data.c['end_time']).over(
                partition_by=self.sub_data.c['user_id'],
                order_by=self.sub_data.c['start_time']).label(
                    'previous_sub_end_time'),
            (func.safe_divide(
                (self.sub_data.c['amount'] -
                 self.sub_data.c['additional_amount']),
                self.sub_data.c['length'])).label('daily_price')).subquery()

        sub_prices = self.sub_prices_query()

        user_stats_query = self.bq_session.query(
            sub_data.c['user_id'].label('user_id'),
            func.sum(sub_data.c['amount']).label('total_amount'),
            func.avg(sub_data.c['amount']).label('average_amount'),
            func.count(func.distinct(
                sub_data.c['subscription_id'])).label('count_subs'),
            func.sum(case([(sub_data.c['is_paid'] == 1, 1.0)],
                          else_=0.0)).label('paid_subs'),
            func.sum(case([(sub_data.c['is_paid'] == 0, 1.0)],
                          else_=0.0)).label('free_subs'),
            func.date_diff(
                func.max(sub_data.c['created_at']).cast(DATE),
                func.min(sub_data.c['created_at']).cast(DATE),
                text('day')).label('number_of_days_since_the_first_paid_sub'),
            func.avg(
                func.date_diff(sub_data.c['start_time'].cast(DATE),
                               sub_data.c['previous_sub_end_time'].cast(DATE),
                               text('day'))).label('average_gap'),
            func.max(
                func.date_diff(sub_data.c['start_time'].cast(DATE),
                               sub_data.c['previous_sub_end_time'].cast(DATE),
                               text('day'))).label('max_gap'),
            func.sum(
                case([(sub_data.c['is_paid'] == 1, sub_data.c['length'])],
                     else_=0.0)).label('total_days_paid_sub'),
            func.sum(
                case([(sub_data.c['is_paid'] == 0, sub_data.c['length'])],
                     else_=0.0)).label('total_days_free_sub'),
            func.sum(case([(sub_data.c['is_upgraded'] == 1, 1.0)],
                          else_=0.0)).label('upgraded_subs'),
            func.sum(
                case([(sub_prices.c['average_price'] == 0, 0.0),
                      (1 - sub_data.c['daily_price'] /
                       sub_prices.c['average_price'] >= 0.2, 1.0)],
                     else_=0.0)).label('discount_subs')
        ).filter(
            sub_data.c['start_time'].cast(DATE) <= self.max_date.date()).join(
                sub_prices,
                and_(
                    sub_data.c['web_access_level'] ==
                    sub_prices.c['web_access_level'], *[
                        sub_data.c[column] == sub_prices.c[column]
                        for column in self.sub_desc_column_names
                    ])).group_by(sub_data.c['user_id']).subquery()

        return user_stats_query
Example #10
0
 def trajectDirection(self):
     """traject direction of previous location and current location"""
     azimuth = func.ST_Azimuth(func.lag(self.rawlocation).over(order_by=(self.device_info_serial, self.date_time,)), self.rawlocation)
     return func.round(cast(func.degrees(azimuth), Numeric()), 2).label('tdirection')
Example #11
-1
    def trajectSpeed(self):
        """traject speed by distance between previous and current location divided by current date_time - previous date_time

        round(CAST
        (
            ST_Length_Spheroid(
                ST_MakeLine(location, lag(location) over (order by device_info_serial, date_time)),
                'SPHEROID[\"WGS 84\",6378137,298.257223563]'
            )
        /
        EXTRACT(
            EPOCH FROM (date_time - lag(date_time) over (order by device_info_serial, date_time))
        )
        ) AS NUMERIC, 4)
        """
        order_by = (self.device_info_serial, self.date_time,)
        spheroid = 'SPHEROID["WGS 84",6378137,298.257223563]'
        line = func.ST_MakeLine(self.rawlocation, func.lag(self.rawlocation).over(order_by=order_by))
        distance = func.ST_Length_Spheroid(line, spheroid)
        duration = func.extract('epoch', self.date_time - func.lag(self.date_time).over(order_by=order_by))
        return func.round(cast(distance / duration, Numeric), 4).label('tspeed')
def problem1():
    """
    find all numbers that appear at least three times consecutively.
    """
    query = ses.query(Number.value).order_by(Number.id)
    number_list = [number.value for number in query]

    init = 0
    count = 0
    n_consecutive = 3
    target_numbers = list()
    for i in number_list:
        if i != init:
            init = i
            count = 1
        else:
            count += 1
        if count == n_consecutive:
            target_numbers.append(init)

    # print(number_list)
    # print(target_numbers)

    # pprint(query, engine)

    query = ses.query(Number.value, func.lag(Number.value, 1))
    pprint(query, engine)