def build_count_subquery(counter_type): return db.session.query(DeviceCounters.device_id, func.coalesce(func.sum(DeviceCounters.value), 0).label('count')).\ join(RowProcessed, RowProcessed.analyzer == 'packet_analyzer').\ join(Packet, Packet.id == RowProcessed.last_row).\ filter(DeviceCounters.counter_type == counter_type).\ filter(DeviceCounters.last_update + func.make_interval(0,0,0,1) > Packet.date).\ group_by(DeviceCounters.device_id).\ subquery()
def expire_task(): """ checks whether any pastes have expired and deletes expired pastes """ print("running expiry task at {}".format(datetime.now())) result = Pastes.query.filter(Pastes.created_at \ + func.make_interval(0,0,0,0,0,Pastes.expiration_length_in_minutes) < datetime.now()) for row in result: # delete paste file os.remove(row.paste_path) # delete db entry db.session.delete(row) db.session.commit()
def _prepare_query(self, session: Session, final_query_fields): Status = self.Status Holiday = self.Holiday end_ts = coalesce(Status.end_ts, self.get_last_work_day_end_time(session)) status_duration = end_ts - Status.start_ts status_duration_days = func.date_part('DAY', status_duration) holiday_period_count = func.count(Holiday.date) sq = session.query( Status.issue_key, Status.status, Status.start_ts, end_ts.label('end_ts'), status_duration.label(self.STATUS_DURATION), status_duration_days.label(self.STATUS_DURATION_DAYS), holiday_period_count.label(self.HOLIDAYS_PERIOD_COUNT), ).outerjoin( # Leave only days stricly between start and end of a status interval Holiday, and_( end_ts > Holiday.date, Status.start_ts < Holiday.date, )).group_by( Status.issue_key, Status.status, Status.start_ts, end_ts, status_duration, status_duration_days, ).subquery() status_duration = sq.c[self.STATUS_DURATION] holiday_period_count = sq.c[self.HOLIDAYS_PERIOD_COUNT] start_date = cast(sq.c[StatusAttrs.start_ts], Date) end_date = cast(sq.c[StatusAttrs.end_ts], Date) # Calculate it differently because we need only days inside range status_duration_days = end_date - start_date # Worktime days end end_of_start_day = start_date + self.WORK_DAY_END start_of_end_day = end_date + self.WORK_DAY_START start_ts = sq.c[StatusAttrs.start_ts] end_ts = sq.c[StatusAttrs.end_ts] query_fields = [sq.c[field_name] for field_name in final_query_fields] """Fields for group_by and select""" q = session.query( func.sum( case([ # If begin and end in a same calendar day, just return difference. (start_date == end_date, status_duration), ( start_date != end_date, # calculate start day work time case( [ # If start of a status lesser then the work_day_end, return difference. Otherwise just 0 (start_ts < end_of_start_day, end_of_start_day - start_ts), ], else_=timedelta(0), ) + # calculate full working hours for working days between start and end days func.make_interval( 0, 0, 0, 0, # Convert hours to pg interval. cast((status_duration_days - sq.c[self.HOLIDAYS_PERIOD_COUNT]) * self.WORK_HOURS_PER_DAY, Integer)) + # Calculate end day work time case( [ # If end of a status is greater then the work start of the day, return difference. (end_ts > start_of_end_day, end_ts - start_of_end_day), ], else_=timedelta(0), ), ), ])).label(self.WORK_TIME_DURATION), func.sum(sq.c[self.STATUS_DURATION]).label(self.STATUS_DURATION), *query_fields, ).group_by(*query_fields, ) return q
def get_with(asset_id, asset_type, organization_id=None): """ Gets an asset from database Request parameters: - asset_id: database id of the asset - asset_type: type of the requested asset, can be "device" or "gateway". - organization_id (optional): when given, asserts that received organization matchs the asset's organization Returns: - Model object of requested asset """ if asset_type == "device": asset_query = db.session.query( Device.id, Device.organization_id, Device.dev_eui.label('hex_id'), expression.literal_column('\'Device\'').label('type'), Device.name, Device.app_name, DataCollector.name.label('data_collector'), PolicyItem.parameters.label('policy_parameters'), Device.connected.label('connected'), Device.last_activity, Device.activity_freq, Device.activity_freq_variance, Device.npackets_up, Device.npackets_down, Device.npackets_lost.label('packet_loss'), Device.max_rssi, Device.max_lsnr, Device.ngateways_connected_to, Device.payload_size, Device.last_packets_list ).filter(Device.id == asset_id).\ join(DataCollector, Device.data_collector_id == DataCollector.id).\ join(Policy, Policy.id == DataCollector.policy_id).\ join(PolicyItem, and_(Policy.id == PolicyItem.policy_id, PolicyItem.alert_type_code == 'LAF-401')).\ join(RowProcessed, RowProcessed.analyzer == 'packet_analyzer').\ join(Packet, Packet.id == RowProcessed.last_row).\ join(DeviceCounters, and_( DeviceCounters.device_id == Device.id, DeviceCounters.counter_type.in_(dev_wanted_counters), DeviceCounters.last_update + func.make_interval(0,0,0,1) > Packet.date ), isouter=True).\ group_by(Device.id, DataCollector.name, PolicyItem.parameters) asset_query = add_counters_columns(asset_query, dev_wanted_counters, DeviceCounters) asset = asset_query.first() elif asset_type == "gateway": asset_query = db.session.query( Gateway.id, Gateway.organization_id, Gateway.gw_hex_id.label('hex_id'), expression.literal_column('\'Gateway\'').label('type'), Gateway.name, expression.null().label('app_name'), DataCollector.name.label('data_collector'), expression.null().label('policy_parameters'), Gateway.connected.label('connected'), Gateway.last_activity, Gateway.activity_freq, cast(expression.null(), Float).label('activity_freq_variance'), Gateway.npackets_up, Gateway.npackets_down, cast(expression.null(), Float).label('packet_loss'), cast(expression.null(), Float).label('max_rssi'), cast(expression.null(), Float).label('max_lsnr'), cast(expression.null(), Float).label('ngateways_connected_to'), cast(expression.null(), Float).label('payload_size'), expression.null().label('last_packets_list') ).filter(Gateway.id == asset_id).\ join(DataCollector, Gateway.data_collector_id == DataCollector.id).\ join(RowProcessed, RowProcessed.analyzer == 'packet_analyzer').\ join(Packet, Packet.id == RowProcessed.last_row).\ join(GatewayCounters, and_( GatewayCounters.gateway_id == Gateway.id, GatewayCounters.counter_type.in_(gtw_wanted_counters), GatewayCounters.last_update + func.make_interval(0,0,0,1) > Packet.date ), isouter=True).\ group_by(Gateway.id, DataCollector.name) asset_query = add_counters_columns(asset_query, gtw_wanted_counters, GatewayCounters) asset = asset_query.first() else: raise Error.BadRequest( f"Invalid asset_type: {asset_type}. Valid values are \'device\' or \'gateway\'" ) if not asset: raise Error.NotFound( f"Asset with id {asset_id} and type {asset_type} not found") if organization_id and asset.organization_id != organization_id: raise Error.Forbidden( "User's organization's different from asset organization") return asset
def list_all(organization_id, page=None, size=None, asset_type=None, asset_status=None, data_collector_ids=None, gateway_ids=None, device_ids=None, min_signal_strength=None, max_signal_strength=None, min_packet_loss=None, max_packet_loss=None): """ List assets of an organization and their resource usage information. Parameters: - asset_type: for filtering, count only this type of asset ("device" or "gateway"). - asset_status: for filtering, count only assets with this status ("connected" or "disconnected"). - data_collector_ids[]: for filtering, count only the assets belongs to specific data collectors. - gateway_ids[]: for filtering, count only the assets connected to ANY one of these gateways. - device_ids[]: for filtering, list only the assets related to ANY of these devices - min_signal_strength: for filtering, count only the assets with signal strength not lower than this value (dBm) - max_signal_strength: for filtering, count only the assets with signal strength not higher than this value (dBm) - min_packet_loss: for filtering, count only the assets with packet loss not lower than this value (percentage) - max_packet_loss: for filtering, count only the assets with packet loss not higher than this value (percentage) Returns: - Dict with the list of assets. """ last_dev_addrs = db.session.\ query(DeviceSession.device_id, func.max(DeviceSession.last_activity).label('last_activity')).\ group_by(DeviceSession.device_id).\ subquery() # Build two queries, one for devices and one for gateways dev_query = db.session.query( Device.id.label('id'), Device.dev_eui.label('hex_id'), DeviceSession.dev_addr.label('dev_addr'), expression.literal_column('\'Device\'').label('type'), Device.name, Device.app_name, DataCollector.name.label('data_collector'), PolicyItem.parameters.label('policy_parameters'), Device.connected.label('connected'), Device.last_activity, Device.activity_freq, Device.activity_freq_variance, Device.npackets_up, Device.npackets_down, Device.npackets_lost.label('packet_loss'), Device.max_rssi, Device.max_lsnr, Device.payload_size, Device.ngateways_connected_to ).select_from(Device).\ filter(Device.organization_id==organization_id).\ filter(Device.pending_first_connection==False).\ join(DataCollector, Device.data_collector_id == DataCollector.id).\ join(Policy, Policy.id == DataCollector.policy_id).\ join(PolicyItem, and_(Policy.id == PolicyItem.policy_id, PolicyItem.alert_type_code == 'LAF-401')).\ join(last_dev_addrs, Device.id == last_dev_addrs.c.device_id).\ join(DeviceSession, and_(DeviceSession.device_id == Device.id, DeviceSession.last_activity == last_dev_addrs.c.last_activity)).\ join(RowProcessed, RowProcessed.analyzer == 'packet_analyzer').\ join(Packet, Packet.id == RowProcessed.last_row).\ join(DeviceCounters, and_( DeviceCounters.device_id == Device.id, DeviceCounters.counter_type.in_(dev_wanted_counters), DeviceCounters.last_update + func.make_interval(0,0,0,1) > Packet.date ), isouter=True).\ group_by(Device.id, DeviceSession.dev_addr, DataCollector.name, PolicyItem.parameters) gtw_query = db.session.query( distinct(Gateway.id).label('id'), Gateway.gw_hex_id.label('hex_id'), expression.null().label('dev_addr'), expression.literal_column('\'Gateway\'').label('type'), Gateway.name, expression.null().label('app_name'), DataCollector.name.label('data_collector'), expression.null().label('policy_parameters'), Gateway.connected.label('connected'), Gateway.last_activity, Gateway.activity_freq, cast(expression.null(), Float).label('activity_freq_variance'), Gateway.npackets_up, Gateway.npackets_down, cast(expression.null(), Float).label('packet_loss'), cast(expression.null(), Float).label('max_rssi'), cast(expression.null(), Float).label('max_lsnr'), cast(expression.null(), Float).label('payload_size'), cast(expression.null(), Float).label('ngateways_connected_to') ).select_from(Gateway).\ filter(Gateway.organization_id == organization_id).\ join(DataCollector, Gateway.data_collector_id == DataCollector.id).\ join(RowProcessed, RowProcessed.analyzer == 'packet_analyzer').\ join(Packet, Packet.id == RowProcessed.last_row).\ join(GatewayCounters, and_( GatewayCounters.gateway_id == Gateway.id, GatewayCounters.counter_type.in_(gtw_wanted_counters), GatewayCounters.last_update + func.make_interval(0,0,0,1) > Packet.date ), isouter=True).\ group_by(Gateway.id, DataCollector.name) # Add a column for every counter type to each query, using the wanted_counters lists dev_query = add_counters_columns(dev_query, dev_wanted_counters, DeviceCounters) gtw_query = add_counters_columns(gtw_query, gtw_wanted_counters, GatewayCounters) queries = add_filters(dev_query=dev_query, gtw_query=gtw_query, asset_type=asset_type, asset_status=asset_status, data_collector_ids=data_collector_ids, gateway_ids=gateway_ids, device_ids=device_ids, min_signal_strength=min_signal_strength, max_signal_strength=max_signal_strength, min_packet_loss=min_packet_loss, max_packet_loss=max_packet_loss) dev_query = queries[0] gtw_query = queries[1] # Filter by device type if the parameter was given, else, make a union with queries. if asset_type is None: asset_query = dev_query.union(gtw_query) elif asset_type == "device": asset_query = dev_query elif asset_type == "gateway": asset_query = gtw_query else: raise Error.BadRequest("Invalid asset type parameter") asset_query = asset_query.order_by(text('type desc, connected desc, id')) if page and size: return asset_query.paginate(page=page, per_page=size, error_out=False) else: return asset_query.all()
def default_addinterval(element, compiler, **kw): dt1, dt2 = list(element.clauses) return compiler.process(dt1 + func.make_interval(0, 0, 0, 0, 0, dt2))