def get_issues_and_reminders(offset=None, limit=None, start=None, end=None, site_ids=None, include_count=None, search=None, event_id=None, include_expired=None): """ Returns one or more row/s of narratives. Args: offset (Integer) - limit (Integer) - start () - end () - include_count (Boolean) search (String) event_id (Integer) """ print(get_process_status_log("get_issues_and_reminders", "start")) iar = IssuesAndReminders irp = IssuesRemindersSitePostings # base = DB.session.query(iar) base = iar.query.options(joinedload(iar.postings).joinedload( irp.event)).filter(iar.resolution == None) return_data = None if start and end: base = base.filter(iar.ts_posted.between(start, end)) if not event_id: if search: base = base.filter(iar.detail.ilike("%" + search + "%")) if not include_expired: base = base.filter( DB.or_(iar.ts_expiration > datetime.now(), iar.ts_expiration == None)) issues_and_reminders = base.order_by(DB.desc( iar.ts_posted)).limit(limit).offset(offset).all() DB.session.commit() if include_count: count = get_issues_count(base) return_data = [issues_and_reminders, count] else: return_data = issues_and_reminders else: issues_and_reminders = base.order_by(DB.desc( iar.timestamp)).filter(iar.event_id == event_id).all() DB.session.commit() return_data = issues_and_reminders print(get_process_status_log("get_issues_and_reminders", "end")) return return_data
def get_latest_messages(mobile_id, messages_per_convo=20): """ """ query_start = datetime.now() siu = SmsInboxUsers siut = SmsInboxUserTags sms_inbox = DB.session.query( siu.inbox_id.label("convo_id"), siu.inbox_id, bindparam("outbox_id", None), siu.mobile_id, siu.sms_msg, siu.ts_sms.label("ts"), siu.ts_sms.label("ts_received"), bindparam("ts_written", None), bindparam("ts_sent", None), literal("inbox").label("source"), bindparam("send_status", None) ).options(raiseload("*")).filter(siu.mobile_id == mobile_id).order_by(DB.desc(siu.ts_sms)) sou = SmsOutboxUsers sous = SmsOutboxUserStatus sout = SmsOutboxUserTags outbox_sub = sout.query.join(sou).filter( sout.outbox_id == sou.outbox_id).subquery() sms_outbox = DB.session.query( sous.stat_id.label("convo_id"), bindparam("inbox_id", None), sous.outbox_id, sous.mobile_id, sou.sms_msg, sou.ts_written.label("ts"), bindparam("ts_received", None), sou.ts_written, sous.ts_sent, literal("outbox").label("source"), sous.send_status ).options(raiseload("*")).join(sou).filter(sous.mobile_id == mobile_id) \ .order_by(DB.desc(sous.outbox_id)) union = sms_inbox.union(sms_outbox).order_by( DB.desc(text("anon_1_ts"))).limit(messages_per_convo) query_end = datetime.now() print("") print("SCRIPT RUNTIME: GET LATEST MESSAGES", (query_end - query_start).total_seconds()) print("") return union
def get_releases_of_an_event_args(): """ ARGS TYPE of Parameter filter. This is the args type implementation of the previous function that gets releases of an event. Make sure you import "request" from "flask" from flask import request """ # Example of putting parameter filter on URL # # page = request.args.get('page', default = 1, type = int) # filter = request.args.get('filter', default = '*', type = str) # # /my-route?page=34 -> page: 34 filter: '*' # /my-route -> page: 1 filter: '*' # /my-route?page=10&filter=test -> page: 10 filter: 'test' # /my-route?page=10&filter=10 -> page: 10 filter: '10' event_id = request.args.get("event_id", default=1, type=int) releases = MonitoringReleases.query.join(MonitoringEvents).order_by(DB.desc( MonitoringEvents.event_id)).filter(MonitoringEvents.event_id == event_id).all() releases_data = MonitoringReleasesSchema(many=True).dump(releases).data return jsonify(releases_data)
def get_surficial_data(site_code=None, marker_id=None, data_id=None, mo_id=None, ts_order="asc", end_ts=None, start_ts=None, limit=None, anchor="marker_data"): """ Returns surficial data of a site or marker specified. You can filter data more using start, end timestamps and a limit. anchor (string): choose whether to return 'marker_observation' or 'marker_data' """ if data_id: filtered_query = md.query.filter(md.data_id == data_id) elif mo_id: filtered_query = mo.query.filter(mo.mo_id == mo_id) else: if anchor == "marker_observations": base_query = mo.query else: base_query = md.query.join(mo) if ts_order == "asc": base_query = base_query.order_by(DB.asc(mo.ts)) elif ts_order == "desc": base_query = base_query.order_by(DB.desc(mo.ts)) if marker_id: filtered_query = base_query.filter(md.marker_id == marker_id) if site_code: filtered_query = base_query.join(Sites).filter( Sites.site_code == site_code) if end_ts: if not isinstance(end_ts, datetime): end_ts = datetime.strptime(end_ts, "%Y-%m-%d %H:%M:%S") filtered_query = filtered_query.filter(mo.ts <= end_ts) if start_ts: if not isinstance(start_ts, datetime): start_ts = datetime.strptime(start_ts, "%Y-%m-%d %H:%M:%S") filtered_query = filtered_query.filter(mo.ts >= start_ts) if limit: filtered_query = filtered_query.limit(limit) if limit == 1: filtered_marker_data = filtered_query.first() else: filtered_marker_data = filtered_query.all() return filtered_marker_data
def get_releases_of_an_event(event_id): """ Sample implementation of joins with parameter """ releases = MonitoringReleases.query.join(MonitoringEvents).order_by(DB.desc( MonitoringEvents.event_id)).filter(MonitoringEvents.event_id == event_id).all() releases_data = MonitoringReleasesSchema(many=True).dump(releases).data return jsonify(releases_data)
def get_all_events(): """ Sample implementation of querying all columns of a table (default) """ events = MonitoringEvents.query.order_by(DB.desc( MonitoringEvents.event_id)).filter(MonitoringEvents.status == 2).all() event_data = MonitoringEventsSchema(many=True).dump(events).data return jsonify(event_data)
def get_all_event_ids_only(): """ Sample implementation of querying select columns instead of returning all (*) """ events = DB.session.query(MonitoringEvents.event_id).order_by(DB.desc( MonitoringEvents.event_id)).filter(MonitoringEvents.status == 2).all() event_data = MonitoringEventsSchema(many=True).dump(events).data return jsonify(event_data)
def get_narratives(offset=None, limit=None, start=None, end=None, site_ids=None, include_count=None, search=None, event_id=None, raise_site=True): """ Returns one or more row/s of narratives. Args: offset (Integer) - limit (Integer) - start () - end () - site_ids (Integer) - include_count (Boolean) search (String) event_id (Integer) """ nar = Narratives base = nar.query if raise_site: base = base.options(DB.raiseload("site")) if start is None and end is None: pass else: base = base.filter(nar.timestamp.between(start, end)) if not event_id: if site_ids: base = base.filter(nar.site_id.in_(site_ids)) if search != "": base = base.filter(nar.narrative.ilike("%" + search + "%")) narratives = base.order_by(DB.desc( nar.timestamp)).limit(limit).offset(offset).all() DB.session.commit() # DB.session.commit() if include_count: count = get_narrative_count(base) return [narratives, count] else: return narratives else: narratives = base.order_by(DB.asc( nar.timestamp)).filter(nar.event_id == event_id).all() DB.session.commit() return narratives
def get_all_events_wo_relationship(): """ Sample implementation of preventing lazy load on relationship """ events = MonitoringEvents.query.options(DB.raiseload(MonitoringEvents.releases)).filter( MonitoringEvents.status == "finished").order_by(DB.desc(MonitoringEvents.event_id)).all() event_data = MonitoringEventsSchema( many=True, exclude=("releases", )).dump(events).data return jsonify(event_data)
def get_on_demand(timestamp): """ """ m_od = MonitoringOnDemand on_demand_alerts = m_od.query.order_by(DB.desc( m_od.request_ts)).filter(m_od.request_ts == timestamp).all() return on_demand_alerts
def get_rainfall_alerts(site_id=None, latest_trigger_ts=None): """ Query rainfall alerts Not in use except for tech_info_maker (which was not yet imported) """ if site_id and latest_trigger_ts: rain_alerts = ra.query.order_by(DB.desc(ra.ts)).filter( ra.site_id == site_id, ra.ts == latest_trigger_ts).all() else: rain_alerts = ra.query.all() return rain_alerts
def get_loggers(site_code=None, many=True): """ Function that gets basic site data by site code """ base = Loggers.query.order_by(DB.desc( Loggers.logger_id)).join(TSMSensors).join(Sites) if site_code: base = base.filter(Sites.site_code == site_code) if many: loggers = base.all() else: loggers = base.first() return loggers
def formulate_surficial_tech_info(surficial_alert_detail): """ Sample """ tech_info = [] surficial_tech_info = "" for item in surficial_alert_detail: name = item.marker.marker_histories.order_by(DB.desc( mh.ts)).first().marker_names[0].marker_name disp = item.displacement timestamp = '{:.2f}'.format(item.time_delta) tech_info.append( f"Marker {name}: {disp} cm difference in {timestamp} hours") surficial_tech_info = '; '.join(tech_info) return surficial_tech_info
def find_narrative_event_id(timestamp, site_id): """ """ me = MonitoringEvents mea = MonitoringEventAlerts event_id = None filtering = DB.or_( DB.and_(mea.ts_start <= timestamp, timestamp <= mea.ts_end), DB.and_(mea.ts_start <= timestamp, mea.ts_end == None)) event_alert = mea.query.options(DB.joinedload("event", innerjoin=True), DB.raiseload("*")) \ .order_by(DB.desc(mea.event_alert_id)) \ .join(me).filter(filtering).filter(me.site_id == site_id) \ .first() if event_alert: event_id = event_alert.event.event_id return event_id
def get_subsurface_node_alerts(site_id, start_ts, latest_trigger_ts, alert_level): """ Update: Returns a list of node alerts Returns list of sensors with its corresponding node alerts """ # NOTE: OPTIMIZE: Use TSMSensor instead of NodeAlerts OR use join() query try: tsm_sensors = tsma.query.filter(tsma.site_id == site_id).all() tsm_node_alerts = [] for sensor in tsm_sensors: sensor_node_alerts = sensor.node_alerts.filter( DB.or_(na.disp_alert == alert_level, na.vel_alert == 3)) \ .order_by(DB.desc(na.na_id)).filter( start_ts <= na.ts, na.ts <= latest_trigger_ts).all() if sensor_node_alerts: # If there are no node alerts on sensor, skip. # If there is, remove duplicate node alerts. We only need the latest. unique_list = [] comparator = [] for item in sensor_node_alerts: com = item.node_id comparator.append(com) if not (com in comparator and comparator.count(com) > 1): unique_list.append(item) sensor_node_alerts = unique_list tsm_node_alerts.extend(sensor_node_alerts) # Save nodes to its own dictionary per sensor then put it in a list # entry_dict = { # "logger_name": sensor.logger.logger_name, # "sensor_node_alerts": sensor_node_alerts # } # tsm_node_alerts.append(entry_dict) except: raise return tsm_node_alerts
def get_quick_inbox(inbox_limit=50, messages_per_convo=20): query_start = datetime.now() vlmmid = ViewLatestMessagesMobileID inbox_mobile_ids = vlmmid.query.outerjoin( UserMobiles, vlmmid.mobile_id == UserMobiles.mobile_id) \ .outerjoin(Users).order_by(DB.desc(vlmmid.max_ts)).limit(inbox_limit).all() unsent_messages_arr = get_unsent_messages() latest_inbox_messages = get_messages_for_mobile_group( inbox_mobile_ids, messages_per_convo) unsent_messages = format_unsent_messages(unsent_messages_arr) messages = { "inbox": latest_inbox_messages, "unsent": unsent_messages } query_end = datetime.now() print("") print("SCRIPT RUNTIME", (query_end - query_start).total_seconds()) print("") return messages
def get_site_subsurface_columns(site_code, include_deactivated=False): """ Returns one or more row/s of subsurface_columns. Edit: [190320] - no provisions for None site_code parameter. Args: site_id """ sub_col = TSMSensors filter_var = Loggers.logger_name.like("%" + str(site_code) + "%") query = sub_col.query.join(Loggers).options( DB.joinedload("logger").joinedload("logger_model").raiseload("*"), DB.joinedload("logger").joinedload( "site", innerjoin=True).raiseload("*") ).order_by( DB.asc(Loggers.logger_name), DB.desc(sub_col.date_activated)).filter(filter_var) if not include_deactivated: query = query.filter(sub_col.date_deactivated.is_(None)) sub_column = query.all() return sub_column
def get_last_event_alert(event_id): return MonitoringEventAlerts.query.options(DB.raiseload("*")).filter( MonitoringEventAlerts.event_id == event_id).order_by( DB.desc(MonitoringEventAlerts.ts_start)).first()
def get_last_site_event(site_id): return MonitoringEvents.query.options(DB.raiseload("*")) \ .filter(MonitoringEvents.site_id == site_id).order_by( DB.desc(MonitoringEvents.event_start)).first()