def get_tag_by_type(tag_type, tag_details, tag_id): """ Returns tag row your are looking for based on provided parameters NOTE: Please REFACTOR so we can accomodate tag types other than smsinbox and smsoutbox _users. Args: tag_type (String) key (String) value () """ row = None query_class = None row_key_1 = None if tag_type == "smsinbox_user_tags": query_class = SmsInboxUserTags row_key_1 = "inbox_id" elif tag_type == "smsoutbox_user_tags": query_class = SmsOutboxUserTags row_key_1 = "outbox_id" row = query_class.query.filter(DB.and_( getattr(query_class, row_key_1) == tag_details[row_key_1], query_class.tag_id == tag_id )).first() return row
def get_latest_alerts(): mm = MonitoringMoms mi = MomsInstances subquery = DB.session.query( DB.func.max(mi.site_id).label("site_id"), mi.instance_id, DB.func.max(mm.observance_ts).label("max_ts")).join(mm).group_by( mi.instance_id).subquery("t2") max_alerts = DB.session.query(DB.func.max( mm.op_trigger), subquery.c.site_id).join(mi).join( subquery, DB.and_(mm.observance_ts == subquery.c.max_ts, mi.instance_id == subquery.c.instance_id)).group_by( subquery.c.site_id).all() sites = get_sites_data() sites_data = SitesSchema(many=True).dump(sites).data for site in sites_data: site_id = site["site_id"] alert = next((x[0] for x in max_alerts if x[1] == site_id), 0) site["moms_alert"] = alert sites_data.sort(key=lambda x: x["moms_alert"], reverse=True) return jsonify(sites_data)
def delete_issues_reminders_site_postings(site_id, event_id): """ """ irsp = IssuesRemindersSitePostings result = irsp.query.filter( DB.and_(irsp.site_id == site_id, irsp.event_id == event_id)).first() DB.session.delete(result)
def get_earthquake_alerts(timestamp, site_id): """ """ eq_a = EarthquakeAlerts eq_e = EarthquakeEvents eq_filter = DB.and_(eq_e.ts == timestamp, eq_a.site_id == site_id) earthquake_alerts = eq_a.query.join(eq_e).filter(eq_filter).all() return earthquake_alerts
def find_narrative_event_id(timestamp, site_id): """ """ me = MonitoringEvents mea = MonitoringEventAlerts event_id = None filtering = DB.or_( DB.and_(mea.ts_start <= timestamp, timestamp <= mea.ts_end), DB.and_(mea.ts_start <= timestamp, mea.ts_end == None)) event_alert = mea.query.options(DB.joinedload("event", innerjoin=True), DB.raiseload("*")) \ .order_by(DB.desc(mea.event_alert_id)) \ .join(me).filter(filtering).filter(me.site_id == site_id) \ .first() if event_alert: event_id = event_alert.event.event_id return event_id
def get_account(username, password): """ """ encode_password = str.encode(password) hash_object = hashlib.sha512(encode_password) hex_digest_password = hash_object.hexdigest() password = str(hex_digest_password) account = UserAccounts.query.filter(DB.and_( UserAccounts.username == username, UserAccounts.password == password)).first() return account
def insert_to_moms_instances_table(feature): feature_id = get_feature_id(feature.feature_type) result = MomsInstances.query.options(DB.raiseload("*")).filter( DB.and_(MomsInstances.site_id == feature.site_id, MomsInstances.feature_id == feature_id, MomsInstances.feature_name == feature.feature_name)).first() if result is None: # If not exists, insert new instance moms_instance = MomsInstances(site_id=feature.site_id, feature_id=feature_id, feature_name=feature.feature_name) DB.session.add(moms_instance) DB.session.flush() moms_instance_id = moms_instance.instance_id else: # If exists, return existing ID moms_instance_id = result.instance_id return moms_instance_id
def delete_surficial_data(mo_id=None, site_id=None, ts=None, data_id=None): """ """ if data_id: row = md.query.filter_by(data_id=data_id).first() obs = row.marker_observation obs_data = obs.marker_data DB.session.delete(row) if (len(obs_data) == 1): mo_id = obs.mo_id mo.query.filter_by(mo_id=mo_id).delete() elif mo_id: mo.query.filter_by(mo_id=mo_id).delete() md.query.filter_by(mo_id=mo_id).delete() elif site_id and ts: row = mo.query.filter(DB.and_(mo.site_id == site_id, mo.ts == ts)).first() mo_id = row.mo_id row.delete() md.query.filter_by(mo_id=mo_id).delete() DB.session.commit()
def migrate_sms_user_tags(user_type): is_inbox = False is_outbox = False if user_type == "inbox": is_inbox = True elif user_type == "outbox": is_outbox = True if is_inbox: res = Gintags.query.join(SmsInboxUsers, Gintags.table_element_id == SmsInboxUsers.inbox_id).filter(Gintags.table_used == "smsinbox_users").all() elif is_outbox: res = Gintags.query.join(SmsOutboxUsers, Gintags.table_element_id == SmsOutboxUsers.outbox_id).join(SmsOutboxUserStatus, SmsOutboxUsers.outbox_id == SmsOutboxUserStatus.outbox_id).filter( DB.and_(Gintags.table_used == "smsoutbox_users", ~Gintags.tag_id_fk.in_([0, 121]), Gintags.tagger_eid_fk != "", SmsOutboxUserStatus.send_status > 0)).all() # empty tag_id_fks, varchar tagger_eid_fk?! tag_names = [] for row in res: tag = row.tag does_tag_exists = check_if_sms_tag_exists(tag_names, tag.tag_name) if not does_tag_exists: if is_inbox: source = "smsinbox_users" elif is_outbox: source = "smsoutbox_users" new_tag = SmsTags( tag=tag.tag_name, source=source, description=tag.tag_description ) DB.session.add(new_tag) DB.session.flush() tag_id = new_tag.tag_id tag_names.append({ "tag_name": new_tag.tag, "tag_id": new_tag.tag_id }) else: tag_id = does_tag_exists["tag_id"] if is_inbox: new_sms_tag = SmsInboxUserTags( inbox_id=row.table_element_id, tag_id=tag_id, user_id=row.tagger_eid_fk, ts=row.timestamp ) elif is_outbox: new_sms_tag = SmsOutboxUserTags( outbox_id=row.table_element_id, tag_id=tag_id, user_id=row.tagger_eid_fk, ts=row.timestamp ) DB.session.add(new_sms_tag) DB.session.flush()