def resource_name(self, db_session=None): db_session = get_db_session(db_session) if self.resource_id: return ResourceService.by_resource_id( self.resource_id, db_session=db_session).resource_name else: return "any resource"
def check_for_alert(cls, resource, *args, **kwargs): """ Check for open uptime alerts. Create new one if nothing is found and send alerts """ db_session = get_db_session(kwargs.get("db_session")) request = get_current_request() event_type = "uptime_alert" metric = kwargs["metric"] event = EventService.for_resource( [resource.resource_id], event_type=Event.types[event_type], status=Event.statuses["active"], ) if event.first(): log.info("ALERT: PROGRESS: %s %s" % (event_type, resource)) else: log.warning("ALERT: OPEN: %s %s" % (event_type, resource)) event_values = { "status_code": metric["status_code"], "tries": metric["tries"], "response_time": metric["response_time"], } new_event = Event( resource_id=resource.resource_id, event_type=Event.types[event_type], status=Event.statuses["active"], values=event_values, ) db_session.add(new_event) new_event.send_alerts(request=request, resource=resource)
def get_paginator_by_app_ids(cls, app_ids=None, page=1, item_count=None, items_per_page=50, order_by=None, filter_settings=None, exclude_columns=None, db_session=None): if not filter_settings: filter_settings = {} results, item_count = cls.get_search_iterator(app_ids, page, items_per_page, order_by, filter_settings) paginator = paginate.Page([], item_count=item_count, items_per_page=items_per_page, **filter_settings) ordered_ids = tuple(item['_source']['pg_id'] for item in results.get('hits', [])) sorted_instance_list = [] if ordered_ids: db_session = get_db_session(db_session) query = db_session.query(Log) query = query.filter(Log.log_id.in_(ordered_ids)) query = query.order_by(sa.desc('timestamp')) sa_items = query.all() # resort by score for i_id in ordered_ids: for item in sa_items: if str(item.log_id) == str(i_id): sorted_instance_list.append(item) paginator.sa_items = sorted_instance_list return paginator
def by_id(cls, group_id, app_ids=None, db_session=None): db_session = get_db_session(db_session) q = db_session.query(ReportGroup).filter( ReportGroup.id == int(group_id)) if app_ids: q = q.filter(ReportGroup.resource_id.in_(app_ids)) return q.first()
def by_public_api_key( cls, api_key, db_session=None, from_cache=False, request=None ): db_session = get_db_session(db_session) cache_region = get_region("redis_min_1") def uncached(api_key): q = db_session.query(Application) q = q.filter(Application.public_key == api_key) q = q.options(sa.orm.eagerload(Application.owner)) return q.first() if from_cache: @cache_region.cache_on_arguments("ApplicationService.by_public_api_key") def cached(api_key): app = uncached(api_key) if app: db_session.expunge(app) return app app = cached(api_key) else: app = uncached(api_key) return app
def send_alerts(self, request=None, resource=None, db_session=None): """" Sends alerts to applicable channels """ db_session = get_db_session(db_session) db_session.flush() if not resource: resource = Resource.by_resource_id(self.resource_id) if not request: request = get_current_request() if not resource: return users = set([p.user for p in resource.users_for_perm('view')]) for user in users: for channel in user.alert_channels: matches_resource = not channel.resources or resource in [ r.resource_id for r in channel.resources ] if (not channel.channel_validated or not channel.send_alerts or not matches_resource): continue else: try: channel.notify_alert(resource=resource, event=self, user=user, request=request) except IntegrationException as e: log.warning('%s' % e)
def get_logs(cls, resource_ids=None, filter_settings=None, db_session=None): # ensure we always have id's passed if not resource_ids: # raise Exception('No App ID passed') return [] db_session = get_db_session(db_session) q = db_session.query(Log) q = q.filter(Log.resource_id.in_(resource_ids)) if filter_settings.get("start_date"): q = q.filter(Log.timestamp >= filter_settings.get("start_date")) if filter_settings.get("end_date"): q = q.filter(Log.timestamp <= filter_settings.get("end_date")) if filter_settings.get("log_level"): q = q.filter( Log.log_level == filter_settings.get("log_level").upper()) if filter_settings.get("request_id"): request_id = filter_settings.get("request_id", "") q = q.filter(Log.request_id == request_id.replace("-", "")) if filter_settings.get("namespace"): q = q.filter(Log.namespace == filter_settings.get("namespace")) q = q.order_by(sa.desc(Log.timestamp)) return q
def check_for_groups_alert(cls, resource, event_type, *args, **kwargs): """ Check for open alerts depending on group type. Create new one if nothing is found and send alerts """ db_session = get_db_session(kwargs.get("db_session")) request = get_current_request() report_groups = kwargs["report_groups"] occurence_dict = kwargs["occurence_dict"] error_reports = 0 slow_reports = 0 for group in report_groups: occurences = occurence_dict.get(group.id, 1) if group.get_report().report_type == ReportType.error: error_reports += occurences elif group.get_report().report_type == ReportType.slow: slow_reports += occurences log_msg = "LIMIT INFO: %s : %s error reports. %s slow_reports" % ( resource, error_reports, slow_reports, ) logging.warning(log_msg) threshold = 10 for event_type in ["error_report_alert", "slow_report_alert"]: if ( error_reports < resource.error_report_threshold and event_type == "error_report_alert" ): continue elif ( slow_reports <= resource.slow_report_threshold and event_type == "slow_report_alert" ): continue if event_type == "error_report_alert": amount = error_reports threshold = resource.error_report_threshold elif event_type == "slow_report_alert": amount = slow_reports threshold = resource.slow_report_threshold event = EventService.for_resource( [resource.resource_id], event_type=Event.types[event_type], status=Event.statuses["active"], ) if event.first(): log.info("ALERT: PROGRESS: %s %s" % (event_type, resource)) else: log.warning("ALERT: OPEN: %s %s" % (event_type, resource)) new_event = Event( resource_id=resource.resource_id, event_type=Event.types[event_type], status=Event.statuses["active"], values={"reports": amount, "threshold": threshold}, ) db_session.add(new_event) new_event.send_alerts(request=request, resource=resource)
def by_app_ids(cls, app_ids=None, order_by=True, db_session=None): db_session = get_db_session(db_session) q = db_session.query(Report) if app_ids: q = q.filter(Report.resource_id.in_(app_ids)) if order_by: q = q.order_by(sa.desc(Report.id)) return q
def affected_users_count(cls, report_group, db_session=None): db_session = get_db_session(db_session) query = db_session.query(sa.func.count(Report.username)) query = query.filter(Report.group_id == report_group.id) query = query.filter(Report.username != "") query = query.filter(Report.username != None) query = query.group_by(Report.username) return query.count()
def users_commenting(cls, report_group, exclude_user_id=None, db_session=None): db_session = get_db_session(None, report_group) query = db_session.query(User).distinct() query = query.filter(User.id == ReportComment.owner_id) query = query.filter(ReportComment.group_id == report_group.id) if exclude_user_id: query = query.filter(ReportComment.owner_id != exclude_user_id) return query
def by_http_referer(cls, referer_string, db_session=None): db_session = get_db_session(db_session) domain = urllib.parse.urlsplit(referer_string, allow_fragments=False).netloc if domain: if domain.startswith("www."): domain = domain[4:] q = db_session.query(Application).filter(Application.domain == domain) return q.first()
def by_app_id_and_integration_name( cls, resource_id, integration_name, db_session=None ): db_session = get_db_session(db_session) query = db_session.query(cls) query = query.filter(cls.integration_name == integration_name) query = query.filter(cls.resource_id == resource_id) return query.first()
def create_tag(cls, resource_id, tag_name, value, db_session=None): tag = Tag(resource_id=resource_id, name=cls.cut_name(tag_name), value=cls.cut_value(value)) db_session = get_db_session(db_session) db_session.add(tag) db_session.flush() return tag
def latest_for_user(cls, user, db_session=None): registry = get_current_registry() resources = UserService.resources_with_perms( user, ["view"], resource_types=registry.resource_types) resource_ids = [r.resource_id for r in resources] db_session = get_db_session(db_session) return EventService.for_resource(resource_ids, or_target_user_id=user.id, limit=10, db_session=db_session)
def last_updated(cls, since_when, exclude_status=None, db_session=None): db_session = get_db_session(db_session) q = db_session.query(Application) q2 = ReportGroup.last_updated( since_when, exclude_status=exclude_status, db_session=db_session ) q2 = q2.from_self(ReportGroup.resource_id) q2 = q2.group_by(ReportGroup.resource_id) q = q.filter(Application.resource_id.in_(q2)) return q
def query_by_primary_key_and_namespace(cls, list_of_pairs, db_session=None): db_session = get_db_session(db_session) list_of_conditions = [] query = db_session.query(Log) for pair in list_of_pairs: list_of_conditions.append(sa.and_( Log.primary_key == pair['pk'], Log.namespace == pair['ns'])) query = query.filter(sa.or_(*list_of_conditions)) query = query.order_by(sa.asc(Log.timestamp), sa.asc(Log.log_id)) return query
def get_paginator_by_app_ids( cls, app_ids=None, page=1, item_count=None, items_per_page=50, order_by=None, filter_settings=None, exclude_columns=None, db_session=None, ): if not filter_settings: filter_settings = {} results = cls.get_search_iterator( app_ids, page, items_per_page, order_by, filter_settings ) ordered_ids = [] if results: for item in results["top_groups"]["buckets"]: pg_id = item["top_reports_hits"]["hits"]["hits"][0]["_source"][ "report_id" ] ordered_ids.append(pg_id) log.info(filter_settings) paginator = paginate.Page( ordered_ids, items_per_page=items_per_page, **filter_settings ) sa_items = () if paginator.items: db_session = get_db_session(db_session) # latest report detail query = db_session.query(Report) query = query.options(sa.orm.joinedload(Report.report_group)) query = query.filter(Report.id.in_(paginator.items)) if filter_settings.get("order_col"): order_col = filter_settings.get("order_col") if filter_settings.get("order_dir") == "dsc": sort_on = "desc" else: sort_on = "asc" if order_col == "when": order_col = "last_timestamp" query = query.order_by( getattr(sa, sort_on)(getattr(ReportGroup, order_col)) ) sa_items = query.all() sorted_instance_list = [] for i_id in ordered_ids: for report in sa_items: if str(report.id) == i_id and report not in sorted_instance_list: sorted_instance_list.append(report) paginator.sa_items = sorted_instance_list return paginator
def by_hash_and_resource( cls, resource_id, grouping_hash, since_when=None, db_session=None ): db_session = get_db_session(db_session) q = db_session.query(ReportGroup) q = q.filter(ReportGroup.resource_id == resource_id) q = q.filter(ReportGroup.grouping_hash == grouping_hash) q = q.filter(ReportGroup.fixed == False) if since_when: q = q.filter(ReportGroup.first_timestamp >= since_when) return q.first()
def top_affected_users(cls, report_group, db_session=None): db_session = get_db_session(db_session) count_label = sa.func.count(Report.username).label("count") query = db_session.query(Report.username, count_label) query = query.filter(Report.group_id == report_group.id) query = query.filter(Report.username != None) query = query.filter(Report.username != "") query = query.group_by(Report.username) query = query.order_by(sa.desc(count_label)) query = query.limit(50) return query
def by_id_cached(cls, db_session=None): db_session = get_db_session(db_session) cache_region = get_region("redis_min_1") @cache_region.cache_on_arguments("ApplicationService.by_id") def cached(*args, **kwargs): app = cls.by_id(*args, db_session=db_session, **kwargs) if app: db_session.expunge(app) return app return cached
def by_api_key_cached(cls, db_session=None): db_session = get_db_session(db_session) cache_region = get_region('redis_min_1') @cache_region.cache_on_arguments('ApplicationService.by_api_key') def cached(*args, **kwargs): app = cls.by_api_key(*args, db_session=db_session, **kwargs) if app: db_session.expunge(app) return app return cached
def run_postprocessing(self, report): """ Alters report group priority based on postprocessing configuration """ request = get_current_request() get_db_session(None, self).flush() for action in self.application.postprocess_conf: get_db_session(None, self).flush() rule_obj = Rule(action.rule, REPORT_TYPE_MATRIX) report_dict = report.get_dict(request) # if was not processed yet if (rule_obj.match(report_dict) and action.pkey not in self.triggered_postprocesses_ids): action.postprocess(self) # this way sqla can track mutation of list self.triggered_postprocesses_ids = self.triggered_postprocesses_ids + [ action.pkey ] get_db_session(None, self).flush() # do not go out of bounds if self.priority < 1: self.priority = 1 if self.priority > 10: self.priority = 10
def filtered_key_and_section(cls, pairs=None, db_session=None): db_session = get_db_session(db_session) query = db_session.query(Config) if pairs: conditions = [] for pair in pairs: conditions.append(sa.and_( Config.key == pair['key'], Config.section == pair['section']) ) query = query.filter(sa.or_(*conditions)) return query
def by_tag_id(cls, tag_id, db_session=None): db_session = get_db_session(db_session) registry = get_current_registry() @registry.cache_regions.redis_min_10.cache_on_arguments( namespace='TagService.by_tag_id') def cached(tag_id): tag = db_session.query(Tag).filter(Tag.id == tag_id).first() if tag: db_session.expunge(tag) return tag return cached(tag_id)
def by_resource_group_and_perm(cls, group_id, perm_name, resource_id, db_session=None): """ return all instances by user name, perm name and resource id """ db_session = get_db_session(db_session) query = db_session.query(GroupResourcePermission) query = query.filter(GroupResourcePermission.group_id == group_id) query = query.filter( GroupResourcePermission.resource_id == resource_id) query = query.filter(GroupResourcePermission.perm_name == perm_name) return query.first()
def get_paginator_by_app_ids(cls, app_ids=None, page=1, item_count=None, items_per_page=50, order_by=None, filter_settings=None, exclude_columns=None, db_session=None): if not filter_settings: filter_settings = {} results = cls.get_search_iterator(app_ids, page, items_per_page, order_by, filter_settings) ordered_ids = [] if results: for item in results['top_groups']['buckets']: pg_id = item['top_reports_hits']['hits']['hits'][0]['_source'][ 'pg_id'] ordered_ids.append(pg_id) log.info(filter_settings) paginator = paginate.Page(ordered_ids, items_per_page=items_per_page, **filter_settings) sa_items = () if paginator.items: db_session = get_db_session(db_session) # latest report detail query = db_session.query(Report) query = query.options(sa.orm.joinedload(Report.report_group)) query = query.filter(Report.id.in_(paginator.items)) if filter_settings.get('order_col'): order_col = filter_settings.get('order_col') if filter_settings.get('order_dir') == 'dsc': sort_on = 'desc' else: sort_on = 'asc' if order_col == 'when': order_col = 'last_timestamp' query = query.order_by( getattr(sa, sort_on)(getattr(ReportGroup, order_col))) sa_items = query.all() sorted_instance_list = [] for i_id in ordered_ids: for report in sa_items: if (str(report.id) == i_id and report not in sorted_instance_list): sorted_instance_list.append(report) paginator.sa_items = sorted_instance_list return paginator
def by_query(cls, resource_id=None, plugin_name=None, section=None, db_session=None): db_session = get_db_session(db_session) query = db_session.query(PluginConfig) if resource_id: query = query.filter(PluginConfig.resource_id == resource_id) if plugin_name: query = query.filter(PluginConfig.plugin_name == plugin_name) if section: query = query.filter(PluginConfig.section == section) return query
def get_uptime_stats(cls, resource_id, db_session=None): db_session = get_db_session(db_session) now = datetime.utcnow().replace(microsecond=0, second=0) floor_func = UptimeMetric.start_interval since_when = now - timedelta(hours=1) query = db_session.query( floor_func.label("interval"), UptimeMetric.response_time, UptimeMetric.tries, UptimeMetric.status_code, UptimeMetric.location, ) query = query.filter(UptimeMetric.resource_id == resource_id) query = query.filter(UptimeMetric.start_interval >= since_when) query = query.order_by(sa.desc(floor_func)) return query
def for_resource( cls, resource_ids, event_type=None, status=None, since_when=None, limit=20, event_id=None, target_uuid=None, order_by=None, or_target_user_id=None, db_session=None, ): """ Fetches events including based on passed params OR if target_user_id is present include events that just target this user """ db_session = get_db_session(db_session) query = db_session.query(Event) query = query.options(sa.orm.joinedload(Event.resource)) and_cond = [Event.resource_id.in_(resource_ids)] if not resource_ids: and_cond = [Event.resource_id == -999] if event_type: and_cond.append(Event.event_type == event_type) if status: and_cond.append(Event.status == status) if since_when: and_cond.append(Event.start_date >= since_when) if event_id: and_cond.append(Event.id == event_id) if target_uuid: and_cond.append(Event.target_uuid == target_uuid) or_cond = [] if or_target_user_id: or_cond.append(sa.or_(Event.target_user_id == or_target_user_id)) query = query.filter(sa.or_(sa.and_(*and_cond), *or_cond)) if not order_by: query = query.order_by(sa.desc(Event.start_date)) if limit: query = query.limit(limit) return query