def setup(self): """ Validates and creates integration between application and hipchat """ resource = self.request.context.resource self.create_missing_channel(resource, 'hipchat') form = forms.IntegrationHipchatForm(MultiDict( self.request.safe_json_body or {}), csrf_context=self.request, **self.integration_config) if self.request.method == 'POST' and form.validate(): integration_config = { 'api_token': form.api_token.data, 'rooms': form.rooms.data, } if not self.integration: # add new integration self.integration = HipchatIntegration( modified_date=datetime.utcnow(), ) self.request.session.flash('Integration added') resource.integrations.append(self.integration) else: self.request.session.flash('Integration updated') self.integration.config = integration_config DBSession.flush() self.create_missing_channel(resource, 'hipchat') return integration_config elif self.request.method == 'POST': return HTTPUnprocessableEntity(body=form.errors_json) return self.integration_config
def auth_tokens_POST(request): """ Lists all available alert channels """ if request.matched_route.name == "users_self_property": user = request.user else: user = UserService.by_id(request.matchdict.get("user_id")) if not user: return HTTPNotFound() req_data = request.safe_json_body or {} if not req_data.get("expires"): req_data.pop("expires", None) form = forms.AuthTokenCreateForm(MultiDict(req_data), csrf_context=request) if not form.validate(): return HTTPUnprocessableEntity(body=form.errors_json) token = AuthToken() form.populate_obj(token) if token.expires: interval = h.time_deltas.get(token.expires)["delta"] token.expires = datetime.datetime.utcnow() + interval user.auth_tokens.append(token) DBSession.flush() return token.get_dict()
def user_resource_permission_create(request): """ Set new permissions for user for a resource """ resource = request.context.resource user_name = request.unsafe_json_body.get("user_name") user = UserService.by_user_name(user_name) if not user: user = UserService.by_email(user_name) if not user: return False for perm_name in request.unsafe_json_body.get("permissions", []): permission = UserResourcePermissionService.by_resource_user_and_perm( user.id, perm_name, resource.resource_id ) if not permission: permission = UserResourcePermission(perm_name=perm_name, user_id=user.id) resource.user_permissions.append(permission) DBSession.flush() perms = [ p.perm_name for p in ResourceService.perms_for_user(resource, user) if p.type == "user" ] result = {"user_name": user.user_name, "permissions": list(set(perms))} return result
def users_create(request): """ Returns users list """ form = forms.UserCreateForm(MultiDict(request.safe_json_body or {}), csrf_context=request) if form.validate(): log.info("registering user") # probably not needed in the future since this requires root anyways # lets keep this here in case we lower view permission in the future # if request.registry.settings['appenlight.disable_registration']: # return HTTPUnprocessableEntity(body={'error': 'Registration is currently disabled.'}) user = User() # insert new user here DBSession.add(user) form.populate_obj(user) UserService.regenerate_security_code(user) UserService.set_password(user, user.user_password) user.status = 1 if form.status.data else 0 request.session.flash(_("User created")) DBSession.flush() return user.get_dict(exclude_keys=[ "security_code_date", "notes", "security_code", "user_password", ]) else: return HTTPUnprocessableEntity(body=form.errors_json)
def setup(self): """ Creates webhook integration """ resource = self.request.context.resource self.create_missing_channel(resource, "webhooks") form = forms.IntegrationWebhooksForm(MultiDict( self.request.safe_json_body or {}), csrf_context=self.request, **self.integration_config) if self.request.method == "POST" and form.validate(): integration_config = { "reports_webhook": form.reports_webhook.data, "alerts_webhook": form.alerts_webhook.data, } if not self.integration: # add new integration self.integration = WebhooksIntegration( modified_date=datetime.utcnow()) self.request.session.flash("Integration added") resource.integrations.append(self.integration) else: self.request.session.flash("Integration updated") self.integration.config = integration_config DBSession.flush() self.create_missing_channel(resource, "webhooks") return integration_config elif self.request.method == "POST": return HTTPUnprocessableEntity(body=form.errors_json) return self.integration_config
def setup(self): """ Validates and creates integration between application and slack """ resource = self.request.context.resource self.create_missing_channel(resource, "slack") form = forms.IntegrationSlackForm(MultiDict(self.request.safe_json_body or {}), csrf_context=self.request, **self.integration_config) if self.request.method == "POST" and form.validate(): integration_config = {"webhook_url": form.webhook_url.data} if not self.integration: # add new integration self.integration = SlackIntegration( modified_date=datetime.utcnow()) self.request.session.flash("Integration added") resource.integrations.append(self.integration) else: self.request.session.flash("Integration updated") self.integration.config = integration_config DBSession.flush() self.create_missing_channel(resource, "slack") return integration_config elif self.request.method == "POST": return HTTPUnprocessableEntity(body=form.errors_json) return self.integration_config
def add_metrics(resource_id, request_params, dataset, proto_version): current_time = datetime.utcnow().replace(second=0, microsecond=0) try: resource = ApplicationService.by_id_cached()(resource_id) resource = DBSession.merge(resource, load=False) es_docs = [] rows = [] for metric in dataset: tags = dict(metric["tags"]) server_n = tags.get("server_name", metric["server_name"]).lower() tags["server_name"] = server_n or "unknown" new_metric = Metric( timestamp=metric["timestamp"], resource_id=resource.resource_id, namespace=metric["namespace"], tags=tags, ) rows.append(new_metric) es_docs.append(new_metric.es_doc()) session = DBSession() session.bulk_save_objects(rows) session.flush() action = "METRICS" metrics_msg = "%s: %s, metrics: %s, proto:%s" % ( action, str(resource), len(dataset), proto_version, ) log.info(metrics_msg) mark_changed(session) redis_pipeline = Datastores.redis.pipeline(transaction=False) key = REDIS_KEYS["counters"]["metrics_per_minute"].format(current_time) redis_pipeline.incr(key, len(rows)) redis_pipeline.expire(key, 3600 * 24) key = REDIS_KEYS["counters"]["events_per_minute_per_user"].format( resource.owner_user_id, current_time) redis_pipeline.incr(key, len(rows)) redis_pipeline.expire(key, 3600) key = REDIS_KEYS["counters"]["metrics_per_hour_per_app"].format( resource_id, current_time.replace(minute=0)) redis_pipeline.incr(key, len(rows)) redis_pipeline.expire(key, 3600 * 24 * 7) redis_pipeline.sadd( REDIS_KEYS["apps_that_got_new_data_per_hour"].format( current_time.replace(minute=0)), resource_id, ) redis_pipeline.execute() add_metrics_es(es_docs) return True except Exception as exc: print_traceback(log) if celery.conf["CELERY_EAGER_PROPAGATES_EXCEPTIONS"]: raise add_metrics.retry(exc=exc)
def alert_channels_rule_POST(request): """ Creates new notification rule for specific alert channel """ user = request.user alert_action = AlertChannelAction(owner_id=request.user.id, type="report") DBSession.add(alert_action) DBSession.flush() return alert_action.get_dict()
def applications_postprocess_POST(request): """ Creates new postprocessing rules for applications """ resource = request.context.resource conf = ApplicationPostprocessConf() conf.do = "postprocess" conf.new_value = "1" resource.postprocess_conf.append(conf) DBSession.flush() return conf.get_dict()
def default_application(default_user): from appenlight.models.application import Application transaction.begin() application = Application( resource_id=1, resource_name='testapp', api_key='xxxx') DBSession.add(application) default_user.resources.append(application) DBSession.flush() transaction.commit() return application
def set_default_values(): row = PluginConfigService.by_query(plugin_name=PLUGIN_DEFINITION["name"], section="global").first() if not row: plugin = PluginConfig() plugin.config = {"uptime_regions_map": [], "json_config_version": 1} plugin.section = "global" plugin.plugin_name = PLUGIN_DEFINITION["name"] plugin.config["json_config_version"] = 1 DBSession.add(plugin) DBSession.flush()
def assign_users(request): """ Assigns specific report group to user for review - send email notification """ report_group = request.context.report_group application = request.context.resource currently_assigned = [u.user_name for u in report_group.assigned_users] new_assigns = request.unsafe_json_body # first unassign old users for user_name in new_assigns["unassigned"]: if user_name in currently_assigned: user = UserService.by_user_name(user_name) report_group.assigned_users.remove(user) comment = ReportComment(owner_id=request.user.id, report_time=report_group.first_timestamp) comment.body = "Unassigned group from @%s" % user_name report_group.comments.append(comment) # assign new users for user_name in new_assigns["assigned"]: if user_name not in currently_assigned: user = UserService.by_user_name(user_name) if user in report_group.assigned_users: report_group.assigned_users.remove(user) DBSession.flush() assignment = ReportAssignment( owner_id=user.id, report_time=report_group.first_timestamp, group_id=report_group.id, ) DBSession.add(assignment) comment = ReportComment(owner_id=request.user.id, report_time=report_group.first_timestamp) comment.body = "Assigned report_group to @%s" % user_name report_group.comments.append(comment) email_vars = { "user": user, "request": request, "application": application, "report_group": report_group, "email_title": "AppEnlight :: Assigned Report", } UserService.send_email( request, recipients=[user.email], variables=email_vars, template="/email_templates/assigned_report.jinja2", ) return True
def charts_event_rules_POST(request): json_body = copy.deepcopy(request.unsafe_json_body) alert_action = AlertChannelAction( name=json_body.get("name"), owner_id=request.user.id, resource_id=request.context.resource.resource_id, other_id=request.context.chart.uuid, rule=json_body["rule"], config=json_body["mappings"], type="chart", ) DBSession.add(alert_action) DBSession.flush() return alert_action.get_dict(extended_info=True)
def default_user(): from appenlight.models.user import User from appenlight.models.auth_token import AuthToken transaction.begin() user = User(id=1, user_name='testuser', status=1, email='*****@*****.**') DBSession.add(user) token = AuthToken(token='1234') user.auth_tokens.append(token) DBSession.flush() transaction.commit() return user
def add_metrics(resource_id, request_params, dataset, proto_version): current_time = datetime.utcnow().replace(second=0, microsecond=0) try: resource = ApplicationService.by_id_cached()(resource_id) resource = DBSession.merge(resource, load=False) es_docs = [] rows = [] for metric in dataset: tags = dict(metric['tags']) server_n = tags.get('server_name', metric['server_name']).lower() tags['server_name'] = server_n or 'unknown' new_metric = Metric(timestamp=metric['timestamp'], resource_id=resource.resource_id, namespace=metric['namespace'], tags=tags) rows.append(new_metric) es_docs.append(new_metric.es_doc()) session = DBSession() session.bulk_save_objects(rows) session.flush() action = 'METRICS' metrics_msg = '%s: %s, metrics: %s, proto:%s' % ( action, str(resource), len(dataset), proto_version) log.info(metrics_msg) mark_changed(session) redis_pipeline = Datastores.redis.pipeline(transaction=False) key = REDIS_KEYS['counters']['metrics_per_minute'].format(current_time) redis_pipeline.incr(key, len(rows)) redis_pipeline.expire(key, 3600 * 24) key = REDIS_KEYS['counters']['events_per_minute_per_user'].format( resource.owner_user_id, current_time) redis_pipeline.incr(key, len(rows)) redis_pipeline.expire(key, 3600) key = REDIS_KEYS['counters']['metrics_per_hour_per_app'].format( resource_id, current_time.replace(minute=0)) redis_pipeline.incr(key, len(rows)) redis_pipeline.expire(key, 3600 * 24 * 7) redis_pipeline.sadd( REDIS_KEYS['apps_that_got_new_data_per_hour'].format( current_time.replace(minute=0)), resource_id) redis_pipeline.execute() add_metrics_es(es_docs) return True except Exception as exc: print_traceback(log) add_metrics.retry(exc=exc)
def dashboard_POST(request): dashboard = Dashboard( resource_name=request.unsafe_json_body["resource_name"], layout_config=request.unsafe_json_body["layout_config"], ) request.user.resources.append(dashboard) DBSession.flush() chart_ids = [] for row in dashboard.layout_config: for col in row["columns"]: chart_ids.append(col["chartId"]) for c_uuid in chart_ids: chart = DashboardChart(uuid=c_uuid, config=None) dashboard.charts.append(chart) request.session.flash("Dashboard created") return dashboard.get_dict()
def groups_create(request): """ Returns groups list """ form = forms.GroupCreateForm(MultiDict(request.safe_json_body or {}), csrf_context=request) if form.validate(): log.info("registering group") group = Group() # insert new group here DBSession.add(group) form.populate_obj(group) request.session.flash(_("Group created")) DBSession.flush() return group.get_dict(include_perms=True) else: return HTTPUnprocessableEntity(body=form.errors_json)
def add_uptime_stats(params, metric): proto_version = parse_proto(params.get("protocol_version")) try: application = ApplicationService.by_id_cached()(metric["resource_id"]) application = DBSession.merge(application, load=False) if not application: return start_interval = convert_date(metric["timestamp"]) start_interval = start_interval.replace(second=0, microsecond=0) new_metric = UptimeMetric( start_interval=start_interval, response_time=metric["response_time"], status_code=metric["status_code"], is_ok=metric["is_ok"], location=metric.get("location", 1), tries=metric["tries"], resource_id=application.resource_id, owner_user_id=application.owner_user_id, ) DBSession.add(new_metric) DBSession.flush() add_metrics_uptime([new_metric.es_doc()]) if metric["is_ok"]: event_types = [Event.types["uptime_alert"]] statuses = [Event.statuses["active"]] # get events older than 5 min events = EventService.by_type_and_status( event_types, statuses, older_than=(datetime.utcnow() - timedelta(minutes=6)), app_ids=[application.resource_id], ) for event in events: event.close() else: UptimeMetricService.check_for_alert(application, metric=metric) action = "METRICS UPTIME" metrics_msg = "%s: %s, proto:%s" % (action, str(application), proto_version) log.info(metrics_msg) session = DBSession() mark_changed(session) return True except Exception as exc: print_traceback(log) add_uptime_stats.retry(exc=exc)
def user_resource_permission_delete(request): """ Removes user permission from specific resource """ resource = request.context.resource user = User.by_user_name(request.GET.get('user_name')) if not user: return False for perm_name in request.GET.getall('permissions'): permission = UserResourcePermission.by_resource_user_and_perm( user.id, perm_name, resource.resource_id) resource.user_permissions.remove(permission) DBSession.flush() perms = [ p.perm_name for p in resource.perms_for_user(user) if p.type == 'user' ] result = {'user_name': user.user_name, 'permissions': list(set(perms))} return result
def alert_channels_POST(request): """ Creates a new email alert channel for user, sends a validation email """ user = request.user form = forms.EmailChannelCreateForm(MultiDict(request.unsafe_json_body), csrf_context=request) if not form.validate(): return HTTPUnprocessableEntity(body=form.errors_json) email = form.email.data.strip() channel = EmailAlertChannel() channel.channel_name = "email" channel.channel_value = email security_code = generate_random_string(10) channel.channel_json_conf = {"security_code": security_code} user.alert_channels.append(channel) email_vars = { "user": user, "email": email, "request": request, "security_code": security_code, "email_title": "AppEnlight :: " "Please authorize your email", } UserService.send_email( request, recipients=[email], variables=email_vars, template="/email_templates/authorize_email.jinja2", ) request.session.flash(_("Your alert channel was " "added to the system.")) request.session.flash( _("You need to authorize your email channel, a message was " "sent containing necessary information."), "warning", ) DBSession.flush() channel.get_dict()
def application_create(request): """ Creates new application instances """ user = request.user form = forms.ApplicationCreateForm(MultiDict(request.unsafe_json_body), csrf_context=request) if form.validate(): session = DBSession() resource = Application() DBSession.add(resource) form.populate_obj(resource) resource.api_key = resource.generate_api_key() user.resources.append(resource) request.session.flash(_('Application created')) DBSession.flush() mark_changed(session) else: return HTTPUnprocessableEntity(body=form.errors_json) return resource.get_dict()
def group_resource_permission_delete(request): """ Removes group permission from specific resource """ form = forms.ReactorForm(request.POST, csrf_context=request) form.validate() resource = request.context.resource group = GroupService.by_id(request.GET.get('group_id')) if not group: return False for perm_name in request.GET.getall('permissions'): permission = GroupResourcePermissionService.by_resource_group_and_perm( group.id, perm_name, resource.resource_id) resource.group_permissions.remove(permission) DBSession.flush() perm_tuples = resource.groups_for_perm(ANY_PERMISSION, limit_group_permissions=True, group_ids=[group.id]) perms = [p.perm_name for p in perm_tuples if p.type == 'group'] result = {'group': group.get_dict(), 'permissions': list(set(perms))} return result
def users_create(request): """ Returns users list """ form = forms.UserCreateForm(MultiDict(request.safe_json_body or {}), csrf_context=request) if form.validate(): log.info('registering user') user = User() # insert new user here DBSession.add(user) form.populate_obj(user) user.regenerate_security_code() user.set_password(user.user_password) user.status = 1 if form.status.data else 0 request.session.flash(_('User created')) DBSession.flush() return user.get_dict(exclude_keys=[ 'security_code_date', 'notes', 'security_code', 'user_password' ]) else: return HTTPUnprocessableEntity(body=form.errors_json)
def user_resource_permission_delete(request): """ Removes user permission from specific resource """ resource = request.context.resource user = UserService.by_user_name(request.GET.get("user_name")) if not user: return False for perm_name in request.GET.getall("permissions"): permission = UserResourcePermissionService.by_resource_user_and_perm( user.id, perm_name, resource.resource_id ) resource.user_permissions.remove(permission) DBSession.flush() perms = [ p.perm_name for p in ResourceService.perms_for_user(resource, user) if p.type == "user" ] result = {"user_name": user.user_name, "permissions": list(set(perms))} return result
def group_resource_permission_create(request): """ Set new permissions for group for a resource """ resource = request.context.resource group = GroupService.by_id(request.unsafe_json_body.get("group_id")) if not group: return False for perm_name in request.unsafe_json_body.get("permissions", []): permission = GroupResourcePermissionService.by_resource_group_and_perm( group.id, perm_name, resource.resource_id ) if not permission: permission = GroupResourcePermission(perm_name=perm_name, group_id=group.id) resource.group_permissions.append(permission) DBSession.flush() perm_tuples = ResourceService.groups_for_perm( resource, ANY_PERMISSION, limit_group_permissions=True, group_ids=[group.id] ) perms = [p.perm_name for p in perm_tuples if p.type == "group"] result = {"group": group.get_dict(), "permissions": list(set(perms))} return result
def post(request): schema = UptimeConfigSchema() json_body = request.unsafe_json_body plugin = PluginConfig() plugin.config = {} plugin.plugin_name = PLUGIN_DEFINITION["name"] plugin.owner_id = request.user.id if json_body["section"] == "global": # admin config plugin.config = json_body["config"] plugin.section = "global" else: # handle user uptime_url deserialized = schema.deserialize(json_body["config"]) plugin.config = deserialized plugin.section = "resource" if request.context.resource: plugin.resource_id = request.context.resource.resource_id plugin.config["json_config_version"] = 1 DBSession.add(plugin) DBSession.flush() return plugin
def register(request): """ Render register page with form Also handles oAuth flow for registration """ login_url = request.route_url("ziggurat.routes.sign_in") if request.query_string: query_string = "?%s" % request.query_string else: query_string = "" referrer = "%s%s" % (request.path, query_string) if referrer in [login_url, "/register", "/register?sign_in=1"]: referrer = "/" # never use the login form itself as came_from sign_in_form = forms.SignInForm( came_from=request.params.get("came_from", referrer), csrf_context=request ) # populate form from oAuth session data returned by authomatic social_data = request.session.get("zigg.social_auth") if request.method != "POST" and social_data: log.debug(social_data) user_name = social_data["user"].get("user_name", "").split("@")[0] form_data = {"user_name": user_name, "email": social_data["user"].get("email")} form_data["user_password"] = str(uuid.uuid4()) form = forms.UserRegisterForm(MultiDict(form_data), csrf_context=request) form.user_password.widget.hide_value = False else: form = forms.UserRegisterForm(request.POST, csrf_context=request) if request.method == "POST" and form.validate(): log.info("registering user") # insert new user here if request.registry.settings["appenlight.disable_registration"]: request.session.flash(_("Registration is currently disabled.")) return HTTPFound(location=request.route_url("/")) new_user = User() DBSession.add(new_user) form.populate_obj(new_user) UserService.regenerate_security_code(new_user) new_user.status = 1 UserService.set_password(new_user, new_user.user_password) new_user.registration_ip = request.environ.get("REMOTE_ADDR") if social_data: handle_social_data(request, new_user, social_data) email_vars = { "user": new_user, "request": request, "email_title": "AppEnlight :: Start information", } UserService.send_email( request, recipients=[new_user.email], variables=email_vars, template="/email_templates/registered.jinja2", ) request.session.flash(_("You have successfully registered.")) DBSession.flush() headers = security.remember(request, new_user.id) return HTTPFound(location=request.route_url("/"), headers=headers) settings = request.registry.settings social_plugins = {} if settings.get("authomatic.pr.twitter.key", ""): social_plugins["twitter"] = True if settings.get("authomatic.pr.google.key", ""): social_plugins["google"] = True if settings.get("authomatic.pr.github.key", ""): social_plugins["github"] = True if settings.get("authomatic.pr.bitbucket.key", ""): social_plugins["bitbucket"] = True return { "form": form, "sign_in_form": sign_in_form, "social_plugins": social_plugins, }
def register(request): """ Render register page with form Also handles oAuth flow for registration """ login_url = request.route_url('ziggurat.routes.sign_in') if request.query_string: query_string = '?%s' % request.query_string else: query_string = '' referrer = '%s%s' % (request.path, query_string) if referrer in [login_url, '/register', '/register?sign_in=1']: referrer = '/' # never use the login form itself as came_from sign_in_form = forms.SignInForm(came_from=request.params.get( 'came_from', referrer), csrf_context=request) # populate form from oAuth session data returned by authomatic social_data = request.session.get('zigg.social_auth') if request.method != 'POST' and social_data: log.debug(social_data) user_name = social_data['user'].get('user_name', '').split('@')[0] form_data = { 'user_name': user_name, 'email': social_data['user'].get('email') } form_data['user_password'] = str(uuid.uuid4()) form = forms.UserRegisterForm(MultiDict(form_data), csrf_context=request) form.user_password.widget.hide_value = False else: form = forms.UserRegisterForm(request.POST, csrf_context=request) if request.method == 'POST' and form.validate(): log.info('registering user') # insert new user here if request.registry.settings['appenlight.disable_registration']: request.session.flash(_('Registration is currently disabled.')) return HTTPFound(location=request.route_url('/')) new_user = User() DBSession.add(new_user) form.populate_obj(new_user) new_user.regenerate_security_code() new_user.status = 1 new_user.set_password(new_user.user_password) new_user.registration_ip = request.environ.get('REMOTE_ADDR') if social_data: handle_social_data(request, new_user, social_data) email_vars = { 'user': new_user, 'request': request, 'email_title': "AppEnlight :: Start information" } UserService.send_email(request, recipients=[new_user.email], variables=email_vars, template='/email_templates/registered.jinja2') request.session.flash(_('You have successfully registered.')) DBSession.flush() headers = security.remember(request, new_user.id) return HTTPFound(location=request.route_url('/'), headers=headers) settings = request.registry.settings social_plugins = {} if settings.get('authomatic.pr.twitter.key', ''): social_plugins['twitter'] = True if settings.get('authomatic.pr.google.key', ''): social_plugins['google'] = True if settings.get('authomatic.pr.github.key', ''): social_plugins['github'] = True if settings.get('authomatic.pr.bitbucket.key', ''): social_plugins['bitbucket'] = True return { "form": form, "sign_in_form": sign_in_form, "social_plugins": social_plugins }
def add_logs(resource_id, request_params, dataset, **kwargs): proto_version = request_params.get("protocol_version") current_time = datetime.utcnow().replace(second=0, microsecond=0) try: es_docs = collections.defaultdict(list) resource = ApplicationService.by_id_cached()(resource_id) resource = DBSession.merge(resource, load=False) ns_pairs = [] for entry in dataset: # gather pk and ns so we can remove older versions of row later if entry["primary_key"] is not None: ns_pairs.append({ "pk": entry["primary_key"], "ns": entry["namespace"] }) log_entry = Log() log_entry.set_data(entry, resource=resource) log_entry._skip_ft_index = True resource.logs.append(log_entry) DBSession.flush() # insert non pk rows first if entry["primary_key"] is None: es_docs[log_entry.partition_id].append(log_entry.es_doc()) # 2nd pass to delete all log entries from db for same pk/ns pair if ns_pairs: ids_to_delete = [] es_docs = collections.defaultdict(list) es_docs_to_delete = collections.defaultdict(list) found_pkey_logs = LogService.query_by_primary_key_and_namespace( list_of_pairs=ns_pairs) log_dict = {} for log_entry in found_pkey_logs: log_key = (log_entry.primary_key, log_entry.namespace) if log_key not in log_dict: log_dict[log_key] = [] log_dict[log_key].append(log_entry) for ns, entry_list in log_dict.items(): entry_list = sorted(entry_list, key=lambda x: x.timestamp) # newest row needs to be indexed in es log_entry = entry_list[-1] # delete everything from pg and ES, leave the last row in pg for e in entry_list[:-1]: ids_to_delete.append(e.log_id) es_docs_to_delete[e.partition_id].append(e.delete_hash) es_docs_to_delete[log_entry.partition_id].append( log_entry.delete_hash) es_docs[log_entry.partition_id].append(log_entry.es_doc()) if ids_to_delete: query = DBSession.query(Log).filter( Log.log_id.in_(ids_to_delete)) query.delete(synchronize_session=False) if es_docs_to_delete: # batch this to avoid problems with default ES bulk limits for es_index in es_docs_to_delete.keys(): for batch in in_batches(es_docs_to_delete[es_index], 20): query = {"query": {"terms": {"delete_hash": batch}}} try: Datastores.es.delete_by_query( index=es_index, doc_type="log", body=query, conflicts="proceed", ) except elasticsearch.exceptions.NotFoundError as exc: msg = "skipping index {}".format(es_index) log.info(msg) total_logs = len(dataset) log_msg = "LOG_NEW: %s, entries: %s, proto:%s" % ( str(resource), total_logs, proto_version, ) log.info(log_msg) # mark_changed(session) redis_pipeline = Datastores.redis.pipeline(transaction=False) key = REDIS_KEYS["counters"]["logs_per_minute"].format(current_time) redis_pipeline.incr(key, total_logs) redis_pipeline.expire(key, 3600 * 24) key = REDIS_KEYS["counters"]["events_per_minute_per_user"].format( resource.owner_user_id, current_time) redis_pipeline.incr(key, total_logs) redis_pipeline.expire(key, 3600) key = REDIS_KEYS["counters"]["logs_per_hour_per_app"].format( resource_id, current_time.replace(minute=0)) redis_pipeline.incr(key, total_logs) redis_pipeline.expire(key, 3600 * 24 * 7) redis_pipeline.sadd( REDIS_KEYS["apps_that_got_new_data_per_hour"].format( current_time.replace(minute=0)), resource_id, ) redis_pipeline.execute() add_logs_es(es_docs) return True except Exception as exc: print_traceback(log) if celery.conf["CELERY_EAGER_PROPAGATES_EXCEPTIONS"]: raise add_logs.retry(exc=exc)
def add_reports(resource_id, request_params, dataset, **kwargs): proto_version = parse_proto(request_params.get("protocol_version", "")) current_time = datetime.utcnow().replace(second=0, microsecond=0) try: # we will store solr docs here for single insert es_report_docs = {} es_report_group_docs = {} resource = ApplicationService.by_id(resource_id) tags = [] es_slow_calls_docs = {} es_reports_stats_rows = {} for report_data in dataset: # build report details for later added_details = 0 report = Report() report.set_data(report_data, resource, proto_version) report._skip_ft_index = True # find latest group in this months partition report_group = ReportGroupService.by_hash_and_resource( report.resource_id, report.grouping_hash, since_when=datetime.utcnow().date().replace(day=1), ) occurences = report_data.get("occurences", 1) if not report_group: # total reports will be +1 moment later report_group = ReportGroup( grouping_hash=report.grouping_hash, occurences=0, total_reports=0, last_report=0, priority=report.priority, error=report.error, first_timestamp=report.start_time, ) report_group._skip_ft_index = True report_group.report_type = report.report_type report.report_group_time = report_group.first_timestamp add_sample = pick_sample(report_group.occurences, report_type=report_group.report_type) if add_sample: resource.report_groups.append(report_group) report_group.reports.append(report) added_details += 1 DBSession.flush() if report.partition_id not in es_report_docs: es_report_docs[report.partition_id] = [] es_report_docs[report.partition_id].append(report.es_doc()) tags.extend(list(report.tags.items())) slow_calls = report.add_slow_calls(report_data, report_group) DBSession.flush() for s_call in slow_calls: if s_call.partition_id not in es_slow_calls_docs: es_slow_calls_docs[s_call.partition_id] = [] es_slow_calls_docs[s_call.partition_id].append( s_call.es_doc()) # try generating new stat rows if needed else: # required for postprocessing to not fail later report.report_group = report_group stat_row = ReportService.generate_stat_rows( report, resource, report_group) if stat_row.partition_id not in es_reports_stats_rows: es_reports_stats_rows[stat_row.partition_id] = [] es_reports_stats_rows[stat_row.partition_id].append( stat_row.es_doc()) # see if we should mark 10th occurence of report last_occurences_10 = int(math.floor(report_group.occurences / 10)) curr_occurences_10 = int( math.floor((report_group.occurences + report.occurences) / 10)) last_occurences_100 = int(math.floor(report_group.occurences / 100)) curr_occurences_100 = int( math.floor( (report_group.occurences + report.occurences) / 100)) notify_occurences_10 = last_occurences_10 != curr_occurences_10 notify_occurences_100 = last_occurences_100 != curr_occurences_100 report_group.occurences = ReportGroup.occurences + occurences report_group.last_timestamp = report.start_time report_group.summed_duration = ReportGroup.summed_duration + report.duration summed_duration = ReportGroup.summed_duration + report.duration summed_occurences = ReportGroup.occurences + occurences report_group.average_duration = summed_duration / summed_occurences report_group.run_postprocessing(report) if added_details: report_group.total_reports = ReportGroup.total_reports + 1 report_group.last_report = report.id report_group.set_notification_info( notify_10=notify_occurences_10, notify_100=notify_occurences_100) DBSession.flush() report_group.get_report().notify_channel(report_group) if report_group.partition_id not in es_report_group_docs: es_report_group_docs[report_group.partition_id] = [] es_report_group_docs[report_group.partition_id].append( report_group.es_doc()) action = "REPORT" log_msg = "%s: %s %s, client: %s, proto: %s" % ( action, report_data.get("http_status", "unknown"), str(resource), report_data.get("client"), proto_version, ) log.info(log_msg) total_reports = len(dataset) redis_pipeline = Datastores.redis.pipeline(transaction=False) key = REDIS_KEYS["counters"]["reports_per_minute"].format(current_time) redis_pipeline.incr(key, total_reports) redis_pipeline.expire(key, 3600 * 24) key = REDIS_KEYS["counters"]["events_per_minute_per_user"].format( resource.owner_user_id, current_time) redis_pipeline.incr(key, total_reports) redis_pipeline.expire(key, 3600) key = REDIS_KEYS["counters"]["reports_per_hour_per_app"].format( resource_id, current_time.replace(minute=0)) redis_pipeline.incr(key, total_reports) redis_pipeline.expire(key, 3600 * 24 * 7) redis_pipeline.sadd( REDIS_KEYS["apps_that_got_new_data_per_hour"].format( current_time.replace(minute=0)), resource_id, ) redis_pipeline.execute() add_reports_es(es_report_group_docs, es_report_docs) add_reports_slow_calls_es(es_slow_calls_docs) add_reports_stats_rows_es(es_reports_stats_rows) return True except Exception as exc: print_traceback(log) if celery.conf["CELERY_EAGER_PROPAGATES_EXCEPTIONS"]: raise add_reports.retry(exc=exc)