def serialize(self, obj, attrs, user, **kwargs): query_keys = [ "environment", "query", "fields", "widths", "conditions", "aggregations", "range", "start", "end", "orderby", "limit", "yAxis", "display", "topEvents", ] data = { "id": str(obj.id), "name": obj.name, "projects": [project.id for project in obj.projects.all()], "version": obj.version or obj.query.get("version", 1), "expired": False, "dateCreated": obj.date_created, "dateUpdated": obj.date_updated, "createdBy": serialize(obj.created_by, serializer=UserSerializer()) if obj.created_by else None, } for key in query_keys: if obj.query.get(key) is not None: data[key] = obj.query[key] # expire queries that are beyond the retention period if "start" in obj.query: start, end = parse_timestamp(obj.query["start"]), parse_timestamp( obj.query["end"]) if start and end: data["expired"], data[ "start"] = outside_retention_with_modified_start( start, end, obj.organization) if obj.query.get("all_projects"): data["projects"] = list(ALL_ACCESS_PROJECTS) return data
def normalize_crumb(cls, crumb): ty = crumb.get('type') or 'default' ts = parse_timestamp(crumb.get('timestamp')) if ts is None: raise InterfaceValidationError('Unable to determine timestamp ' 'for crumb') rv = { 'type': ty, 'timestamp': to_timestamp(ts), } level = crumb.get('level') if level not in (None, 'info'): rv['level'] = level msg = crumb.get('message') if msg is not None: rv['message'] = trim(six.text_type(msg), 4096) category = crumb.get('category') if category is not None: rv['category'] = trim(six.text_type(category), 256) event_id = crumb.get('event_id') if event_id is not None: rv['event_id'] = event_id if crumb.get('data'): for key, value in six.iteritems(crumb['data']): if not isinstance(value, six.string_types): crumb['data'][key] = json.dumps(value) rv['data'] = trim(crumb['data'], 4096) return rv
def normalize_crumb(cls, crumb): ty = crumb.get('type') or 'default' ts = parse_timestamp(crumb.get('timestamp')) if ts is None: raise InterfaceValidationError('Unable to determine timestamp ' 'for crumb') rv = { 'type': ty, 'timestamp': to_timestamp(ts), } level = crumb.get('level') if level not in (None, 'info'): rv['level'] = level msg = crumb.get('message') if msg is not None: rv['message'] = trim(six.text_type(msg), 4096) category = crumb.get('category') if category is not None: rv['category'] = trim(six.text_type(category), 256) event_id = crumb.get('event_id') if event_id is not None: rv['event_id'] = event_id if 'data' in crumb: rv['data'] = trim(crumb['data'], 4096) return rv
def _process_signal(msg): project_id = int(msg.get("project_id") or 0) if project_id == 0: metrics.incr("outcomes_consumer.skip_outcome", tags={"reason": "project_zero"}) return # no project. this is valid, so ignore silently. outcome = int(msg.get("outcome", -1)) if outcome not in (Outcome.FILTERED, Outcome.RATE_LIMITED): metrics.incr("outcomes_consumer.skip_outcome", tags={"reason": "wrong_outcome_type"}) return # nothing to do here event_id = msg.get("event_id") if not event_id: metrics.incr("outcomes_consumer.skip_outcome", tags={"reason": "missing_event_id"}) return if is_signal_sent(project_id=project_id, event_id=event_id): metrics.incr("outcomes_consumer.skip_outcome", tags={"reason": "is_signal_sent"}) return # message already processed nothing left to do try: project = Project.objects.get_from_cache(id=project_id) except Project.DoesNotExist: metrics.incr("outcomes_consumer.skip_outcome", tags={"reason": "unknown_project"}) logger.error("OutcomesConsumer could not find project with id: %s", project_id) return reason = msg.get("reason") remote_addr = msg.get("remote_addr") if outcome == Outcome.FILTERED: event_filtered.send_robust(ip=remote_addr, project=project, sender=OutcomesConsumerWorker) elif outcome == Outcome.RATE_LIMITED: event_dropped.send_robust(ip=remote_addr, project=project, reason_code=reason, sender=OutcomesConsumerWorker) # remember that we sent the signal just in case the processor dies before mark_signal_sent(project_id=project_id, event_id=event_id) timestamp = msg.get("timestamp") if timestamp is not None: delta = to_datetime(time.time()) - parse_timestamp(timestamp) metrics.timing("outcomes_consumer.timestamp_lag", delta.total_seconds()) metrics.incr("outcomes_consumer.signal_sent", tags={ "reason": reason, "outcome": outcome })
def _process_tsdb_batch(messages): tsdb_increments = [] messages_to_process = [] is_tsdb_incremented_requests = [] for msg in messages: project_id = int(msg.get("project_id") or 0) or None event_id = msg.get("event_id") if not project_id or not event_id: continue to_increment = [ ( model, key, { "timestamp": parse_timestamp(msg["timestamp"]) if msg.get("timestamp") is not None else to_datetime(time.time()) }, ) for model, key in tsdb_increments_from_outcome( org_id=int(msg.get("org_id") or 0) or None, project_id=project_id, key_id=int(msg.get("key_id") or 0) or None, outcome=int(msg.get("outcome", -1)), reason=msg.get("reason") or None, ) ] if not to_increment: continue messages_to_process.append((msg, to_increment)) is_tsdb_incremented_requests.append(_get_tsdb_cache_key(project_id, event_id)) is_tsdb_incremented_results = cache.get_many(is_tsdb_incremented_requests) mark_tsdb_incremented_requests = [] for (msg, to_increment), should_increment in zip( messages_to_process, is_tsdb_incremented_results ): if should_increment is not None: continue tsdb_increments.extend(to_increment) mark_tsdb_incremented_requests.append((project_id, event_id)) metrics.incr("outcomes_consumer.tsdb_incremented") metrics.timing("outcomes_consumer.tsdb_incr_multi_size", len(tsdb_increments)) if tsdb_increments: tsdb.incr_multi(tsdb_increments) if mark_tsdb_incremented_requests: mark_tsdb_incremented_many(mark_tsdb_incremented_requests)
def normalize_crumb(cls, crumb): crumb = dict(crumb) ts = parse_timestamp(crumb.get("timestamp")) if ts: crumb["timestamp"] = to_timestamp(ts) else: crumb["timestamp"] = None for key in ("type", "level", "message", "category", "event_id", "data"): crumb.setdefault(key, None) return crumb
def normalize_crumb(cls, crumb): ty = crumb.get('type') or 'default' ts = parse_timestamp(crumb.get('timestamp')) if ts is None: raise InterfaceValidationError('Unable to determine timestamp ' 'for crumb') rv = { 'type': ty, 'timestamp': to_timestamp(ts), } level = crumb.get('level') if level not in (None, 'info'): rv['level'] = level msg = crumb.get('message') if msg is not None: rv['message'] = trim(six.text_type(msg), 4096) category = crumb.get('category') if category is not None: rv['category'] = trim(six.text_type(category), 256) event_id = crumb.get('event_id') if event_id is not None: rv['event_id'] = event_id if crumb.get('data'): try: for key, value in six.iteritems(crumb['data']): if not isinstance(value, six.string_types): crumb['data'][key] = json.dumps(value) except AttributeError: # TODO(dcramer): we dont want to discard the the rest of the # crumb, but it'd be nice if we could record an error # raise InterfaceValidationError( # 'The ``data`` on breadcrumbs must be a mapping (received {})'.format( # type(crumb['data']), # ) # ) pass else: rv['data'] = trim(crumb['data'], 4096) return rv
def normalize_crumb(cls, crumb): ty = crumb.get('type') or 'default' level = crumb.get('level') if not isinstance(level, six.string_types) or \ (level not in LOG_LEVELS_MAP and level != 'critical'): level = 'info' ts = parse_timestamp(crumb.get('timestamp')) if ts is None: raise InterfaceValidationError( 'Unable to determine timestamp for crumb') ts = to_timestamp(ts) msg = crumb.get('message') if msg is not None: msg = trim(six.text_type(msg), 4096) category = crumb.get('category') if category is not None: category = trim(six.text_type(category), 256) event_id = crumb.get('event_id') data = crumb.get('data') if not isinstance(data, dict): # TODO(dcramer): we dont want to discard the the rest of the # crumb, but it'd be nice if we could record an error # raise InterfaceValidationError( # 'The ``data`` on breadcrumbs must be a mapping (received {})'.format( # type(crumb['data']), # ) # ) data = None else: data = trim(data, 4096) return { 'type': ty, 'level': level, 'timestamp': ts, 'message': msg, 'category': category, 'event_id': event_id, 'data': data }
def normalize_crumb(cls, crumb): ty = crumb.get('type') or 'default' level = crumb.get('level') or 'info' ts = parse_timestamp(crumb.get('timestamp')) if ts is None: raise InterfaceValidationError('Unable to determine timestamp for crumb') ts = to_timestamp(ts) msg = crumb.get('message') if msg is not None: msg = trim(six.text_type(msg), 4096) category = crumb.get('category') if category is not None: category = trim(six.text_type(category), 256) event_id = crumb.get('event_id') data = crumb.get('data') if data: try: for key, value in six.iteritems(data): if not isinstance(value, six.string_types): data[key] = json.dumps(value) except AttributeError: # TODO(dcramer): we dont want to discard the the rest of the # crumb, but it'd be nice if we could record an error # raise InterfaceValidationError( # 'The ``data`` on breadcrumbs must be a mapping (received {})'.format( # type(crumb['data']), # ) # ) data = None else: data = trim(data, 4096) return { 'type': ty, 'level': level, 'timestamp': ts, 'message': msg, 'category': category, 'event_id': event_id, 'data': data }
def normalize_crumb(cls, crumb): crumb = dict(crumb) ts = parse_timestamp(crumb.get('timestamp')) if ts: crumb['timestamp'] = to_timestamp(ts) else: crumb['timestamp'] = None for key in ( 'type', 'level', 'message', 'category', 'event_id', 'data', ): crumb.setdefault(key, None) return crumb
def normalize_crumb(cls, crumb, rust_renormalized): if rust_renormalized: crumb = dict(crumb) ts = parse_timestamp(crumb.get('timestamp')) if ts: crumb['timestamp'] = to_timestamp(ts) else: crumb['timestamp'] = None for key in ( 'type', 'level', 'message', 'category', 'event_id', 'data', ): crumb.setdefault(key, None) return crumb ty = crumb.get('type') or 'default' level = crumb.get('level') if not isinstance(level, six.string_types) or \ (level not in LOG_LEVELS_MAP and level != 'critical'): level = 'info' ts = parse_timestamp(crumb.get('timestamp')) if ts is None: raise InterfaceValidationError('Unable to determine timestamp for crumb') ts = to_timestamp(ts) msg = crumb.get('message') if msg is not None: msg = trim(six.text_type(msg), 4096) category = crumb.get('category') if category is not None: category = trim(six.text_type(category), 256) event_id = crumb.get('event_id') data = crumb.get('data') if not isinstance(data, dict): # TODO(dcramer): we dont want to discard the the rest of the # crumb, but it'd be nice if we could record an error # raise InterfaceValidationError( # 'The ``data`` on breadcrumbs must be a mapping (received {})'.format( # type(crumb['data']), # ) # ) data = None else: data = trim(data, 4096) return { 'type': ty, 'level': level, 'timestamp': ts, 'message': msg, 'category': category, 'event_id': event_id, 'data': data }
def _process_signal(msg): project_id = int(msg.get("project_id") or 0) if project_id == 0: metrics.incr("outcomes_consumer.skip_outcome", tags={"reason": "project_zero"}) return # no project. this is valid, so ignore silently. outcome = int(msg.get("outcome", -1)) if outcome not in (Outcome.ACCEPTED, Outcome.FILTERED, Outcome.RATE_LIMITED): metrics.incr("outcomes_consumer.skip_outcome", tags={"reason": "wrong_outcome_type"}) return # nothing to do here event_id = msg.get("event_id") if not event_id: metrics.incr("outcomes_consumer.skip_outcome", tags={"reason": "missing_event_id"}) return try: project = Project.objects.get_from_cache(id=project_id) except Project.DoesNotExist: metrics.incr("outcomes_consumer.skip_outcome", tags={"reason": "unknown_project"}) logger.error("OutcomesConsumer could not find project with id: %s", project_id) return reason = msg.get("reason") remote_addr = msg.get("remote_addr") quantity = msg.get("quantity") category = msg.get("category") if category is not None: category = DataCategory(category) if outcome == Outcome.ACCEPTED: event_saved.send_robust(project=project, category=category, quantity=quantity, sender=OutcomesConsumerWorker) elif outcome == Outcome.FILTERED and reason == FilterStatKeys.DISCARDED_HASH: event_discarded.send_robust(project=project, category=category, quantity=quantity, sender=OutcomesConsumerWorker) elif outcome == Outcome.FILTERED: event_filtered.send_robust( ip=remote_addr, project=project, category=category, quantity=quantity, sender=OutcomesConsumerWorker, ) elif outcome == Outcome.RATE_LIMITED: event_dropped.send_robust( ip=remote_addr, project=project, reason_code=reason, category=category, quantity=quantity, sender=OutcomesConsumerWorker, ) timestamp = msg.get("timestamp") if timestamp is not None: delta = to_datetime(time.time()) - parse_timestamp(timestamp) metrics.timing("outcomes_consumer.timestamp_lag", delta.total_seconds()) metrics.incr("outcomes_consumer.signal_sent", tags={ "reason": reason, "outcome": outcome })