Example #1
0
def submit_process(project, from_reprocessing, cache_key, event_id, start_time, data):
    if features.has('projects:kafka-ingest', project=project):
        kafka.produce_sync(
            settings.KAFKA_PROCESS,
            value=json.dumps({
                'cache_key': cache_key,
                'start_time': start_time,
                'from_reprocessing': from_reprocessing,
                'data': data,
            }),
        )
    else:
        task = process_event_from_reprocessing if from_reprocessing else process_event
        task.delay(cache_key=cache_key, start_time=start_time, event_id=event_id)
Example #2
0
def submit_process(project, from_reprocessing, cache_key, event_id, start_time, data):
    if features.has('projects:kafka-ingest', project=project):
        kafka.produce_sync(
            settings.KAFKA_PROCESS,
            value=json.dumps({
                'cache_key': cache_key,
                'start_time': start_time,
                'from_reprocessing': from_reprocessing,
                'data': data,
            }),
        )
    else:
        task = process_event_from_reprocessing if from_reprocessing else process_event
        task.delay(cache_key=cache_key, start_time=start_time, event_id=event_id)
Example #3
0
def submit_save_event(project, cache_key, event_id, start_time, data):
    if features.has('projects:kafka-ingest', project=project):
        kafka.produce_sync(
            settings.KAFKA_SAVE,
            value=json.dumps({
                'cache_key': cache_key,
                'start_time': start_time,
                'data': data,
            }),
        )
    else:
        if cache_key:
            data = None

        save_event.delay(
            cache_key=cache_key, data=data, start_time=start_time, event_id=event_id,
            project_id=project.id
        )
Example #4
0
def submit_save_event(project, cache_key, event_id, start_time, data):
    if features.has('projects:kafka-ingest', project=project):
        kafka.produce_sync(
            settings.KAFKA_SAVE,
            value=json.dumps({
                'cache_key': cache_key,
                'start_time': start_time,
                'data': data,
            }),
        )
    else:
        if cache_key:
            data = None

        save_event.delay(
            cache_key=cache_key, data=data, start_time=start_time, event_id=event_id,
            project_id=project.id
        )
Example #5
0
    def insert_data_to_database(self,
                                data,
                                start_time=None,
                                from_reprocessing=False,
                                attachments=None):
        if start_time is None:
            start_time = time()

        # we might be passed some subclasses of dict that fail dumping
        if isinstance(data, CANONICAL_TYPES):
            data = dict(data.items())

        cache_timeout = 3600
        cache_key = cache_key_for_event(data)
        default_cache.set(cache_key, data, cache_timeout)

        # Attachments will be empty or None if the "event-attachments" feature
        # is turned off. For native crash reports it will still contain the
        # crash dump (e.g. minidump) so we can load it during processing.
        if attachments is not None:
            attachment_cache.set(cache_key, attachments, cache_timeout)

        # NOTE: Project is bound to the context in most cases in production, which
        # is enough for us to do `projects:kafka-ingest` testing.
        project = self.context and self.context.project

        if project and features.has('projects:kafka-ingest', project=project):
            kafka.produce_sync(
                settings.KAFKA_PREPROCESS,
                value=json.dumps({
                    'cache_key': cache_key,
                    'start_time': start_time,
                    'from_reprocessing': from_reprocessing,
                    'data': data,
                }),
            )
        else:
            task = from_reprocessing and \
                preprocess_event_from_reprocessing or preprocess_event
            task.delay(cache_key=cache_key,
                       start_time=start_time,
                       event_id=data['event_id'])
Example #6
0
    def insert_data_to_database(self, data, start_time=None,
                                from_reprocessing=False, attachments=None):
        if start_time is None:
            start_time = time()

        # we might be passed some subclasses of dict that fail dumping
        if isinstance(data, CANONICAL_TYPES):
            data = dict(data.items())

        cache_timeout = 3600
        cache_key = cache_key_for_event(data)
        default_cache.set(cache_key, data, cache_timeout)

        # Attachments will be empty or None if the "event-attachments" feature
        # is turned off. For native crash reports it will still contain the
        # crash dump (e.g. minidump) so we can load it during processing.
        if attachments is not None:
            attachment_cache.set(cache_key, attachments, cache_timeout)

        # NOTE: Project is bound to the context in most cases in production, which
        # is enough for us to do `projects:kafka-ingest` testing.
        project = self.context and self.context.project

        if project and features.has('projects:kafka-ingest', project=project):
            kafka.produce_sync(
                settings.KAFKA_PREPROCESS,
                value=json.dumps({
                    'cache_key': cache_key,
                    'start_time': start_time,
                    'from_reprocessing': from_reprocessing,
                    'data': data,
                }),
            )
        else:
            task = from_reprocessing and \
                preprocess_event_from_reprocessing or preprocess_event
            task.delay(cache_key=cache_key, start_time=start_time,
                       event_id=data['event_id'])