Пример #1
0
def test_is_minidump():
    assert is_minidump_event(
        {"exception": {
            "values": [{
                "mechanism": {
                    "type": "minidump"
                }
            }]
        }})
    assert not is_minidump_event(
        {"exception": {
            "values": [{
                "mechanism": {
                    "type": "other"
                }
            }]
        }})
    assert not is_minidump_event(
        {"exception": {
            "values": [{
                "mechanism": {
                    "type": None
                }
            }]
        }})
    assert not is_minidump_event(
        {"exception": {
            "values": [{
                "mechanism": None
            }]
        }})
    assert not is_minidump_event({"exception": {"values": [None]}})
    assert not is_minidump_event({"exception": {"values": []}})
    assert not is_minidump_event({"exception": {"values": None}})
    assert not is_minidump_event({"exception": None})
Пример #2
0
def get_required_attachment_types(data) -> Set[str]:
    if is_minidump_event(data):
        return {MINIDUMP_ATTACHMENT_TYPE}
    elif is_applecrashreport_event(data):
        return {APPLECRASHREPORT_ATTACHMENT_TYPE}
    else:
        return set()
Пример #3
0
def get_symbolication_function(data):
    if is_minidump_event(data):
        return process_minidump
    elif is_applecrashreport_event(data):
        return process_applecrashreport
    elif is_native_event(data):
        return process_payload
Пример #4
0
def write_error(e, data, errors=None):
    # User fixable but fatal errors are reported as processing
    # issues. We skip this for minidumps, as reprocessing is not
    # possible without persisting minidumps.
    if e.is_user_fixable and e.is_fatal and not is_minidump_event(data):
        report_processing_issue(data,
                                scope="native",
                                object="dsym:%s" % e.image_uuid,
                                type=e.type,
                                data=e.get_data())

    # This in many ways currently does not really do anything.
    # The reason is that once a processing issue is reported
    # the event will only be stored as a raw event and no
    # group will be generated.  As a result it also means that
    # we will not have any user facing event or error showing
    # up at all.  We want to keep this here though in case we
    # do not want to report some processing issues (eg:
    # optional difs)
    if e.is_user_fixable or e.is_sdk_failure:
        if errors is None:
            errors = data.setdefault("errors", [])
        errors.append(e.get_data())
    else:
        logger.debug("Failed to symbolicate with native backend",
                     exc_info=True)
Пример #5
0
 def get_event_enhancers(self, data):
     if is_minidump_event(data):
         return [process_minidump]
     elif is_applecrashreport_event(data):
         return [process_applecrashreport]
     elif is_native_event(data):
         return [process_payload]
Пример #6
0
def test_is_minidump():
    assert is_minidump_event(
        {'exception': {
            'values': [{
                'mechanism': {
                    'type': 'minidump'
                }
            }]
        }})

    assert not is_minidump_event(
        {'exception': {
            'values': [{
                'mechanism': {
                    'type': 'other'
                }
            }]
        }})

    assert not is_minidump_event(
        {'exception': {
            'values': [{
                'mechanism': {
                    'type': None
                }
            }]
        }})

    assert not is_minidump_event(
        {'exception': {
            'values': [{
                'mechanism': None
            }]
        }})

    assert not is_minidump_event({'exception': {'values': [None]}})

    assert not is_minidump_event({'exception': {'values': []}})

    assert not is_minidump_event({'exception': {'values': None}})

    assert not is_minidump_event({'exception': None})
Пример #7
0
def capture_nodestore_stats(cache_key, project_id, event_id):
    set_current_project(project_id)

    from sentry.eventstore.compressor import deduplicate
    from sentry.eventstore.models import Event

    node_id = Event.generate_node_id(project_id, event_id)
    data = nodestore.get(node_id)

    if not data:
        metrics.incr("eventstore.compressor.error", tags={"reason": "no_data"})
        return

    old_event_size = _json_size(data)

    unprocessed_data = event_processing_store.get(
        _get_unprocessed_key(cache_key))
    event_processing_store.delete_by_key(_get_unprocessed_key(cache_key))

    tags = {
        "with_reprocessing": bool(unprocessed_data),
        "platform": data.get("platform") or "none",
        "is_minidump": is_minidump_event(data),
    }

    if unprocessed_data:
        metrics.incr("nodestore_stats.with_reprocessing")

        concatenated_size = _json_size(data, unprocessed_data)
        metrics.timing("events.size.concatenated",
                       concatenated_size,
                       tags=tags)
        metrics.timing("events.size.concatenated.ratio",
                       concatenated_size / old_event_size,
                       tags=tags)

        _data = dict(data)
        _data["__nodestore_reprocessing"] = unprocessed_data
        simple_concatenated_size = _json_size(_data)
        metrics.timing("events.size.simple_concatenated",
                       simple_concatenated_size,
                       tags=tags)
        metrics.timing(
            "events.size.simple_concatenated.ratio",
            simple_concatenated_size / old_event_size,
            tags=tags,
        )
    else:
        metrics.incr("nodestore_stats.without_reprocessing")

    new_data, extra_keys = deduplicate(dict(data))
    total_size = event_size = _json_size(new_data)

    for key, value in six.iteritems(extra_keys):
        if nodestore.get(key) is not None:
            metrics.incr("eventstore.compressor.hits", tags=tags)
            # do not continue, nodestore.set() should bump TTL
        else:
            metrics.incr("eventstore.compressor.misses", tags=tags)
            total_size += _json_size(value)

        # key is md5sum of content
        # do not store actual value to keep prod impact to a minimum
        nodestore.set(key, {})

    metrics.timing("events.size.deduplicated", event_size, tags=tags)
    metrics.timing("events.size.deduplicated.total_written",
                   total_size,
                   tags=tags)

    metrics.timing("events.size.deduplicated.ratio",
                   event_size / old_event_size,
                   tags=tags)
    metrics.timing("events.size.deduplicated.total_written.ratio",
                   total_size / old_event_size,
                   tags=tags)

    if total_size > old_event_size:
        nodestore_stats_logger.info(
            "events.size.deduplicated.details",
            extra={
                "project_id": project_id,
                "event_id": event_id,
                "total_size": total_size,
                "old_event_size": old_event_size,
            },
        )