コード例 #1
0
def build_default_context(product_name=None, versions=None):
    """
    Given a product name and a list of versions, generates navbar context.

    Adds ``products`` is a dict of product name -> product information
    for all active supported products.

    Adds ``active_versions`` is a dict of product name -> version information
    for all active supported products.

    Adds ``version`` which is the first version specified from the ``versions``
    argument.

    """
    context = {}

    # Build product information
    all_products = productlib.get_products()
    context["products"] = all_products

    try:
        if not product_name:
            product = productlib.get_default_product()
        else:
            product = productlib.get_product_by_name(product_name)
    except productlib.ProductDoesNotExist:
        raise http.Http404("Not a recognized product")

    context["product"] = product

    # Build product version information for all products
    active_versions = {
        prod.name: get_version_context_for_product(prod)
        for prod in all_products
    }
    context["active_versions"] = active_versions

    if versions is not None:
        if isinstance(versions, str):
            versions = versions.split(";")

        if versions:
            # Check that versions is a list
            assert isinstance(versions, list)

            # Check that the specified versions are all valid for this product
            pv_versions = [x["version"] for x in active_versions[product.name]]
            for version in versions:
                if version not in pv_versions:
                    raise http.Http404(
                        "Not a recognized version for that product")

            context["version"] = versions[0]

    return context
コード例 #2
0
def get_versions_for_product(product, use_cache=True):
    """Returns list of recent version strings for specified product

    This looks at the crash reports submitted for this product over
    VERSIONS_WINDOW_DAYS days and returns the versions of those crash reports.

    If SuperSearch returns an error, this returns an empty list.

    NOTE(willkg): This data is noisy if there are crash reports with junk
    versions.

    :arg product: either a product name or Product to query for
    :arg bool use_cache: whether or not to pull results from cache

    :returns: list of versions sorted in reverse order or ``[]``

    """
    if isinstance(product, str):
        product = productlib.get_product_by_name(product)

    if use_cache:
        key = "get_versions_for_product:%s" % product.name.lower().replace(
            " ", "")
        ret = cache.get(key)
        if ret is not None:
            return ret

    api = supersearch_models.SuperSearchUnredacted()
    now = timezone.now()

    # Find versions for specified product in crash reports reported in the last
    # VERSIONS_WINDOW_DAYS days and use a big _facets_size so that it picks up versions
    # that have just been released that don't have many crash reports, yet
    window = settings.VERSIONS_WINDOW_DAYS
    params = {
        "product":
        product.name,
        "_results_number":
        0,
        "_facets":
        "version",
        "_facets_size":
        1000,
        "date": [
            ">=" + (now - datetime.timedelta(days=window)).isoformat(),
            "<" + now.isoformat(),
        ],
    }

    # Since we're caching the results of the search plus additional work done,
    # we don't want to cache the fetch
    ret = api.get(**params, dont_cache=True)
    if "facets" not in ret or "version" not in ret["facets"]:
        return []

    # Get versions from facet, drop junk, and sort the final list
    versions = set()
    for item in ret["facets"]["version"]:
        if item["count"] < settings.VERSIONS_COUNT_THRESHOLD:
            continue

        version = item["term"]

        # Bug #1622932 is about something submitting crash reports with a version of
        # 1024 which is clearly junk, but it messes everything up; this is hacky,
        # but let's just drop those explicitly and push off thinking about a better
        # way of doing all of this until later
        if version.startswith("1024"):
            continue

        try:
            # This generates the sort key but also parses the version to make sure it's
            # a valid looking version
            versions.add((generate_semver(version), version))

            # Add X.Yb to betas set
            if "b" in version:
                beta_version = version[:version.find("b") + 1]
                versions.add((generate_semver(beta_version), beta_version))
        except VersionParseError:
            pass

    # Sort by sortkey and then drop the sortkey
    versions = sorted(versions, key=lambda v: v[0], reverse=True)
    versions = [v[1] for v in versions]

    if use_cache:
        # Cache value for an hour plus a fudge factor in seconds
        cache.set(key, versions, timeout=(60 * 60) + random.randint(60, 120))

    return versions
コード例 #3
0
ファイル: views.py プロジェクト: johnmcwade/socorro
def report_index(request, crash_id, default_context=None):
    valid_crash_id = utils.find_crash_id(crash_id)
    if not valid_crash_id:
        return http.HttpResponseBadRequest("Invalid crash ID")

    # Sometimes, in Socorro we use a prefix on the crash ID. Usually it's
    # 'bp-' but this is configurable.
    # If you try to use this to reach the perma link for a crash, it should
    # redirect to the report index with the correct crash ID.
    if valid_crash_id != crash_id:
        return redirect(
            reverse("crashstats:report_index", args=(valid_crash_id, )))

    context = default_context or {}
    context["crash_id"] = crash_id

    refresh_cache = request.GET.get("refresh") == "cache"

    raw_api = models.RawCrash()
    try:
        context["raw"] = raw_api.get(crash_id=crash_id,
                                     refresh_cache=refresh_cache)
    except CrashIDNotFound:
        # If the raw crash can't be found, we can't do much.
        return render(request,
                      "crashstats/report_index_not_found.html",
                      context,
                      status=404)
    utils.enhance_raw(context["raw"])

    context["your_crash"] = your_crash = (request.user.is_active
                                          and context["raw"].get("Email")
                                          == request.user.email)

    api = models.UnredactedCrash()
    try:
        context["report"] = api.get(crash_id=crash_id,
                                    refresh_cache=refresh_cache)
    except CrashIDNotFound:
        # ...if we haven't already done so.
        cache_key = f"priority_job:{crash_id}"
        if not cache.get(cache_key):
            priority_api = models.PriorityJob()
            priority_api.post(crash_ids=[crash_id])
            cache.set(cache_key, True, 60)
        return render(request, "crashstats/report_index_pending.html", context)

    context["product_details"] = productlib.get_product_by_name(
        context["report"]["product"])

    # For C++/Rust crashes
    if "json_dump" in context["report"]:
        json_dump = context["report"]["json_dump"]
        if "sensitive" in json_dump and not request.user.has_perm(
                "crashstats.view_pii"):
            del json_dump["sensitive"]
        context["raw_stackwalker_output"] = json.dumps(json_dump,
                                                       sort_keys=True,
                                                       indent=4,
                                                       separators=(",", ": "))
        utils.enhance_json_dump(json_dump, settings.VCS_MAPPINGS)
        parsed_dump = json_dump
    else:
        context["raw_stackwalker_output"] = "No dump available"
        parsed_dump = {}

    context["crashing_thread"] = parsed_dump.get("crash_info",
                                                 {}).get("crashing_thread")
    if context["report"]["signature"].startswith("shutdownhang"):
        # For shutdownhang signatures, we want to use thread 0 as the
        # crashing thread, because that's the thread that actually contains
        # the useful data about what happened.
        context["crashing_thread"] = 0

    context["parsed_dump"] = parsed_dump

    # For Java crashes
    if "java_exception" in context["report"]:
        if request.user.has_perm("crashstats.view_pii") or your_crash:
            context["java_exception_stacks"] = context["report"][
                "java_exception_raw"]["exception"]["values"]
        else:
            context["java_exception_stacks"] = context["report"][
                "java_exception"]["exception"]["values"]

    context["bug_associations"] = list(
        models.BugAssociation.objects.filter(
            signature=context["report"]["signature"]).values(
                "bug_id", "signature").order_by("-bug_id"))

    context["public_raw_keys"] = [
        x for x in context["raw"] if x in models.RawCrash.API_ALLOWLIST()
    ]
    if request.user.has_perm("crashstats.view_pii") or your_crash:
        # If the user can see PII or this is their crash report, include everything
        context["protected_raw_keys"] = [
            x for x in context["raw"]
            if x not in models.RawCrash.API_ALLOWLIST()
        ]
    else:
        context["protected_raw_keys"] = []

    # Sort keys case-insensitively
    context["public_raw_keys"].sort(key=lambda s: s.lower())
    context["protected_raw_keys"].sort(key=lambda s: s.lower())

    if request.user.has_perm("crashstats.view_rawdump"):
        context["raw_dump_urls"] = [
            ("dump", reverse("crashstats:raw_data", args=(crash_id, "dmp"))),
            (
                "minidump-stackwalk output",
                reverse("crashstats:raw_data", args=(crash_id, "json")),
            ),
        ]
        if context["raw"].get("additional_minidumps"):
            suffixes = [
                x.strip()
                for x in context["raw"]["additional_minidumps"].split(",")
                if x.strip()
            ]
            for suffix in suffixes:
                name = "upload_file_minidump_%s" % (suffix, )
                context["raw_dump_urls"].append((
                    name,
                    reverse("crashstats:raw_data_named",
                            args=(crash_id, name, "dmp")),
                ))
        if (context["raw"].get("ContainsMemoryReport")
                and context["report"].get("memory_report")
                and not context["report"].get("memory_report_error")):
            context["raw_dump_urls"].append((
                "memory_report",
                reverse(
                    "crashstats:raw_data_named",
                    args=(crash_id, "memory_report", "json.gz"),
                ),
            ))

    # Add descriptions to all fields.
    all_fields = SuperSearchFields().get()
    descriptions = {}
    for field in all_fields.values():
        key = "{}.{}".format(field["namespace"], field["in_database_name"])
        descriptions[key] = "{} Search: {}".format(
            field.get("description", "").strip()
            or "No description for this field.",
            field["is_exposed"] and field["name"] or "N/A",
        )

    def make_raw_crash_key(key):
        """In the report_index.html template we need to create a key
        that we can use to look up against the 'fields_desc' dict.
        Because you can't do something like this in jinja::

            {{ fields_desc.get(u'raw_crash.{}'.format(key), empty_desc) }}

        we do it here in the function instead.
        The trick is that the lookup key has to be a unicode object or
        else you get UnicodeEncodeErrors in the template rendering.
        """
        return f"raw_crash.{key}"

    context["make_raw_crash_key"] = make_raw_crash_key
    context["fields_desc"] = descriptions
    context["empty_desc"] = "No description for this field. Search: unknown"

    # report.addons used to be a list of lists.
    # In https://bugzilla.mozilla.org/show_bug.cgi?id=1250132
    # we changed it from a list of lists to a list of strings, using
    # a ':' to split the name and version.
    # See https://bugzilla.mozilla.org/show_bug.cgi?id=1250132#c7
    # Considering legacy, let's tackle both.
    # In late 2017, this code is going to be useless and can be removed.
    if context["report"].get("addons") and isinstance(
            context["report"]["addons"][0], (list, tuple)):
        # This is the old legacy format. This crash hasn't been processed
        # the new way.
        context["report"]["addons"] = [
            ":".join(x) for x in context["report"]["addons"]
        ]

    content = loader.render_to_string("crashstats/report_index.html", context,
                                      request)
    utf8_content = content.encode("utf-8", errors="backslashreplace")
    return HttpResponse(utf8_content, charset="utf-8")
コード例 #4
0
def topcrashers(request, days=None, possible_days=None, default_context=None):
    context = default_context or {}

    form = TopCrashersForm(request.GET)
    if not form.is_valid():
        return http.HttpResponseBadRequest(str(form.errors))

    product_name = form.cleaned_data["product"]
    versions = form.cleaned_data["version"]
    crash_type = form.cleaned_data["process_type"]
    os_name = form.cleaned_data["platform"]
    result_count = form.cleaned_data["_facets_size"]
    tcbs_mode = form.cleaned_data["_tcbs_mode"]
    range_type = form.cleaned_data["_range_type"]

    range_type = "build" if range_type == "build" else "report"

    if not tcbs_mode or tcbs_mode not in ("realtime", "byday"):
        tcbs_mode = "realtime"

    try:
        product = productlib.get_product_by_name(product_name)
    except productlib.ProductDoesNotExist:
        return http.HttpResponseBadRequest("Unrecognized product")

    context["product"] = product

    if not versions:
        # :(
        # simulate what the nav.js does which is to take the latest version
        # for this product.
        for pv in context["active_versions"][product.name]:
            if pv["is_featured"]:
                url = "%s&version=%s" % (
                    request.build_absolute_uri(),
                    urlquote(pv["version"]),
                )
                return redirect(url)
        if context["active_versions"][product.name]:
            # Not a single version was featured, but there were active
            # versions. In this case, use the first available
            # *active* version.
            for pv in context["active_versions"][product.name]:
                url = "%s&version=%s" % (
                    request.build_absolute_uri(),
                    urlquote(pv["version"]),
                )
                return redirect(url)

    # Used to pick a version in the dropdown menu.
    context["version"] = versions[0] if versions else ""

    if tcbs_mode == "realtime":
        end_date = timezone.now().replace(microsecond=0)
    elif tcbs_mode == "byday":
        end_date = timezone.now().replace(hour=0,
                                          minute=0,
                                          second=0,
                                          microsecond=0)

    # settings.PROCESS_TYPES might contain tuple to indicate that some
    # are actual labels.
    process_types = []
    for option in settings.PROCESS_TYPES:
        if isinstance(option, (list, tuple)):
            process_types.append(option[0])
        else:
            process_types.append(option)
    if crash_type not in process_types:
        crash_type = "browser"

    context["crash_type"] = crash_type

    platforms = models.Platform.objects.values()
    if os_name not in (item["name"] for item in platforms):
        os_name = None

    context["os_name"] = os_name

    # set the result counts filter in the context to use in
    # the template. This way we avoid hardcoding it twice and
    # have it defined in one common location.
    context["result_counts"] = settings.TCBS_RESULT_COUNTS
    if result_count not in context["result_counts"]:
        result_count = settings.TCBS_RESULT_COUNTS[0]

    context["result_count"] = result_count
    context["query"] = {
        "product": product.name,
        "versions": versions,
        "crash_type": crash_type,
        "os_name": os_name,
        "result_count": str(result_count),
        "mode": tcbs_mode,
        "range_type": range_type,
        "end_date": end_date,
        "start_date": end_date - datetime.timedelta(days=days),
    }

    total_number_of_crashes, topcrashers_stats = get_topcrashers_stats(
        product=product.name,
        version=versions,
        platform=os_name,
        process_type=crash_type,
        date=[
            "<" + end_date.isoformat(),
            ">=" + context["query"]["start_date"].isoformat(),
        ],
        _facets_size=result_count,
        _range_type=range_type,
    )

    count_of_included_crashes = 0
    signatures = []

    # Get signatures and count of included crashes to show
    for topcrashers_stats_item in topcrashers_stats[:int(result_count)]:
        signatures.append(topcrashers_stats_item.signature_term)
        count_of_included_crashes += topcrashers_stats_item.num_crashes

    context["number_of_crashes"] = count_of_included_crashes
    context["total_percentage"] = len(topcrashers_stats) and (
        100.0 * count_of_included_crashes / total_number_of_crashes)
    context["total_number_of_crashes"] = total_number_of_crashes

    # Get augmented bugs data.
    bugs = defaultdict(list)
    if signatures:
        qs = (models.BugAssociation.objects.filter(
            signature__in=signatures).values("bug_id",
                                             "signature").order_by("-bug_id"))
        for item in qs:
            bugs[item["signature"]].append(item["bug_id"])

    # Get augmented signature data.
    sig_date_data = {}
    if signatures:
        qs = models.Signature.objects.filter(signature__in=signatures).values(
            "signature", "first_date")
        sig_date_data = {item["signature"]: item["first_date"] for item in qs}

    for topcrashers_stats_item in topcrashers_stats:
        crash_counts = []
        for item in platforms:
            if item["name"] == "Unknown":
                # not applicable in this context
                continue
            key = "%s_count" % item["short_name"]
            crash_counts.append([
                topcrashers_stats_item.num_crashes_per_platform[key],
                item["name"]
            ])

        signature_term = topcrashers_stats_item.signature_term
        # Augment with bugs.
        if signature_term in bugs:
            if hasattr(topcrashers_stats_item, "bugs"):
                topcrashers_stats_item.bugs.extend(bugs[signature_term])
            else:
                topcrashers_stats_item.bugs = bugs[signature_term]

        # Augment with first appearance dates.
        if signature_term in sig_date_data:
            topcrashers_stats_item.first_report = sig_date_data[signature_term]

        if hasattr(topcrashers_stats_item, "bugs"):
            topcrashers_stats_item.bugs.sort(reverse=True)

    context["topcrashers_stats"] = topcrashers_stats
    context["days"] = days
    context["report"] = "topcrasher"
    context["possible_days"] = possible_days
    context["total_crashing_signatures"] = len(signatures)
    context["process_type_values"] = []
    for option in settings.PROCESS_TYPES:
        if option == "all":
            continue
        if isinstance(option, (list, tuple)):
            value, label = option
        else:
            value = option
            label = option.capitalize()
        context["process_type_values"].append((value, label))

    context["platform_values"] = [item["name"] for item in platforms]

    return render(request, "topcrashers/topcrashers.html", context)
コード例 #5
0
def exploitability_report(request, default_context=None):
    context = default_context or {}

    if not request.GET.get("product"):
        url = reverse("exploitability:report")
        url += "?" + urlencode(
            {"product": productlib.get_default_product().name})
        return redirect(url)

    form = ExploitabilityReportForm(request.GET,
                                    active_versions=context["active_versions"])
    if not form.is_valid():
        return http.HttpResponseBadRequest(str(form.errors))

    product_name = form.cleaned_data["product"]
    version = form.cleaned_data["version"]
    product = productlib.get_product_by_name(product_name)

    api = SuperSearchUnredacted()
    params = {
        "product": product_name,
        "version": version,
        "_results_number": 0,
        # This aggregates on crashes that do NOT contain these
        # key words. For example, if a crash has
        # {'exploitability': 'error: unable to analyze dump'}
        # then it won't get included.
        "exploitability": ["!error", "!interesting"],
        "_aggs.signature": "exploitability",
        "_facets_size": settings.EXPLOITABILITY_BATCH_SIZE,
    }
    results = api.get(**params)

    base_signature_report_dict = {"product": product_name}
    if version:
        base_signature_report_dict["version"] = version

    crashes = []
    categories = ("high", "none", "low", "medium", "null")
    for signature_facet in results["facets"]["signature"]:
        # this 'signature_facet' will look something like this:
        #
        #  {
        #      'count': 1234,
        #      'term': 'My | Signature',
        #      'facets': {
        #          'exploitability': [
        #              {'count': 1, 'term': 'high'},
        #              {'count': 23, 'term': 'medium'},
        #              {'count': 11, 'term': 'other'},
        #
        # And we only want to include those where:
        #
        #   low or medium or high are greater than 0
        #

        exploitability = signature_facet["facets"]["exploitability"]
        if not any(x["count"] for x in exploitability
                   if x["term"] in ("high", "medium", "low")):
            continue
        crash = {
            "bugs": [],
            "signature":
            signature_facet["term"],
            "high_count":
            0,
            "medium_count":
            0,
            "low_count":
            0,
            "none_count":
            0,
            "url": (reverse("signature:signature_report") + "?" + urlencode(
                dict(base_signature_report_dict,
                     signature=signature_facet["term"]))),
        }
        for cluster in exploitability:
            if cluster["term"] in categories:
                crash["{}_count".format(cluster["term"])] = cluster["count"]
        crash["med_or_high"] = crash.get("high_count", 0) + crash.get(
            "medium_count", 0)
        crashes.append(crash)

    # Sort by the 'med_or_high' key first (descending),
    # and by the signature second (ascending).
    crashes.sort(key=lambda x: (-x["med_or_high"], x["signature"]))

    # now, let's go back and fill in the bugs
    signatures = [x["signature"] for x in crashes]
    if signatures:
        qs = (models.BugAssociation.objects.filter(
            signature__in=signatures).values("bug_id", "signature").order_by(
                "-bug_id", "signature"))
        bugs = defaultdict(list)
        for item in qs:
            bugs[item["signature"]].append(item["bug_id"])

        for crash in crashes:
            crash["bugs"] = bugs.get(crash["signature"], [])

    context["crashes"] = crashes
    context["product"] = product
    context["version"] = version
    context["report"] = "exploitable"

    return render(request, "exploitability/report.html", context)