Example #1
0
def test_categorical_histogram_dict_value():
    cat2 = Histogram('TELEMETRY_TEST_CATEGORICAL',
                     {'values': {
                         u'0': 2,
                         u'1': 1,
                         u'2': 0,
                         u'3': 0
                     }})
    assert all(cat2.get_value() == series)
Example #2
0
def get_dates_metrics(prefix, channel):
    mapping = {"true": True, "false": False}
    dimensions = {k: mapping.get(v, v) for k, v in request.args.iteritems()}

    # Get dates
    dates = dimensions.pop('dates', "").split(',')
    version = dimensions.pop('version', None)
    metric = dimensions.get('metric', None)

    if not dates or not version or not metric:
        abort(404)

    if metric == "SEARCH_COUNTS":
        abort(404)

    # Get bucket labels
    if metric.startswith("SIMPLE_MEASURES_"):
        labels = simple_measures_labels
        kind = "exponential"
        description = ""
    else:
        revision = histogram_revision_map.get(channel, "nightly")  # Use nightly revision if the channel is unknown
        try:
            definition = Histogram(metric, {"values": {}}, revision=revision)
        except KeyError:
            # Couldn't find the histogram definition
            abort(404)

        kind = definition.kind
        description = definition.definition.description()

        if kind == "count":
            labels = count_histogram_labels
            dimensions["metric"] = "[[COUNT]]_{}".format(metric)
        elif kind == "flag":
            labels = [0, 1]
        else:
            labels = definition.get_value().keys().tolist()

    # Fetch metrics
    result = execute_query("select * from batched_get_metric(%s, %s, %s, %s, %s)", (prefix, channel, version, dates, json.dumps(dimensions)))
    if not result:
        abort(404)

    pretty_result = {"data": [], "buckets": labels, "kind": kind, "description": description}
    for row in result:
        date = row[0]
        label = row[1]
        histogram = row[2][:-2]
        sum = row[2][-2]
        count = row[2][-1]
        pretty_result["data"].append({"date": date, "label": label, "histogram": histogram, "count": count, "sum": sum})

    return json.dumps(pretty_result)
def get_dates_metrics(prefix, channel):
    mapping = {"true": True, "false": False}
    dimensions = {k: mapping.get(v, v) for k, v in request.args.iteritems()}

    extra_dimensions = dimensions.viewkeys() - ALLOWED_DIMENSIONS
    if extra_dimensions:
        # We received an unsupported query string to filter by, return 405.
        valid_url = '{}?{}'.format(
            request.path,
            urlencode({k: v for k, v in dimensions.items() if k in ALLOWED_DIMENSIONS}))
        raise MethodNotAllowed(valid_methods=[valid_url])

    if 'child' in dimensions:
        # Process types in the db are true/false, not content/process
        new_process_map = {"content": True, "parent": False}
        dimensions['child'] = new_process_map.get(dimensions['child'], dimensions['child'])

    # Get dates
    dates = dimensions.pop('dates', '').split(',')
    version = dimensions.pop('version', None)
    metric = dimensions.get('metric')

    if not dates or not version or not metric:
        abort(404, description="Missing date or version or metric. All three are required.")

    if not _allow_metric(channel, metric):
        abort(404, description="This metric is not allowed to be served.")

    # Get bucket labels
    for _prefix, _labels in SCALAR_MEASURE_MAP.iteritems():
        if metric.startswith(_prefix) and _prefix != COUNT_HISTOGRAM_PREFIX:
            labels = _labels
            kind = "exponential"
            try:
                description = _get_description(channel, _prefix, metric)
            except MissingScalarError:
                abort(404, description="Cannot find this scalar definition.")
            break
    else:
        revision = histogram_revision_map[channel]
        try:
            definition = Histogram(metric, {"values": {}}, revision=revision)
        except KeyError:
            # Couldn't find the histogram definition
            abort(404, description="Cannot find this histogram definition.")

        kind = definition.kind
        description = definition.definition.description()

        if kind == "count":
            labels = COUNT_HISTOGRAM_LABELS
            dimensions["metric"] = "{}_{}".format(COUNT_HISTOGRAM_PREFIX, metric)
        elif kind == "flag":
            labels = [0, 1]
        else:
            labels = definition.get_value().keys().tolist()

    altered_dimensions = deepcopy(dimensions)
    if 'child' in dimensions:
        # Bug 1339139 - when adding gpu processes, child process went from True/False to "true"/"false"/"gpu"
        reverse_map = {True: 'true', False: 'false'}
        altered_dimensions['child'] = reverse_map.get(altered_dimensions['child'], altered_dimensions['child'])

    # Fetch metrics
    if metric.startswith("USE_COUNTER2_"):
        # Bug 1412382 - Use Counters need to be composed from reported True
        # values and False values supplied by *CONTENT_DOCUMENTS_DESTROYED.
        denominator = "TOP_LEVEL_CONTENT_DOCUMENTS_DESTROYED"
        if metric.endswith("_DOCUMENT"):
            denominator = "CONTENT_DOCUMENTS_DESTROYED"
        denominator = "{}_{}".format(COUNT_HISTOGRAM_PREFIX, denominator)
        denominator_dimensions = deepcopy(dimensions)
        denominator_dimensions["metric"] = denominator
        denominator_new_dimensions = deepcopy(altered_dimensions)
        denominator_new_dimensions["metric"] = denominator
        result = execute_query(
            "select * from batched_get_use_counter(%s, %s, %s, %s, %s, %s, %s, %s)", (
                prefix, channel, version, dates, json.dumps(denominator_dimensions),
                json.dumps(denominator_new_dimensions), json.dumps(dimensions), json.dumps(altered_dimensions)))
    else:
        result = execute_query(
            "select * from batched_get_metric(%s, %s, %s, %s, %s, %s)", (
                prefix, channel, version, dates, json.dumps(dimensions), json.dumps(altered_dimensions)))

    if not result:
        abort(404, description="No data found for this metric.")

    pretty_result = {"data": [], "buckets": labels, "kind": kind, "description": description}
    for row in result:
        date = row[0]
        label = row[1]
        histogram = row[2][:-2]
        sum = row[2][-2]
        count = row[2][-1]
        pretty_result["data"].append({"date": date, "label": label, "histogram": histogram, "count": count, "sum": sum})

    return Response(json.dumps(pretty_result), mimetype="application/json")
Example #4
0
def get_dates_metrics(prefix, channel):
    mapping = {"true": True, "false": False}
    dimensions = {k: mapping.get(v, v) for k, v in request.args.iteritems()}

    # Get dates
    dates = dimensions.pop('dates', "").split(',')
    version = dimensions.pop('version', None)
    metric = dimensions.get('metric', None)

    if not dates or not version or not metric:
        abort(404)

    # Get bucket labels
    if metric.startswith("SIMPLE_MEASURES_"):
        labels = simple_measures_labels
        kind = "exponential"
        description = ""
    else:
        revision = histogram_revision_map.get(
            channel,
            "nightly")  # Use nightly revision if the channel is unknown
        try:
            definition = Histogram(metric, {"values": {}}, revision=revision)
        except KeyError:
            # Couldn't find the histogram definition
            abort(404)

        kind = definition.kind
        description = definition.definition.description()

        if kind == "count":
            labels = count_histogram_labels
            dimensions["metric"] = "[[COUNT]]_{}".format(metric)
        elif kind == "flag":
            labels = [0, 1]
        else:
            labels = definition.get_value().keys().tolist()

    # Fetch metrics
    result = execute_query(
        "select * from batched_get_metric(%s, %s, %s, %s, %s)",
        (prefix, channel, version, dates, json.dumps(dimensions)))
    if not result:
        abort(404)

    pretty_result = {
        "data": [],
        "buckets": labels,
        "kind": kind,
        "description": description
    }
    for row in result:
        date = row[0]
        label = row[1]
        histogram = row[2][:-2]
        sum = row[2][-2]
        count = row[2][-1]
        pretty_result["data"].append({
            "date": date,
            "label": label,
            "histogram": histogram,
            "count": count,
            "sum": sum
        })

    return json.dumps(pretty_result)
Example #5
0
def get_dates_metrics(prefix, channel):
    mapping = {"true": True, "false": False}
    dimensions = {k: mapping.get(v, v) for k, v in request.args.iteritems()}

    extra_dimensions = dimensions.viewkeys() - ALLOWED_DIMENSIONS
    if extra_dimensions:
        # We received an unsupported query string to filter by, return 405.
        valid_url = '{}?{}'.format(
            request.path,
            urlencode({k: v for k, v in dimensions.items() if k in ALLOWED_DIMENSIONS}))
        raise MethodNotAllowed(valid_methods=[valid_url])

    if 'child' in dimensions:
        # Process types in the db are true/false, not content/process
        new_process_map = {"content": True, "parent": False}
        dimensions['child'] = new_process_map.get(dimensions['child'], dimensions['child'])

    # Get dates
    dates = dimensions.pop('dates', '').split(',')
    version = dimensions.pop('version', None)
    metric = dimensions.get('metric')

    if not dates or not version or not metric:
        abort(404, description="Missing date or version or metric. All three are required.")

    if not _allow_metric(channel, metric):
        abort(404, description="This metric is not allowed to be served.")

    # Get bucket labels
    for _prefix, _labels in SCALAR_MEASURE_MAP.iteritems():
        if metric.startswith(_prefix) and _prefix != COUNT_HISTOGRAM_PREFIX:
            labels = _labels
            kind = "exponential"
            try:
                description = _get_description(channel, _prefix, metric)
            except MissingScalarError:
                abort(404, description="Cannot find this scalar definition.")
            break
    else:
        revision = histogram_revision_map[channel]
        try:
            definition = Histogram(metric, {"values": {}}, revision=revision)
        except KeyError:
            # Couldn't find the histogram definition
            abort(404, description="Cannot find this histogram definition.")

        kind = definition.kind
        description = definition.definition.description()

        if kind == "count":
            labels = COUNT_HISTOGRAM_LABELS
            dimensions["metric"] = "{}_{}".format(COUNT_HISTOGRAM_PREFIX, metric)
        elif kind == "flag":
            labels = [0, 1]
        else:
            labels = definition.get_value().keys().tolist()

    altered_dimensions = deepcopy(dimensions)
    if 'child' in dimensions:
        # Bug 1339139 - when adding gpu processes, child process went from True/False to "true"/"false"/"gpu"
        reverse_map = {True: 'true', False: 'false'}
        altered_dimensions['child'] = reverse_map.get(altered_dimensions['child'], altered_dimensions['child'])

    # Fetch metrics
    if metric.startswith("USE_COUNTER2_"):
        # Bug 1412382 - Use Counters need to be composed from reported True
        # values and False values supplied by *CONTENT_DOCUMENTS_DESTROYED.
        denominator = "TOP_LEVEL_CONTENT_DOCUMENTS_DESTROYED"
        if metric.endswith("_DOCUMENT"):
            denominator = "CONTENT_DOCUMENTS_DESTROYED"
        denominator = "{}_{}".format(COUNT_HISTOGRAM_PREFIX, denominator)
        denominator_dimensions = deepcopy(dimensions)
        denominator_dimensions["metric"] = denominator
        denominator_new_dimensions = deepcopy(altered_dimensions)
        denominator_new_dimensions["metric"] = denominator
        result = execute_query(
            "select * from batched_get_use_counter(%s, %s, %s, %s, %s, %s, %s, %s)", (
                prefix, channel, version, dates, json.dumps(denominator_dimensions),
                json.dumps(denominator_new_dimensions), json.dumps(dimensions), json.dumps(altered_dimensions)))
    else:
        result = execute_query(
            "select * from batched_get_metric(%s, %s, %s, %s, %s, %s)", (
                prefix, channel, version, dates, json.dumps(dimensions), json.dumps(altered_dimensions)))

    if not result:
        abort(404, description="No data found for this metric.")

    pretty_result = {"data": [], "buckets": labels, "kind": kind, "description": description}
    for row in result:
        date = row[0]
        label = row[1]
        histogram = row[2][:-2]
        sum = row[2][-2]
        count = row[2][-1]
        pretty_result["data"].append({"date": date, "label": label, "histogram": histogram, "count": count, "sum": sum})

    return Response(json.dumps(pretty_result), mimetype="application/json")
def test_large_values():
    hist = Histogram('GC_REASON_2', {'values': {u'3.0683611344442837e+257': 1}})
    assert all(hist.get_value() == 0)
def test_malformed_non_categorical():
    hist = Histogram('GC_REASON_2', {})
    assert all(hist.get_value() == 0)
def test_malformed_categorical():
    # See bug 1353196
    cat2 = Histogram('TELEMETRY_TEST_CATEGORICAL', {})
    assert all(cat2.get_value() == 0)
def test_categorical_histogram_dict_value():
    cat2 = Histogram('TELEMETRY_TEST_CATEGORICAL', {'values': {u'0': 2, u'1': 1, u'2': 0, u'3': 0}})
    assert all(cat2.get_value() == series)
def test_large_categorical_values():
    hist = Histogram('TELEMETRY_TEST_CATEGORICAL', {'values': {u'3.0683611344442837e+257': 1}})
    assert all(hist.get_value() == 0)