Ejemplo n.º 1
0
def url(value):
    """
    convert FROM dict OR string TO URL PARAMETERS
    """
    global value2url_param
    if not value2url_param:
        from mo_files.url import value2url_param
    return value2url_param(value)
Ejemplo n.º 2
0
Archivo: s3.py Proyecto: rv404674/TUID
        def more():
            xml = http.get(self.url + "?" + value2url_param(state)).content
            data = BeautifulSoup(xml, 'xml')

            state.get_more = data.find("istruncated").contents[0] == "true"
            contents = data.findAll("contents")
            state.marker = contents[-1].find("key").contents[0]
            return [{k: t(d.find(k).contents[0]) for k, t in content_keys.items()} for d in contents]
Ejemplo n.º 3
0
def url(value):
    """
    convert FROM dict OR string TO URL PARAMETERS
    """
    global value2url_param
    if not value2url_param:
        from mo_files.url import value2url_param
    return value2url_param(value)
Ejemplo n.º 4
0
        def more():
            xml = http.get(self.url + "?" + value2url_param(state)).content
            data = BeautifulSoup(xml, 'xml')

            state.get_more = data.find("istruncated").contents[0] == "true"
            contents = data.findAll("contents")
            if len(contents):
               state.marker = contents[-1].find("key").contents[0]
            return [{k: t(d.find(k).contents[0]) for k, t in content_keys.items()} for d in contents]
Ejemplo n.º 5
0
def process(
    about_deviant,
    since,
    source,
    deviant_summary,
    show=False,
    show_limit=MAX_POINTS,
    show_old=False,
    show_distribution=None,
):
    """
    :param signature_hash: The performance hash
    :param since: Only data after this date
    :param show:
    :param show_limit:
    :param show_old:
    :param show_distribution:
    :return:
    """
    sig_id = about_deviant.id
    if not isinstance(sig_id, int):
        Log.error("expecting id")

    # GET SIGNATURE DETAILS
    sig = get_signature(db_config=source, signature_id=sig_id)

    # GET SIGNATURE DETAILS
    data = get_dataum(source, sig.id, since=since, limit=show_limit)

    min_date = since.unix
    pushes = jx.sort(
        [{
            "value": median(rows.value),
            "runs": rows,
            "push": {
                "time": unwrap(t)["push.time"]
            },
        } for t, rows in jx.groupby(data, "push.time")
         if t["push\\.time"] > min_date],
        "push.time",
    )

    values = list(pushes.value)
    title = "-".join(
        map(
            str,
            [
                sig.id,
                sig.framework,
                sig.suite,
                sig.test,
                sig.repository,
                sig.platform,
                about_deviant.overall_dev_status,
            ],
        ))
    # EG https://treeherder.mozilla.org/perf.html#/graphs?highlightAlerts=1&series=mozilla-central,fee739b45f7960e4a520d8e0bd781dd9d0a3bec4,1,10&timerange=31536000
    url = "https://treeherder.mozilla.org/perf.html#/graphs?" + value2url_param(
        {
            "highlightAlerts":
            1,
            "series": [
                sig.repository, sig.id, 1,
                coalesce(sig.framework_id, sig.framework)
            ],
            "timerange":
            Duration(TREEHERDER_RANGE).seconds
        })

    Log.note("With {{title}}: {{url}}", title=title, url=url)

    with Timer("find segments"):
        new_segments, new_diffs = find_segments(values, sig.alert_change_type,
                                                sig.alert_threshold)

    # USE PERFHERDER ALERTS TO IDENTIFY OLD SEGMENTS
    old_segments = tuple(
        sorted(
            set([
                i for i, p in enumerate(pushes) if any(r.alert.id
                                                       for r in p.runs)
            ] + [0, len(pushes)])))
    old_medians = [0.0] + [
        np.median(values[s:e])
        for s, e in zip(old_segments[:-1], old_segments[1:])
    ]
    old_diffs = np.array(
        [b / a - 1 for a, b in zip(old_medians[:-1], old_medians[1:])] + [0])

    if len(new_segments) == 1:
        overall_dev_status = None
        overall_dev_score = None
        last_mean = None
        last_std = None
        last_dev_status = None
        last_dev_score = None
        relative_noise = None
        Log.note("not ")
    else:
        # NOISE OF LAST SEGMENT
        s, e = new_segments[-2], new_segments[-1]
        last_segment = np.array(values[s:e])
        ignore = IGNORE_TOP
        trimmed_segment = last_segment[np.argsort(last_segment)
                                       [ignore:-ignore]]
        last_mean = np.mean(trimmed_segment)
        last_std = np.std(trimmed_segment)
        last_dev_status, last_dev_score = deviance(trimmed_segment)
        relative_noise = last_std / last_mean

        # FOR EACH SEGMENT, NORMALIZE MEAN AND VARIANCE
        normalized = []
        for s, e in jx.pairs(new_segments):
            data = np.array(values[s:e])
            norm = (data + last_mean - np.mean(data)) * last_std / np.std(data)
            normalized.extend(norm)

        overall_dev_status, overall_dev_score = deviance(normalized)
        Log.note(
            "\n\tdeviance = {{deviance}}\n\tnoise={{std}}\n\tpushes={{pushes}}\n\tsegments={{num_segments}}",
            title=title,
            deviance=(overall_dev_status, overall_dev_score),
            std=relative_noise,
            pushes=len(values),
            num_segments=len(new_segments) - 1,
        )

        if show_distribution:
            histogram(trimmed_segment,
                      title=last_dev_status + "=" + text(last_dev_score))

    max_extra_diff = None
    max_missing_diff = None
    _is_diff = is_diff(new_segments, old_segments)
    if _is_diff:
        # FOR MISSING POINTS, CALC BIGGEST DIFF
        max_extra_diff = mo_math.MAX(
            abs(d) for s, d in zip(new_segments, new_diffs)
            if all(not (s - TOLERANCE <= o <= s + TOLERANCE)
                   for o in old_segments))
        max_missing_diff = mo_math.MAX(
            abs(d) for s, d in zip(old_segments, old_diffs)
            if all(not (s - TOLERANCE <= n <= s + TOLERANCE)
                   for n in new_segments))

        Log.alert(
            "Disagree max_extra_diff={{max_extra_diff|round(places=3)}}, max_missing_diff={{max_missing_diff|round(places=3)}}",
            max_extra_diff=max_extra_diff,
            max_missing_diff=max_missing_diff,
        )
        Log.note("old={{old}}, new={{new}}",
                 old=old_segments,
                 new=new_segments)
    else:
        Log.note("Agree")

    if show and len(pushes):
        show_old and assign_colors(values, old_segments, title="OLD " + title)
        assign_colors(values, new_segments, title="NEW " + title)
        if url:
            webbrowser.open(url)

    if isinstance(deviant_summary, bigquery.Table):
        Log.note("BigQuery summary not updated")
        return

    deviant_summary.upsert(
        where={"eq": {
            "id": sig.id
        }},
        doc=Data(
            id=sig_id,
            title=title,
            num_pushes=len(values),
            num_segments=len(new_segments) - 1,
            relative_noise=relative_noise,
            overall_dev_status=overall_dev_status,
            overall_dev_score=overall_dev_score,
            last_mean=last_mean,
            last_std=last_std,
            last_dev_status=last_dev_status,
            last_dev_score=last_dev_score,
            last_updated=Date.now(),
            is_diff=_is_diff,
            max_extra_diff=max_extra_diff,
            max_missing_diff=max_missing_diff,
            num_new_segments=len(new_segments),
            num_old_segments=len(old_segments),
        ),
    )