Пример #1
0
        self.report_caption(caption)
        self.report_caption(self.controls.score.currentText())

        if self.score != 0:
            self.report_raw(self.get_info_text(short=False))

    def set_visual_settings(self, key, value):
        self.plot.parameter_setter.set_parameter(key, value)
        self.visual_settings[key] = value


def gaussian_smoother(x, y, sigma=1.0):
    x = np.asarray(x)
    y = np.asarray(y)

    gamma = 1. / (2 * sigma ** 2)
    a = 1. / (sigma * np.sqrt(2 * np.pi))

    if x.shape != y.shape:
        raise ValueError

    def smoother(xs):
        W = a * np.exp(-gamma * ((xs - x) ** 2))
        return np.average(y, weights=W)

    return np.vectorize(smoother, otypes=[float])


if __name__ == "__main__":  # pragma: no cover
    WidgetPreview(OWCalibrationPlot).run(results_for_preview())
Пример #2
0
            for i in range(n):
                self._set_item(n + 2, i + 2, _sum_item(int(colsum[i]), "t"))
                self._set_item(i + 2, n + 2, _sum_item(int(rowsum[i]), "l"))
            self._set_item(n + 2, n + 2, _sum_item(int(rowsum.sum())))

    def send_report(self):
        """Send report"""
        if self.results is not None and self.selected_learner:
            self.report_table(
                "Confusion matrix for {} (showing {})".format(
                    self.learners[self.selected_learner[0]],
                    self.quantities[self.selected_quantity].lower()),
                self.tableview)

    @classmethod
    def migrate_settings(cls, settings, version):
        if not version:
            # For some period of time the 'selected_learner' property was
            # changed from List[int] -> int
            # (commit 4e49bb3fd0e11262f3ebf4b1116a91a4b49cc982) and then back
            # again (commit 8a492d79a2e17154a0881e24a05843406c8892c0)
            if "selected_learner" in settings and \
                    isinstance(settings["selected_learner"], int):
                settings["selected_learner"] = [settings["selected_learner"]]


if __name__ == "__main__":  # pragma: no cover
    from Orange.widgets.evaluate.utils import results_for_preview
    WidgetPreview(OWConfusionMatrix).run(results_for_preview("iris"))
Пример #3
0
    # going through point (0, 1). The point(s) with the minimum
    # distance are our result

    # y = m * x + 1
    # m * x - 1y + 1 = 0
    a, b, c = slope, -1, 1
    dist = distance_to_line(a, b, c, fpr, tpr)
    mindist = np.min(dist)

    return np.flatnonzero((dist - mindist) <= tol)


def distance_to_line(a, b, c, x0, y0):
    """
    Return the distance to a line ax + by + c = 0
    """
    assert a != 0 or b != 0
    return np.abs(a * x0 + b * y0 + c) / np.sqrt(a**2 + b**2)


def roc_iso_performance_slope(fp_cost, fn_cost, p):
    assert 0 <= p <= 1
    if fn_cost * p == 0:
        return np.inf
    else:
        return (fp_cost * (1. - p)) / (fn_cost * p)


if __name__ == "__main__":  # pragma: no cover
    WidgetPreview(OWROCAnalysis).run(results_for_preview())
Пример #4
0
        caption = report.list_legend(self.classifiers_list_box,
                                     self.selected_classifiers)
        self.report_items((("Target class", self.target_cb.currentText()), ))
        self.report_plot()
        self.report_caption(caption)


def lift_curve_from_results(results, target, clf_idx, subset=slice(0, -1)):
    actual = results.actual[subset]
    scores = results.probabilities[clf_idx][subset][:, target]
    yrate, tpr, thresholds = lift_curve(actual, scores, target)
    return yrate, tpr, thresholds


def lift_curve(ytrue, ypred, target=1):
    P = np.sum(ytrue == target)
    N = ytrue.size - P

    if P == 0 or N == 0:
        # Undefined TP and FP rate
        return np.array([]), np.array([]), np.array([])

    fpr, tpr, thresholds = skl_metrics.roc_curve(ytrue, ypred, target)
    rpp = fpr * (N / (P + N)) + tpr * (P / (P + N))
    return rpp, tpr, thresholds


if __name__ == "__main__":  # pragma: no cover
    from Orange.widgets.evaluate.utils import results_for_preview
    WidgetPreview(OWLiftCurve).run(results_for_preview())
Пример #5
0
    # going through point (0, 1). The point(s) with the minimum
    # distance are our result

    # y = m * x + 1
    # m * x - 1y + 1 = 0
    a, b, c = slope, -1, 1
    dist = distance_to_line(a, b, c, fpr, tpr)
    mindist = numpy.min(dist)

    return numpy.flatnonzero((dist - mindist) <= tol)


def distance_to_line(a, b, c, x0, y0):
    """
    Return the distance to a line ax + by + c = 0
    """
    assert a != 0 or b != 0
    return numpy.abs(a * x0 + b * y0 + c) / numpy.sqrt(a ** 2 + b ** 2)


def roc_iso_performance_slope(fp_cost, fn_cost, p):
    assert 0 <= p <= 1
    if fn_cost * p == 0:
        return numpy.inf
    else:
        return (fp_cost * (1. - p)) / (fn_cost * p)


if __name__ == "__main__":  # pragma: no cover
    WidgetPreview(OWROCAnalysis).run(results_for_preview())
Пример #6
0
            for i in range(n):
                self._set_item(n + 2, i + 2, _sum_item(int(colsum[i]), "t"))
                self._set_item(i + 2, n + 2, _sum_item(int(rowsum[i]), "l"))
            self._set_item(n + 2, n + 2, _sum_item(int(rowsum.sum())))

    def send_report(self):
        """Send report"""
        if self.results is not None and self.selected_learner:
            self.report_table(
                "Confusion matrix for {} (showing {})".
                format(self.learners[self.selected_learner[0]],
                       self.quantities[self.selected_quantity].lower()),
                self.tableview)

    @classmethod
    def migrate_settings(cls, settings, version):
        if not version:
            # For some period of time the 'selected_learner' property was
            # changed from List[int] -> int
            # (commit 4e49bb3fd0e11262f3ebf4b1116a91a4b49cc982) and then back
            # again (commit 8a492d79a2e17154a0881e24a05843406c8892c0)
            if "selected_learner" in settings and \
                    isinstance(settings["selected_learner"], int):
                settings["selected_learner"] = [settings["selected_learner"]]


if __name__ == "__main__":  # pragma: no cover
    from Orange.widgets.evaluate.utils import results_for_preview
    WidgetPreview(OWConfusionMatrix).run(results_for_preview("iris"))
Пример #7
0
    def send_report(self):
        if self.results is None:
            return
        caption = report.list_legend(self.classifiers_list_box,
                                     self.selected_classifiers)
        self.report_items((("Target class", self.target_cb.currentText()),))
        self.report_plot()
        self.report_caption(caption)


def gaussian_smoother(x, y, sigma=1.0):
    x = np.asarray(x)
    y = np.asarray(y)

    gamma = 1. / (2 * sigma ** 2)
    a = 1. / (sigma * np.sqrt(2 * np.pi))

    if x.shape != y.shape:
        raise ValueError

    def smoother(xs):
        W = a * np.exp(-gamma * ((xs - x) ** 2))
        return np.average(y, weights=W)

    return np.vectorize(smoother, otypes=[np.float])


if __name__ == "__main__":  # pragma: no cover
    WidgetPreview(OWCalibrationPlot).run(results_for_preview())
Пример #8
0
        caption = report.list_legend(self.classifiers_list_box,
                                     self.selected_classifiers)
        self.report_items((("Target class", self.target_cb.currentText()),))
        self.report_plot()
        self.report_caption(caption)


def lift_curve_from_results(results, target, clf_idx, subset=slice(0, -1)):
    actual = results.actual[subset]
    scores = results.probabilities[clf_idx][subset][:, target]
    yrate, tpr, thresholds = lift_curve(actual, scores, target)
    return yrate, tpr, thresholds


def lift_curve(ytrue, ypred, target=1):
    P = np.sum(ytrue == target)
    N = ytrue.size - P

    if P == 0 or N == 0:
        # Undefined TP and FP rate
        return np.array([]), np.array([]), np.array([])

    fpr, tpr, thresholds = skl_metrics.roc_curve(ytrue, ypred, target)
    rpp = fpr * (N / (P + N)) + tpr * (P / (P + N))
    return rpp, tpr, thresholds


if __name__ == "__main__":  # pragma: no cover
    from Orange.widgets.evaluate.utils import results_for_preview
    WidgetPreview(OWLiftCurve).run(results_for_preview())