Esempio n. 1
0
    def __init__(self, pos_score, neg_score):
        """ """
        kwargs = locals()
        del kwargs['self']

        helper = LayerHelper("PaddleRec_PosNegRatio", **kwargs)
        if "pos_score" not in kwargs or "neg_score" not in kwargs:
            raise ValueError(
                "PosNegRatio expect pos_score and neg_score as inputs.")
        pos_score = kwargs.get('pos_score')
        neg_score = kwargs.get('neg_score')

        if not isinstance(pos_score, Variable):
            raise ValueError("pos_score must be Variable, but received %s" %
                             type(pos_score))
        if not isinstance(neg_score, Variable):
            raise ValueError("neg_score must be Variable, but received %s" %
                             type(neg_score))

        wrong = fluid.layers.cast(fluid.layers.less_equal(
            pos_score, neg_score),
                                  dtype='float32')
        wrong_cnt = fluid.layers.reduce_sum(wrong)
        right = fluid.layers.cast(fluid.layers.less_than(neg_score, pos_score),
                                  dtype='float32')
        right_cnt = fluid.layers.reduce_sum(right)

        global_right_cnt, _ = helper.create_or_get_global_variable(
            name="right_cnt", persistable=True, dtype='float32', shape=[1])
        global_wrong_cnt, _ = helper.create_or_get_global_variable(
            name="wrong_cnt", persistable=True, dtype='float32', shape=[1])

        for var in [global_right_cnt, global_wrong_cnt]:
            helper.set_variable_initializer(
                var, Constant(value=0.0, force_cpu=True))

        helper.append_op(type="elementwise_add",
                         inputs={
                             "X": [global_right_cnt],
                             "Y": [right_cnt]
                         },
                         outputs={"Out": [global_right_cnt]})
        helper.append_op(type="elementwise_add",
                         inputs={
                             "X": [global_wrong_cnt],
                             "Y": [wrong_cnt]
                         },
                         outputs={"Out": [global_wrong_cnt]})
        self.pn = (global_right_cnt + 1.0) / (global_wrong_cnt + 1.0)

        self._global_metric_state_vars = dict()
        self._global_metric_state_vars['right_cnt'] = (global_right_cnt.name,
                                                       "float32")
        self._global_metric_state_vars['wrong_cnt'] = (global_wrong_cnt.name,
                                                       "float32")

        self.metrics = dict()
        self.metrics['WrongCnt'] = global_wrong_cnt
        self.metrics['RightCnt'] = global_right_cnt
        self.metrics['PN'] = self.pn
Esempio n. 2
0
    def __init__(self, input, label, k=20):
        """ """
        kwargs = locals()
        del kwargs['self']
        self.k = k

        if not isinstance(input, Variable):
            raise ValueError("input must be Variable, but received %s" %
                             type(input))
        if not isinstance(label, Variable):
            raise ValueError("label must be Variable, but received %s" %
                             type(label))

        helper = LayerHelper("PaddleRec_RecallK", **kwargs)
        batch_accuracy = accuracy(input, label, self.k)
        global_ins_cnt, _ = helper.create_or_get_global_variable(
            name="ins_cnt", persistable=True, dtype='float32', shape=[1])
        global_pos_cnt, _ = helper.create_or_get_global_variable(
            name="pos_cnt", persistable=True, dtype='float32', shape=[1])

        for var in [global_ins_cnt, global_pos_cnt]:
            helper.set_variable_initializer(
                var, Constant(value=0.0, force_cpu=True))

        tmp_ones = fluid.layers.fill_constant(shape=fluid.layers.shape(label),
                                              dtype="float32",
                                              value=1.0)
        batch_ins = fluid.layers.reduce_sum(tmp_ones)
        batch_pos = batch_ins * batch_accuracy

        helper.append_op(type="elementwise_add",
                         inputs={
                             "X": [global_ins_cnt],
                             "Y": [batch_ins]
                         },
                         outputs={"Out": [global_ins_cnt]})

        helper.append_op(type="elementwise_add",
                         inputs={
                             "X": [global_pos_cnt],
                             "Y": [batch_pos]
                         },
                         outputs={"Out": [global_pos_cnt]})

        self.acc = global_pos_cnt / global_ins_cnt

        self._global_metric_state_vars = dict()
        self._global_metric_state_vars['ins_cnt'] = (global_ins_cnt.name,
                                                     "float32")
        self._global_metric_state_vars['pos_cnt'] = (global_pos_cnt.name,
                                                     "float32")

        metric_name = "Acc(Recall@%d)" % self.k
        self.metrics = dict()
        self.metrics["InsCnt"] = global_ins_cnt
        self.metrics["RecallCnt"] = global_pos_cnt
        self.metrics[metric_name] = self.acc
Esempio n. 3
0
    def __init__(self, input, label, class_num):
        """R
        """
        kwargs = locals()
        del kwargs['self']

        self.num_cls = class_num

        if not isinstance(input, Variable):
            raise ValueError("input must be Variable, but received %s" %
                             type(input))
        if not isinstance(label, Variable):
            raise ValueError("label must be Variable, but received %s" %
                             type(label))

        helper = LayerHelper("PaddleRec_PrecisionRecall", **kwargs)
        label = paddle.cast(label, dtype="int32")
        label.stop_gradient = True
        max_probs, indices = paddle.topk(input, k=1)
        indices = paddle.cast(indices, dtype="int32")
        indices.stop_gradient = True

        states_info, _ = helper.create_or_get_global_variable(
            name="states_info",
            persistable=True,
            dtype='float32',
            shape=[self.num_cls, 4])
        states_info.stop_gradient = True

        helper.set_variable_initializer(states_info,
                                        Constant(value=0.0, force_cpu=True))

        batch_metrics, _ = helper.create_or_get_global_variable(
            name="batch_metrics",
            persistable=False,
            dtype='float32',
            shape=[6])
        accum_metrics, _ = helper.create_or_get_global_variable(
            name="global_metrics",
            persistable=False,
            dtype='float32',
            shape=[6])

        batch_states = paddle.full(shape=[self.num_cls, 4], fill_value=0.0)
        batch_states.stop_gradient = True

        helper.append_op(type="precision_recall",
                         attrs={'class_number': self.num_cls},
                         inputs={
                             'MaxProbs': [max_probs],
                             'Indices': [indices],
                             'Labels': [label],
                             'StatesInfo': [states_info]
                         },
                         outputs={
                             'BatchMetrics': [batch_metrics],
                             'AccumMetrics': [accum_metrics],
                             'AccumStatesInfo': [batch_states]
                         })
        helper.append_op(type="assign",
                         inputs={'X': [batch_states]},
                         outputs={'Out': [states_info]})

        batch_states.stop_gradient = True
        states_info.stop_gradient = True

        self._global_metric_state_vars = dict()
        self._global_metric_state_vars['states_info'] = (states_info.name,
                                                         "float32")

        self.metrics = dict()
        self.metrics["precision_recall_f1"] = accum_metrics
        self.metrics["[TP FP TN FN]"] = states_info