Exemple #1
0
def predict_one(xi, m):
    """
    Return the label and a :class:`c_double` array of decision values of
    the test instance *xi* using :class:`LearnerModel` *m*.

    *xi* can be a :class:`list` or a :class:`dict` as in LIBLINEAR python
    interface. It can also be a LIBLINEAR feature_node array.

    .. note::

        This function is designed to analyze the result of one instance.
        It has a severe efficiency issue and should be used only by
        :func:`libshorttext.classifier.predict_single_text`. If many
        instances need to be predicted, they should be stored in a file
        and predicted by :func:`predict`.

    .. warning::

        The content of *xi* may be **changed** after the function call.
    """

    if isinstance(xi, (list, dict)):
        xi = liblinear.gen_feature_nodearray(xi)[0]
    elif not isinstance(xi, POINTER(liblinear.feature_node)):
        raise TypeError("xi should be a test instance")

    print "pppppppppppppppp1"
    learner_param = LearnerParameter(m.param_options[0], m.param_options[1])

    if m.bias >= 0:
        i = 0
        while xi[i].index != -1: i += 1

        # Already has bias, or bias reserved.
        # Actually this statement should be true if
        # the data is read by read_SVMProblem.
        if i > 0 and xi[i-1].index == m.nr_feature + 1:
            i -= 1

        xi[i] = liblinear.feature_node(m.nr_feature + 1, m.bias)
        xi[i+1] = liblinear.feature_node(-1, 0)

    LearnerProblem.normalize_one(xi, learner_param, m.idf)
    print "pppppppppppppppp2"

    dec_values = (c_double * m.nr_class)()
    label = liblinear.liblinear.predict_values(m, xi, dec_values)

    return label, dec_values
Exemple #2
0
    def set_bias(self, bias):
        if self.bias == bias:
            return
        node = liblinear.feature_node(self.n, bias)
        if bias >= 0 and self.bias < 0:
            self.n += 1
            node = liblinear.feature_node(self.n, bias)
        if bias < 0 and self.bias >= 0:
            self.n -= 1
            node = liblinear.feature_node(-1, bias)

        for i in range(1,self.l):
            self.x[i][-2] = node
        self.x_space[self.n_x_space-2] = node
        self.bias = bias
Exemple #3
0
    def set_bias(self, bias):
        if self.bias == bias:
            return
        node = liblinear.feature_node(self.n, bias)
        if bias >= 0 and self.bias < 0:
            self.n += 1
            node = liblinear.feature_node(self.n, bias)
        if bias < 0 and self.bias >= 0:
            self.n -= 1
            node = liblinear.feature_node(-1, bias)

        for i in range(1, self.l):
            self.x[i][-2] = node
        self.x_space[self.n_x_space - 2] = node
        self.bias = bias
Exemple #4
0
def predict_one(xi, m):
    """
    Return the label and a :class:`c_double` array of decision values of
    the test instance *xi* using :class:`LearnerModel` *m*.

    *xi* can be a :class:`list` or a :class:`dict` as in LIBLINEAR python
    interface. It can also be a LIBLINEAR feature_node array.

    .. note::

        This function is designed to analyze the result of one instance.
        It has a severe efficiency issue and should be used only by
        :func:`libshorttext.classifier.predict_single_text`. If many
        instances need to be predicted, they should be stored in a file
        and predicted by :func:`predict`.

    .. warning::

        The content of *xi* may be **changed** after the function call.
    """

    if isinstance(xi, (list, dict)):
        xi = liblinear.gen_feature_nodearray(xi)[0]
    elif not isinstance(xi, POINTER(liblinear.feature_node)):
        raise TypeError("xi should be a test instance")

    learner_param = LearnerParameter(m.param_options[0], m.param_options[1])

    if m.bias >= 0:
        i = 0
        while xi[i].index != -1:
            i += 1

        # Already has bias, or bias reserved.
        # Actually this statement should be true if
        # the data is read by read_SVMProblem.
        if i > 0 and xi[i - 1].index == m.nr_feature + 1:
            i -= 1

        xi[i] = liblinear.feature_node(m.nr_feature + 1, m.bias)
        xi[i + 1] = liblinear.feature_node(-1, 0)

    LearnerProblem.normalize_one(xi, learner_param, m.idf)

    dec_values = (c_double * m.nr_class)()
    label = liblinear.liblinear.predict_values(m, xi, dec_values)

    return label, dec_values