def _ll_grad(w, X, X2, y, l2):
    """
    Logistic Regression loglikelihood gradient given, the weights w, the data
    X, and the labels y.
    """
    w1 = w[:X.shape[1]]
    w2 = w[X.shape[1]:]
    z1 = np.dot(w1, np.transpose(X))
    z2 = np.dot(w2, np.transpose(X2))
    
    p1 = invlogit_vect(z1)
    p2 = invlogit_vect(z2)
    p = np.multiply(p1, p2)

    g1 = np.dot(np.transpose(X), 
                np.multiply(invlogit_vect(-z1), 
                            np.divide(np.subtract(y, p), 
                                      np.subtract(1, p))))
    g2 = np.dot(np.transpose(X2), 
                np.multiply(invlogit_vect(-z2), 
                            np.divide(np.subtract(y, p), 
                                      np.subtract(1, p))))
    
    g = np.concatenate((g1, g2))
    g -= np.multiply(l2, w)
    return -1 * g
    def predict_proba(self, X, X2):
        """
        Returns the probability of class 1 for each x in X.
        """
        try:
            getattr(self, "intercept1_")
            getattr(self, "intercept2_")
            getattr(self, "coef1_")
            getattr(self, "coef2_")
        except AttributeError:
            raise RuntimeError(
                "You must train classifer before predicting data!")

        X = check_array(X)
        X2 = check_array(X2)

        if self.fit_first_intercept:
            X = np.insert(X, 0, 1, axis=1)
        if self.fit_second_intercept:
            X2 = np.insert(X2, 0, 1, axis=1)

        w = np.insert(self.coef1_, 0, self.intercept1_)
        w2 = np.insert(self.coef2_, 0, self.intercept2_)
        return (invlogit_vect(np.dot(w, np.transpose(X))) *
                invlogit_vect(np.dot(w2, np.transpose(X2))))
예제 #3
0
def _ll_grad(w, X, X2, y, l2):
    """
    Logistic Regression loglikelihood gradient given, the weights w, the data
    X, and the labels y.
    """
    w1 = w[:X.shape[1]]
    w2 = w[X.shape[1]:]
    z1 = np.dot(w1, np.transpose(X))
    z2 = np.dot(w2, np.transpose(X2))
    
    p1 = invlogit_vect(z1)
    p2 = invlogit_vect(z2)
    p = np.multiply(p1, p2)

    g1 = np.dot(np.transpose(X), 
                np.multiply(invlogit_vect(-z1), 
                            np.divide(np.subtract(y, p), 
                                      np.subtract(1, p))))
    g2 = np.dot(np.transpose(X2), 
                np.multiply(invlogit_vect(-z2), 
                            np.divide(np.subtract(y, p), 
                                      np.subtract(1, p))))
    
    g = np.concatenate((g1, g2))
    g -= np.multiply(l2, w)
    return -1 * g 
def _ll(w, X, X2, y, l2):
    """
    Logistic Regression loglikelihood given, the weights w, the data X, and the
    labels y.
    """
    w1 = w[:X.shape[1]]
    w2 = w[X.shape[1]:]

    p1 = invlogit_vect(np.dot(w1, np.transpose(X)))
    p2 = invlogit_vect(np.dot(w2, np.transpose(X2)))
    p = np.multiply(p1, p2)
    
    ll = -sum(np.multiply(y, np.log(p)))
    ll -= sum(np.multiply(1-y, np.log(1-p)))
    ll += np.dot(np.divide(l2, 2), np.multiply(w, w))
    return ll
예제 #5
0
def _ll(w, X, X2, y, l2):
    """
    Logistic Regression loglikelihood given, the weights w, the data X, and the
    labels y.
    """
    w1 = w[:X.shape[1]]
    w2 = w[X.shape[1]:]

    p1 = invlogit_vect(np.dot(w1, np.transpose(X)))
    p2 = invlogit_vect(np.dot(w2, np.transpose(X2)))
    p = np.multiply(p1, p2)
    
    ll = -sum(np.multiply(y, np.log(p)))
    ll -= sum(np.multiply(1-y, np.log(1-p)))
    ll += np.dot(np.divide(l2, 2), np.multiply(w, w))
    return ll
def _ll_grad(w, X, y, l2):
    """
    Logistic Regression loglikelihood gradient given, the weights w, the data
    X, and the labels y.
    """
    p = invlogit_vect(np.dot(w, np.transpose(X)))
    g = np.dot(np.transpose(X), np.subtract(y, p))
    g -= np.multiply(l2, w)
    return -1 * g
예제 #7
0
def _ll_grad(w, X, y, l2):
    """
    Logistic Regression loglikelihood gradient given, the weights w, the data
    X, and the labels y.
    """
    p = invlogit_vect(np.dot(w, np.transpose(X)))
    g = np.dot(np.transpose(X), np.subtract(y, p))
    g -= np.multiply(l2, w)
    return -1 * g 
예제 #8
0
    def predict_proba(self, X, X2):
        """
        Returns the probability of class 1 for each x in X.
        """
        try:
            getattr(self, "intercept1_")
            getattr(self, "intercept2_")
            getattr(self, "coef1_")
            getattr(self, "coef2_")
        except AttributeError:
            raise RuntimeError("You must train classifer before predicting data!")

        X = check_array(X)
        X2 = check_array(X2)

        if self.fit_first_intercept:
            X = np.insert(X, 0, 1, axis=1)
        if self.fit_second_intercept:
            X2 = np.insert(X2, 0, 1, axis=1)

        w = np.insert(self.coef1_, 0, self.intercept1_)
        w2 = np.insert(self.coef2_, 0, self.intercept2_)
        return (invlogit_vect(np.dot(w, np.transpose(X))) *
                invlogit_vect(np.dot(w2, np.transpose(X2))))
예제 #9
0
    def predict_proba(self, X):
        """
        Returns the probability of class 1 for each x in X.
        """
        try:
            getattr(self, "intercept_")
            getattr(self, "coef_")
        except AttributeError:
            raise RuntimeError("You must train classifer before predicting data!")

        X = check_array(X)
        if self.fit_intercept:
            X = np.insert(X, 0, 1, axis=1)

        w = np.insert(self.coef_, 0, self.intercept_)
        return invlogit_vect(np.dot(w, np.transpose(X)))
예제 #10
0
    def predict_proba(self, X):
        """
        Returns the probability of class 1 for each x in X.
        """
        try:
            getattr(self, "intercept_")
            getattr(self, "coef_")
        except AttributeError:
            raise RuntimeError(
                "You must train classifer before predicting data!")

        X = check_array(X)
        if self.fit_intercept:
            X = np.insert(X, 0, 1, axis=1)

        w = np.insert(self.coef_, 0, self.intercept_)
        return invlogit_vect(np.dot(w, np.transpose(X)))