Beispiel #1
0
 def test_mean_hinge_loss(self):
     data = np.array([[0., 1., -1.], [0., -1., 1.]])
     labels = np.array([[1], [1]])
     weights = np.array([[0, 0, 1]])
     exp_loss = 0.5
     act_loss = hinge.hinge(weights, data, labels)
     self.assertEqual(act_loss, exp_loss, "Mean hinge loss")
Beispiel #2
0
 def test_hinge_no_loss(self):
     data = np.array([[0., 10., -1.]])
     labels = np.array([[1]])
     weights = np.array([0.3, 0.3, 0.3])
     exp_loss = 0
     act_loss = hinge.hinge(weights, data, labels)
     self.assertEqual(act_loss, exp_loss, "Zero hinge loss")
Beispiel #3
0
 def test_hinge_loss(self):
     data = np.array([[0., 1., -1.]])
     labels = np.array([[1]])
     weights = np.array([0, 0, 1])
     exp_loss = 1
     act_loss = hinge.hinge(weights, data, labels)
     self.assertEqual(act_loss, exp_loss, "Negative hinge loss")
Beispiel #4
0
 def fit(self, data, labels, verbose=True):
     self.weights = np.random.random((1, data.shape[1]))
     self.hist_w = np.zeros((self.max_iter, data.shape[1]))
     self.hist_f = np.zeros((self.max_iter, 1))
     for train_id in range(self.max_iter):
         gradient = hinge_grad(self.weights, data, labels)
         self.weights = self.weights - self.eps * gradient
         self.hist_w[train_id] = self.weights
         self.hist_f[train_id] = hinge(self.weights, data, labels)
         if train_id % 100 == 0 and verbose:
             print train_id, self.hist_f[train_id]
Beispiel #5
0
 def fit(self, data, labels, verbose=True):
     self.weights = np.random.random((1, data.shape[1]))
     self.hist_w = np.zeros((self.max_iter, data.shape[1]))
     self.hist_f = np.zeros((self.max_iter, 1))
     for train_id in range(self.max_iter):
         gradient = hinge_grad(self.weights, data, labels)
         self.weights = self.weights - self.eps * gradient
         self.hist_w[train_id] = self.weights
         self.hist_f[train_id] = hinge(self.weights, data, labels)
         if train_id % 100 == 0 and verbose:
             print train_id, self.hist_f[train_id]
Beispiel #6
0
    def check_backward(self, x_data, t_data, use_cudnn=True):
        x = chainer.Variable(x_data)
        t = chainer.Variable(t_data)
        loss = hinge(x, t, use_cudnn)
        loss.backward()
        self.assertEqual(None, t.grad)

        func = loss.creator
        f = lambda: func.forward((x.data, t.data))
        gx, = gradient_check.numerical_grad(f, (x.data,), (1,), eps=0.01)

        gradient_check.assert_allclose(gx, x.grad)
def trainspamfilter(xTr, yTr):
    # INPUT:
    # xTr, yTr
    #
    # OUTPUT: w_trained
    #
    # Consider optimizing the input parameters for your loss and GD!

    f = lambda w: hinge(w, xTr, yTr, 0.001)
    # f = lambda w : logistic(w, xTr, yTr, 0.0001)
    # f = lambda w : ridge(w, xTr, yTr, 0.09)
    w_trained = grdescent(f, np.zeros((xTr.shape[0], 1)), 1e-04, 1000)
    io.savemat('w_trained.mat', mdict={'w': w_trained})
    return w_trained
def trainspamfilter(xTr, yTr):

    #
    # INPUT:
    # xTr
    # yTr
    #
    # OUTPUT: w_trained
    #
    # Feel free to change this code any way you want

    f = lambda w: hinge(w, xTr, yTr, .1)
    w_trained = grdescent(f, np.zeros((xTr.shape[0], 1)), 1e-06, 2000)
    io.savemat('w_trained.mat', mdict={'w': w_trained})
    return w_trained
Beispiel #9
0
    def check_forward(self, x_data, t_data, use_cudnn=True):
        x_val = chainer.Variable(x_data)
        t_val = chainer.Variable(t_data)
        loss = hinge(x_val, t_val, use_cudnn)
        self.assertEqual(loss.data.shape, ())
        self.assertEqual(loss.data.dtype, numpy.float32)
        loss_value = float(cuda.to_cpu(loss.data))

        # Compute expected value
        loss_expect = 0
        for i in six.moves.range(self.x.shape[0]):
            for j in six.moves.range(self.x.shape[1]):
                xd, td = self.x[i, j], self.t[i, j]
                loss_expect += max(0, 1.0 - xd * td)
        loss_expect /= self.t.shape[0]
        self.assertAlmostEqual(loss_expect, loss_value, places=5)
Beispiel #10
0
    def forward(self, x, t, train=True):
        xp = cuda.get_array_module(*x)

        x = Variable(x, volatile=not train)
        t = Variable(t, volatile=not train)
        h = self.fc(x)
        loss = hinge(h, t, self.penalty)

        if self.penalty == 'l1':
            loss += self.c * F.sum(Variable(abs(self.fc.W),
                                            volatile=not train))

        elif self.penalty == 'l2':
            n = Variable(self.fc.W.dot(self.fc.W.T), volatile=not train)
            loss += self.c * F.reshape(n, ())

        return loss
Beispiel #11
0
    def forward(self, x, t, train=True):
        xp = cuda.get_array_module(*x)

        x = Variable(x, volatile=not train)
        t = Variable(t, volatile=not train)
        h = self.fc(x)
        loss = hinge(h, t, self.penalty)

        if self.penalty == 'l1':
            loss += self.c * F.sum(Variable(abs(self.fc.W),
                                            volatile=not train))

        elif self.penalty == 'l2':
            n = Variable(self.fc.W.dot(self.fc.W.T), volatile=not train)
            loss += self.c * F.reshape(n, ())

        return loss
Beispiel #12
0
    def forward(self, x, t, train=True):
        chainer.config.train = not train
        xp = cuda.get_array_module(*x)

        x = Variable(x)
        t = Variable(t)
        h = self.fc(x)
        loss = hinge(h, t, self.penalty)

        if self.penalty == 'L1':
            loss += self.c * F.sum(F.absolute(self.fc.W))

        elif self.penalty == 'L2':
            n = F.matmul(self.fc.W, self.fc.W.T)
            loss += self.c * F.reshape(n, ())

        return loss
Beispiel #13
0
def vis_rocs():
    data = io.loadmat(
        'D:/WashU/2020SPR/CSE517 Machine Learning/data/data_train_default.mat')
    X = data['X']
    Y = data['Y']
    xTr, xTv, yTr, yTv = valsplit(X, Y)

    MAXITER = 100
    STEPSIZE = 1e-01

    #         %% Ridge Regression
    d, n = xTr.shape
    f = lambda w: ridge(w, xTr, yTr, 0.1)
    ws = grdescent(f, np.zeros((d, 1)), STEPSIZE, MAXITER)

    preds = linearmodel(ws, xTv)
    fpr, tpr, sqauc = area_under_roc_curve(yTv, preds)

    plt.plot(fpr, tpr, color="blue", linewidth=2.0, label="ridge")

    #         %% Hinge Loss
    d, n = xTr.shape
    f = lambda w: hinge(w, xTr, yTr, 0.1)
    wh = grdescent(f, np.zeros((d, 1)), STEPSIZE, MAXITER)
    preds = linearmodel(wh, xTv)
    fpr, tpr, hinauc = area_under_roc_curve(yTv, preds)

    plt.plot(fpr, tpr, color="green", linewidth=2.0, label="hinge")

    #         %% Logistic Regression
    d, n = xTr.shape
    f = lambda w: logistic(w, xTr, yTr)
    wl = grdescent(f, np.zeros((d, 1)), STEPSIZE, MAXITER)
    preds = linearmodel(wl, xTv)
    fpr, tpr, logauc = area_under_roc_curve(yTv, preds)

    plt.plot(fpr, tpr, color="red", linewidth=2.0, label="logistic")
    plt.xlabel('False Positive Rate')
    plt.ylabel('True Positive Rate')
    plt.legend(loc='upper right')

    print("Hinge loss: Area under the curve: %.2f" % hinauc)
    print("Logistic loss: Area under the curve: %.2f" % logauc)
    print("Squared loss: Area under the curve: %.2f" % sqauc)
    plt.show()
    return