Beispiel #1
0
import unittest
import TeaSpoon
import numpy as np
from examples import log_reg

X,y  = log_reg.rand_dset()
p0   = log_reg.init(X,y)
pred = TeaSpoon.compile(log_reg.pred, [p0, X], jac=False)
loss, grad = TeaSpoon.compile(log_reg.loss, [p0, X, y], jac=True)


class testcase:

    def test_default(self):

        p1, res = TeaSpoon.optimize(
            fun=loss,
            p0=p0,
            jac=grad,
            callback='default',
            args=(X, y),
            method='BFGS',
            options = {'maxiter': 20, 'disp': 0},
        )

        loss_0  = TeaSpoon.exe(loss, [p0, X, y])
        loss_1  = TeaSpoon.exe(loss, [p1, X, y])

        assert loss_0 > loss_1

Beispiel #2
0
    '''
    compute predictions
    '''
    w = pi['w'].value
    b = pi['b'].value
    P = TT.nnet.softmax( T.dot(X, w) + b )
    return TT.argmax(P, 1)


# debug your theano code (optional)
TS.debug(pred, [p0, X])
TS.debug(loss, [p0, X, y])


# compile your functions
pred_T = TS.compile(pred, [p0, X], jac=False)
loss_T, grad_T = TS.compile(loss, [p0, X, y], jac=True)


# # exection of comiled funtion (optional)
TS.exe(pred_T, [p0, X])
TS.exe(loss_T, [p0, X, y])


# define your own callback function (optional)
def callback(pi):
    Y_hat = TS.exe(pred_T, [pi, X])
    out = {}
    out['ACC']  = np.mean(Y_hat==y)
    out['Loss'] = TS.exe(loss_T, [pi, X,y])
    out['pi']   = 3.14