Exemple #1
0
 def _callback(pi):
     Y_hat = TeaSpoon.exe(pred, [pi, X])
     out = {}
     out['MSE']   = np.mean(Y_hat==y)
     out['Loss']  = TeaSpoon.exe(loss, [pi, X,y])
     opt = {'freq':10}
     return out, opt
Exemple #2
0
def callback(pi):
    Y_hat = TS.exe(pred_T, [pi, X])
    out = {}
    out['ACC']  = np.mean(Y_hat==y)
    out['Loss'] = TS.exe(loss_T, [pi, X,y])
    out['pi']   = 3.14
    opt = {'freq':1}
    return out, opt
Exemple #3
0
    def test_default(self):

        p1, res = TeaSpoon.optimize(
            fun=loss,
            p0=p0,
            jac=grad,
            callback='default',
            args=(X, y),
            method='BFGS',
            options = {'maxiter': 20, 'disp': 0},
        )

        loss_0  = TeaSpoon.exe(loss, [p0, X, y])
        loss_1  = TeaSpoon.exe(loss, [p1, X, y])

        assert loss_0 > loss_1
Exemple #4
0
    P = TT.nnet.softmax( T.dot(X, w) + b )
    return TT.argmax(P, 1)


# debug your theano code (optional)
TS.debug(pred, [p0, X])
TS.debug(loss, [p0, X, y])


# compile your functions
pred_T = TS.compile(pred, [p0, X], jac=False)
loss_T, grad_T = TS.compile(loss, [p0, X, y], jac=True)


# # exection of comiled funtion (optional)
TS.exe(pred_T, [p0, X])
TS.exe(loss_T, [p0, X, y])


# define your own callback function (optional)
def callback(pi):
    Y_hat = TS.exe(pred_T, [pi, X])
    out = {}
    out['ACC']  = np.mean(Y_hat==y)
    out['Loss'] = TS.exe(loss_T, [pi, X,y])
    out['pi']   = 3.14
    opt = {'freq':1}
    return out, opt


# use here the features and options from scipy.optimize