Пример #1
0
    return T.maximum(X, 0.)

def dropout(X, p=0.):
    if p > 0:
        retain_prob = 1 - p
        X *= srng.binomial(X.shape, p=retain_prob, dtype=theano.config.floatX)
        X /= retain_prob
    return X

def softmax(X):
    e_x = T.exp(X - X.max(axis=1).dimshuffle(0, 'x'))
    return e_x / e_x.sum(axis=1).dimshuffle(0, 'x')

def get_hyper_params(learning_rate=0.10):
    kern_one=8#32
    kern_two=12#64
    kern_third=16#128
    n_hidden=400
    kern_params=[(kern_one, 1, 3, 3),(kern_two, kern_one, 3, 3),(kern_third, kern_two, 3, 3),
                 (kern_third * 6 * 6, n_hidden),(n_hidden, 7)]
    params={'learning_rate': learning_rate,
            'kern_params':kern_params}
    return params

if __name__ == '__main__':
    dataset_path="/home/user/cf/conv_frames/cls/images/"
    dataset=load.get_images(dataset_path)
    out_path="/home/user/cf/exp1/conv_net"
    cls=learning.create_classifer(dataset_path,out_path,built_conv_cls,flat=False)
    learning.evaluate_cls(dataset_path,out_path,flat=False)
Пример #2
0
def create_cls_fun(free_vars,model):
    learning_rate=0.15
    py_x=get_px_y(free_vars,model)
    loss=get_loss_function(free_vars,py_x)
    input_vars=free_vars.get_vars()
    g_W = T.grad(cost=loss, wrt=model.W)
    g_b = T.grad(cost=loss, wrt=model.b)
    update = [(model.W, model.W - learning_rate * g_W),
               (model.b,model.b - learning_rate * g_b)]
    train = theano.function(inputs=input_vars, 
                                outputs=loss, updates=update, 
                                allow_input_downcast=True)
    y_pred = T.argmax(py_x, axis=1)
    test=theano.function(inputs=[free_vars.X], outputs=y_pred, 
            allow_input_downcast=True) 
    return train,test

def get_px_y(free_vars,model):    
    equation=T.dot(free_vars.X, model.W) + model.b
    return T.nnet.softmax(equation)

def get_loss_function(free_vars,py_x):
    return T.mean(T.nnet.categorical_crossentropy(py_x,free_vars.y))

if __name__ == "__main__":
    dataset_path="/home/user/cf/conv_frames/cls/images/"
    #cls=built_classifer()
    out_path="/home/user/cf/exp1/logit"
    #ml_tools.evaluate_cls(dataset_path,cls)
    learning.create_classifer(dataset_path,out_path,built_logit_cls)
Пример #3
0
def get_px_y(free_vars,model):
    hidden=model.hidden
    output_layer=model.logistic
    h = T.nnet.sigmoid(T.dot(free_vars.X, hidden.W) + hidden.b)
    pyx = T.nnet.softmax(T.dot(h, output_layer.W) + output_layer.b)
    return pyx

def get_loss_function(free_vars,py_x):
    return T.mean(T.nnet.categorical_crossentropy(py_x,free_vars.y))

def sgd(loss, params, learning_rate=0.05):
    gparams = [T.grad(loss, param) for param in params]
    updates = [
        (param, param - learning_rate * gparam)
        for param, gparam in zip(params, gparams)
    ]
    return updates

def get_hyper_params(learning_rate=0.05):
    params={'learning_rate': learning_rate,
            'n_in':3200,'n_out':7,'n_hidden':800}
    return params

if __name__ == "__main__":
    dataset_path="/home/user/cf/conv_frames/cls/images/"
    dataset=load.get_images(dataset_path)
    out_path="/home/user/cf/exp1/nn"
    cls=learning.create_classifer(dataset_path,out_path,built_nn_cls)
    learning.evaluate_cls(dataset_path,out_path)