return T.maximum(X, 0.) def dropout(X, p=0.): if p > 0: retain_prob = 1 - p X *= srng.binomial(X.shape, p=retain_prob, dtype=theano.config.floatX) X /= retain_prob return X def softmax(X): e_x = T.exp(X - X.max(axis=1).dimshuffle(0, 'x')) return e_x / e_x.sum(axis=1).dimshuffle(0, 'x') def get_hyper_params(learning_rate=0.10): kern_one=8#32 kern_two=12#64 kern_third=16#128 n_hidden=400 kern_params=[(kern_one, 1, 3, 3),(kern_two, kern_one, 3, 3),(kern_third, kern_two, 3, 3), (kern_third * 6 * 6, n_hidden),(n_hidden, 7)] params={'learning_rate': learning_rate, 'kern_params':kern_params} return params if __name__ == '__main__': dataset_path="/home/user/cf/conv_frames/cls/images/" dataset=load.get_images(dataset_path) out_path="/home/user/cf/exp1/conv_net" cls=learning.create_classifer(dataset_path,out_path,built_conv_cls,flat=False) learning.evaluate_cls(dataset_path,out_path,flat=False)
def get_px_y(free_vars,model): hidden=model.hidden output_layer=model.logistic h = T.nnet.sigmoid(T.dot(free_vars.X, hidden.W) + hidden.b) pyx = T.nnet.softmax(T.dot(h, output_layer.W) + output_layer.b) return pyx def get_loss_function(free_vars,py_x): return T.mean(T.nnet.categorical_crossentropy(py_x,free_vars.y)) def sgd(loss, params, learning_rate=0.05): gparams = [T.grad(loss, param) for param in params] updates = [ (param, param - learning_rate * gparam) for param, gparam in zip(params, gparams) ] return updates def get_hyper_params(learning_rate=0.05): params={'learning_rate': learning_rate, 'n_in':3200,'n_out':7,'n_hidden':800} return params if __name__ == "__main__": dataset_path="/home/user/cf/conv_frames/cls/images/" dataset=load.get_images(dataset_path) out_path="/home/user/cf/exp1/nn" cls=learning.create_classifer(dataset_path,out_path,built_nn_cls) learning.evaluate_cls(dataset_path,out_path)