def base(train_dp, valid_dp, logger, learning_rate): # learning_rate = 0.01 rng = numpy.random.RandomState([2016,02,26]) max_epochs = 1000 cost = CECost() stats = list() test_dp = deepcopy(valid_dp) train_dp.reset() valid_dp.reset() test_dp.reset() # NETWORK TOPOLOGY: model = MLP(cost=cost) model.add_layer(Relu(idim=125, odim=125, irange=1.6, rng=rng)) model.add_layer(Softmax(idim=125, odim=19, rng=rng)) # define the optimiser, here stochasitc gradient descent # with fixed learning rate and max_epochs lr_scheduler = LearningRateFixed( learning_rate=learning_rate, max_epochs=max_epochs) optimiser = SGDOptimiser(lr_scheduler=lr_scheduler) logger.info('Training started...') tr_stats_b, valid_stats_b = optimiser.train(model, train_dp, valid_dp) logger.info('Testing the model on test set:') tst_cost, tst_accuracy = optimiser.validate(model, test_dp) logger.info('ACL test set accuracy is %.2f %%, cost (%s) is %.3f' % (tst_accuracy*100., cost.get_name(), tst_cost))
print "date: " + str(dt) train_dp.reset() test_dp.reset() valid_dp.reset() rng = numpy.random.RandomState([dt.year, dt.month, dt.day]) # define the model structure, here just one linear layer # and mean square error cost cost = CECost() model = MLP(cost=cost) model.add_layer(ConvRelu_Opt(1, 1, rng=rng, stride=(1, 1))) model.add_layer(Sigmoid(idim=122, odim=122, rng=rng)) model.add_layer(Softmax(idim=122, odim=19, rng=rng)) #one can stack more layers here # print map(lambda x: (x.idim, x.odim), model.layers) lr_scheduler = LearningRateFixed(learning_rate=0.01, max_epochs=500) optimiser = SGDOptimiser(lr_scheduler=lr_scheduler) tr_stats, valid_stats = optimiser.train(model, train_dp, valid_dp) tst_cost, tst_accuracy = optimiser.validate(model, test_dp) seeds.append((tr_stats, valid_stats, (tst_cost, tst_accuracy))) end = time.time() print "scipy.correlate time: " + str(end - start) with open('seeds_conv_fft_feat.pkl', 'wb') as f: p.dump(seeds, f)
rng=rng)) tsk8_2_model.add_layer( ConvMaxPool2D(num_feat_maps=5, conv_shape=(24, 24), pool_shape=(2, 2), pool_stride=(2, 2))) #idim, odim, tsk8_2_model.add_layer(Relu(idim=5 * 12 * 12, odim=80, rng=rng)) tsk8_2_model.add_layer(Softmax(idim=80, odim=10, rng=rng)) #one can stack more layers here # define the optimiser, here stochasitc gradient descent # with fixed learning rate and max_epochs as stopping criterion lr_scheduler = LearningRateFixed(learning_rate=0.01, max_epochs=30) optimiser = SGDOptimiser(lr_scheduler=lr_scheduler) logger.info('Initialising data providers...') train_dp = MNISTDataProvider(dset='train', batch_size=250, max_num_batches=-10, randomize=True, conv_reshape=True) valid_dp = MNISTDataProvider(dset='valid', batch_size=250, max_num_batches=-10, randomize=False, conv_reshape=True) logger.info('Training started...') tsk8_2_tr_stats, tsk8_2_valid_stats = optimiser.train(tsk8_2_model, train_dp,
learning_rate = 0.07 max_epochs = 30 cost = CECost() stats = list() test_dp = deepcopy(valid_dp) train_dp.reset() valid_dp.reset() test_dp.reset() #define the model model = MLP(cost=cost) #model.add_layer(ComplexLinear(idim=125, odim=125, irange=1.6, rng=rng)) #model.add_layer(Sigmoid(idim=2*125, odim=125, irange=1.6, rng=rng)) model.add_layer(Sigmoid(idim=125, odim=125, irange=1.6, rng=rng)) model.add_layer(Softmax(idim=125, odim=19, rng=rng)) # define the optimiser, here stochasitc gradient descent # with fixed learning rate and max_epochs lr_scheduler = LearningRateFixed(learning_rate=learning_rate, max_epochs=max_epochs) optimiser = SGDOptimiser(lr_scheduler=lr_scheduler) logger.info('Training started...') tr_stats, valid_stats = optimiser.train(model, train_dp, valid_dp) logger.info('Testing the model on test set:') tst_cost, tst_accuracy = optimiser.validate(model,test_dp ) logger.info('MNIST test set accuracy is %.2f %%, cost (%s) is %.3f'%(tst_accuracy*100., cost.get_name(), tst_cost))
#L1 test from support import * from mlp.schedulers import LearningRateFixed from mlp.optimisers import SGDOptimiser LearningRate = 0.7 Num_epoches = 30 l1_weights = [10, 1, 0.1, 0.01, 0.001] tsk2_1_jobs = [] for l1_w in l1_weights: lr_scheduler = LearningRateFixed(LearningRate, Num_epoches) tsk2_1_jobs.append({ "model": create_one_hid_model(), "label": "l1_w={}".format(l1_w), "optimiser": SGDOptimiser(lr_scheduler, l1_weight=l1_w) }) tsk2_1_stats = get_models_statistics(tsk2_1_jobs)