def base(train_dp, valid_dp, logger, learning_rate): # learning_rate = 0.01 rng = numpy.random.RandomState([2016,02,26]) max_epochs = 1000 cost = CECost() stats = list() test_dp = deepcopy(valid_dp) train_dp.reset() valid_dp.reset() test_dp.reset() # NETWORK TOPOLOGY: model = MLP(cost=cost) model.add_layer(Relu(idim=125, odim=125, irange=1.6, rng=rng)) model.add_layer(Softmax(idim=125, odim=19, rng=rng)) # define the optimiser, here stochasitc gradient descent # with fixed learning rate and max_epochs lr_scheduler = LearningRateFixed( learning_rate=learning_rate, max_epochs=max_epochs) optimiser = SGDOptimiser(lr_scheduler=lr_scheduler) logger.info('Training started...') tr_stats_b, valid_stats_b = optimiser.train(model, train_dp, valid_dp) logger.info('Testing the model on test set:') tst_cost, tst_accuracy = optimiser.validate(model, test_dp) logger.info('ACL test set accuracy is %.2f %%, cost (%s) is %.3f' % (tst_accuracy*100., cost.get_name(), tst_cost))
print "date: " + str(dt) train_dp.reset() test_dp.reset() valid_dp.reset() rng = numpy.random.RandomState([dt.year, dt.month, dt.day]) # define the model structure, here just one linear layer # and mean square error cost cost = CECost() model = MLP(cost=cost) model.add_layer(ConvRelu_Opt(1, 1, rng=rng, stride=(1, 1))) model.add_layer(Sigmoid(idim=122, odim=122, rng=rng)) model.add_layer(Softmax(idim=122, odim=19, rng=rng)) #one can stack more layers here # print map(lambda x: (x.idim, x.odim), model.layers) lr_scheduler = LearningRateFixed(learning_rate=0.01, max_epochs=500) optimiser = SGDOptimiser(lr_scheduler=lr_scheduler) tr_stats, valid_stats = optimiser.train(model, train_dp, valid_dp) tst_cost, tst_accuracy = optimiser.validate(model, test_dp) seeds.append((tr_stats, valid_stats, (tst_cost, tst_accuracy))) end = time.time() print "scipy.correlate time: " + str(end - start) with open('seeds_conv_fft_feat.pkl', 'wb') as f: p.dump(seeds, f)
learning_rate = 0.07 max_epochs = 30 cost = CECost() stats = list() test_dp = deepcopy(valid_dp) train_dp.reset() valid_dp.reset() test_dp.reset() #define the model model = MLP(cost=cost) #model.add_layer(ComplexLinear(idim=125, odim=125, irange=1.6, rng=rng)) #model.add_layer(Sigmoid(idim=2*125, odim=125, irange=1.6, rng=rng)) model.add_layer(Sigmoid(idim=125, odim=125, irange=1.6, rng=rng)) model.add_layer(Softmax(idim=125, odim=19, rng=rng)) # define the optimiser, here stochasitc gradient descent # with fixed learning rate and max_epochs lr_scheduler = LearningRateFixed(learning_rate=learning_rate, max_epochs=max_epochs) optimiser = SGDOptimiser(lr_scheduler=lr_scheduler) logger.info('Training started...') tr_stats, valid_stats = optimiser.train(model, train_dp, valid_dp) logger.info('Testing the model on test set:') tst_cost, tst_accuracy = optimiser.validate(model,test_dp ) logger.info('MNIST test set accuracy is %.2f %%, cost (%s) is %.3f'%(tst_accuracy*100., cost.get_name(), tst_cost))
optimiser = SGDOptimiser(lr_scheduler=lr_scheduler) logger.info('Initialising data providers...') train_dp = MNISTDataProvider(dset='train', batch_size=250, max_num_batches=-10, randomize=True, conv_reshape=True) valid_dp = MNISTDataProvider(dset='valid', batch_size=250, max_num_batches=-10, randomize=False, conv_reshape=True) logger.info('Training started...') tsk8_2_tr_stats, tsk8_2_valid_stats = optimiser.train(tsk8_2_model, train_dp, valid_dp) logger.info('Testing the model on test set:') test_dp = MNISTDataProvider(dset='eval', batch_size=200, max_num_batches=-10, randomize=False, conv_reshape=True) tsk8_2_cost, tsk8_2_accuracy = optimiser.validate(tsk8_2_model, test_dp) logger.info('MNIST test set accuracy is %.2f %% (cost is %.3f)' % (tsk8_2_accuracy * 100., tsk8_2_cost)) #saving for future use with open('tsk8_2_model.pkl', 'wb') as f: cPickle.dump(tsk8_2_model, f)