def train_classifer_main(): np.random.seed(11) data_path = '/data3/deepak_interns/vikram/vikram/mnist/' batch_size = 4 #data_path = '../data/cifar-100-python' model_save_path = './models' print 'Loading Dataset' mnist = mnist_data_set(data_path, batch_size) print 'done loading' datasets = mnist.data_sets print type(datasets['train'].X) print np.max(datasets['train'].X), np.min(datasets['train'].X) print np.max(datasets['test'].X), np.min(datasets['test'].X) print datasets['train'].X.shape, datasets['train'].Y.shape print datasets['test'].X.shape, datasets['test'].Y.shape data_shape = datasets['train'].X.shape data_shape = (batch_size, ) + data_shape[1:] print 'Data shape:', data_shape path = os.path.join(model_save_path, 'train1') print 'loading snn' f = open(os.path.join(path, 'snn_autonet' + '.save'), 'rb') snn_loaded_object = cPickle.load(f) f.close() print('Done') path = os.path.join(model_save_path, 'classifer') if not os.path.exists(path): os.makedirs(path) os.makedirs(os.path.join(path, 'snapshots')) print('creating classifer') classifier = softmax_classifier() #print('loading classifier') #f = open(os.path.join(path, 'softmax_classifer' + '.save'), 'rb') #classifier=cPickle.load(f) #f.close() #print('done') print('classifier TRAINING ...') train_classifer(classifier, datasets, path, snn_loaded_object) print('completed training classifer!') print('saving classifier...') f = open(os.path.join(path, classifier.name + '.save'), 'wb') #theano.misc.pkl_utils.dump() sys.setrecursionlimit(50000) cPickle.dump(classifier, f, protocol=cPickle.HIGHEST_PROTOCOL) f.close() print('Done')
def train_snn_main(): np.random.seed(64) data_path = '/data3/deepak_interns/vikram/vikram/mnist/' batch_size = 4 #data_path = '../data/cifar-100-python' model_save_path = './models' print 'Loading Dataset' mnist = mnist_data_set(data_path, batch_size) print 'done loading' datasets = mnist.data_sets print type(datasets['train'].X) print np.max(datasets['train'].X), np.min(datasets['train'].X) print datasets['train'].X.shape #print datasets['valid'].X.shape data_shape = datasets['train'].X.shape data_shape = (batch_size, ) + data_shape[1: ] print 'Data shape:', data_shape path = os.path.join(model_save_path, 'train1') if not os.path.exists(path): os.makedirs(path) print 'Creating snn' # print 'loading snn' # f = open(os.path.join(path, 'snn_autonet' + '.save'), 'rb') # network=cPickle.load(f) network = snn(data_shape) assert(np.amin(network.full_net_layers[-1].W.eval())>=0) assert(np.amax(network.full_net_layers[-1].W.eval())<=1) # print(np.amin(network.all_layers[-1].W.eval())) #np.random.seed(8) # path = os.path.join(model_save_path, 'snapshots') # if not os.path.exists(path): # os.makedirs(path) print('SNN TRAINING ...') train_snn(network, datasets, path) print('completed training snn !') print('saving trained snn...') f = open(os.path.join(path, network.name + '.save'), 'wb') #theano.misc.pkl_utils.dump() sys.setrecursionlimit(50000) cPickle.dump(network, f, protocol = cPickle.HIGHEST_PROTOCOL) f.close() print('Done')
path = os.path.join(model_save_path, 'train1') print 'loading snn' f = open(os.path.join(path, 'snn_autonet' + '.save'), 'rb') network=cPickle.load(f) f.close() print('Done') batch_size = 4 #snn_loaded_object.all_layers[-1].stdp_enabled=False data_path = '/data3/deepak_interns/vikram/vikram/mnist/' print 'Loading Dataset' mnist = mnist_data_set(data_path, batch_size) print 'done loading' datasets = mnist.data_sets print type(datasets['train'].X) print np.max(datasets['train'].X), np.min(datasets['train'].X) data_shape = datasets['train'].X.shape data_shape = (batch_size, ) + data_shape[1: ] print 'Data shape:', data_shape def dog_output(input_image): _,channels,height,width=input_image.shape