def main(dataset_name, save_dir, cfg): ## Data data_dir = os.path.join('/scail/data/group/atlas/kalpit/data', dataset_name) dataset = Dataset(data_dir) ## Model print 'Creating Model...' model = get_model(dataset_name, cfg) #model.summary() ## Train print 'Training Model...' starttime = time.time() train_loss_batch, train_acc_batch, train_loss, val_loss, val_acc = train( model, dataset, cfg) endtime = time.time() plot_loss(train_loss, save_dir, 'training_cost', 'training_cost') plot_loss(val_loss, save_dir, 'validation_cost', 'validation_cost') ## Validate print '' print 'Final Validation...' validate(model, dataset) ## Training Time print 'Training Time: {:.2f}'.format(endtime - starttime) return min(train_loss)
def main(dataset_name, network, save_dir, cfg): ## Data data_dir = os.path.join('/scail/data/group/atlas/kalpit/data', dataset_name) dataset = Dataset(data_dir) ## Model print 'Creating Model...' model = get_model(dataset_name+'_'+network, cfg) #model.summary() ## Train print 'Training Model...' starttime = time.time() if network=='ff': if cfg.optimizer=='kalpit': train_loss, val_loss, val_acc = train_ff_kalpit(model, dataset, cfg, save_dir) else: train_loss, val_loss, val_acc = train_ff_vanilla(model, dataset, cfg, save_dir) elif network=='conv': dataset.data_reshape((cfg.input_height,cfg.input_width,cfg.input_nchannels)) # for both mnist and cifar10 if cfg.optimizer=='kalpit': train_loss, val_loss, val_acc = train_conv_kalpit(model, dataset, cfg, save_dir) else: train_loss, val_loss, val_acc = train_conv_vanilla(model, dataset, cfg, save_dir) elif network=='autoencoder': if cfg.optimizer=='kalpit': train_loss, val_loss = train_autoencoder_kalpit(model, dataset, cfg, save_dir) else: train_loss, val_loss = train_autoencoder_vanilla(model, dataset, cfg, save_dir) else: raise NotImplementedError endtime = time.time() #plot_loss(train_loss, save_dir, 'training_cost', 'training_cost') #plot_loss(val_loss, save_dir, 'validation_cost', 'validation_cost') ## Validate print '' print 'Final Validation...' if network=='ff': validate_ff(model, dataset) elif network=='conv': validate_conv(model, dataset) elif network=='autoencoder': validate_autoencoder(model, dataset) ## Training Time print 'Training Time: {:.2f}'.format(endtime - starttime) return min(train_loss)
if final_run: sys.stdout = open(os.path.join(save_dir, 'stdout'), 'w') print run_id print 'testing' ## Data data_dir = os.path.join('/scail/data/group/atlas/kalpit/data', dataset_name) dataset = Dataset(data_dir) ## Config cfg = Config(save_dir) ## Model print 'Creating Model...' print 'DROPOUT NOT IMPLEMENTED CORRECTLY FOR VALIDATION!!!' model = get_model(dataset_name, cfg) #model.summary() ## Train print 'Training Model...' starttime = time.time() train_loss_batch, train_acc_batch, train_loss, val_loss, val_acc = train(model, dataset, cfg) endtime = time.time() plot_loss(train_loss, save_dir, 'training_cost', 'training_cost') plot_loss(val_loss, save_dir, 'validation_cost', 'validation_cost') ## Validate print '' print 'Final Validation...' validate(model, dataset)