def move_and_save(this_run=i, new_folder=new_folder, old_folder=old_folder, dataset_folder=dataset_dict[dataset], stage_name=stage_name, AD_folder=AD_folder, move_dataset=args.move_dataset): ### Move dataset over? if move_dataset == 1 and this_run == 0: send_message('Moving dataset over: ' + dataset_folder) os.system( 'scp -r ' + dataset_folder + ' [email protected]:/scratch/shenghuahe/datasets/' ) send_message( 'Moving model files around, in order to ship over to chpc') # Locate important files: training_nums = old_folder + 'training_nums.out' # parse this file: train_loss, val_loss, learning_rate = parse_training_nums( training_nums) # Find the lowest val_loss min_index = np.argmin(val_loss) min_val_loss = val_loss[min_index] # Find the lowest train_loss at the lowest val_loss min_val_train_loss = train_loss[min_index] # Find lowest overall train_loss min_train_loss = np.min(train_loss) best_index = 'best' ### ? Create visualization of filters for this specific model model = hf.load_trained_CNN(version=1, name=best_index + '_simple', folder=old_folder) ### Move model file into transfer folder model_file_yaml = old_folder + best_index + '_simple.yaml' model_file_h5 = old_folder + best_index + '_simple.h5' copyfile(model_file_yaml, new_folder + 'best_simple.yaml') copyfile(model_file_h5, new_folder + 'best_simple.h5') ### Move training_nums file over copyfile(training_nums, new_folder + 'training_nums.out') ### Move png plot over copyfile(old_folder + 'training_plot.png', new_folder + 'training_plot.png') ### Move .batch file over run_file_name = generate_run_file() os.system( 'scp ' + run_file_name + ' [email protected]:/scratch/shenghuahe/batch_files') ### Move model folder over os.system( 'scp -r ' + new_folder + ' [email protected]:/scratch/shenghuahe/models/' + stage_name) ### Make directory for the AD files to be transfered into: hf.generate_folder(AD_folder) ### Create AD folder on CHPC to put files into os.system( 'ssh [email protected] "mkdir /scratch/shenghuahe/' + AD_folder + '"') return model
input_shape = (args.dimension, args.dimension, 1) model = cg.residual_projectionNet2(depth=args.depth, nb_filters=args.nb_filters, input_shape=input_shape, dropout=args.dropout) if args.num_gpus > 1: model = make_parallel(model, args.num_gpus) args.batch_size = args.batch_size * args.num_gpus model.name = get_model_name() if args.pretrained_model is not None: best = hf.load_trained_CNN(name=args.pretrained_model, folder='') model.set_weights(best.get_weights()) print('model name: ' + model.name) if args.just_return_name: with open('/home/bmkelly/dl-limitedview-prior/tmp.out', 'w') as outfile: outfile.write(get_model_name()) sys.exit() opt = Adam(lr=args.lr) #opt = SGD(lr=args.min_lr, momentum=0.9, decay=0.0001, nesterov=True) loss_weights = [] loss_weights.append(1.0)
parser.add_argument("--model_folder",type=str,default='trained_v19') parser.add_argument("--save_dir",type=str,default='tmp/') args = parser.parse_args() return args args = get_args() name = args.model_folder #name = 'deartifact_False_0.300_16_1_8_325_False_0.0001_mse_0_100_50_False_0.0_True_False_13_False_5_False_500_0.5_1.0_10000_num_stacks1_march13_linked' # mkdir trained_v19/ # cp projection_results5/deartifact_False_0.300_16_1_8_325_False_0.0001_mse_0_100_0_False_0.0_False_False_19_False_0_False_500_0.5_1.0_num_stacks1_equalADFalse_num_normal_training_examples1000_march13_linked_old/best_simple.yaml trained_v19/best_simple.yaml # cp projection_results5/deartifact_False_0.300_16_1_8_325_False_0.0001_mse_0_100_0_False_0.0_False_False_19_False_0_False_500_0.5_1.0_num_stacks1_equalADFalse_num_normal_training_examples1000_march13_linked_old/best_simple.h5 trained_v19/best_simple.h5 # scp -r [email protected]:/home/bmkelly/dl-limitedview-prior/trained_v19 . # Load model model_just_weights = hf.load_trained_CNN(load_weights=True,name=name+'/best_simple',folder='') model_larger = cg.linked_projection_network(input_shape=(256,256,1),k1=args.f_dim1,k2=args.f_dim2,k3=args.f_dim3,nb_filters=args.nb_filters,num_stacks=args.num_stacks) model_larger.set_weights(model_just_weights.get_weights()) if args.dataset == 13: max_index=8000 theta=60 data_dirname='../xct-parallelbeam-matlab/dataset_v13_60_noRI_scale_nonneg/' H_DIRNAME = '../xct-parallelbeam-matlab/system-matrix/' elif args.dataset==18: max_index=9000 theta=60 data_dirname='../xct-parallelbeam-matlab/dataset_v17_100_noRI_scale_nonneg_noInvCrime_PoissonNoise/' H_DIRNAME = '../xct-parallelbeam-matlab/system-matrix/' elif args.dataset==19:
nb_filters=nb_filters[i], elu=elu[i], longer=args.longer, dropout=args.dropout) model.name = 'deartifact_'+str(lrelu[i])+'_'+'{0:.3f}'.format(alpha[i])+ '_' + str(f_dim1[i]) + \ '_' + str(f_dim2[i]) + '_' + str(f_dim3[i])+ '_' + str(nb_filters[i])+ \ '_' + str(elu[i]) + '_' + str(lr[i])+ '_' + args.loss+ '_' + str(args.longer)+ \ '_' + str(args.nb_epochs) + '_' + str(args.inter_epochs)+'_' + str(args.normalize)+ '_'+ str(args.dropout) + \ '_' + str(args.final_act)+'_' + str(args.relu) +'_' + str(args.dataset)+ \ '_' + str(args.normalize_simplistic) + '_' + str(args.num_loop) + \ '_' + str(args.lower_learning_rate) + '_' + str(args.lr_step) + \ '_' + str(args.lr_drop_pc) + '_' + str(args.DA_Lr_decrease) + '_' +str(args.num_add)+ '_march8' print('model name: ' + model.name) if args.use_previous_best: best = hf.load_trained_CNN(name=model.name + '/390', folder=output_folder) model.set_weights(best.get_weights()) import time # rename files (which will be overwritten) to old/todays fold = output_folder + model.name + '/' os.system('mv ' + fold + 'training_nums.out ' + fold + 'training_nums_' + time.strftime("%d_%m_%Y_%H_%M_%S") + '.out') os.system('mv ' + fold + 'training_plot.png ' + fold + 'training_plot_' + time.strftime("%d_%m_%Y_%H_%M_%S") + '.png') sgd = Adam(lr=args.lr) model.compile(loss=args.loss, optimizer=sgd) ## Load dataset X_train,X_test,Y_train,Y_test = hf.load_data(version=args.dataset,normalize_projection=args.normalize,\