def under_bed(): split_num = 0 train_file = '../data/ck_96/train_test_files/train_' + str( split_num) + '.txt' test_file = '../data/ck_96/train_test_files/test_' + str( split_num) + '.txt' mean_file = '../data/ck_96/train_test_files/train_' + str( split_num) + '_mean.png' std_file = '../data/ck_96/train_test_files/train_' + str( split_num) + '_std.png' train_file_curr = util.readLinesFromFile(train_file)[0] mean = scipy.misc.imread(mean_file).astype(np.float32) std = scipy.misc.imread(std_file).astype(np.float32) std[std == 0] = 1. train_file_curr, label = train_file_curr.split(' ') label = int(label) image = scipy.misc.imread(train_file_curr).astype(np.float32) # image = (image-mean)/std # image = image[:,:,np.newaxis] out_dir = '../scratch/check_ck_aug' util.mkdir(out_dir) out_file_bef = os.path.join(out_dir, 'im.jpg') scipy.misc.imsave(out_file_bef, image) list_of_to_dos = ['pixel_augment'] out_file_aft = os.path.join(out_dir, 'im_' + '_'.join(list_of_to_dos) + '.jpg') import torch data_transforms = {} data_transforms['train'] = transforms.Compose([ # transforms.ToPILImage(), # transforms.ColorJitter(brightness=0.9, contrast=0.9, saturation=0.9, hue=0.5), lambda x: augment_image(x, list_of_to_dos, mean_im=mean, std_im=std), transforms.ToTensor(), ]) train_data = dataset.CK_96_Dataset(train_file, mean_file, std_file, data_transforms['train']) train_dataloader = torch.utils.data.DataLoader(train_data, batch_size=1, shuffle=False, num_workers=0) for batch in train_dataloader: print batch.keys() print torch.min(batch['image']), torch.max(batch['image']) print batch['label'].shape image = batch['image'][0].numpy() print image.shape break scipy.misc.imsave(out_file_aft, image[0]) visualize.writeHTMLForFolder(out_dir)
def get_set_up(model_name, route_iter, pre_pend, strs_append, split_num, model_num, train_pre = None, test_file = None, au = False): # get_set_up(model_name, route_iter, pre_pend, strs_append, split_num, model_num, train_pre=train_pre, test_file = test_file ,au=True) out_dir_meta = os.path.join('../experiments',model_name+str(route_iter)) out_dir_train = os.path.join(out_dir_meta,pre_pend+str(split_num)+strs_append) # final_model_file = os.path.join(out_dir_train,'model_'+str(model_num)+'.pt') if train_pre is None: train_pre = os.path.join('../data/ck_96','train_test_files') if test_file is None: test_file = os.path.join(train_pre,'test_'+str(split_num)+'.txt') else: test_file = os.path.join(train_pre,test_file) mean_file = os.path.join(train_pre,'train_'+str(split_num)+'_mean.png') std_file = os.path.join(train_pre,'train_'+str(split_num)+'_std.png') data_transforms = {} data_transforms['val']= transforms.Compose([ transforms.ToTensor(), lambda x: x*255. ]) if au: test_data = dataset.Bp4d_Dataset_Mean_Std_Im(test_file, mean_file, std_file, resize= 96,transform = data_transforms['val'], binarize = True) else: test_data = dataset.CK_96_Dataset(test_file, mean_file, std_file, data_transforms['val']) # if not au: if au: criterion = 'marginmulti' else: criterion = 'margin' test_params = dict(out_dir_train = out_dir_train, model_num = model_num, train_data = None, test_data = test_data, gpu_id = 0, model_name = model_name, batch_size_val = 32, criterion = criterion, au=au ) # num_iter = save_visualizations.save_routings(**test_params) # print num_iter # # else: # # print 'HERE' # # raw_input() out_file_results = os.path.join(out_dir_train,'save_routings_single_batch_'+str(model_num)) print out_file_results
def script_visualizing_primary_caps(): model_name = 'khorrami_capsule_7_3_bigclass' route_iter = 3 pre_pend = 'ck_96_train_test_files_' strs_append = '_reconstruct_True_True_all_aug_margin_False_wdecay_0_600_exp_0.96_350_1e-06_0.001_0.001_0.001' model_num = 599 split_num = 4 out_dir_meta = os.path.join('../experiments',model_name+str(route_iter)) out_dir_train = os.path.join(out_dir_meta,pre_pend+str(split_num)+strs_append) train_pre = os.path.join('../data/ck_96','train_test_files') test_file = os.path.join(train_pre,'train_'+str(split_num)+'.txt') mean_file = os.path.join(train_pre,'train_'+str(split_num)+'_mean.png') std_file = os.path.join(train_pre,'train_'+str(split_num)+'_std.png') data_transforms = {} data_transforms['val']= transforms.Compose([ transforms.ToTensor(), lambda x: x*255. ]) test_data = dataset.CK_96_Dataset(test_file, mean_file, std_file, data_transforms['val']) au = False class_rel = 0 criterion = 'margin' test_params = dict(out_dir_train = out_dir_train, model_num = model_num, train_data = None, test_data = test_data, gpu_id = 0, model_name = model_name, batch_size_val = 128, criterion = criterion, au=au, class_rel = class_rel ) save_visualizations.save_primary_caps(**test_params)
def khorrami_full_exp(): for split_num in range(0,1): out_dir_meta = '../experiments/khorrami_full_capsule/' num_epochs = 100 epoch_start = 0 dec_after = ['exp',0.96,200,1e-6] # dec_after = ['step',50,0.5] lr = [0.001] pool_type = 'max' im_size = 96 model_name = 'khorrami_full_capsule' save_after = 50 model_file=None strs_append = '_'.join([str(val) for val in ['justflip',pool_type,num_epochs]+dec_after+lr]) out_dir_train = os.path.join(out_dir_meta,'ck_'+str(split_num)+'_'+strs_append) print out_dir_train train_file = '../data/ck_96/train_test_files/train_'+str(split_num)+'.txt' test_file = '../data/ck_96/train_test_files/test_'+str(split_num)+'.txt' mean_file = '../data/ck_96/train_test_files/train_'+str(split_num)+'_mean.png' std_file = '../data/ck_96/train_test_files/train_'+str(split_num)+'_std.png' mean_im = scipy.misc.imread(mean_file).astype(np.float32) std_im = scipy.misc.imread(std_file).astype(np.float32) std_im[std_im==0]=1. list_of_to_dos = ['flip'] # ,'rotate','scale_translate','pixel_augment'] # 'flip','rotate','scale_translate'] data_transforms = {} data_transforms['train']= transforms.Compose([ lambda x: augmenters.augment_image(x,list_of_to_dos,mean_im,std_im,im_size), transforms.ToTensor(), lambda x: x*255. ]) data_transforms['val']= transforms.Compose([ transforms.ToTensor(), lambda x: x*255. ]) train_data = dataset.CK_96_Dataset(train_file, mean_file, std_file, data_transforms['train']) test_data = dataset.CK_96_Dataset(test_file, mean_file, std_file, data_transforms['val']) network_params = dict(n_classes=8, conv_layers = [[64,5,2]],caps_layers=[[16,8,5,2],[32,8,7,3],[8,16,5,1]], r=3, init=False) batch_size = 32 batch_size_val = 4 util.makedirs(out_dir_train) train_params = dict(out_dir_train = out_dir_train, train_data = train_data, test_data = test_data, batch_size = batch_size, batch_size_val = batch_size_val, num_epochs = num_epochs, save_after = save_after, disp_after = 1, plot_after = 10, test_after = num_epochs-1, lr = lr, dec_after = dec_after, model_name = model_name, criterion = 'margin', gpu_id = 2, num_workers = 0, model_file = model_file, epoch_start = epoch_start, network_params = network_params) print train_params param_file = os.path.join(out_dir_train,'params.txt') all_lines = [] for k in train_params.keys(): str_print = '%s: %s' % (k,train_params[k]) print str_print all_lines.append(str_print) util.writeFile(param_file,all_lines) train_model(**train_params)
def main(): # out_dir_meta = '../experiments/bl_khorrami_ck_96_nobn_pixel_augment_255_range' # range_splits = [0,1,2,3,4,5] out_dir_meta = '../experiments/bl_khorrami_ck_96_nobn_pixel_augment_255_range_trans_fix' range_splits = range(6, 10) print range_splits # range(10) util.mkdir(out_dir_meta) all_accuracy = [] for split_num in range_splits: train_file = '../data/ck_96/train_test_files/train_' + str( split_num) + '.txt' test_file = '../data/ck_96/train_test_files/test_' + str( split_num) + '.txt' mean_file = '../data/ck_96/train_test_files/train_' + str( split_num) + '_mean.png' std_file = '../data/ck_96/train_test_files/train_' + str( split_num) + '_std.png' list_of_to_dos = ['flip', 'rotate', 'scale_translate', 'pixel_augment'] mean_im = scipy.misc.imread(mean_file).astype(np.float32) std_im = scipy.misc.imread(std_file).astype(np.float32) batch_size = 128 batch_size_val = None num_epochs = 500 save_after = 100 disp_after = 1 plot_after = 10 test_after = 1 lr = [0.001, 0.001] # lr = [0.0001,0.0001] dec_after = 300 model_name = 'khorrami_ck_96' criterion = nn.CrossEntropyLoss() gpu_id = 0 num_workers = 2 model_num = num_epochs - 1 # model_file = None # epoch_start = 0 # lr_dir_train = lr lr_dir_train = [0.01, 0.01] # strs_append = '_'.join([str(val) for val in [num_epochs,dec_after,lr_dir_train[0],lr_dir_train[1],'100_dec']]) strs_append = '_'.join([ str(val) for val in [num_epochs, dec_after, lr_dir_train[0], lr_dir_train[1]] ]) out_dir_train = os.path.join( out_dir_meta, 'split_' + str(split_num) + '_' + strs_append) print out_dir_train epoch_start = 401 strs_append = '_'.join( [str(val) for val in [400, 300, lr_dir_train[0], lr_dir_train[1]]]) out_dir_res = os.path.join( out_dir_meta, 'split_' + str(split_num) + '_' + strs_append) # strs_append = '_'.join([str(val) for val in [250,200,lr_dir_train[0],lr_dir_train[1]]]) # model_file = os.path.join(out_dir_meta,'split_'+str(split_num)+'_'+strs_append,'model_200.pt') model_file = os.path.join(out_dir_res, 'model_399.pt') # raw_input() util.mkdir(out_dir_train) data_transforms = {} data_transforms['train'] = transforms.Compose([ lambda x: augment_image( x, list_of_to_dos, mean_im=mean_im, std_im=std_im), transforms.ToTensor(), lambda x: x * 255. ]) data_transforms['val'] = transforms.Compose( [transforms.ToTensor(), lambda x: x * 255.]) train_data = dataset.CK_96_Dataset(train_file, mean_file, std_file, data_transforms['train']) test_data = dataset.CK_96_Dataset(test_file, mean_file, std_file, data_transforms['val']) train_model(out_dir_train, train_data, test_data, batch_size=batch_size, batch_size_val=batch_size_val, num_epochs=num_epochs, save_after=save_after, disp_after=disp_after, plot_after=plot_after, test_after=test_after, lr=lr, dec_after=dec_after, model_name=model_name, criterion=criterion, gpu_id=gpu_id, num_workers=num_workers, model_file=model_file, epoch_start=epoch_start) test_model(out_dir_train, model_num, train_data, test_data, model_name=model_name, batch_size_val=batch_size_val, criterion=criterion) res_dir = os.path.join(out_dir_train, 'results_model_' + str(model_num)) log_file = os.path.join(res_dir, 'log.txt') accuracy = util.readLinesFromFile(log_file)[-1] accuracy = float(accuracy.split(' ')[1]) all_accuracy.append(accuracy) print all_accuracy, np.mean(all_accuracy), np.std(all_accuracy)
def train_khorrami_aug(wdecay, lr, route_iter, folds=[4, 9], model_name='vgg_capsule_disfa', epoch_stuff=[30, 60], res=False, class_weights=False, reconstruct=False, loss_weights=None, model_to_test=None, oulu=False, dropout=0): out_dirs = [] out_dir_meta = '../experiments/showing_overfitting_justhflip_' + model_name + str( route_iter) num_epochs = epoch_stuff[1] if model_to_test is None: model_to_test = num_epochs - 1 epoch_start = 0 dec_after = ['step', epoch_stuff[0], 0.1] lr = lr im_resize = 110 im_size = 96 save_after = num_epochs if not oulu: type_data = 'train_test_files' n_classes = 8 train_pre = os.path.join('../data/ck_96', type_data) pre_pend = 'ck_96_' + type_data + '_' else: type_data = 'three_im_no_neutral_just_strong_False' n_classes = 6 # 'train_test_files'; n_classes = 8; train_pre = os.path.join( '../data/Oulu_CASIA/train_test_files_preprocess_vl', type_data) pre_pend = 'oulu_96_' + type_data + '_' criterion = 'margin' criterion_str = criterion init = False strs_append_list = [ 'reconstruct', reconstruct, class_weights, 'all_aug', criterion_str, init, 'wdecay', wdecay, num_epochs ] + dec_after + lr + [dropout] if loss_weights is not None: strs_append_list = strs_append_list + ['lossweights'] + loss_weights strs_append = '_' + '_'.join([str(val) for val in strs_append_list]) lr_p = lr[:] for split_num in folds: model_file = None margin_params = None out_dir_train = os.path.join(out_dir_meta, pre_pend + str(split_num) + strs_append) final_model_file = os.path.join(out_dir_train, 'model_' + str(num_epochs - 1) + '.pt') if os.path.exists(final_model_file): print 'skipping', final_model_file continue else: print 'not skipping', final_model_file train_file = os.path.join(train_pre, 'train_' + str(split_num) + '.txt') test_file = os.path.join(train_pre, 'test_' + str(split_num) + '.txt') mean_file = os.path.join(train_pre, 'train_' + str(split_num) + '_mean.png') std_file = os.path.join(train_pre, 'train_' + str(split_num) + '_std.png') mean_im = scipy.misc.imread(mean_file).astype(np.float32) std_im = scipy.misc.imread(std_file).astype(np.float32) class_weights = util.get_class_weights( util.readLinesFromFile(train_file)) list_of_to_dos = ['flip'] data_transforms = {} data_transforms['train'] = transforms.Compose([ lambda x: augmenters.augment_image(x, list_of_to_dos, mean_im, std_im, im_size), transforms.ToTensor(), lambda x: x * 255. ]) data_transforms['val'] = transforms.Compose( [transforms.ToTensor(), lambda x: x * 255.]) train_data = dataset.CK_96_Dataset(train_file, mean_file, std_file, data_transforms['train']) test_data = dataset.CK_96_Dataset(test_file, mean_file, std_file, data_transforms['val']) network_params = dict(n_classes=n_classes, pool_type='max', r=route_iter, init=init, class_weights=class_weights, reconstruct=reconstruct, loss_weights=loss_weights, dropout=dropout) batch_size = 128 batch_size_val = None util.makedirs(out_dir_train) train_params = dict(out_dir_train=out_dir_train, train_data=train_data, test_data=test_data, batch_size=batch_size, batch_size_val=batch_size_val, num_epochs=num_epochs, save_after=save_after, disp_after=1, plot_after=100, test_after=1, lr=lr, dec_after=dec_after, model_name=model_name, criterion=criterion, gpu_id=0, num_workers=2, model_file=model_file, epoch_start=epoch_start, margin_params=margin_params, network_params=network_params, weight_decay=wdecay) test_params = dict( out_dir_train=out_dir_train, model_num=model_to_test, # num_epochs-1, train_data=train_data, test_data=test_data, gpu_id=0, model_name=model_name, batch_size_val=batch_size_val, criterion=criterion, margin_params=margin_params, network_params=network_params) print train_params param_file = os.path.join(out_dir_train, 'params.txt') all_lines = [] for k in train_params.keys(): str_print = '%s: %s' % (k, train_params[k]) print str_print all_lines.append(str_print) util.writeFile(param_file, all_lines) if reconstruct: train_model_recon(**train_params) # test_model_recon(**test_params) else: train_model(**train_params)
def get_class_variations(model_name, route_iter, pre_pend, strs_append, split_num, model_num, class_rel,type_exp,train_pre = None, test_file = None, au= False): # out_dir_meta = os.path.join('../experiments',model_name+str(route_iter)) # out_dir_train = os.path.join(out_dir_meta,pre_pend+str(split_num)+strs_append) # # final_model_file = os.path.join(out_dir_train,'model_'+str(model_num)+'.pt') # train_pre = os.path.join('../data/ck_96','train_test_files') # test_file = os.path.join(train_pre,'test_'+str(split_num)+'.txt') # mean_file = os.path.join(train_pre,'train_'+str(split_num)+'_mean.png') # std_file = os.path.join(train_pre,'train_'+str(split_num)+'_std.png') # data_transforms = {} # data_transforms['val']= transforms.Compose([ # transforms.ToTensor(), # lambda x: x*255. # ]) # test_data = dataset.CK_96_Dataset(test_file, mean_file, std_file, data_transforms['val']) # test_params = dict(out_dir_train = out_dir_train, # model_num = model_num, # train_data = None, # test_data = test_data, # gpu_id = 0, # model_name = model_name, # batch_size_val = None, # criterion = 'margin', # class_rel = class_rel # ) out_dir_meta = os.path.join('../experiments',model_name+str(route_iter)) out_dir_train = os.path.join(out_dir_meta,pre_pend+str(split_num)+strs_append) # final_model_file = os.path.join(out_dir_train,'model_'+str(model_num)+'.pt') if train_pre is None: train_pre = os.path.join('../data/ck_96','train_test_files') if test_file is None: test_file = os.path.join(train_pre,'test_'+str(split_num)+'.txt') else: test_file = os.path.join(train_pre,test_file) mean_file = os.path.join(train_pre,'train_'+str(split_num)+'_mean.png') std_file = os.path.join(train_pre,'train_'+str(split_num)+'_std.png') data_transforms = {} data_transforms['val']= transforms.Compose([ transforms.ToTensor(), lambda x: x*255. ]) if au: test_data = dataset.Bp4d_Dataset_Mean_Std_Im(test_file, mean_file, std_file, resize= 96,transform = data_transforms['val'], binarize = True) else: test_data = dataset.CK_96_Dataset(test_file, mean_file, std_file, data_transforms['val']) # if not au: if au: criterion = 'marginmulti' else: criterion = 'margin' test_params = dict(out_dir_train = out_dir_train, model_num = model_num, train_data = None, test_data = test_data, gpu_id = 0, model_name = model_name, batch_size_val = 128, criterion = criterion, au=au, class_rel = class_rel ) if type_exp ==0 : save_visualizations.save_class_as_other(**test_params) # save_routings(**test_params) out_file_results = os.path.join(out_dir_train,'save_class_as_other_single_batch_'+str(model_num)) out_file_html = os.path.join(out_file_results,'visualizing_class_variations_'+str(class_rel)+'.html') # os.path.join('../scratch/ck_test','save_routings_single_batch_'+str(model_num)) elif type_exp==1: save_visualizations.save_class_vary_mag(**test_params) # save_routings(**test_params) out_file_results = os.path.join(out_dir_train,'save_class_vary_mag_single_batch_'+str(model_num)) out_file_html = os.path.join(out_file_results,'visualizing_vary_mag_'+str(class_rel)+'.html') # os.path.join('../scratch/ck_test','save_routings_single_batch_'+str(model_num)) elif type_exp ==2: save_visualizations.save_class_vary_attr(**test_params) out_file_results = os.path.join(out_dir_train,'save_class_vary_attr_single_batch_'+str(model_num)+'_'+str(class_rel)) out_file_html = os.path.join(out_file_results,'visualizing_vary_attr_'+str(class_rel)+'.html') else: save_visualizations.save_class_vary_mag_class_rel(**test_params) # save_routings(**test_params) out_file_results = os.path.join(out_dir_train,'save_class_vary_mag_single_batch_'+str(model_num)+'_'+str(class_rel)) out_file_html = os.path.join(out_file_results,'visualizing_vary_mag_'+str(class_rel)+'.html') # os.path.join('../scratch/ck_test','save_routings_single_batch_'+str(model_num)) return im_files = np.load(os.path.join(out_file_results,'ims_all.npy')) captions = np.array(im_files) im_files_new = [] captions_new = [] for r in range(im_files.shape[0]): caption_row = [] im_row = [] for c in range(im_files.shape[1]): file_curr = im_files[r,c] caption_row.append(os.path.split(file_curr)[1][:file_curr.rindex('.')]) # print file_curr # print file_curr.replace(str_replace[0],str_replace[1]) # print util.getRelPath(file_curr.replace(str_replace[0],str_replace[1]),dir_server) im_row.append(util.getRelPath(file_curr.replace(str_replace[0],str_replace[1]),dir_server)) # im_files[r,c] = # print im_files[r,c] # raw_input() im_files_new.append(im_row) captions_new.append(caption_row) visualize.writeHTML(out_file_html,im_files_new,captions_new,96,96) print out_file_html.replace(str_replace[0],str_replace[1]).replace(dir_server,click_str)
def train_khorrami_aug_oulu(wdecay,lr,route_iter,folds=[4,9],model_name='vgg_capsule_disfa',epoch_stuff=[30,60],res=False,meta_data_dir = 'train_test_files_preprocess_vl'): out_dirs = [] out_dir_meta = '../experiments/'+model_name+str(route_iter) num_epochs = epoch_stuff[1] epoch_start = 0 dec_after = ['exp',0.96,epoch_stuff[0],1e-6] lr = lr im_resize = 110 im_size = 96 save_after = num_epochs type_data = 'three_im_no_neutral_just_strong_False'; n_classes = 6; criterion = 'margin' criterion_str = criterion # criterion = nn.CrossEntropyLoss() # criterion_str = 'crossentropy' init = False strs_append = '_'+'_'.join([str(val) for val in ['all_aug',criterion_str,init,'wdecay',wdecay,num_epochs]+dec_after+lr]) pre_pend = 'oulu_96_'+meta_data_dir+'_'+type_data+'_' for split_num in folds: if res: strs_appendc = '_'.join([str(val) for val in ['all_aug','wdecay',wdecay,50,'step',50,0.1]+lr]) out_dir_train = os.path.join(out_dir_meta,'oulu_'+type_data+'_'+str(split_num)+'_'+strs_appendc) model_file = os.path.join(out_dir_train,'model_49.pt') epoch_start = 50 else: model_file = None margin_params = None out_dir_train = os.path.join(out_dir_meta,pre_pend+str(split_num)+strs_append) final_model_file = os.path.join(out_dir_train,'model_'+str(num_epochs-1)+'.pt') if os.path.exists(final_model_file): print 'skipping',final_model_file continue train_file = os.path.join('../data/Oulu_CASIA',meta_data_dir, type_data, 'train_'+str(split_num)+'.txt') test_file = os.path.join('../data/Oulu_CASIA',meta_data_dir, type_data, 'test_'+str(split_num)+'.txt') mean_file = os.path.join('../data/Oulu_CASIA',meta_data_dir, type_data, 'train_'+str(split_num)+'_mean.png') std_file = os.path.join('../data/Oulu_CASIA',meta_data_dir, type_data, 'train_'+str(split_num)+'_std.png') mean_im = scipy.misc.imread(mean_file).astype(np.float32) std_im = scipy.misc.imread(std_file).astype(np.float32) print std_im.shape print np.min(std_im),np.max(std_im) print mean_im.shape print np.min(mean_im),np.max(mean_im) class_weights = util.get_class_weights(util.readLinesFromFile(train_file)) # raw_input() list_of_to_dos = ['flip','rotate','scale_translate'] data_transforms = {} data_transforms['train']= transforms.Compose([ lambda x: augmenters.augment_image(x,list_of_to_dos,mean_im,std_im,im_size), transforms.ToTensor(), lambda x: x*255. ]) data_transforms['val']= transforms.Compose([ transforms.ToTensor(), lambda x: x*255. ]) # train_data = dataset.CK_96_Dataset_Just_Mean(train_file, mean_file, data_transforms['train']) # test_data = dataset.CK_96_Dataset_Just_Mean(test_file, mean_file, data_transforms['val']) train_data = dataset.CK_96_Dataset(train_file, mean_file, std_file, data_transforms['train']) test_data = dataset.CK_96_Dataset(test_file, mean_file, std_file, data_transforms['val']) network_params = dict(n_classes=n_classes,pool_type='max',r=route_iter,init=init, class_weights = class_weights) # if lr[0]==0: batch_size = 128 batch_size_val = 128 # else: # batch_size = 32 # batch_size_val = 16 util.makedirs(out_dir_train) train_params = dict(out_dir_train = out_dir_train, train_data = train_data, test_data = test_data, batch_size = batch_size, batch_size_val = batch_size_val, num_epochs = num_epochs, save_after = save_after, disp_after = 1, plot_after = 100, test_after = 1, lr = lr, dec_after = dec_after, model_name = model_name, criterion = criterion, gpu_id = 0, num_workers = 0, model_file = model_file, epoch_start = epoch_start, margin_params = margin_params, network_params = network_params, weight_decay=wdecay) print train_params param_file = os.path.join(out_dir_train,'params.txt') all_lines = [] for k in train_params.keys(): str_print = '%s: %s' % (k,train_params[k]) print str_print all_lines.append(str_print) util.writeFile(param_file,all_lines) train_model(**train_params) test_params = dict(out_dir_train = out_dir_train, model_num = num_epochs-1, train_data = train_data, test_data = test_data, gpu_id = 0, model_name = model_name, batch_size_val = batch_size_val, criterion = criterion, margin_params = margin_params, network_params = network_params) test_model(**test_params) # getting_accuracy.print_accuracy(out_dir_meta,pre_pend,strs_append,folds,log='log.txt') getting_accuracy.view_loss_curves(out_dir_meta,pre_pend,strs_append,folds,num_epochs-1)
def train_khorrami_aug(wdecay,lr,route_iter,folds=[4,9],model_name='vgg_capsule_disfa',epoch_stuff=[30,60],res=False, class_weights = False, reconstruct = False, oulu = False, meta_data_dir = None,loss_weights = None, exp = False, non_peak = False, model_to_test = None, dropout = None): out_dirs = [] out_dir_meta = '../experiments_dropout/'+model_name+str(route_iter) num_epochs = epoch_stuff[1] if model_to_test is None: model_to_test = num_epochs -1 epoch_start = 0 if exp: dec_after = ['exp',0.96,epoch_stuff[0],1e-6] # dec_after = ['exp',0.96,epoch_stuff[0],1e-6] else: dec_after = ['step',epoch_stuff[0],0.1] lr = lr im_resize = 110 im_size = 96 save_after = 100 if non_peak: type_data = 'train_test_files_non_peak_one_third'; n_classes = 8; train_pre = os.path.join('../data/ck_96',type_data) test_pre = os.path.join('../data/ck_96','train_test_files') else: type_data = 'train_test_files'; n_classes = 8; train_pre = os.path.join('../data/ck_96',type_data) test_pre = os.path.join('../data/ck_96',type_data) if oulu: type_data = 'three_im_no_neutral_just_strong_False'; n_classes = 6; criterion = 'margin' criterion_str = criterion # criterion = nn.CrossEntropyLoss() # criterion_str = 'crossentropy' init = False strs_append_list = ['reconstruct',reconstruct,class_weights,'flip',criterion_str,init,'wdecay',wdecay,num_epochs]+dec_after+lr+['dropout',dropout] if loss_weights is not None: strs_append_list = strs_append_list +['lossweights']+loss_weights strs_append = '_'+'_'.join([str(val) for val in strs_append_list]) if oulu: pre_pend = 'oulu_96_'+meta_data_dir+'_' else: pre_pend = 'ck_96_'+type_data+'_' lr_p=lr[:] for split_num in folds: if res: strs_appendc = '_'+'_'.join([str(val) for val in ['reconstruct',reconstruct,True,'all_aug',criterion_str,init,'wdecay',wdecay,600,'step',600,0.1]+lr_p]) out_dir_train = os.path.join(out_dir_meta,pre_pend+str(split_num)+strs_appendc) model_file = os.path.join(out_dir_train,'model_599.pt') epoch_start = 600 lr =[0.1*lr_curr for lr_curr in lr_p] else: model_file = None margin_params = None out_dir_train = os.path.join(out_dir_meta,pre_pend+str(split_num)+strs_append) final_model_file = os.path.join(out_dir_train,'model_'+str(num_epochs-1)+'.pt') if os.path.exists(final_model_file): print 'skipping',final_model_file # raw_input() continue else: print 'not skipping', final_model_file # raw_input() # continue if not oulu: # train_file = '../data/ck_96/train_test_files/train_'+str(split_num)+'.txt' # test_file = '../data/ck_96/train_test_files/test_'+str(split_num)+'.txt' # mean_file = '../data/ck_96/train_test_files/train_'+str(split_num)+'_mean.png' # std_file = '../data/ck_96/train_test_files/train_'+str(split_num)+'_std.png' train_file = os.path.join(train_pre,'train_'+str(split_num)+'.txt') test_file_easy = os.path.join(train_pre,'test_'+str(split_num)+'.txt') test_file = os.path.join(test_pre,'test_'+str(split_num)+'.txt') mean_file = os.path.join(train_pre,'train_'+str(split_num)+'_mean.png') std_file = os.path.join(train_pre,'train_'+str(split_num)+'_std.png') else: train_file = os.path.join('../data/Oulu_CASIA',meta_data_dir, type_data, 'train_'+str(split_num)+'.txt') test_file = os.path.join('../data/Oulu_CASIA',meta_data_dir, type_data, 'test_'+str(split_num)+'.txt') mean_file = os.path.join('../data/Oulu_CASIA',meta_data_dir, type_data, 'train_'+str(split_num)+'_mean.png') std_file = os.path.join('../data/Oulu_CASIA',meta_data_dir, type_data, 'train_'+str(split_num)+'_std.png') mean_im = scipy.misc.imread(mean_file).astype(np.float32) std_im = scipy.misc.imread(std_file).astype(np.float32) class_weights = util.get_class_weights(util.readLinesFromFile(train_file)) # print std_im.shape # print np.min(std_im),np.max(std_im) # raw_input() list_of_to_dos = ['flip'] # ,'rotate','scale_translate', 'pixel_augment'] data_transforms = {} data_transforms['train']= transforms.Compose([ lambda x: augmenters.augment_image(x,list_of_to_dos,mean_im,std_im,im_size), transforms.ToTensor(), lambda x: x*255. ]) data_transforms['val']= transforms.Compose([ transforms.ToTensor(), lambda x: x*255. ]) # train_data = dataset.CK_96_Dataset_Just_Mean(train_file, mean_file, data_transforms['train']) # test_data = dataset.CK_96_Dataset_Just_Mean(test_file, mean_file, data_transforms['val']) print train_file print test_file print std_file print mean_file # raw_input() train_data = dataset.CK_96_Dataset(train_file, mean_file, std_file, data_transforms['train']) train_data_no_t = dataset.CK_96_Dataset(test_file_easy, mean_file, std_file, data_transforms['val']) test_data = dataset.CK_96_Dataset(test_file, mean_file, std_file, data_transforms['val']) if dropout is not None: network_params = dict(n_classes=n_classes,pool_type='max',r=route_iter,init=init,class_weights = class_weights, reconstruct = reconstruct,loss_weights = loss_weights, dropout = dropout) else: network_params = dict(n_classes=n_classes,pool_type='max',r=route_iter,init=init,class_weights = class_weights, reconstruct = reconstruct,loss_weights = loss_weights) # if lr[0]==0: batch_size = 128 batch_size_val = 128 # else: # batch_size = 32 # batch_size_val = 16 util.makedirs(out_dir_train) train_params = dict(out_dir_train = out_dir_train, train_data = train_data, test_data = test_data, batch_size = batch_size, batch_size_val = batch_size_val, num_epochs = num_epochs, save_after = save_after, disp_after = 1, plot_after = 100, test_after = 1, lr = lr, dec_after = dec_after, model_name = model_name, criterion = criterion, gpu_id = 0, num_workers = 2, model_file = model_file, epoch_start = epoch_start, margin_params = margin_params, network_params = network_params, weight_decay=wdecay) test_params = dict(out_dir_train = out_dir_train, model_num = model_to_test, # num_epochs-1, train_data = train_data, test_data = test_data, gpu_id = 0, model_name = model_name, batch_size_val = batch_size_val, criterion = criterion, margin_params = margin_params, network_params = network_params) test_params_train = dict(**test_params) test_params_train['test_data'] = train_data_no_t test_params_train['post_pend'] = '_easy' print train_params param_file = os.path.join(out_dir_train,'params.txt') all_lines = [] for k in train_params.keys(): str_print = '%s: %s' % (k,train_params[k]) print str_print all_lines.append(str_print) util.writeFile(param_file,all_lines) # if reconstruct: train_model_recon(**train_params) # test_model_recon(**test_params) # else: # train_model(**train_params) # test_model(**test_params) getting_accuracy.print_accuracy(out_dir_meta,pre_pend,strs_append,folds,log='log.txt')
def khorrami_bl_exp(mmi=False, model_to_test=None): out_dir_meta = '../experiments/khorrami_ck_96_caps_bl/' # pre_pend = os.path.join(out_dir_meta,'ck_') # post_pend = strs_append num_epochs = 300 epoch_start = 0 # dec_after = ['exp',0.96,350,1e-6] dec_after = ['exp', 0.96, 350, 1e-6] # dec_after = ['step',num_epochs,0.1] lr = [0.001, 0.001] im_size = 96 model_name = 'khorrami_ck_96' # model_name = 'khorrami_ck_96_caps_bl' save_after = 10 # margin_params = {'step':1,'start':0.2} # strs_append = '_'.join([str(val) for val in [model_name,300]+dec_after+lr]) # out_dir_train = os.path.join(out_dir_meta,'ck_'+str(split_num)+'_'+strs_append) # model_file = os.path.join(out_dir_train,'model_299.pt') model_file = None if not mmi: strs_append = '_'.join([ str(val) for val in ['train_test_files_non_peak_one_third', model_name, num_epochs] + dec_after + lr ]) strs_append = '_' + strs_append pre_pend = 'ck_' folds = range(10) else: pre_pend = 'mmi_96_' folds = range(2) strs_append = '_'.join([ str(val) for val in ['train_test_files', model_name, num_epochs] + dec_after + lr ]) strs_append = '_' + strs_append if model_to_test is None: model_to_test = num_epochs - 1 for split_num in folds: out_dir_train = os.path.join(out_dir_meta, pre_pend + str(split_num) + strs_append) print out_dir_train out_file_model = os.path.join(out_dir_train, 'model_' + str(num_epochs - 1) + '.pt') if os.path.exists(out_file_model): print 'skipping', out_file_model # continue else: print 'not done', out_file_model raw_input() if not mmi: train_file = '../data/ck_96/train_test_files_non_peak_one_third/train_' + str( split_num) + '.txt' test_file = '../data/ck_96/train_test_files/test_' + str( split_num) + '.txt' test_file_easy = '../data/ck_96/train_test_files_non_peak_one_third/test_' + str( split_num) + '.txt' mean_file = '../data/ck_96/train_test_files_non_peak_one_third/train_' + str( split_num) + '_mean.png' std_file = '../data/ck_96/train_test_files_non_peak_one_third/train_' + str( split_num) + '_std.png' else: type_data = 'train_test_files' n_classes = 6 train_pre = os.path.join('../data/mmi', type_data) test_pre = train_pre train_file = os.path.join(train_pre, 'train_' + str(split_num) + '.txt') test_file_easy = os.path.join( train_pre, 'test_front_' + str(split_num) + '.txt') test_file = os.path.join(test_pre, 'test_side_' + str(split_num) + '.txt') mean_file = os.path.join(train_pre, 'train_' + str(split_num) + '_mean.png') std_file = os.path.join(train_pre, 'train_' + str(split_num) + '_std.png') # train_file = '../data/ck_96/train_test_files/train_'+str(split_num)+'.txt' # test_file = '../data/ck_96/train_test_files/test_'+str(split_num)+'.txt' # mean_file = '../data/ck_96/train_test_files/train_'+str(split_num)+'_mean.png' # std_file = '../data/ck_96/train_test_files/train_'+str(split_num)+'_std.png' mean_im = scipy.misc.imread(mean_file).astype(np.float32) std_im = scipy.misc.imread(std_file).astype(np.float32) std_im[std_im == 0] = 1. if not mmi: list_of_to_dos = [ 'pixel_augment', 'flip', 'rotate', 'scale_translate' ] data_transforms = {} data_transforms['train'] = transforms.Compose([ lambda x: augmenters.augment_image(x, list_of_to_dos, mean_im, std_im, im_size), transforms.ToTensor(), lambda x: x * 255. ]) data_transforms['val'] = transforms.Compose( [transforms.ToTensor(), lambda x: x * 255.]) train_data = dataset.CK_96_Dataset(train_file, mean_file, std_file, data_transforms['train']) test_data = dataset.CK_96_Dataset(test_file, mean_file, std_file, data_transforms['val']) test_data_easy = dataset.CK_96_Dataset(test_file_easy, mean_file, std_file, data_transforms['val']) else: list_of_to_dos = ['flip', 'rotate', 'scale_translate'] data_transforms = {} data_transforms['train'] = transforms.Compose([ lambda x: augmenters.random_crop(x, im_size), lambda x: augmenters.augment_image(x, list_of_to_dos), transforms.ToTensor(), lambda x: x * 255. ]) data_transforms['val'] = transforms.Compose( [transforms.ToTensor(), lambda x: x * 255.]) print train_file print test_file print std_file print mean_file # raw_input() train_data = dataset.CK_96_Dataset_with_rs( train_file, mean_file, std_file, data_transforms['train']) test_data_easy = dataset.CK_96_Dataset_with_rs( test_file_easy, mean_file, std_file, data_transforms['val'], resize=im_size) test_data = dataset.CK_96_Dataset_with_rs(test_file, mean_file, std_file, data_transforms['val'], resize=im_size) network_params = dict(n_classes=8, bn=False) batch_size = 128 batch_size_val = 128 util.makedirs(out_dir_train) train_params = dict(out_dir_train=out_dir_train, train_data=train_data, test_data=test_data, batch_size=batch_size, batch_size_val=batch_size_val, num_epochs=num_epochs, save_after=save_after, disp_after=1, plot_after=10, test_after=1, lr=lr, dec_after=dec_after, model_name=model_name, criterion=nn.CrossEntropyLoss(), gpu_id=1, num_workers=0, model_file=model_file, epoch_start=epoch_start, network_params=network_params) test_params = dict(out_dir_train=out_dir_train, model_num=model_to_test, train_data=train_data, test_data=test_data, gpu_id=1, model_name=model_name, batch_size_val=batch_size_val, criterion=nn.CrossEntropyLoss(), margin_params=None, network_params=network_params, post_pend='', model_nums=None) test_params_easy = dict(out_dir_train=out_dir_train, model_num=model_to_test, train_data=train_data, test_data=test_data_easy, gpu_id=1, model_name=model_name, batch_size_val=batch_size_val, criterion=nn.CrossEntropyLoss(), margin_params=None, network_params=network_params, post_pend='_easy', model_nums=None) print train_params param_file = os.path.join(out_dir_train, 'params.txt') all_lines = [] for k in train_params.keys(): str_print = '%s: %s' % (k, train_params[k]) print str_print all_lines.append(str_print) util.writeFile(param_file, all_lines) # train_model(**train_params) test_model(**test_params) # print test_params['test_data'] # print test_params['post_pend'] # # raw_input() # print test_params_easy['test_data'] # print test_params_easy['post_pend'] test_model(**test_params_easy) # print out_dir_train, model_to_test # raw_input() getting_accuracy.print_accuracy(out_dir_meta, pre_pend, strs_append, folds, log='log.txt') getting_accuracy.view_loss_curves(out_dir_meta, pre_pend, strs_append, folds, num_epochs - 1)
def train_gray(wdecay,lr,route_iter,folds = [4,9],model_name='vgg_capsule_disfa',epoch_stuff=[30,60],res=False, reconstruct = False, oulu = False, meta_data_dir = 'train_test_files_preprocess_vl',loss_weights = None, exp = False, dropout = 0, gpu_id = 0, aug_more = 'flip', model_to_test = None): out_dir_meta = '../experiments_dropout/'+model_name+'_'+str(route_iter) num_epochs = epoch_stuff[1] if model_to_test is None: model_to_test = num_epochs -1 epoch_start = 0 if exp: dec_after = ['exp',0.96,epoch_stuff[0],1e-6] else: dec_after = ['step',epoch_stuff[0],0.1] lr = lr im_resize = 110 im_size = 96 save_after = 100 type_data = 'train_test_files'; n_classes = 8; train_pre = os.path.join('../data/ck_96',type_data) test_pre = os.path.join('../data/ck_96',type_data) if oulu: type_data = 'three_im_no_neutral_just_strong_False'; n_classes = 6; criterion = 'margin' criterion_str = criterion init = False strs_append_list = ['reconstruct',reconstruct]+aug_more+[num_epochs]+dec_after+lr+[dropout] if loss_weights is not None: strs_append_list = strs_append_list +['lossweights']+loss_weights strs_append = '_'+'_'.join([str(val) for val in strs_append_list]) if oulu: pre_pend = 'oulu_96_'+meta_data_dir+'_' else: pre_pend = 'ck_96_'+type_data+'_' lr_p=lr[:] for split_num in folds: if res: print 'what to res?' raw_input() else: model_file = None margin_params = None out_dir_train = os.path.join(out_dir_meta,pre_pend+str(split_num)+strs_append) print out_dir_train final_model_file = os.path.join(out_dir_train,'model_'+str(num_epochs-1)+'.pt') if os.path.exists(final_model_file): print 'skipping',final_model_file # raw_input() continue else: print 'not skipping', final_model_file # raw_input() # continue if not oulu: train_file = os.path.join(train_pre,'train_'+str(split_num)+'.txt') test_file_easy = os.path.join(train_pre,'test_'+str(split_num)+'.txt') test_file = os.path.join(test_pre,'test_'+str(split_num)+'.txt') mean_file = os.path.join(train_pre,'train_'+str(split_num)+'_mean.png') std_file = os.path.join(train_pre,'train_'+str(split_num)+'_std.png') else: train_file = os.path.join('../data/Oulu_CASIA',meta_data_dir, type_data, 'train_'+str(split_num)+'.txt') test_file = os.path.join('../data/Oulu_CASIA',meta_data_dir, type_data, 'test_'+str(split_num)+'.txt') mean_file = os.path.join('../data/Oulu_CASIA',meta_data_dir, type_data, 'train_'+str(split_num)+'_mean.png') std_file = os.path.join('../data/Oulu_CASIA',meta_data_dir, type_data, 'train_'+str(split_num)+'_std.png') mean_im = scipy.misc.imread(mean_file).astype(np.float32) std_im = scipy.misc.imread(std_file).astype(np.float32) class_weights = util.get_class_weights(util.readLinesFromFile(train_file)) list_of_to_dos = aug_more print list_of_to_dos # raw_input() # aug_more.split('_') # ['flip','rotate','scale_translate', 'pixel_augment'] data_transforms = {} if 'hs' in list_of_to_dos: print '**********HS!!!!!!!' list_transforms = [lambda x: augmenters.hide_and_seek(x)] if 'flip' in list_of_to_dos: list_transforms.append(lambda x: augmenters.horizontal_flip(x)) list_transforms = list_transforms+ [transforms.ToTensor(),lambda x: x*255.] print list_transforms data_transforms['train']= transforms.Compose(list_transforms) elif 'none' in list_of_to_dos: print 'DOING NOTHING!!!!!!' data_transforms['train']= transforms.Compose([ transforms.ToTensor(), lambda x: x*255. ]) else: data_transforms['train']= transforms.Compose([ lambda x: augmenters.augment_image(x,list_of_to_dos,mean_im,std_im,im_size), transforms.ToTensor(), lambda x: x*255. ]) data_transforms['val']= transforms.Compose([ transforms.ToTensor(), lambda x: x*255. ]) print data_transforms['train'] # raw_input() # train_data = dataset.CK_96_Dataset_Just_Mean(train_file, mean_file, data_transforms['train']) # test_data = dataset.CK_96_Dataset_Just_Mean(test_file, mean_file, data_transforms['val']) print train_file print test_file print std_file print mean_file # raw_input() train_data = dataset.CK_96_Dataset(train_file, mean_file, std_file, data_transforms['train']) test_data = dataset.CK_96_Dataset(test_file, mean_file, std_file, data_transforms['val']) network_params = dict(n_classes=n_classes,pool_type='max',r=route_iter,init=init,class_weights = class_weights, reconstruct = reconstruct,loss_weights = loss_weights, dropout = dropout) batch_size = 128 batch_size_val = 128 util.makedirs(out_dir_train) train_params = dict(out_dir_train = out_dir_train, train_data = train_data, test_data = test_data, batch_size = batch_size, batch_size_val = batch_size_val, num_epochs = num_epochs, save_after = save_after, disp_after = 1, plot_after = 100, test_after = 1, lr = lr, dec_after = dec_after, model_name = model_name, criterion = criterion, gpu_id = gpu_id, num_workers = 0, model_file = model_file, epoch_start = epoch_start, margin_params = margin_params, network_params = network_params, weight_decay=wdecay) test_params = dict(out_dir_train = out_dir_train, model_num = model_to_test, # num_epochs-1, train_data = train_data, test_data = test_data, gpu_id = gpu_id, model_name = model_name, batch_size_val = batch_size_val, criterion = criterion, margin_params = margin_params, network_params = network_params) print train_params param_file = os.path.join(out_dir_train,'params.txt') all_lines = [] for k in train_params.keys(): str_print = '%s: %s' % (k,train_params[k]) print str_print all_lines.append(str_print) util.writeFile(param_file,all_lines) # if reconstruct: train_model_recon(**train_params) test_model_recon(**test_params) # else: # train_model(**train_params) # test_model(**test_params) getting_accuracy.print_accuracy(out_dir_meta,pre_pend,strs_append,folds,log='log.txt')
def checking_aug(wdecay,lr,route_iter,folds = [4,9],model_name='vgg_capsule_disfa',epoch_stuff=[30,60],res=False, reconstruct = False, oulu = False, meta_data_dir = 'train_test_files_preprocess_vl',loss_weights = None, exp = False, dropout = 0, gpu_id = 0, aug_more = 'flip', model_to_test = None): out_dir_meta = '../experiments_dropout/'+model_name+'_'+str(route_iter) num_epochs = epoch_stuff[1] if model_to_test is None: model_to_test = num_epochs -1 epoch_start = 0 if exp: dec_after = ['exp',0.96,epoch_stuff[0],1e-6] else: dec_after = ['step',epoch_stuff[0],0.1] lr = lr im_resize = 110 im_size = 96 save_after = 100 type_data = 'train_test_files'; n_classes = 8; train_pre = os.path.join('../data/ck_96',type_data) test_pre = os.path.join('../data/ck_96',type_data) if oulu: type_data = 'three_im_no_neutral_just_strong_False'; n_classes = 6; criterion = 'margin' criterion_str = criterion init = False strs_append_list = ['reconstruct',reconstruct]+aug_more+[num_epochs]+dec_after+lr+[dropout] if loss_weights is not None: strs_append_list = strs_append_list +['lossweights']+loss_weights strs_append = '_'+'_'.join([str(val) for val in strs_append_list]) if oulu: pre_pend = 'oulu_96_'+meta_data_dir+'_' else: pre_pend = 'ck_96_'+type_data+'_' lr_p=lr[:] for split_num in folds: if res: print 'what to res?' raw_input() else: model_file = None margin_params = None out_dir_train = os.path.join(out_dir_meta,pre_pend+str(split_num)+strs_append) print out_dir_train final_model_file = os.path.join(out_dir_train,'model_'+str(num_epochs-1)+'.pt') if os.path.exists(final_model_file): print 'skipping',final_model_file # raw_input() # continue else: print 'not skipping', final_model_file # raw_input() # continue if not oulu: train_file = os.path.join(train_pre,'train_'+str(split_num)+'.txt') # train_file = os.path.join(train_pre,'test_'+str(split_num)+'.txt') test_file_easy = os.path.join(train_pre,'test_'+str(split_num)+'.txt') test_file = os.path.join(test_pre,'test_'+str(split_num)+'.txt') mean_file = os.path.join(train_pre,'train_'+str(split_num)+'_mean.png') std_file = os.path.join(train_pre,'train_'+str(split_num)+'_std.png') else: train_file = os.path.join('../data/Oulu_CASIA',meta_data_dir, type_data, 'train_'+str(split_num)+'.txt') test_file = os.path.join('../data/Oulu_CASIA',meta_data_dir, type_data, 'test_'+str(split_num)+'.txt') mean_file = os.path.join('../data/Oulu_CASIA',meta_data_dir, type_data, 'train_'+str(split_num)+'_mean.png') std_file = os.path.join('../data/Oulu_CASIA',meta_data_dir, type_data, 'train_'+str(split_num)+'_std.png') mean_im = scipy.misc.imread(mean_file).astype(np.float32) std_im = scipy.misc.imread(std_file).astype(np.float32) class_weights = util.get_class_weights(util.readLinesFromFile(train_file)) list_of_to_dos = aug_more print list_of_to_dos # raw_input() # aug_more.split('_') # ['flip','rotate','scale_translate', 'pixel_augment'] data_transforms = {} data_transforms['train']= transforms.Compose([ # lambda x: augmenters.augment_image(x,list_of_to_dos,mean_im,std_im,im_size), lambda x: augmenters.hide_and_seek(x, div_sizes = [9,7,5,3], hide_prob = 0.5,fill_val = 0), transforms.ToTensor(), lambda x: x*255. ]) data_transforms['val']= transforms.Compose([ transforms.ToTensor(), lambda x: x*255. ]) # train_data = dataset.CK_96_Dataset_Just_Mean(train_file, mean_file, data_transforms['train']) # test_data = dataset.CK_96_Dataset_Just_Mean(test_file, mean_file, data_transforms['val']) print train_file print test_file print std_file print mean_file # raw_input() train_data = dataset.CK_96_Dataset(train_file, mean_file, std_file, data_transforms['train']) batch_size = 1 # len(train_data) print batch_size train_dataloader = torch.utils.data.DataLoader(train_data, batch_size=batch_size, shuffle=False) out_dir_im = '../experiments_dropout/checking_aug/im_flip_check' util.makedirs(out_dir_im) for num_iter_train,batch in enumerate(train_dataloader): if num_iter_train%100==0: print num_iter_train ims = batch['image'].cpu().numpy() # print ims.shape # print np.mean(ims) # print np.std(ims) # print np.min(ims),np.max(ims) # print np.min(train_data.mean),np.max(train_data.mean) # print np.min(train_data.std),np.max(train_data.std) # continue labels = batch['label'] # ims = ims*train_data.std[np.newaxis,np.newaxis,:,:] # ims = ims+train_data.mean[np.newaxis,np.newaxis,:,:] for num_curr, im_curr in enumerate(ims): if num_curr%100==0: print num_curr im_curr = im_curr.squeeze() # print np.min(im_curr),np.max(im_curr) # print im_curr.shape out_file_curr = os.path.join(out_dir_im, '_'.join([str(val) for val in [num_iter_train,num_curr]])+'.png') # print out_file_curr # print np.min(im_curr),np.max(im_curr) # raw_input() # cv2.imwrite(out_file_curr,im_curr) scipy.misc.imsave(out_file_curr,im_curr) # break # break # print ims.shape # print train_data.mean.shape # print train_data.std.shape visualize.writeHTMLForFolder(out_dir_im,'.png') print 'done'