def get_set_up(model_name, route_iter, pre_pend, strs_append, split_num, model_num, train_pre = None, test_file = None, au = False):

    # get_set_up(model_name, route_iter, pre_pend, strs_append, split_num, model_num, train_pre=train_pre, test_file = test_file ,au=True)


    out_dir_meta = os.path.join('../experiments',model_name+str(route_iter))
    out_dir_train =  os.path.join(out_dir_meta,pre_pend+str(split_num)+strs_append)
    # final_model_file = os.path.join(out_dir_train,'model_'+str(model_num)+'.pt')  

    if train_pre is None:
        train_pre =  os.path.join('../data/ck_96','train_test_files')

    if test_file is None:
        test_file = os.path.join(train_pre,'test_'+str(split_num)+'.txt')
    else:
        test_file = os.path.join(train_pre,test_file)
        
    mean_file = os.path.join(train_pre,'train_'+str(split_num)+'_mean.png')
    std_file = os.path.join(train_pre,'train_'+str(split_num)+'_std.png')
    data_transforms = {}
    data_transforms['val']= transforms.Compose([
            transforms.ToTensor(),
            lambda x: x*255.
            ])

    if au:
        test_data = dataset.Bp4d_Dataset_Mean_Std_Im(test_file, mean_file, std_file, resize= 96,transform = data_transforms['val'], binarize = True)
    else:
        test_data = dataset.CK_96_Dataset(test_file, mean_file, std_file, data_transforms['val'])
    
    # if not au:
    if au:
        criterion = 'marginmulti'
    else:
        criterion = 'margin'
    test_params = dict(out_dir_train = out_dir_train,
                    model_num = model_num,
                    train_data = None,
                    test_data = test_data,
                    gpu_id = 0,
                    model_name = model_name,
                    batch_size_val = 32,
                    criterion = criterion,
                    au=au
                    )
    # num_iter = save_visualizations.save_routings(**test_params)    
    # print num_iter

    # # else:
    # #     print 'HERE'
    # #     raw_input()

    out_file_results = os.path.join(out_dir_train,'save_routings_single_batch_'+str(model_num))
    print out_file_results
def get_class_variations(model_name, route_iter, pre_pend, strs_append, split_num, model_num, class_rel,type_exp,train_pre = None, test_file = None, au= False):
    # out_dir_meta = os.path.join('../experiments',model_name+str(route_iter))
    # out_dir_train =  os.path.join(out_dir_meta,pre_pend+str(split_num)+strs_append)
    # # final_model_file = os.path.join(out_dir_train,'model_'+str(model_num)+'.pt')  

    # train_pre =  os.path.join('../data/ck_96','train_test_files')
    # test_file = os.path.join(train_pre,'test_'+str(split_num)+'.txt')
    # mean_file = os.path.join(train_pre,'train_'+str(split_num)+'_mean.png')
    # std_file = os.path.join(train_pre,'train_'+str(split_num)+'_std.png')
    # data_transforms = {}
    # data_transforms['val']= transforms.Compose([
    #         transforms.ToTensor(),
    #         lambda x: x*255.
    #         ])

    # test_data = dataset.CK_96_Dataset(test_file, mean_file, std_file, data_transforms['val'])
    # test_params = dict(out_dir_train = out_dir_train,
    #                 model_num = model_num,
    #                 train_data = None,
    #                 test_data = test_data,
    #                 gpu_id = 0,
    #                 model_name = model_name,
    #                 batch_size_val = None,
    #                 criterion = 'margin',
    #                 class_rel = class_rel
    #                 )
    


    out_dir_meta = os.path.join('../experiments',model_name+str(route_iter))
    out_dir_train =  os.path.join(out_dir_meta,pre_pend+str(split_num)+strs_append)
    # final_model_file = os.path.join(out_dir_train,'model_'+str(model_num)+'.pt')  

    if train_pre is None:
        train_pre =  os.path.join('../data/ck_96','train_test_files')

    if test_file is None:
        test_file = os.path.join(train_pre,'test_'+str(split_num)+'.txt')
    else:
        test_file = os.path.join(train_pre,test_file)
    
    mean_file = os.path.join(train_pre,'train_'+str(split_num)+'_mean.png')
    std_file = os.path.join(train_pre,'train_'+str(split_num)+'_std.png')
    data_transforms = {}
    data_transforms['val']= transforms.Compose([
            transforms.ToTensor(),
            lambda x: x*255.
            ])

    if au:
        test_data = dataset.Bp4d_Dataset_Mean_Std_Im(test_file, mean_file, std_file, resize= 96,transform = data_transforms['val'], binarize = True)
    else:
        test_data = dataset.CK_96_Dataset(test_file, mean_file, std_file, data_transforms['val'])
    
    # if not au:
    if au:
        criterion = 'marginmulti'
    else:
        criterion = 'margin'
    test_params = dict(out_dir_train = out_dir_train,
                    model_num = model_num,
                    train_data = None,
                    test_data = test_data,
                    gpu_id = 0,
                    model_name = model_name,
                    batch_size_val = 128,
                    criterion = criterion,
                    au=au,
                    class_rel = class_rel
                    )


    if type_exp ==0 :
        save_visualizations.save_class_as_other(**test_params)
        # save_routings(**test_params)    
        out_file_results = os.path.join(out_dir_train,'save_class_as_other_single_batch_'+str(model_num))
        out_file_html = os.path.join(out_file_results,'visualizing_class_variations_'+str(class_rel)+'.html')
        # os.path.join('../scratch/ck_test','save_routings_single_batch_'+str(model_num))
    elif type_exp==1:
        save_visualizations.save_class_vary_mag(**test_params)
        # save_routings(**test_params)    
        out_file_results = os.path.join(out_dir_train,'save_class_vary_mag_single_batch_'+str(model_num))
        out_file_html = os.path.join(out_file_results,'visualizing_vary_mag_'+str(class_rel)+'.html')
        # os.path.join('../scratch/ck_test','save_routings_single_batch_'+str(model_num))
    elif type_exp ==2:
        
        save_visualizations.save_class_vary_attr(**test_params)
        out_file_results = os.path.join(out_dir_train,'save_class_vary_attr_single_batch_'+str(model_num)+'_'+str(class_rel))
        out_file_html = os.path.join(out_file_results,'visualizing_vary_attr_'+str(class_rel)+'.html')
    else:
        save_visualizations.save_class_vary_mag_class_rel(**test_params)
        # save_routings(**test_params)    
        out_file_results = os.path.join(out_dir_train,'save_class_vary_mag_single_batch_'+str(model_num)+'_'+str(class_rel))
        out_file_html = os.path.join(out_file_results,'visualizing_vary_mag_'+str(class_rel)+'.html')
        # os.path.join('../scratch/ck_test','save_routings_single_batch_'+str(model_num))


    return
    im_files = np.load(os.path.join(out_file_results,'ims_all.npy'))
    captions = np.array(im_files)

    im_files_new = []
    captions_new = []
    for r in range(im_files.shape[0]):
        caption_row = []
        im_row = []
        for c in range(im_files.shape[1]):
            file_curr = im_files[r,c]
            caption_row.append(os.path.split(file_curr)[1][:file_curr.rindex('.')])

            # print file_curr
            # print file_curr.replace(str_replace[0],str_replace[1])
            # print util.getRelPath(file_curr.replace(str_replace[0],str_replace[1]),dir_server)
            im_row.append(util.getRelPath(file_curr.replace(str_replace[0],str_replace[1]),dir_server))

            # im_files[r,c] = 
            # print im_files[r,c]
            # raw_input()
        im_files_new.append(im_row)
        captions_new.append(caption_row)

    
    visualize.writeHTML(out_file_html,im_files_new,captions_new,96,96)
    print out_file_html.replace(str_replace[0],str_replace[1]).replace(dir_server,click_str)
Exemple #3
0
def train_gray(wdecay,
               lr,
               route_iter,
               folds=[4, 9],
               model_name='vgg_capsule_bp4d',
               epoch_stuff=[30, 60],
               res=False,
               class_weights=False,
               reconstruct=False,
               loss_weights=None,
               exp=False,
               disfa=False,
               vgg_base_file=None,
               vgg_base_file_str=None,
               mean_file=None,
               std_file=None,
               aug_more=False,
               align=True):
    out_dirs = []

    out_dir_meta = '../experiments/' + model_name + str(route_iter)
    num_epochs = epoch_stuff[1]
    epoch_start = 0
    if exp:
        dec_after = ['exp', 0.96, epoch_stuff[0], 1e-6]
    else:
        dec_after = ['step', epoch_stuff[0], 0.1]

    lr = lr
    im_resize = 110
    # 256
    im_size = 96
    save_after = 1
    if disfa:
        dir_files = '../data/disfa'
        # type_data = 'train_test_10_6_method_110_gray_align'; n_classes = 10;
        type_data = 'train_test_8_au_all_method_110_gray_align'
        n_classes = 8
        pre_pend = 'disfa_' + type_data + '_'
        binarize = True
    else:
        dir_files = '../data/bp4d'
        if align:
            type_data = 'train_test_files_110_gray_align'
            n_classes = 12
        else:
            type_data = 'train_test_files_110_gray_nodetect'
            n_classes = 12
        pre_pend = 'bp4d_' + type_data + '_'
        binarize = False

    criterion = 'marginmulti'
    criterion_str = criterion

    init = False
    aug_str = aug_more
    # if aug_more:
    #     aug_str = 'cropkhAugNoColor'
    # else:
    #     aug_str = 'flipCrop'

    strs_append = '_' + '_'.join([
        str(val) for val in [
            'reconstruct', reconstruct, class_weights, aug_str, criterion_str,
            init, 'wdecay', wdecay, num_epochs
        ] + dec_after + lr + ['lossweights'] + loss_weights +
        [vgg_base_file_str]
    ])

    lr_p = lr[:]
    for split_num in folds:

        if res:

            # strs_appendc = '_'+'_'.join([str(val) for val in ['reconstruct',reconstruct,True,'flipCrop',criterion_str,init,'wdecay',wdecay,10,'exp',0.96,350,1e-6]+['lossweights']+loss_weights])
            # dec_afterc = dec_after
            strs_appendc = '_' + '_'.join([
                str(val) for val in [
                    'reconstruct', reconstruct, True, aug_str, criterion_str,
                    init, 'wdecay', wdecay, 10
                ] + dec_after + lr + ['lossweights'] + loss_weights +
                [vgg_base_file_str]
            ])

            out_dir_train = os.path.join(
                out_dir_meta, pre_pend + str(split_num) + strs_appendc)
            model_file = os.path.join(out_dir_train, 'model_9.pt')
            epoch_start = 10
            # lr =[0.1*lr_curr for lr_curr in lr_p]

        else:
            model_file = None

        margin_params = None

        out_dir_train = os.path.join(out_dir_meta,
                                     pre_pend + str(split_num) + strs_append)
        final_model_file = os.path.join(out_dir_train,
                                        'model_' + str(num_epochs - 1) + '.pt')
        if os.path.exists(final_model_file):
            print 'skipping', final_model_file
            # raw_input()
            # continue
        else:
            print 'not skipping', final_model_file
            # raw_input()
            # continue

        train_file = os.path.join(dir_files, type_data,
                                  'train_' + str(split_num) + '.txt')
        test_file = os.path.join(dir_files, type_data,
                                 'test_' + str(split_num) + '.txt')
        if vgg_base_file is None:
            mean_file = os.path.join(dir_files, type_data,
                                     'train_' + str(split_num) + '_mean.png')
            std_file = os.path.join(dir_files, type_data,
                                    'train_' + str(split_num) + '_std.png')

        print train_file
        print test_file
        print mean_file
        print std_file
        # raw_input()

        class_weights = util.get_class_weights_au(
            util.readLinesFromFile(train_file))

        data_transforms = {}
        if aug_more == 'cropkhAugNoColor':
            train_resize = None
            print 'AUGING MORE'
            list_of_todos = ['flip', 'rotate', 'scale_translate']

            data_transforms['train'] = transforms.Compose([
                lambda x: augmenters.random_crop(x, im_size),
                lambda x: augmenters.augment_image(x, list_of_todos),
                # lambda x: augmenters.horizontal_flip(x),
                transforms.ToTensor(),
                lambda x: x * 255,
            ])
        elif aug_more == 'cropFlip':
            train_resize = None
            data_transforms['train'] = transforms.Compose([
                lambda x: augmenters.random_crop(x, im_size),
                lambda x: augmenters.horizontal_flip(x),
                transforms.ToTensor(),
                lambda x: x * 255,
            ])
        elif aug_more == 'NONE':
            train_resize = im_size
            data_transforms['train'] = transforms.Compose([
                transforms.ToTensor(),
                lambda x: x * 255,
            ])
        else:
            raise ValueError('aug_more is problematic')

        data_transforms['val'] = transforms.Compose([
            transforms.ToTensor(),
            lambda x: x * 255,
        ])

        train_data = dataset.Bp4d_Dataset_Mean_Std_Im(
            train_file,
            mean_file,
            std_file,
            transform=data_transforms['train'],
            binarize=binarize,
            resize=train_resize)
        test_data = dataset.Bp4d_Dataset_Mean_Std_Im(
            test_file,
            mean_file,
            std_file,
            resize=im_size,
            transform=data_transforms['val'],
            binarize=binarize)

        # train_data = dataset.Bp4d_Dataset_Mean_Std_Im(test_file, mean_file, std_file, resize= im_size, transform = data_transforms['val'])

        network_params = dict(n_classes=n_classes,
                              pool_type='max',
                              r=route_iter,
                              init=init,
                              class_weights=class_weights,
                              reconstruct=reconstruct,
                              loss_weights=loss_weights,
                              vgg_base_file=vgg_base_file)

        batch_size = 128
        batch_size_val = 128

        util.makedirs(out_dir_train)

        train_params = dict(out_dir_train=out_dir_train,
                            train_data=train_data,
                            test_data=test_data,
                            batch_size=batch_size,
                            batch_size_val=batch_size_val,
                            num_epochs=num_epochs,
                            save_after=save_after,
                            disp_after=1,
                            plot_after=10,
                            test_after=1,
                            lr=lr,
                            dec_after=dec_after,
                            model_name=model_name,
                            criterion=criterion,
                            gpu_id=0,
                            num_workers=0,
                            model_file=model_file,
                            epoch_start=epoch_start,
                            margin_params=margin_params,
                            network_params=network_params,
                            weight_decay=wdecay)
        test_params = dict(out_dir_train=out_dir_train,
                           model_num=num_epochs - 1,
                           train_data=train_data,
                           test_data=test_data,
                           gpu_id=0,
                           model_name=model_name,
                           batch_size_val=batch_size_val,
                           criterion=criterion,
                           margin_params=margin_params,
                           network_params=network_params,
                           barebones=True)
        # test_params_train = dict(**test_params)
        # test_params_train['test_data'] = train_data_no_t
        # test_params_train['post_pend'] = '_train'

        print train_params
        param_file = os.path.join(out_dir_train, 'params.txt')
        all_lines = []
        for k in train_params.keys():
            str_print = '%s: %s' % (k, train_params[k])
            print str_print
            all_lines.append(str_print)
        util.writeFile(param_file, all_lines)

        # if reconstruct:

        train_model_recon(**train_params)
        test_model_recon(**test_params)
        # test_model_recon(**test_params_train)

        # else:
        #     train_model(**train_params)
        # test_params = dict(out_dir_train = out_dir_train,
        #         model_num = num_epochs-1,
        #         train_data = train_data,
        #         test_data = test_data,
        #         gpu_id = 0,
        #         model_name = model_name,
        #         batch_size_val = batch_size_val,
        #         criterion = criterion,
        #         margin_params = margin_params,
        #         network_params = network_params)
        # test_model(**test_params)

    getting_accuracy.print_accuracy(out_dir_meta,
                                    pre_pend,
                                    strs_append,
                                    folds,
                                    log='log.txt')
Exemple #4
0
def save_test_results(wdecay,
                      lr,
                      route_iter,
                      folds=[4, 9],
                      model_name='vgg_capsule_bp4d',
                      epoch_stuff=[30, 60],
                      res=False,
                      class_weights=False,
                      reconstruct=False,
                      loss_weights=None,
                      models_to_test=None,
                      exp=False,
                      disfa=False):
    out_dirs = []

    out_dir_meta = '../experiments/' + model_name + str(route_iter)
    num_epochs = epoch_stuff[1]
    epoch_start = 0
    # dec_after = ['exp',0.96,epoch_stuff[0],1e-6]
    if exp:
        dec_after = ['exp', 0.96, epoch_stuff[0], 1e-6]
    else:
        dec_after = ['step', epoch_stuff[0], 0.1]

    lr = lr
    im_resize = 110
    # 256
    im_size = 96
    # save_after = 1

    if disfa:
        dir_files = '../data/disfa'
        # type_data = 'train_test_10_6_method_110_gray_align'; n_classes = 10;
        type_data = 'train_test_8_au_all_method_110_gray_align'
        n_classes = 8
        pre_pend = 'disfa_' + type_data + '_'
        binarize = True
    else:
        dir_files = '../data/bp4d'
        type_data = 'train_test_files_110_gray_align'
        n_classes = 12
        pre_pend = 'bp4d_' + type_data + '_'
        binarize = False

    criterion = 'marginmulti'
    criterion_str = criterion

    init = False

    strs_append = '_' + '_'.join([
        str(val) for val in [
            'reconstruct', reconstruct, class_weights, 'flipCrop',
            criterion_str, init, 'wdecay', wdecay, num_epochs
        ] + dec_after + lr + ['lossweights'] + loss_weights
    ])

    # pre_pend = 'bp4d_110_'

    lr_p = lr[:]
    for split_num in folds:
        for model_num_curr in models_to_test:
            margin_params = None
            out_dir_train = os.path.join(
                out_dir_meta, pre_pend + str(split_num) + strs_append)
            final_model_file = os.path.join(
                out_dir_train, 'model_' + str(num_epochs - 1) + '.pt')

            if os.path.exists(
                    os.path.join(out_dir_train,
                                 'results_model_' + str(model_num_curr))):
                print 'exists', model_num_curr, split_num
                print out_dir_train
                # continue
            else:

                print 'does not exist', model_num_curr, split_num
                # print 'bp4d_train_test_files_110_gray_align_0_reconstruct_True_True_flipCrop_marginmulti_False_wdecay_0_20_exp_0.96_350_1e-06_0.001_0.001_0.001_lossweights_1.0_1.0'
                print out_dir_train
                # raw_input()

            # if os.path.exists(final_model_file):
            #     print 'skipping',final_model_file
            #     # raw_input()
            #     # continue
            # else:
            #     print 'not skipping', final_model_file
            #     # raw_input()
            #     # continue

            train_file = os.path.join(dir_files, type_data,
                                      'train_' + str(split_num) + '.txt')
            test_file = os.path.join(dir_files, type_data,
                                     'test_' + str(split_num) + '.txt')
            mean_file = os.path.join(dir_files, type_data,
                                     'train_' + str(split_num) + '_mean.png')
            std_file = os.path.join(dir_files, type_data,
                                    'train_' + str(split_num) + '_std.png')

            # train_file = os.path.join('../data/bp4d',type_data,'train_'+str(split_num)+'.txt')
            # test_file = os.path.join('../data/bp4d',type_data,'test_'+str(split_num)+'.txt')

            if model_name.startswith('vgg'):
                mean_std = np.array([[93.5940, 104.7624, 129.1863],
                                     [1., 1., 1.]])  #bgr
                bgr = True
            else:
                # print 'ELSING'
                # mean_std = np.array([[129.1863,104.7624,93.5940],[1.,1.,1.]])
                mean_std = np.array([[0.485 * 255, 0.456 * 255, 0.406 * 255],
                                     [0.229 * 255, 0.224 * 255, 0.225 * 255]])
                # print mean_std
                # raw_input()
                bgr = False

            # print mean_std

            # mean_im = scipy.misc.imread(mean_file).astype(np.float32)
            # std_im = scipy.misc.imread(std_file).astype(np.float32)

            class_weights = util.get_class_weights_au(
                util.readLinesFromFile(train_file))
            data_transforms = {}
            data_transforms['train'] = transforms.Compose([
                lambda x: augmenters.random_crop(x, im_size),
                lambda x: augmenters.horizontal_flip(x),
                transforms.ToTensor(),
                lambda x: x * 255,
            ])
            data_transforms['val'] = transforms.Compose([
                # transforms.ToPILImage(),
                # transforms.Resize((im_size,im_size)),
                # lambda x: augmenters.resize(x,im_size),
                transforms.ToTensor(),
                lambda x: x * 255,
            ])

            # data_transforms = {}
            # data_transforms['train']= transforms.Compose([
            #     transforms.ToPILImage(),
            #     # transforms.Resize((im_resize,im_resize)),
            #     transforms.RandomCrop(im_size),
            #     transforms.RandomHorizontalFlip(),
            #     transforms.RandomRotation(15),
            #     transforms.ColorJitter(),
            #     transforms.ToTensor(),
            #     lambda x: x*255,
            #     transforms.Normalize(mean_std[0,:],mean_std[1,:]),
            # ])
            # data_transforms['val']= transforms.Compose([
            #     transforms.ToPILImage(),
            #     transforms.Resize((im_size,im_size)),
            #     transforms.ToTensor(),
            #     lambda x: x*255,
            #     transforms.Normalize(mean_std[0,:],mean_std[1,:]),
            #     ])

            # print train_file
            # print test_file
            # train_data = dataset.Bp4d_Dataset(train_file, bgr = bgr, transform = data_transforms['train'])
            # test_data = dataset.Bp4d_Dataset(test_file, bgr = bgr, transform = data_transforms['val'])

            train_data = dataset.Bp4d_Dataset_Mean_Std_Im(
                train_file,
                mean_file,
                std_file,
                transform=data_transforms['train'],
                binarize=binarize)
            test_data = dataset.Bp4d_Dataset_Mean_Std_Im(
                test_file,
                mean_file,
                std_file,
                resize=im_size,
                transform=data_transforms['val'],
                binarize=binarize)

            network_params = dict(n_classes=n_classes,
                                  pool_type='max',
                                  r=route_iter,
                                  init=init,
                                  class_weights=class_weights,
                                  reconstruct=reconstruct,
                                  loss_weights=loss_weights)

            batch_size = 96
            batch_size_val = 96

            util.makedirs(out_dir_train)

            test_params = dict(out_dir_train=out_dir_train,
                               model_num=model_num_curr,
                               train_data=train_data,
                               test_data=test_data,
                               gpu_id=0,
                               model_name=model_name,
                               batch_size_val=batch_size_val,
                               criterion=criterion,
                               margin_params=margin_params,
                               network_params=network_params,
                               barebones=True)
            test_model_recon(**test_params)