Esempio n. 1
0
def experiment_nnFullImage(params):
    
    if params.out_file_pickle is None:
        out_file_pickle,out_file_text=setupFilesWithBigObjectForFeatureExtraction(params.db_path,params.class_id,params.threshold,params.out_file_pre,params.path_to_annotation,params.path_to_images)
        params=params._replace(out_file_pickle=out_file_pickle)
        params=params._replace(out_file_text=out_file_text)
    
    out_file_pickle=params.out_file_pickle;
    out_file_text=params.out_file_text;
    print out_file_pickle
    [img_paths_originals,img_paths_to_keep,class_id_idx_tuples]=pickle.load(open(out_file_pickle,'rb'));
    assert len(img_paths_originals)==len(class_id_idx_tuples)

    file_names_mat,object_indices=recreateOriginalPaths(params.path_to_annotation,img_paths_to_keep,returnObject_idx=True);
    print 'getting azimuths'
    azimuths=[getObjectStruct(file_name,object_idx).viewpoint.azimuth_coarse  for file_name,object_idx in zip(file_names_mat,object_indices)]
    
    if params.out_file_layers is None:
        print 'running layers part'
        out_file_layers=caffe_wrapper.saveFeaturesOfLayers(out_file_text,params.path_to_classify,params.gpu_no,params.layers,ext='jpg',out_file=params.out_file_pre,meanFile=params.caffe_mean,deployFile=params.caffe_deploy,modelFile=params.caffe_model)
        params=params._replace(out_file_layers=out_file_layers)
        
    out_file_layers=params.out_file_layers;

    print 'writing to db'
    for layer in params.layers:
        vals=np.load(out_file_layers);
        indices,distances=nearest_neighbor.doCosineDistanceNN(vals[layer],numberOfN=None);
        mani=Pascal3D_Manipulator(params.db_path_out);
        mani.openSession();
        for idx in range(len(img_paths_originals)):
            mani.insert(idx,img_paths_originals[idx],layer,out_file_layers,class_id_idx_tuples[idx][0],class_id_idx_tuples[idx][1],params.caffe_model, azimuth=azimuths[idx],neighbor_index=indices[idx],neighbor_distance=distances[idx],trainedClass=params.trainFlag)
        mani.closeSession();
    
    return params
Esempio n. 2
0
def experiment_nnPatches(params):
    out_file_text=params.out_file_text;
    class_ids=params.class_id;

    img_paths=util.readLinesFromFile(out_file_text)
    class_id_idx_tuples=[];
    for img_path in img_paths:
        class_id=img_path[:img_path.rindex('_')]
        class_id=class_id[class_id.rindex('_')+1:];
        class_idx=class_ids.index(class_id);
        class_idx=params.class_idx[class_idx];
        class_id_idx_tuples.append((class_id,class_idx));

    file_names_mat,object_indices=recreateOriginalPaths(params.path_to_annotation,img_paths,returnObject_idx=True);
    print 'getting azimuths'
    azimuths=[getObjectStruct(file_name,object_idx).viewpoint.azimuth_coarse  for file_name,object_idx in zip(file_names_mat,object_indices)]
    
    if params.out_file_layers is None:
        print 'running layers part'
        out_file_layers=caffe_wrapper.saveFeaturesOfLayers(out_file_text,params.path_to_classify,params.gpu_no,params.layers,ext='jpg',out_file=params.out_file_pre,meanFile=params.caffe_mean,deployFile=params.caffe_deploy,modelFile=params.caffe_model,images_dim=params.images_dim)
        params=params._replace(out_file_layers=out_file_layers)
        
    out_file_layers=params.out_file_layers;

    print 'writing to db'
    for layer in params.layers:
        vals=np.load(out_file_layers);
        indices,distances=nearest_neighbor.doCosineDistanceNN(vals[layer],numberOfN=None);
        mani=Pascal3D_Manipulator(params.db_path_out);
        mani.openSession();
        for idx in range(len(img_paths)):
            mani.insert(idx,img_paths[idx],layer,out_file_layers,class_id_idx_tuples[idx][0],class_id_idx_tuples[idx][1],params.caffe_model, azimuth=azimuths[idx],neighbor_index=indices[idx],neighbor_distance=distances[idx],trainedClass=params.trainFlag,commitFlag=False)
        mani.closeSession();

    return params
Esempio n. 3
0
def experiment_nnFullImageMixWithImagenet(params):
    
    if params.out_file_text is None:
        all_files_info=preprocessImagesFromImagenet(params.imagenet_ids_to_test,params.synset_words,params.val_gt_file,path_to_images_imagenet=params.path_to_images_imagenet)
        all_files_info=getImageInfoForMixTestFromPascal3d(params.db_path_in,params.class_id_pascal,all_files_info=all_files_info);
        out_file_pickle=params.out_file_pre+'.p';
        out_file_text=params.out_file_pre+'.txt';
        pickle.dump(all_files_info,open(out_file_pickle,'wb'));
        with open(out_file_text,'wb') as f:
            for dict_curr in all_files_info:
                f.write(dict_curr['img_path']+'\n');
        params=params._replace(out_file_pickle=out_file_pickle)
        params=params._replace(out_file_text=out_file_text)

    out_file_text=params.out_file_text;
    out_file_pickle=params.out_file_pickle;
    
    if params.out_file_layers is None:
        print 'running layers part'
        out_file_layers=caffe_wrapper.saveFeaturesOfLayers(out_file_text,params.path_to_classify,params.gpu_no,params.layers,ext=params.ext,out_file=params.out_file_pre,meanFile=params.caffe_mean,deployFile=params.caffe_deploy,modelFile=params.caffe_model)
        params=params._replace(out_file_layers=out_file_layers)
        
    out_file_layers=params.out_file_layers;
    all_files_info=pickle.load(open(out_file_pickle,'rb'));

    print 'writing to db'
    for layer in params.layers:
        vals=np.load(out_file_layers);
        indices,distances=nearest_neighbor.doCosineDistanceNN(vals[layer],numberOfN=None);
        mani=Pascal3D_Manipulator(params.db_path_out);
        mani.openSession();
        # for idx in range(len(img_paths_originals)):
        for idx,dict_curr in enumerate(all_files_info):
            mani.insert(idx,dict_curr['img_path'],layer,out_file_layers,dict_curr['class_id'],dict_curr['class_idx'],params.caffe_model, azimuth=dict_curr['azimuth'],neighbor_index=indices[idx],neighbor_distance=distances[idx],trainedClass=params.trainFlag)
        mani.closeSession();
    
    return params;
Esempio n. 4
0
def experiment_nnFullImage(params):

    if params.out_file_pickle is None:
        out_file_pickle, out_file_text = setupFilesWithBigObjectForFeatureExtraction(
            params.db_path, params.class_id, params.threshold,
            params.out_file_pre, params.path_to_annotation,
            params.path_to_images)
        params = params._replace(out_file_pickle=out_file_pickle)
        params = params._replace(out_file_text=out_file_text)

    out_file_pickle = params.out_file_pickle
    out_file_text = params.out_file_text
    print out_file_pickle
    [img_paths_originals, img_paths_to_keep,
     class_id_idx_tuples] = pickle.load(open(out_file_pickle, 'rb'))
    assert len(img_paths_originals) == len(class_id_idx_tuples)

    file_names_mat, object_indices = recreateOriginalPaths(
        params.path_to_annotation, img_paths_to_keep, returnObject_idx=True)
    print 'getting azimuths'
    azimuths = [
        getObjectStruct(file_name, object_idx).viewpoint.azimuth_coarse
        for file_name, object_idx in zip(file_names_mat, object_indices)
    ]

    if params.out_file_layers is None:
        print 'running layers part'
        out_file_layers = caffe_wrapper.saveFeaturesOfLayers(
            out_file_text,
            params.path_to_classify,
            params.gpu_no,
            params.layers,
            ext='jpg',
            out_file=params.out_file_pre,
            meanFile=params.caffe_mean,
            deployFile=params.caffe_deploy,
            modelFile=params.caffe_model)
        params = params._replace(out_file_layers=out_file_layers)

    out_file_layers = params.out_file_layers

    print 'writing to db'
    for layer in params.layers:
        vals = np.load(out_file_layers)
        indices, distances = nearest_neighbor.doCosineDistanceNN(
            vals[layer], numberOfN=None)
        mani = Pascal3D_Manipulator(params.db_path_out)
        mani.openSession()
        for idx in range(len(img_paths_originals)):
            mani.insert(idx,
                        img_paths_originals[idx],
                        layer,
                        out_file_layers,
                        class_id_idx_tuples[idx][0],
                        class_id_idx_tuples[idx][1],
                        params.caffe_model,
                        azimuth=azimuths[idx],
                        neighbor_index=indices[idx],
                        neighbor_distance=distances[idx],
                        trainedClass=params.trainFlag)
        mani.closeSession()

    return params
Esempio n. 5
0
def experiment_nnPatches(params):
    out_file_text = params.out_file_text
    class_ids = params.class_id

    img_paths = util.readLinesFromFile(out_file_text)
    class_id_idx_tuples = []
    for img_path in img_paths:
        class_id = img_path[:img_path.rindex('_')]
        class_id = class_id[class_id.rindex('_') + 1:]
        class_idx = class_ids.index(class_id)
        class_idx = params.class_idx[class_idx]
        class_id_idx_tuples.append((class_id, class_idx))

    file_names_mat, object_indices = recreateOriginalPaths(
        params.path_to_annotation, img_paths, returnObject_idx=True)
    print 'getting azimuths'
    azimuths = [
        getObjectStruct(file_name, object_idx).viewpoint.azimuth_coarse
        for file_name, object_idx in zip(file_names_mat, object_indices)
    ]

    if params.out_file_layers is None:
        print 'running layers part'
        out_file_layers = caffe_wrapper.saveFeaturesOfLayers(
            out_file_text,
            params.path_to_classify,
            params.gpu_no,
            params.layers,
            ext='jpg',
            out_file=params.out_file_pre,
            meanFile=params.caffe_mean,
            deployFile=params.caffe_deploy,
            modelFile=params.caffe_model,
            images_dim=params.images_dim)
        params = params._replace(out_file_layers=out_file_layers)

    out_file_layers = params.out_file_layers

    print 'writing to db'
    for layer in params.layers:
        vals = np.load(out_file_layers)
        indices, distances = nearest_neighbor.doCosineDistanceNN(
            vals[layer], numberOfN=None)
        mani = Pascal3D_Manipulator(params.db_path_out)
        mani.openSession()
        for idx in range(len(img_paths)):
            mani.insert(idx,
                        img_paths[idx],
                        layer,
                        out_file_layers,
                        class_id_idx_tuples[idx][0],
                        class_id_idx_tuples[idx][1],
                        params.caffe_model,
                        azimuth=azimuths[idx],
                        neighbor_index=indices[idx],
                        neighbor_distance=distances[idx],
                        trainedClass=params.trainFlag,
                        commitFlag=False)
        mani.closeSession()

    return params
Esempio n. 6
0
def experiment_nnFullImageMixWithImagenet(params):

    if params.out_file_text is None:
        all_files_info = preprocessImagesFromImagenet(
            params.imagenet_ids_to_test,
            params.synset_words,
            params.val_gt_file,
            path_to_images_imagenet=params.path_to_images_imagenet)
        all_files_info = getImageInfoForMixTestFromPascal3d(
            params.db_path_in,
            params.class_id_pascal,
            all_files_info=all_files_info)
        out_file_pickle = params.out_file_pre + '.p'
        out_file_text = params.out_file_pre + '.txt'
        pickle.dump(all_files_info, open(out_file_pickle, 'wb'))
        with open(out_file_text, 'wb') as f:
            for dict_curr in all_files_info:
                f.write(dict_curr['img_path'] + '\n')
        params = params._replace(out_file_pickle=out_file_pickle)
        params = params._replace(out_file_text=out_file_text)

    out_file_text = params.out_file_text
    out_file_pickle = params.out_file_pickle

    if params.out_file_layers is None:
        print 'running layers part'
        out_file_layers = caffe_wrapper.saveFeaturesOfLayers(
            out_file_text,
            params.path_to_classify,
            params.gpu_no,
            params.layers,
            ext=params.ext,
            out_file=params.out_file_pre,
            meanFile=params.caffe_mean,
            deployFile=params.caffe_deploy,
            modelFile=params.caffe_model)
        params = params._replace(out_file_layers=out_file_layers)

    out_file_layers = params.out_file_layers
    all_files_info = pickle.load(open(out_file_pickle, 'rb'))

    print 'writing to db'
    for layer in params.layers:
        vals = np.load(out_file_layers)
        indices, distances = nearest_neighbor.doCosineDistanceNN(
            vals[layer], numberOfN=None)
        mani = Pascal3D_Manipulator(params.db_path_out)
        mani.openSession()
        # for idx in range(len(img_paths_originals)):
        for idx, dict_curr in enumerate(all_files_info):
            mani.insert(idx,
                        dict_curr['img_path'],
                        layer,
                        out_file_layers,
                        dict_curr['class_id'],
                        dict_curr['class_idx'],
                        params.caffe_model,
                        azimuth=dict_curr['azimuth'],
                        neighbor_index=indices[idx],
                        neighbor_distance=distances[idx],
                        trainedClass=params.trainFlag)
        mani.closeSession()

    return params
Esempio n. 7
0
def script_temp():

    path_to_val = "/disk2/imagenet/val"
    ext = "JPEG"

    out_dir = "/disk2/novemberExperiments/nn_imagenet_try"
    if not os.path.exists(out_dir):
        os.mkdir(out_dir)

    in_file_pre = "list_of_ims_for_nn"
    in_file_pre = os.path.join(out_dir, in_file_pre)

    path_to_classify = ".."
    trainFlag = False
    # caffe_model='/home/maheenrashid/Downloads/caffe/caffe-rc2/models/bvlc_reference_caffenet/bvlc_reference_caffenet.caffemodel';
    caffe_model = "/disk2/octoberExperiments/nn_performance_without_pascal/snapshot_iter_450000.caffemodel"
    caffe_deploy = "/disk2/octoberExperiments/nn_performance_without_pascal/deploy.prototxt"
    caffe_mean = "/disk2/octoberExperiments/nn_performance_without_pascal/mean.npy"
    gpu_no = 0
    layers = ["pool5", "fc6", "fc7"]
    out_file = "nn_non_trained"
    out_file = os.path.join(out_dir, out_file)

    db_path_out = "sqlite://///disk2/novemberExperiments/nn_imagenet/nn_imagenet.db"

    synset_words = "../../data/ilsvrc12/synset_words.txt"
    val_gt_file = "../../data/ilsvrc12/val.txt"

    idx_chosen = pickle.load(open("/disk2/novemberExperiments/nn_imagenet/equal_mix_ids.p", "rb"))

    im_files_gt_classes = imagenet.selectTestSetByID(val_gt_file, idx_chosen, path_to_val=path_to_val)
    im_files = list(zip(*im_files_gt_classes)[0])
    gt_classes = list(zip(*im_files_gt_classes)[1])
    print len(im_files)
    print len(gt_classes)
    print len(set(gt_classes))
    per_file = len(im_files)

    # in_files,_=writeInputImageFiles(im_files,in_file_pre,per_file);
    in_files = [in_file_pre + "_" + str(0) + ".txt"]
    print in_files
    out_files = []
    for idx, in_file_curr in enumerate(in_files):
        out_file_curr = out_file + "_" + str(idx)
        out_files.append(
            caffe_wrapper.saveFeaturesOfLayers(
                in_file_curr,
                path_to_classify,
                gpu_no,
                layers,
                ext=ext,
                out_file=out_file_curr,
                meanFile=caffe_mean,
                deployFile=caffe_deploy,
                modelFile=caffe_model,
            )
        )

    print in_files
    print out_files

    file_list_all = []
    for in_file_curr in in_files:
        with open(in_file_curr, "rb") as f:
            file_list = f.readlines()
            file_list_all.extend([file_curr.strip("\n") for file_curr in file_list])
    print len(file_list_all)

    imagenet_idx_mapped, imagenet_ids_mapped, imagenet_labels_mapped = imagenet.getMappingInfo(
        file_list_all, synset_words, val_gt_file
    )

    print "about to combine"
    t = time.time()
    val_combined = combineDeepFeaturesFromFiles(out_files, layers)
    print time.time() - t

    for layer_curr in layers:
        print "about to nn for ", layer_curr
        t = time.time()
        indices, distances = nearest_neighbor.doCosineDistanceNN(val_combined[layer_curr], numberOfN=None)
        print time.time() - t
        #     break;
        # return

        print indices.shape
        print distances.shape

        print "writing to db"
        mani = Imagenet_Manipulator(db_path_out)
        mani.openSession()
        for idx in range(len(file_list_all)):
            if idx % 100 == 0:
                print layer_curr, idx, len(file_list_all)
            idx_out_file = idx / per_file
            out_file_layers = out_file + "_" + str(idx_out_file) + ".npz"

            mani.insert(
                idx,
                file_list_all[idx],
                layer_curr,
                out_file_layers,
                trainFlag,
                imagenet_idx_mapped[idx],
                imagenet_ids_mapped[idx],
                caffe_model,
                class_label_imagenet=imagenet_labels_mapped[idx],
                neighbor_index=indices[idx],
                neighbor_distance=distances[idx],
            )

        mani.closeSession()