def extract_relation_feats_one_scene(scene_fn, AF, json_dir, metafeat_dir, overwrite=False): af.dir_path(metafeat_dir) filename, file_extension = os.path.splitext(scene_fn[len(json_dir) :]) ext = "cpickle" cur_feat_name = ("{}_relationFeats_{}" "_gmmAbsK-{:02d}_gmmRelK-{:02d}" "_{}_instances-{}.{}").format( filename, AF.scale_str, AF.gmm_abs_k, AF.gmm_rel_k, AF.z_scalar_str, AF.instance_ordering, ext ) cur_feat_fn = os.path.join(metafeat_dir, cur_feat_name) scene_type = "Living" # Only considers objects that appear in Living Room scene_type = None # Considers all objects that appear in any scenes if not os.path.isfile(cur_feat_fn) or overwrite == True: print("Extracting features for {}".format(scene_fn)) with open(scene_fn, "rb") as jf: cur_scene = json.load(jf) cur_metafeats = AF.extract_one_scene_relation_feats(cur_scene, scene_type) cur_feats, _ = AF.scene_metafeatures_to_feats(cur_metafeats, None, "keep", False) nonzero = 0 for f in cur_feats: if f != 0: nonzero += 1 print(nonzero) pdb.set_trace() # TODO Save as cross-language-compatible format with open(cur_feat_fn, "wb") as cur_feat_fp: cPickle.dump(cur_metafeats, cur_feat_fp)
def extract_feats_one_scene(scene_fn, AF, json_dir, metafeat_dir, overwrite=False): af.dir_path(metafeat_dir) filename, file_extension = os.path.splitext(scene_fn[len(json_dir):]) ext = 'cpickle' cur_feat_name = ('{}_{}' '_gmmAbsK-{:02d}_gmmRelK-{:02d}' '_{}_instances-{}.{}').format(filename, AF.scale_str, AF.gmm_abs_k, AF.gmm_rel_k, AF.z_scalar_str, AF.instance_ordering, ext) cur_feat_fn = os.path.join(metafeat_dir, cur_feat_name) if (not os.path.isfile(cur_feat_fn) or overwrite == True): print('Extracting features for {}'.format(scene_fn)) with open(scene_fn, 'rb') as jf: cur_scene = json.load(jf) cur_features = AF.extract_one_scene_feats(cur_scene) # TODO Save as cross-language-compatible format with open(cur_feat_fn, 'wb') as cur_feat_fp: cPickle.dump(cur_features, cur_feat_fp)
def extract_feats_one_scene(scene_fn, AF, json_dir, metafeat_dir, overwrite=False): af.dir_path(metafeat_dir) filename, file_extension = os.path.splitext(scene_fn[len(json_dir):]) ext = 'cpickle' cur_feat_name = ('{}_{}' '_gmmAbsK-{:02d}_gmmRelK-{:02d}' '_{}_instances-{}.{}').format(filename, AF.scale_str, AF.gmm_abs_k, AF.gmm_rel_k, AF.z_scalar_str, AF.instance_ordering, ext) cur_feat_fn = os.path.join(metafeat_dir, cur_feat_name) if (not os.path.isfile(cur_feat_fn) or overwrite==True): print('Extracting features for {}'.format(scene_fn)) with open(scene_fn, 'rb') as jf: cur_scene = json.load(jf) cur_features = AF.extract_one_scene_feats(cur_scene) # TODO Save as cross-language-compatible format with open(cur_feat_fn, 'wb') as cur_feat_fp: cPickle.dump(cur_features, cur_feat_fp)
def get_image_feat(feat_type, image_folder, orig_split, indices, real_split): feats = defaultdict(int) prefix = 'abstract_v002_%s2015_' % (orig_split) if 'fc7' in feat_type: # set some parameters folder = os.path.join(image_folder, 'scene_img', 'img_%s2015' % (orig_split)) + '/' print "Preparing the VGG 19 Net" net = demo.build_convnet() print "Extracting Features" with open('temp_{}.txt'.format(orig_split), 'w') as image_file: for item in tqdm(indices): image_file.write(imname(prefix, item) + '\n') image_file.close() feats = demo.compute_fromfile(net, 'temp_{}.txt'.format(orig_split), base_path=folder) elif 'hdf5' in feat_type: try: folder = os.path.join(image_folder, 'scene_img', 'img_%s2015' % (orig_split)) + '/' images = np.zeros((len(indices), 3, 224, 224)) # TODO: Low Priority, make general for index, item in tqdm(enumerate(indices)): images[index] = demo.load_abstract_image(folder + imname(prefix, item)) with h5py.File('/ssd_local/rama/datasets/abstract-hdf5/{}.h5'.format(real_split), 'w') as outfile: outfile['images'] = images return True except: print "problem" return False else: folder = os.path.join(image_folder, 'scene_json', 'scene_%s2015_indv' % (orig_split)) # create the abstract feature instance AF = pickle.load(open('extract_features/af_dump.p', 'r')) # TODO: Figure out a better place to initialize all this out_dir = '/srv/share/vqa/release_data/abstract_v002/scene_json/features_v2/' keep_or_remove = 'keep' get_names = False tags = feat_type # path to metafeature directory metafeat_dir = af.dir_path(os.path.join(out_dir, 'metafeatures')) for item in tqdm(indices): metafeat_fn = '{}_instances-{}.cpickle'.format(item, AF.instance_ordering) cur_metafeat_fn = os.path.join(metafeat_dir, metafeat_fn) with open(cur_metafeat_fn, 'rb') as fp: cur_metafeats = pickle.load(fp) cur_feats, _ = AF.scene_metafeatures_to_features(cur_metafeats, tags, keep_or_remove, get_names) feats[item] = cur_feats return feats
def extract_features_one_scene(scene_fn, AF, json_dir, metafeat_dir, overwrite=False): af.dir_path(metafeat_dir) filename, file_extension = os.path.splitext(scene_fn[len(json_dir):]) cur_feat_name = '{}_instances-{}.cpickle'.format(filename, AF.instance_ordering) cur_feat_fn = os.path.join(metafeat_dir, cur_feat_name) if (not os.path.isfile(cur_feat_fn) or overwrite==True): print('Extracting features for {}'.format(scene_fn)) with open(scene_fn, 'rb') as jf: cur_scene = json.load(jf) cur_features = AF.extract_one_scene_features(cur_scene) # TODO Save as cross-language-compatible format with open(cur_feat_fn, 'wb') as cur_feat_fp: cPickle.dump(cur_features, cur_feat_fp)
def extract_relation_feats_one_scene(scene_fn, AF, json_dir, metafeat_dir, overwrite=False): af.dir_path(metafeat_dir) filename, file_extension = os.path.splitext(scene_fn[len(json_dir):]) ext = 'cpickle' cur_feat_name = ('{}_relationFeats_{}' '_gmmAbsK-{:02d}_gmmRelK-{:02d}' '_{}_instances-{}.{}').format(filename, AF.scale_str, AF.gmm_abs_k, AF.gmm_rel_k, AF.z_scalar_str, AF.instance_ordering, ext) cur_feat_fn = os.path.join(metafeat_dir, cur_feat_name) scene_type = 'Living' # Only considers objects that appear in Living Room scene_type = None # Considers all objects that appear in any scenes if (not os.path.isfile(cur_feat_fn) or overwrite == True): print('Extracting features for {}'.format(scene_fn)) with open(scene_fn, 'rb') as jf: cur_scene = json.load(jf) cur_metafeats = AF.extract_one_scene_relation_feats( cur_scene, scene_type) cur_feats, _ = AF.scene_metafeatures_to_feats(cur_metafeats, None, 'keep', False) nonzero = 0 for f in cur_feats: if f != 0: nonzero += 1 print(nonzero) pdb.set_trace() # TODO Save as cross-language-compatible format with open(cur_feat_fn, 'wb') as cur_feat_fp: cPickle.dump(cur_metafeats, cur_feat_fp)
def main(): """ Usage: abstract_features_helper.py create_gmms <jsondir> <outdir> [--overwrite --configdir=CD --scaled=SB --absK=K --relK=K] abstract_features_helper.py extract_features <jsondir> <outdir> [--overwrite --configdir=CD --instord=IO --scaled=SB --absK=K --relK=K --zScalar=ZB] abstract_features_helper.py extract_relation_features <jsondir> <outdir> [--overwrite --configdir=CD --instord=IO --scaled=SB --absK=K --relK=K --zScalar=ZB] abstract_features_helper.py extract_features_parallel <jsondir> <outdir> <num_jobs> [--relation --overwrite --configdir=CD --instord=IO --scaled=SB --absK=K --relK=K --zScalar=ZB] abstract_features_helper.py create_feat_matrix <jsondir> <outdir> <featname> [--relation --overwrite --configdir=CD --instord=IO --scaled=SB --absK=K --relK=K --zScalar=ZB] abstract_features_helper.py clipart_library [--configdir=CD] Options: <jsondir> Directory to scene JSON files to run on <outdir> Directory to put the processed files <featname> Base filename of the feature file --overwrite Overwrite files even if they exist --configdir=CD Path to the config data files (contains all object data) [default: USE_DEF] --instord=IO Ordering of the instances for feature extraction, one of: original, random, from_center [default: random] --scaled=SB Scale the x/y coordinates to (0,1) [default: True] --absK=K Number of GMMs for absolute location [default: 9] --relK=K Number of GMMs for relative location [default: 24] --zScalar=ZB Should z/depth be scalar or one-hot [default: False] --relation Creates relation feature-based matrix """ # USE_DEF for --config_dir is /srv/share/abstract_scenes_v002/site_data/ import docopt, textwrap opts = docopt.docopt(textwrap.dedent(main.__doc__)) print("") print(opts) print("") if opts["--configdir"] == "USE_DEF": config_folder = "/srv/share/abstract_scenes_v002/site_data/" else: config_folder = opts["--configdir"] if opts["clipart_library"]: AF = af.AbstractFeatures(config_folder) get_num_objects_in_clipart_library(AF, scene_type="Living") get_num_objects_in_clipart_library(AF, scene_type="Park") get_num_objects_in_clipart_library(AF, scene_type=None) else: if opts["create_gmms"]: instance_ordering = None z_scalar = False # Doesn't matter here... else: # instance_orders = ['original', 'random', 'from_center'] instance_ordering = opts["--instord"] if opts["--zScalar"] == "True": z_scalar = True else: z_scalar = False overwrite = opts["--overwrite"] json_dir = opts["<jsondir>"] out_dir = af.dir_path(opts["<outdir>"]) gmm_dir = af.dir_path(os.path.join(out_dir, "gmms")) metafeat_dir = af.dir_path(os.path.join(out_dir, "metafeatures")) feat_dir = af.dir_path(os.path.join(out_dir, "features")) coords_occur_fn = os.path.join(gmm_dir, "coords_occur.npy") coords_cooccur_fn = os.path.join(gmm_dir, "coords_cooccur.npy") gmm_abs_pos_fn = os.path.join(gmm_dir, "gmm_abs_pos.npy") gmm_rel_pos_fn = os.path.join(gmm_dir, "gmm_rel_pos.npy") if opts["--scaled"] == "True": scale_pos = True else: scale_pos = False gmm_abs_k = int(opts["--absK"]) gmm_rel_k = int(opts["--relK"]) AF = af.AbstractFeatures( config_folder, instance_ordering=instance_ordering, coords_occur_fn=coords_occur_fn, coords_cooccur_fn=coords_cooccur_fn, gmm_abs_pos_fn=gmm_abs_pos_fn, gmm_rel_pos_fn=gmm_rel_pos_fn, scale_pos=scale_pos, z_scalar=z_scalar, gmm_abs_k=gmm_abs_k, gmm_rel_k=gmm_rel_k, ) if opts["create_gmms"]: all_scene_fns = glob.glob(os.path.join(json_dir, "*.json")) AF.create_gmms_models(all_scene_fns, overwrite=overwrite) elif opts["extract_features"]: extract_feats(AF, json_dir, metafeat_dir, overwrite=overwrite) elif opts["extract_relation_features"]: extract_relation_feats(AF, json_dir, metafeat_dir, overwrite=overwrite) elif opts["extract_features_parallel"]: num_jobs = int(opts["<num_jobs>"]) rel = opts["--relation"] extract_feats_parallel(AF, json_dir, metafeat_dir, overwrite=overwrite, num_jobs=num_jobs, relation=rel) elif opts["create_feat_matrix"]: feat_fn_base = opts["<featname>"] rel = opts["--relation"] create_feat_matrix(AF, json_dir, metafeat_dir, feat_dir, feat_fn_base, relation=rel, overwrite=overwrite) else: print("Not a valid command.")
def main(): ''' Usage: abstract_features_helper.py create_gmms <jsondir> <outdir> [--overwrite --configdir=CD --scaled=SB --absK=K --relK=K] abstract_features_helper.py extract_features <jsondir> <outdir> [--overwrite --configdir=CD --instord=IO --scaled=SB --absK=K --relK=K --zScalar=ZB] abstract_features_helper.py extract_relation_features <jsondir> <outdir> [--overwrite --configdir=CD --instord=IO --scaled=SB --absK=K --relK=K --zScalar=ZB] abstract_features_helper.py extract_features_parallel <jsondir> <outdir> <num_jobs> [--relation --overwrite --configdir=CD --instord=IO --scaled=SB --absK=K --relK=K --zScalar=ZB] abstract_features_helper.py create_feat_matrix <jsondir> <outdir> <featname> [--relation --overwrite --configdir=CD --instord=IO --scaled=SB --absK=K --relK=K --zScalar=ZB] abstract_features_helper.py clipart_library [--configdir=CD] abstract_features_helper.py annotations <jsondir> <outdir> [--overwrite --configdir=CD] Options: <jsondir> Directory to scene JSON files to run on <outdir> Directory to put the processed files <featname> Base filename of the feature file --overwrite Overwrite files even if they exist --configdir=CD Path to the config data files (contains all object data) [default: USE_DEF] --instord=IO Ordering of the instances for feature extraction, one of: original, random, from_center [default: random] --scaled=SB Scale the x/y coordinates to (0,1) [default: True] --absK=K Number of GMMs for absolute location [default: 9] --relK=K Number of GMMs for relative location [default: 24] --zScalar=ZB Should z/depth be scalar or one-hot [default: False] --relation Creates relation feature-based matrix ''' #USE_DEF for --config_dir is /srv/share/abstract_scenes_v002/site_data/ import docopt, textwrap opts = docopt.docopt(textwrap.dedent(main.__doc__)) print('') print(opts) print('') if (opts['--configdir'] == 'USE_DEF'): config_folder = '/srv/share/abstract_scenes_v002/site_data/' else: config_folder = opts['--configdir'] if (opts['clipart_library']): AF = af.AbstractFeatures(config_folder) get_num_objects_in_clipart_library(AF, scene_type='Living') get_num_objects_in_clipart_library(AF, scene_type='Park') get_num_objects_in_clipart_library(AF, scene_type=None) elif (opts['annotations']): AF = af.AbstractFeatures(config_folder) af.dir_path(opts['<outdir>']) extract_annotations(AF, opts['<jsondir>'], opts['<outdir>'], opts['--overwrite']) else: if opts['create_gmms']: instance_ordering = None z_scalar = False # Doesn't matter here... else: #instance_orders = ['original', 'random', 'from_center'] instance_ordering = opts['--instord'] if opts['--zScalar'] == 'True': z_scalar = True else: z_scalar = False overwrite = opts['--overwrite'] json_dir = opts['<jsondir>'] out_dir = af.dir_path(opts['<outdir>']) gmm_dir = af.dir_path(os.path.join(out_dir, 'gmms')) metafeat_dir = af.dir_path(os.path.join(out_dir, 'metafeatures')) feat_dir = af.dir_path(os.path.join(out_dir, 'features')) coords_occur_fn = os.path.join(gmm_dir, 'coords_occur.npy') coords_cooccur_fn = os.path.join(gmm_dir, 'coords_cooccur.npy') gmm_abs_pos_fn = os.path.join(gmm_dir, 'gmm_abs_pos.npy') gmm_rel_pos_fn = os.path.join(gmm_dir, 'gmm_rel_pos.npy') if opts['--scaled'] == 'True': scale_pos = True else: scale_pos = False gmm_abs_k = int(opts['--absK']) gmm_rel_k = int(opts['--relK']) AF = af.AbstractFeatures(config_folder, instance_ordering=instance_ordering, coords_occur_fn=coords_occur_fn, coords_cooccur_fn=coords_cooccur_fn, gmm_abs_pos_fn=gmm_abs_pos_fn, gmm_rel_pos_fn=gmm_rel_pos_fn, scale_pos=scale_pos, z_scalar=z_scalar, gmm_abs_k=gmm_abs_k, gmm_rel_k=gmm_rel_k) if (opts['create_gmms']): all_scene_fns = glob.glob(os.path.join(json_dir, '*.json')) AF.create_gmms_models(all_scene_fns, overwrite=overwrite) elif (opts['extract_features']): extract_feats(AF, json_dir, metafeat_dir, overwrite=overwrite) elif (opts['extract_relation_features']): extract_relation_feats(AF, json_dir, metafeat_dir, overwrite=overwrite) elif (opts['extract_features_parallel']): num_jobs = int(opts['<num_jobs>']) rel = opts['--relation'] extract_feats_parallel(AF, json_dir, metafeat_dir, overwrite=overwrite, num_jobs=num_jobs, relation=rel) elif (opts['create_feat_matrix']): feat_fn_base = opts['<featname>'] rel = opts['--relation'] create_feat_matrix(AF, json_dir, metafeat_dir, feat_dir, feat_fn_base, relation=rel, overwrite=overwrite) else: print("Not a valid command.")
def main(): ''' Usage: abstract_features_helper.py create_gmms <jsondir> <outdir> [--overwrite --configdir=CD] abstract_features_helper.py extract_features <jsondir> <outdir> [--overwrite --configdir=CD --instord=IO] abstract_features_helper.py extract_features_parallel <jsondir> <outdir> <num_jobs> [--overwrite --configdir=CD --instord=IO] abstract_features_helper.py create_feat_matrix <jsondir> <outdir> <featname> [--overwrite --configdir=CD --instord=IO] abstract_features_helper.py clipart_library [--configdir=CD] Options: <jsondir> Directory to scene JSON files to run on <outdir> Directory to put the processed files <featname> Base filename of the feature file --overwrite Overwrite files even if they exist --configdir=CD Path to the config data files (contains all object data) [default: USE_DEF] --instord=IO Ordering of the instances for feature extraction, one of: original, random, from_center [default: random] ''' #USE_DEF for --config_dir is /srv/share/abstract_scenes_v002/site_data/ import docopt, textwrap opts = docopt.docopt(textwrap.dedent(main.__doc__)) print('') print(opts) print('') if (opts['--configdir'] == 'USE_DEF'): config_folder = '/srv/share/abstract_scenes_v002/site_data/' else: config_folder = opts['--configdir'] if (opts['clipart_library']): AF = af.AbstractFeatures(config_folder) get_num_objects_in_clipart_library(AF, scene_type='Living') get_num_objects_in_clipart_library(AF, scene_type='Park') get_num_objects_in_clipart_library(AF, scene_type=None) else: if opts['create_gmms']: instance_ordering = None else: #instance_orders = ['original', 'random', 'from_center'] instance_ordering = opts['--instord'] overwrite = opts['--overwrite'] json_dir = opts['<jsondir>'] out_dir = af.dir_path(opts['<outdir>']) gmm_dir = af.dir_path(os.path.join(out_dir, 'gmms')) metafeat_dir = af.dir_path(os.path.join(out_dir, 'metafeatures')) feat_dir = af.dir_path(os.path.join(out_dir, 'features')) coords_occur_fn = os.path.join(gmm_dir, 'coords_occur.npy') coords_cooccur_fn = os.path.join(gmm_dir, 'coords_cooccur.npy') gmm_abs_pos_fn = os.path.join(gmm_dir, 'gmm_abs_pos.npy') gmm_rel_pos_fn = os.path.join(gmm_dir, 'gmm_rel_pos.npy') AF = af.AbstractFeatures(config_folder, instance_ordering=instance_ordering, coords_occur_fn=coords_occur_fn, coords_cooccur_fn=coords_cooccur_fn, gmm_abs_pos_fn=gmm_abs_pos_fn, gmm_rel_pos_fn=gmm_rel_pos_fn) if (opts['create_gmms']): all_scene_fns = glob.glob(os.path.join(json_dir, '*.json')) AF.create_gmms_models(all_scene_fns, overwrite=overwrite) elif (opts['extract_features']): extract_features(AF, json_dir, metafeat_dir, overwrite=overwrite) elif (opts['extract_features_parallel']): num_jobs = int(opts['<num_jobs>']) extract_features_parallel(AF, json_dir, metafeat_dir, overwrite=overwrite, num_jobs=num_jobs) elif (opts['create_feat_matrix']): feat_fn_base = opts['<featname>'] create_feature_matrix(AF, json_dir, metafeat_dir, feat_dir, feat_fn_base, overwrite=overwrite) else: print("Not a valid command.")