def saveimage(f_image, image): """ Save image""" # check that folder of image file exists; else, make it io.mkdir(io.parent_dir(f_image)) cv2.imwrite(f_image, image)
from __future__ import print_function import pandas as pd import numpy as np import urllib.request import fiwtools.utils.io as io import os f_csv = "/Users/josephrobinson/Dropbox/Families_In_The_Wild/Database/FIW_PIDs.csv" dir_out = "/Users/josephrobinson/Dropbox/Families_In_The_Wild/Database/ImagesDB/" df = pd.read_csv(f_csv) fids = df.FID.unique() for fid in fids: ids = np.where(df.FID == fid)[0] dout = dir_out + fid + "/" io.mkdir(dout) for id in range(len(ids)): pid = df.PID[ids[id]] url = df.URL[ids[id]] fout = dout + pid + ".jpg" print(fout) path = urllib.request.urlretrieve(url) print(path) os.rename(path[0], fout) # print('2', path)
header[0]: arr_all_face[:, 0], header[1]: arr_all_face[:, 1], header[2]: arr_all_face[:, 2] }) # arr_pairs = np.array(all_faces) # # print('No. Face Pairs is {}.'.format(arr_pairs.shape[0])) # return pd.DataFrame({"p1": arr_pairs[:, 0], "p2": arr_pairs[:, 1], "p3": arr_pairs[:, 2]}) logger = log.setup_custom_logger(__name__, f_log='tri-info.log', level=log.INFO) out_bin = io.sys_home() + "/Dropbox/Families_In_The_Wild/Database/tripairs//" mkdir(out_bin) dir_fids = io.sys_home() + "/Dropbox/Families_In_The_Wild/Database/FIDs_New/" do_save = False logger.info("Parsing Tri-Subject Pairs:\n\t{}\n\t{}\n".format( out_bin, dir_fids)) fmd, fms = fiw.tri_subjects(dir_data=dir_fids) logger.info("{}".format(out_bin, dir_fids)) # pair_set.write_pairs(out_bin + "sibs-pairs.csv") # df_all_faces.to_csv(out_bin + 'sibs-faces.csv', index=False) if do_save: fiw.write_list_tri_pairs(out_bin + "fmd-pairs.csv", fmd) fiw.write_list_tri_pairs(out_bin + "fms-pairs.csv", fms) print(len(fmd)) for index in range(0, 5):
help='Run on cpu or gpu.') parser.add_argument('-gpu', '--gpu_id', default=0) parser.add_argument('--dims', default=200, help="Dimension to reduce features (for --pca)") parser.add_argument('--overwrite', action='store_true', help="Overwrite existing files.") args = parser.parse_args() dout = os.path.join(args.output, args.layer) + "/" logger.info("Output Directory: {}\nInput Image Directory: {}\n".format( args.output, args.input)) io.mkdir(dout) my_net = cw.CaffeWrapper(model_def=args.model_def, gpu_id=args.gpu_id, mode=args.mode, model_weights=args.weights, do_init=True) dirs_fid, fids = fiwdb.load_fids(args.input) ifiles = glob.glob(args.input + "*/MID*/*.jpg") ofiles = [ dout + str(f).replace(args.input, "").replace(".jpg", ".csv") for f in ifiles ] # layers = args.layers for ifile in ifiles: ofile = dout + str(ifile).replace(args.input, "").replace(
import os import fiwtools.utils.log as log import src.frameworks.pycaffe.net_wrapper as cw import src.frameworks.pycaffe.tools as caffe_tools logger = log.setup_custom_logger(__name__, f_log='kinwildW-feat-extractor.log', level=log.INFO) # if __name__ == "__main__": model_def = '/model/face_deploy.prototxt' weights = '/model/face_train_test_iter_1600.caffemodel' input = '/data/KinFaceW-I/images/' output = '/data/KinFaceW-I/features/fine-tuned/' io.mkdir(output) mode = 'cpu' dims = 200 layer = 'fc5' types = ['father-dau', 'father-son', 'mother-dau', 'mother-son'] gpu_id = 0 overwrite = False layers = ['fc5', 'conv5_5', 'conv5_4'] for l in layers: dout = output + "/" + l + "/" io.mkdir(dout) logger.info("Output Directory: {}\nInput Image Directory: {}\n".format(output, input)) io.mkdir(dout)
import glob import sklearn.metrics.pairwise as pw from sklearn.metrics import roc_curve, auc import fiwtools.utils.io as io import fiwtools.data.kinwild as kinwild import sklearn.preprocessing as skpreprocess from sklearn.decomposition import TruncatedSVD features = ['fc5'] # ''conv5_2', 'conv5_3', 'pool5', 'fc6', 'fc7'] sub_dirs = ['father-dau'] #, 'father-son', 'mother-dau', 'mother-son'] dir_root = '/media/jrobby/Seagate Backup Plus Drive1/DATA/Kinship/KinFaceW-II/' dir_features = dir_root + '/features/fine-tuned/' dir_results = dir_features + 'results_spca/' io.mkdir(dir_results) dir_perms = dir_root + 'perm/' dir_lists = dir_root + 'meta_data/' do_pca = True k = 200 # load experimental settings for 5-fold verification f_lists = glob.glob(dir_lists + "*.csv") pair_types = [io.file_base(f) for f in f_lists] dir_feats = [dir_features + p + "/" for p in sub_dirs] fold_list = [1, 2, 3, 4, 5] for ids in fold_list: folds, labels, pairs1, pairs2 = kinwild.read_pair_list(f_lists[ids])
import fiwtools.utils.io as io # from src.common.utilities import * import src.database.kinwild as kinwild import sklearn.preprocessing as skpreprocess import pandas as pd layers = ['conv5_2', 'conv5_3', 'pool5', 'fc6', 'fc7'] layers = ['res5a'] # lid = 4 # sub_dirs = ['father-dau', 'father-son', 'mother-dau', 'mother-son'] dir_root = io.sys_home() + '/Dropbox/Families_In_The_Wild/Database/journal_data/' dir_features = '/media/jrobby/Seagate Backup Plus Drive1/FIW_dataset/FIW_Extended/features/vgg_face/resnet/' dir_results = io.sys_home() + '/Dropbox/Families_In_The_Wild/Database/journal_results/verification/res_net/' io.mkdir(dir_results) dir_pairs = dir_root + "Pairs/folds_5splits/" # load experimental settings for 5-fold verification f_lists = glob.glob(dir_pairs + "*.csv") pair_types = [io.file_base(f).replace('-folds', '') for f in f_lists] # dir_feats = [dir_features + p + "/" for p in pair_types] import os for i in range(0, 11): df_list = pd.read_csv(f_lists[i]) pair_type = io.file_base(f_lists[i]).replace('-folds', '') labels = np.array(df_list.label) folds = np.array(df_list.fold) for layer in layers: