src.reshape(FEATURE_JITTER, 3, MODEL_INPUT_SIZE[0], MODEL_INPUT_SIZE[1]) for n, fname in enumerate(entries): try: query_filename = fname[0] ref_filename = fname[1] label = int(fname[2]) inputs_q = utils.load_image( os.path.join(prefix, query_filename) ) inputs_ref= utils.load_image( os.path.join(prefix, ref_filename) ) if FEATURE_JITTER == 10: inputs_q = inputs_q.resize( MODEL_ORIGINAL_INPUT_SIZE, PIL.Image.ANTIALIAS ) inputs_ref= inputs_ref.resize( MODEL_ORIGINAL_INPUT_SIZE, PIL.Image.ANTIALIAS ) inputs_q = utils.preprocess(net, inputs_q) inputs_ref= utils.preprocess(net, inputs_ref) inputs_q = utils.oversample(inputs_q, MODEL_INPUT_SIZE) inputs_ref= utils.oversample(inputs_ref, MODEL_INPUT_SIZE) else: inputs_q = inputs_q.resize( MODEL_INPUT_SIZE, PIL.Image.ANTIALIAS ) inputs_ref= inputs_ref.resize( MODEL_INPUT_SIZE, PIL.Image.ANTIALIAS ) inputs_q = utils.preprocess(net, inputs_q) inputs_ref= utils.preprocess(net, inputs_ref) inputs_q = inputs_q[np.newaxis,:,:,:] inputs_ref= inputs_ref[np.newaxis,:,:,:] src.data[:] = inputs_q net.forward(end='pool5/7x7_s1') feat_q = np.squeeze(net.blobs['pool5/7x7_s1'].data).copy() src.data[:] = inputs_ref net.forward(end='pool5/7x7_s1') feat_ref= np.squeeze(net.blobs['pool5/7x7_s1'].data)
src_google.reshape(FEATURE_JITTER, 3, MODEL_INPUT_SIZE[0], MODEL_INPUT_SIZE[1]) src_vgg = net_vgg.blobs['data'] src_vgg.reshape(FEATURE_JITTER, 3, MODEL_INPUT_SIZE[0], MODEL_INPUT_SIZE[1]) filenames=['%s' % entry.strip().split(' ')[0] for entry in open('%s/%s' % (DATASET_ROOT, DATASET_LIST))] feat_vgg = np.squeeze(np.zeros((len(filenames), FEATURE_JITTER, FEATURE_DIM_VGG), dtype=np.float32)) feat_google= np.squeeze(np.zeros((len(filenames), FEATURE_JITTER, FEATURE_DIM_GOOGLE), dtype=np.float32)) import pdb; pdb.set_trace() for n, fname in enumerate(filenames): try: im = utils.load_image( '%s/jpg/%s' %(DATASET_ROOT, fname) ) im = im.resize( MODEL_ORIGINAL_INPUT_SIZE, PIL.Image.ANTIALIAS ) im = utils.preprocess(net_google, im) except: print 'error: filename: ', fname if FEATURE_JITTER == 10: im_jittered = utils.oversample(im, MODEL_INPUT_SIZE) src_google.data[:], src_vgg.data[:] = im_jittered, im_jittered else: src_google.data[:], src_vgg.data[:] = im[:], im[:] net_google.forward(end='pool5/7x7_s1') net_vgg.forward(end='fc6') feat_vgg[n] = net_vgg.blobs['fc6'].data feat_google[n] = np.squeeze(net_google.blobs['pool5/7x7_s1'].data) if (n+1) % 10 == 0: print 'End of ', n+1; sys.stdout.flush() # save mat format sio.savemat('%s_vggoogle_fc6_pool5_7x7_s1.mat' % DATASET_LIST, {'filenames': filenames, 'feat_vgg': feat_vgg, 'feat_google': feat_google})
src = net.blobs['data'] src.reshape(FEATURE_JITTER, 3, 224, 224) # load holidays image file list filenames=[entry.strip() for entry in open('%s/%s' % (DATASET_ROOT, DATASET_INPUT_LIST ))] # load gt list entries = [entry.strip().split(' ') for entry in open('%s/%s' % (DATASET_ROOT, DATASET_GT_LIST))] # set feature set feat, n = np.squeeze(np.zeros((len(filenames), FEATURE_JITTER, FEATURE_DIM), dtype=np.float32)), 0 import pdb; pdb.set_trace() for fname in filenames: # load img #im = PIL.Image.open('%s/holidays/jpg/%s' % (DATASET_ROOT, fname)) im = PIL.Image.open('%s/full/%s' % (DATASET_ROOT, fname)) im = im.resize(MODEL_ORIGINAL_INPUT_SIZE, PIL.Image.ANTIALIAS ) # preprocessing im = utils.preprocess(net, im) if FEATURE_JITTER == 10: src.data[:] = utils.oversample(im, MODEL_INPUT_SIZE) else: src.data[:] = im[:] net.forward(end=layer_name[2]) dst = net.blobs[layer_name[2]] feat[n] = dst.data.reshape(1, FEATURE_DIM) n+=1 if n % 10 == 0: print 'End of %d' % n # save mat format sio.savemat('ukb_vgg16_fc7.mat', {'feat': feat})