Пример #1
0
net_vgg   = caffe.Classifier( MODEL_VGG_DEPLOY_FILE, MODEL_VGG_WEIGHT_FILE, mean = MODEL_MEAN_VALUE, channel_swap = (2, 1, 0) )
src_vgg = net_vgg.blobs['data']
src_vgg.reshape(1, 3, MODEL_ORIGINAL_INPUT_SIZE[0], MODEL_ORIGINAL_INPUT_SIZE[1])

filenames=['%s' % entry.strip().split(' ')[0] for entry in open('%s/%s' % (DATASET_ROOT, DATASET_LIST))]

end_blob_name  = ['pool1', 'pool2', 'pool3', 'pool4', 'pool5', 'fc6_conv', 'relu6_conv']
copy_to_blob_name= ['deconv2_1', 'deconv3_1', 'deconv4_1', 'deconv5_1', 'fc6_deconv', 'relu6_deconv', 'fc7_deconv']
decon_layer_name = ['unpool1', 'unpool2', 'unpool3', 'unpool4', 'unpool5', 'fc6_deconv', 'relu6_deconv']
iter_idx = [0, 1, 2, 3, 4, 5, 6]

print 'Start deconvolution'
for file_id, filename in enumerate(filenames):
  im = utils.load_image( '%s/%s' %(DATASET_ROOT, filename) )
  im = im.resize( MODEL_ORIGINAL_INPUT_SIZE, PIL.Image.ANTIALIAS )
  im = utils.preprocess(net_vgg, im); src_vgg.data[:] = im[:]
  print 'Done load image'

  for layer_idx, end_blob, copy_to, deconv_layer in zip(iter_idx, end_blob_name, copy_to_blob_name, decon_layer_name):
    net_vgg.forward(end=end_blob)
    net_vgg.blobs[copy_to].data[...] = np.copy(net_vgg.blobs[end_blob].data)
    net_vgg.forward(start=deconv_layer)
  
    recon = np.copy(net_vgg.blobs['deconv1_1'].data[0])
    recon = utils.deprocess(net_vgg, recon)
    min_val, max_val = recon.flatten().min(), recon.flatten().max()
    print "{}, layer {}, dim: {}={}, mean: {}, min_val: {}, max_val: {}".format( \
      filename, end_blob, net_vgg.blobs[end_blob].data[0].shape, \
      np.prod(net_vgg.blobs[end_blob].data[0].shape), \
      np.mean(recon.flatten()), min_val, max_val)
    recon = (recon - min_val) / (max_val - min_val)
Пример #2
0
  src_google = net_google.blobs['data']
  src_google.reshape(FEATURE_JITTER, 3, MODEL_INPUT_SIZE[0], MODEL_INPUT_SIZE[1])
  src_vgg = net_vgg.blobs['data']
  src_vgg.reshape(FEATURE_JITTER, 3, MODEL_INPUT_SIZE[0], MODEL_INPUT_SIZE[1])

  filenames=['%s' % entry.strip().split(' ')[0] for entry in open('%s/%s' % (DATASET_ROOT, DATASET_LIST))]
  feat_vgg   = np.squeeze(np.zeros((len(filenames), FEATURE_JITTER, FEATURE_DIM_VGG), dtype=np.float32))
  feat_google= np.squeeze(np.zeros((len(filenames), FEATURE_JITTER, FEATURE_DIM_GOOGLE), dtype=np.float32))
  import pdb; pdb.set_trace()

  for n, fname in enumerate(filenames):
    try:
      im = utils.load_image( '%s/jpg/%s' %(DATASET_ROOT, fname) )
      im = im.resize( MODEL_ORIGINAL_INPUT_SIZE, PIL.Image.ANTIALIAS )
      im = utils.preprocess(net_google, im)
    except:
      print 'error: filename: ', fname
    if FEATURE_JITTER == 10:
      im_jittered = utils.oversample(im, MODEL_INPUT_SIZE)
      src_google.data[:], src_vgg.data[:] = im_jittered, im_jittered
    else: src_google.data[:], src_vgg.data[:] = im[:], im[:]

    net_google.forward(end='pool5/7x7_s1')
    net_vgg.forward(end='fc6')
    feat_vgg[n] = net_vgg.blobs['fc6'].data
    feat_google[n] = np.squeeze(net_google.blobs['pool5/7x7_s1'].data)

    if (n+1) % 10 == 0: print 'End of ', n+1; sys.stdout.flush()

  # save mat format
Пример #3
0
  net_vgg   = caffe.Classifier( MODEL_VGG_DEPLOY_FILE, MODEL_VGG_WEIGHT_FILE, mean = MODEL_MEAN_VALUE, channel_swap = (2, 1, 0) ) 

  src_vgg = net_vgg.blobs['data']
  src_vgg.reshape(1, 3, MODEL_ORIGINAL_INPUT_SIZE[0], MODEL_ORIGINAL_INPUT_SIZE[1])

  for split in range(62):
    if split <> 0: continue
    DATASET_LIST = 'database_images.txt.shuffle.txt.%02d.txt' % split
    MAT_FILENAME = '%s_%dx%d_vgg_fc6_conv.mat' % (DATASET_LIST, MODEL_ORIGINAL_INPUT_SIZE[0], MODEL_ORIGINAL_INPUT_SIZE[1])
    print 'Start feature extraction,', DATASET_LIST

    filenames=['%s' % entry.strip().split(' ')[0] for entry in open('%s/%s' % (DATASET_ROOT, DATASET_LIST))]
    feat_vgg   = np.squeeze(np.zeros((len(filenames), FEATURE_JITTER, FEATURE_DIM_VGG), dtype=np.float32))
    #import pdb; pdb.set_trace()

    for n, fname in enumerate(filenames):
      im = utils.load_image( '%s/%s' %(DATASET_ROOT, fname) )
      im = im.resize( MODEL_ORIGINAL_INPUT_SIZE, PIL.Image.ANTIALIAS )
      im = utils.preprocess(net_vgg, im)
      src_vgg.data[:] = im[:]

      net_vgg.forward(end='fc6_conv')
      feat_vgg[n] = np.reshape(net_vgg.blobs['fc6_conv'].data, (1,FEATURE_DIM_VGG, MODEL_OUTPUT_SIZE**2))[0].T

      if (n+1) % 10 == 0: print 'End of ', n+1; sys.stdout.flush()

    # save mat format
    print 'Save to ', MAT_FILENAME
    import scipy.io as sio; sio.savemat(MAT_FILENAME, {'filenames': filenames, 'feat_vgg': feat_vgg })
    #import cPickle as pickle; pickle.dump({'filenames': filenames, 'feat_vgg': feat_vgg}, open(MAT_FILENAME, 'wb'))
  src = net.blobs['data']
  src.reshape(FEATURE_JITTER, 3, MODEL_INPUT_SIZE[0], MODEL_INPUT_SIZE[1])

  for n, fname in enumerate(entries):
    try:
      query_filename = fname[0] 
      ref_filename = fname[1]
      label = int(fname[2])
      inputs_q  = utils.load_image( os.path.join(prefix, query_filename) )
      inputs_ref= utils.load_image( os.path.join(prefix, ref_filename) )

      if FEATURE_JITTER == 10:
        inputs_q  = inputs_q.resize( MODEL_ORIGINAL_INPUT_SIZE, PIL.Image.ANTIALIAS )
        inputs_ref= inputs_ref.resize( MODEL_ORIGINAL_INPUT_SIZE, PIL.Image.ANTIALIAS )
        inputs_q  = utils.preprocess(net, inputs_q)
        inputs_ref= utils.preprocess(net, inputs_ref)
        inputs_q  = utils.oversample(inputs_q, MODEL_INPUT_SIZE)
        inputs_ref= utils.oversample(inputs_ref, MODEL_INPUT_SIZE)
      else:
        inputs_q  = inputs_q.resize( MODEL_INPUT_SIZE, PIL.Image.ANTIALIAS )
        inputs_ref= inputs_ref.resize( MODEL_INPUT_SIZE, PIL.Image.ANTIALIAS )
        inputs_q  = utils.preprocess(net, inputs_q)
        inputs_ref= utils.preprocess(net, inputs_ref)
        inputs_q  = inputs_q[np.newaxis,:,:,:]
        inputs_ref= inputs_ref[np.newaxis,:,:,:]

      src.data[:] = inputs_q
      net.forward(end='pool5/7x7_s1')
      feat_q  = np.squeeze(net.blobs['pool5/7x7_s1'].data).copy()
      src.data[:] = inputs_ref
Пример #5
0
  src = net.blobs['data']
  src.reshape(FEATURE_JITTER, 3, 224, 224)

  # load holidays image file list
  filenames=[entry.strip() for entry in open('%s/%s' % (DATASET_ROOT, DATASET_INPUT_LIST ))]
  # load gt list
  entries = [entry.strip().split(' ') for entry in open('%s/%s' % (DATASET_ROOT, DATASET_GT_LIST))]
  # set feature set
  feat, n = np.squeeze(np.zeros((len(filenames), FEATURE_JITTER, FEATURE_DIM), dtype=np.float32)), 0 
  import pdb; pdb.set_trace()

  for fname in filenames:
    # load img
    #im = PIL.Image.open('%s/holidays/jpg/%s' % (DATASET_ROOT, fname))
    im = PIL.Image.open('%s/full/%s' % (DATASET_ROOT, fname))
    im = im.resize(MODEL_ORIGINAL_INPUT_SIZE, PIL.Image.ANTIALIAS )
    # preprocessing
    im = utils.preprocess(net, im)
    if FEATURE_JITTER == 10:
      src.data[:] = utils.oversample(im, MODEL_INPUT_SIZE)
    else: src.data[:] = im[:]

    net.forward(end=layer_name[2])
    dst = net.blobs[layer_name[2]]
    feat[n] = dst.data.reshape(1, FEATURE_DIM)
    n+=1
    if n % 10 == 0: print 'End of %d' % n 

  # save mat format
  sio.savemat('ukb_vgg16_fc7.mat', {'feat': feat})