def feat_extract(device_id=0, img_list_path='', batchsize=290, flag='', net='./deploy_33.prototxt', model='../snapshot/model_iter_100000.caffemodel', iter=100000 , blob='normed_feature'):
  f_path = './'+'feat_'+os.path.basename(img_list_path).split('.')[0]+flag+'/iter'+str(iter)+'/'+blob
  if not os.path.exists(f_path):
    os.makedirs(f_path)
  dp = DataProvider(batchsize=batchsize, path=img_list_path)
  img_list_len=len(dp.img_dict)
  net = caffe.Net(net, model, caffe.TEST)
  caffe.set_device(device_id)
  caffe.set_mode_gpu()
  feature = np.zeros((batchsize*100,net.blobs[blob].data.shape[1]))
  img_name = []
  index = 0
  epoch = 0
  counter = 1
  while epoch==0:
    img_np, labels, epoch, img_name_batch= dp.get_batch_vec()
    net.blobs['data'].data[...] = img_np
    net.forward()
    index += 1
    feature[batchsize*(index-1):batchsize*index,:] = net.blobs[blob].data.reshape(batchsize,-1)
    img_name+=img_name_batch
    if epoch==1:
      path = os.path.join(f_path, str(counter)+'.npz')
      np.savez(path,feat=feature[:(img_list_len-(counter-1)*batchsize*100),:],img=img_name)
    elif index == 100:
      path = os.path.join(f_path, str(counter)+'.npz')
      np.savez(path,feat=feature,img=img_name)
      index = 0
      img_name = []
      counter +=1
Пример #2
0
def feat_extract(device_id=7, img_list_path='', batchsize=96, net='./deploy_no_part.prototxt', model='./models/models_iter_2000.caffemodel', iter=2000 , blob='fc7_n'):
  f_path = '/data1/qtang/samsung/train_no_part/'+'feat_'+os.path.basename(img_list_path).split('.')[0]+'/iter'+str(iter)+'/'+blob
  if not os.path.exists(f_path):
    os.makedirs(f_path)
  dp = DataProvider(batchsize=batchsize, path=img_list_path)
  img_list_len=len(dp.img_dict)
  net = caffe.Net(net, model, caffe.TEST)
  caffe.set_device(device_id)
  caffe.set_mode_gpu()
  if blob == 'fc7_n':
    feature = np.zeros((batchsize*100,298))
  else:
    feature = np.zeros((batchsize*100,4096))
  img_name = []
  index = 0
  epoch = 0
  counter = 1
  while epoch==0:
    img_np, labels, epoch, img_name_batch= dp.get_batch_vec()
    net.blobs['top_0'].data[...] = img_np
    net.blobs['top_1'].data[...] = labels
    net.forward()
    index += 1
    feature[batchsize*(index-1):batchsize*index,:] = net.blobs[blob].data
    img_name+=img_name_batch
    if epoch==1:
      path = os.path.join(f_path, str(counter)+'.npz')
      np.savez(path,feat=feature[:(img_list_len-(counter-1)*batchsize*100),:],img=img_name)
    elif index == 100:
      path = os.path.join(f_path, str(counter)+'.npz')
      np.savez(path,feat=feature,img=img_name)
      index = 0
      img_name = []
      counter +=1
def feat_extract(device_id=2, img_list_path='', batchsize=256, net='./deploy_lda_siamese.prototxt', model='./197_iter_500.caffemodel', iter=500 , blob='pool6'):
  f_path = './'+'feat_'+os.path.basename(img_list_path).split('.')[0]+'/iter'+str(iter)+'/'+blob
  if not os.path.exists(f_path):
    os.makedirs(f_path)
  dp = DataProvider(batchsize=batchsize, path=img_list_path)
  img_list_len=len(dp.img_dict)
  net = caffe.Net(net, model, caffe.TEST)
  caffe.set_device(device_id)
  caffe.set_mode_gpu()
  if blob == 'fc7_n':
    feature = np.zeros((batchsize*100,298))
  else:
    feature = np.zeros((batchsize*100,8192))
  img_name = []
  block_label = []
  index = 0
  epoch = 0
  counter = 1
  while epoch==0:
    img_np, labels, epoch, img_name_batch = dp.get_batch_vec()
    net.blobs['top_0'].data[...] = img_np
    net.forward()
    index += 1
    feature[batchsize*(index-1):batchsize*index,:] = net.blobs[blob].data.reshape(batchsize,-1)
    img_name+=img_name_batch
    block_label+=list(labels)
    if epoch==1:
      block_feat  = feature[:(img_list_len-(counter-1)*batchsize*100),:]
      block_feat /= np.sqrt((block_feat**2).sum(axis=1)).reshape(-1,1)
      block_label = block_label[:(img_list_len-(counter-1)*batchsize*100)]
      img_name = img_name[:(img_list_len-(counter-1)*batchsize*100)]
      path = os.path.join(f_path, str(counter)+'.npz')
      np.savez(path, feat=block_feat, img=img_name)
    
    elif index == 100:
      block_feat  = feature
      block_feat /= np.sqrt((block_feat**2).sum(axis=1)).reshape(-1,1)
      path = os.path.join(f_path, str(counter)+'.npz')
      np.savez(path,feat=block_feat,img=img_name)
      index = 0
      img_name = []
      block_label = []
      counter +=1
Пример #4
0
def test_acc(device_id, model, prefix=''):
  net = caffe.Net('./part_val.prototxt', model, caffe.TEST)
  caffe.set_device(device_id)
  caffe.set_mode_gpu()
  dp = DataProvider(batchsize = 64, img_size=227)

  epoch = 0
  loss_list  = []
  acc_layer  = []
  while epoch == 0:
    img_nps_1, img_nps_2, img_nps_3, labels, epoch = dp.get_batch_vec()
    net.blobs['top_0'].data[...] = img_nps_1
    net.blobs['top_1'].data[...] = img_nps_2
    net.blobs['top_2'].data[...] = img_nps_3
    net.blobs['top_3'].data[...] = labels
    net.forward()
    loss_list.append(net.blobs['loss'].data)
    acc_layer.append(net.blobs['accuracy'].data)
  mean_loss = np.array(loss_list).mean()
  acc       = np.array(acc_layer).mean()

  return acc, mean_loss
def feat_extract(device_id=2,
                 img_list_path='',
                 batchsize=64,
                 net='./part_val.prototxt',
                 model='./models/vggs_fc6weights/_iter_7600.caffemodel'):
    f_path = '/data1/qtang/samsung/part_train/' + 'feat_' + os.path.basename(
        img_list_path).split('.')[0] + '/vggs_fc6weights/iter7600/concate'
    if not os.path.exists(f_path):
        os.makedirs(f_path)
    dp = DataProvider(batchsize=batchsize, path=img_list_path)
    net = caffe.Net(net, model, caffe.TEST)
    caffe.set_device(device_id)
    caffe.set_mode_gpu()
    feature = np.zeros((batchsize * 100, 12288))
    img_name = []
    index = 0
    epoch = 0
    counter = 1
    while epoch == 0:
        img_nps_1, img_nps_2, img_nps_3, labels, epoch, img_name_batch = dp.get_batch_vec(
        )
        net.blobs['top_0'].data[...] = img_nps_1
        net.blobs['top_1'].data[...] = img_nps_2
        net.blobs['top_2'].data[...] = img_nps_3
        net.blobs['top_3'].data[...] = labels
        net.forward()
        index += 1
        feature[batchsize * (index - 1):batchsize *
                index, :] = net.blobs['concate'].data
        img_name += img_name_batch
        if epoch == 1:
            path = os.path.join(f_path, str(counter) + '.npz')
            np.savez(path, feat=feature[:index * batchsize, :], img=img_name)
        elif index == 100:
            path = os.path.join(f_path, str(counter) + '.npz')
            np.savez(path, feat=feature, img=img_name)
            index = 0
            img_name = []
            counter += 1
def feat_extract(device_id=2, img_list_path='', batchsize=48, net='./org_part_deploy.prototxt', iter=6000 , blob='fc8'):
  f_path = '/data1/qtang/samsung/org_part/'+'feat_'+os.path.basename(img_list_path).split('.')[0]+'/iter'+str(iter)+'/'+blob
  if not os.path.exists(f_path):
    os.makedirs(f_path)
  dp = DataProvider(batchsize=batchsize, path=img_list_path)
  model='./models/_iter_%d.caffemodel'%iter
  net = caffe.Net(net, model, caffe.TEST)
  caffe.set_device(device_id)
  caffe.set_mode_gpu()
  if blob == 'concat2':
    feature = np.zeros((batchsize*100,8192))
  else:
    feature = np.zeros((batchsize*100,298))
  img_name = []
  index = 0
  epoch = 0
  counter = 1
  while epoch==0:
    img_np_1,img_np_2,img_np_3,labels,img_np_org,epoch, img_name_batch= dp.get_batch_vec()
    net.blobs['top_0'].data[...] = img_np_1
    net.blobs['top_1'].data[...] = img_np_2
    net.blobs['top_2'].data[...] = img_np_3
    net.blobs['top_3'].data[...] = labels
    net.blobs['top_4'].data[...] = img_np_org
    net.forward()
    index += 1
    feature[batchsize*(index-1):batchsize*index,:] = net.blobs[blob].data
    img_name+=img_name_batch
    if epoch==1:
      path = os.path.join(f_path, str(counter)+'.npz')
      np.savez(path,feat=feature[:index*batchsize,:],img=img_name)
    elif index == 100:
      path = os.path.join(f_path, str(counter)+'.npz')
      np.savez(path,feat=feature,img=img_name)
      index = 0
      img_name = []
      counter +=1
Пример #7
0
#!/usr/bin/env python
import caffe
import numpy as np
from data_provider_layer import DataProvider

net=caffe.Net('./deploy_no_part.prototxt', './models/models_iter_8000.caffemodel', caffe.TEST)
caffe.set_mode_gpu()
caffe.set_device(2)

dp      = DataProvider(batchsize=64)
img_len = len(dp.img_dict)
feat    = np.zeros((img_len,298))

epoch         = 0
batch_counter = 0
while epoch == 0:
  img_np, labels, epoch, img_name = dp.get_batch_vec()
  net.blobs['top_0'].data[...] = img_np
  net.blobs['top_1'].data[...] = labels
  net.forward()
  batch_counter += 1
  if epoch == 1:
    feat[64*(batch_counter-1):,:] = net.blobs['fc7_n'].data[:img_len-64*batch_counter,:]
  else:
    feat[64*(batch_counter-1):64*batch_counter,:] = net.blobs['fc7_n'].data

feat /= np.sqrt((feat**2).sum(axis=1)).reshape(-1,1)
var   = np.var(feat, axis=1)
mean  = np.mean(feat, axis=1)
max_  = feat.max(axis=1)
min_  = feat.min(axis=1)