def imgs_to_lmdb(paths_src, path_dst, CAFFE_ROOT=None): ''' Generate LMDB file from set of images Source: https://github.com/BVLC/caffe/issues/1698#issuecomment-70211045 credit: Evan Shelhamer ''' import numpy as np if CAFFE_ROOT is not None: import sys sys.path.insert(0, CAFFE_ROOT + 'python') import caffe db = lmdb.open(path_dst, map_size=int(1e12)) size = np.zeros([len(paths_src), 2]) with db.begin(write=True) as in_txn: i = 1 for idx, path_ in enumerate(paths_src): print str(i) + ' of ' + str(len(paths_src)) + ' ...' #print str(paths_src) img = read_img_cv2(path_) size[i - 1, :] = img.shape[1:] img_dat = caffe.io.array_to_datum(img) in_txn.put(IDX_FMT.format(idx), img_dat.SerializeToString()) i = i + 1 db.close() return size
def imgs_to_lmdb(paths_src, path_dst, CAFFE_ROOT=None): ''' Generate LMDB file from set of images Source: https://github.com/BVLC/caffe/issues/1698#issuecomment-70211045 credit: Evan Shelhamer ''' import numpy as np if CAFFE_ROOT is not None: import sys sys.path.insert(0, CAFFE_ROOT + 'python') import caffe db = lmdb.open(path_dst, map_size=int(1e12)) size = np.zeros([len(paths_src), 2]) with db.begin(write=True) as in_txn: i = 1 for idx, path_ in enumerate(paths_src): print str(i)+' of '+str(len(paths_src))+' ...' #print str(paths_src) img = read_img_cv2(path_) size[i-1, :] = img.shape[1:] img_dat = caffe.io.array_to_datum(img) in_txn.put(IDX_FMT.format(idx), img_dat.SerializeToString()) i = i + 1 db.close() return size
def imgs_to_lmdb(paths_src, path_dst, CAFFE_ROOT=None): ''' Generate LMDB file from set of images Source: https://github.com/BVLC/caffe/issues/1698#issuecomment-70211045 credit: Evan Shelhamer ''' if CAFFE_ROOT is not None: import sys sys.path.insert(0, os.path.join(CAFFE_ROOT, 'python')) import caffe db = lmdb.open(path_dst, map_size=int(1e12)) with db.begin(write=True) as in_txn: for idx, path_ in enumerate(paths_src): path_ = '/home/tairuichen/Documents/PASCAL-Context/VOC2010/JPEGImages/'+ path_ img = read_img_cv2(path_) print path_ #print img.shape #print img img_dat = caffe.io.array_to_datum(img) in_txn.put('{:0>10d}'.format(idx), img_dat.SerializeToString()) db.close() return 0
def test_read_img_cv2_pixels(self): img = r.read_img_cv2(self.path_img1) for ch in range(3): for row in range(4): for col in range(2): assert_equal(img[ch][row][col], self.img1[row][col][ch])
def test_read_img_cv2_subtract_mean(self): m = np.array((1., 2. , 3.)) img = r.read_img_cv2(self.path_img1, mean=m) for ch in range(3): for row in range(4): for col in range(2): assert_equal(img[ch][row][col], self.img1[row][col][ch] - m[ch])
def test_read_img_cv2_subtract_mean(self): m = np.array((1., 2., 3.)) img = r.read_img_cv2(self.path_img1, mean=m) for ch in range(3): for row in range(4): for col in range(2): assert_equal(img[ch][row][col], self.img1[row][col][ch] - m[ch])
def main(args): caffe.set_mode_cpu() # load image, switch to BGR, subtract mean, and make dims C x H x W for Caffe path_img = '/home/kashefy/data/VOCdevkit/VOC2012/JPEGImagesX/2008_000015.jpg' bgr_mean = np.array((104.00698793,116.66876762,122.67891434)) im = Image.open(path_img) in_ = np.array(im, dtype=np.float32) in_ = in_[:,:,::-1] print in_.shape print in_ in_ -= bgr_mean print in_ in_ = in_.transpose((2,0,1)) in_ = read_img_PIL(path_img, mean=bgr_mean) print 'in_' print in_[0, 0, 0:6] print in_[1, 0, 0:6] print in_[2, 0, 0:6] in2 = read_img_cv2(path_img, mean=bgr_mean) print in2.shape #in2[0, :, :] -= 104.00698793 #in2[1, :, :] -= 116.66876762 #in2[2, :, :] -= 122.67891434 print in2[0, 0, 0:6] print in2[1, 0, 0:6] print in2[2, 0, 0:6] print np.all(in_ == in2) print in_[in_ != in2] print in2[in_ != in2] return 0 # load net path_model = '/home/kashefy/data/models/fcn_segm/fcn-32s-Pascal-context/deploy.prototxt' path_weights = '/home/kashefy/data/models/fcn_segm/fcn-32s-Pascal-context/fcn-32s-pascalcontext.caffemodel' net = caffe.Net(path_model, path_weights, caffe.TEST) # shape for input (data blob is N x C x H x W), set data net.blobs['data'].reshape(1, *in_.shape) net.blobs['data'].data[...] = in_
def imgs_to_lmdb(paths_src, path_dst): ''' Generate LMDB file from set of images Source: https://github.com/BVLC/caffe/issues/1698#issuecomment-70211045 credit: Evan Shelhamer ''' db = lmdb.open(path_dst, map_size=MAP_SZ) with db.begin(write=True) as in_txn: for idx, path_ in enumerate(paths_src): img = read_img_cv2(path_) img_dat = caffe.io.array_to_datum(img) in_txn.put(IDX_FMT.format(idx), img_dat.SerializeToString()) db.close() return 0
def imgs_to_lmdb(paths_src, path_dst, CAFFE_ROOT=None): ''' Generate LMDB file from set of images ''' import numpy as np if CAFFE_ROOT is not None: import sys sys.path.insert(0, CAFFE_ROOT + 'python') import caffe db = lmdb.open(path_dst, map_size=int(1e12)) size = np.zeros([len(paths_src), 2]) with db.begin(write=True) as in_txn: i = 1 for idx, path_ in enumerate(paths_src): if idx % 1000 == 0: print str(i)+' of '+str(len(paths_src))+' ...' img = read_img_cv2(path_) size[i-1, :] = img.shape[1:] img_dat = caffe.io.array_to_datum(img) in_txn.put(IDX_FMT.format(idx), img_dat.SerializeToString()) i = i + 1 db.close() return size
def imgs_to_lmdb(paths_src, path_dst, CAFFE_ROOT=None): ''' Generate LMDB file from set of images Source: https://github.com/BVLC/caffe/issues/1698#issuecomment-70211045 credit: Evan Shelhamer ''' if CAFFE_ROOT is not None: import sys sys.path.insert(0, os.path.join(CAFFE_ROOT, 'python')) import caffe db = lmdb.open(path_dst, map_size=int(1e12)) with db.begin(write=True) as in_txn: for idx, path_ in enumerate(paths_src): img = read_img_cv2(path_) img_dat = caffe.io.array_to_datum(img) in_txn.put('{:0>10d}'.format(idx), img_dat.SerializeToString()) db.close() return 0
def imgs_to_lmdb(paths_src, path_dst, CAFFE_ROOT=None): ''' Generate LMDB file from set of images ''' import numpy as np if CAFFE_ROOT is not None: import sys sys.path.insert(0, CAFFE_ROOT + 'python') import caffe db = lmdb.open(path_dst, map_size=int(1e12)) size = np.zeros([len(paths_src), 2]) with db.begin(write=True) as in_txn: i = 1 for idx, path_ in enumerate(paths_src): if idx % 1000 == 0: print str(i) + ' of ' + str(len(paths_src)) + ' ...' img = read_img_cv2(path_) size[i - 1, :] = img.shape[1:] img_dat = caffe.io.array_to_datum(img) in_txn.put(IDX_FMT.format(idx), img_dat.SerializeToString()) i = i + 1 db.close() return size
in_ = np.array(im, dtype=np.float32) in_ = in_[:,:,::-1] print in_.shape print in_ in_ -= bgr_mean print in_ in_ = in_.transpose((2,0,1)) in_ = read_img_PIL(path_img, mean=bgr_mean) print 'in_' print in_[0, 0, 0:6] print in_[1, 0, 0:6] print in_[2, 0, 0:6] in2 = read_img_cv2(path_img, mean=bgr_mean) print in2.shape #in2[0, :, :] -= 104.00698793 #in2[1, :, :] -= 116.66876762 #in2[2, :, :] -= 122.67891434 print in2[0, 0, 0:6] print in2[1, 0, 0:6] print in2[2, 0, 0:6] print np.all(in_ == in2) print in_[in_ != in2] print in2[in_ != in2] return 0 # load net
def test_read_img_cv2_dtype(self): img = r.read_img_cv2(self.path_img1) assert_equal(img.dtype, np.dtype('uint8'))
def test_read_img_cv2_shape(self): img = r.read_img_cv2(self.path_img1) assert_equal(img.shape, (3, 4, 2))
def main(args): caffe.set_mode_cpu() # load image, switch to BGR, subtract mean, and make dims C x H x W for Caffe path_img = "/home/tairuichen/Documents/PASCAL-Context/VOC2010/JPEGImages/2007_000027.jpg" bgr_mean = np.array((104.00698793, 116.66876762, 122.67891434)) im = Image.open(path_img) in_ = np.array(im, dtype=np.float32) in_ = in_[:, :, ::-1] print in_.shape print in_ in_ -= bgr_mean print in_ in_ = in_.transpose((2, 0, 1)) in_ = read_img_PIL(path_img, mean=bgr_mean) print "in_" print in_[0, 0, 0:6] print in_[1, 0, 0:6] print in_[2, 0, 0:6] in2 = read_img_cv2(path_img, mean=bgr_mean) print in2.shape # in2[0, :, :] -= 104.00698793 # in2[1, :, :] -= 116.66876762 # in2[2, :, :] -= 122.67891434 print in2[0, 0, 0:6] print in2[1, 0, 0:6] print in2[2, 0, 0:6] print np.all(in_ == in2) print in_[in_ != in2] print in2[in_ != in2] # return 0 # load net path_model = "fcn-32s-Pascal-context/fcn-32s-pascal-deploy.prototxt" path_weights = "fcn-32s-Pascal-context/fcn-32s-pascal.caffemodel" net = caffe.Net(path_model, path_weights, caffe.TEST) # shape for input (data blob is N x C x H x W), set data net.blobs["data"].reshape(1, *in_.shape) net.blobs["data"].data[...] = in_ # run net and take argmax for prediction net.forward() out = net.blobs["score"].data[0].argmax(axis=0) print "data after fwd" print net.blobs["data"].data[ net.blobs["data"].data.shape[0] / 2 - 3 : net.blobs["data"].data.shape[0] / 2 + 3, net.blobs["data"].data.shape[1] / 2 - 3 : net.blobs["data"].data.shape[1] / 2 + 3, ] print "out" print out[out.shape[0] / 2 - 3 : out.shape[0] / 2 + 3, out.shape[1] / 2 - 3 : out.shape[1] / 2 + 3] plt.imshow(out) plt.show()