def test_imgs_str(self, mock_dat, mock_caffe): # expected serialization of the test image s = ['\x08\x03\x10\x04\x18\x02"\x18\x01\x04\x07\n\r\x10\x13\x16\x02\x05\x08\x0b\x0e\x11\x14\x17\x03\x06\t\x0c\x0f\x12\x15\x18(\x00', '\x08\x03\x10\x04\x18\x02"\x18\x02\x05\x08\x0b\x0e\x11\x14\x17\x03\x06\t\x0c\x0f\x12\x15\x18\x04\x07\n\r\x10\x13\x16\x19(\x00', ] # mock caffe calls made by our module mock_dat.return_value.SerializeToString = MagicMock(side_effect=s) mock_caffe.io.array_to_datum.return_value = caffe.proto.caffe_pb2.Datum() # use the module and test it path_lmdb = os.path.join(self.dir_tmp, 'x2_lmdb') tol.imgs_to_lmdb([self.path_img1, self.path_img2], path_lmdb) assert_true(os.path.isdir(path_lmdb), "failed to save LMDB") env_src = lmdb.open(path_lmdb, readonly=True) count = 0 with env_src.begin() as txn: cursor = txn.cursor() for key, value in cursor: k = tol.IDX_FMT.format(count) # print(k, value) # print(k, s[count]) assert_equal(key, k, "Unexpected key.") assert_equal(value, s[count], "Unexpected content.") count += 1 assert_equal(count, 2, "Unexpected number of samples.")
def main(args): paths_in = ['/media/win/Users/woodstock/dev/data/lena.png'] caffe.set_mode_cpu() path_lmdb_train = 'image-lmdb' path_lmdb_test = path_lmdb_train to_lmdb.imgs_to_lmdb(paths_in, path_lmdb_train) with open('train.prototxt', 'w') as f: f.write(str(to_lmdb.gen_net(path_lmdb_train, 1))) with open('test.prototxt', 'w') as f: f.write(str(to_lmdb.gen_net(path_lmdb_test, 1))) solver = caffe.SGDSolver('auto_solver.prototxt') for i in xrange(len(paths_in)): solver.net.forward() # train net print solver.net.blobs['data'].data.shape d = solver.net.blobs['data'].data sh = d.shape d = d.reshape(sh[1], sh[2], sh[3]) y = cv2.merge([d[0, :, :], d[1, :, :], d[2, :, :]]) print y.dtype print y cv2.imshow('y', y) cv2.waitKey() return 0
def main(args): dir_imgs = CAFFE_ROOT + "data/fcn_label_full/" + phase + "_jpg" paths_imgs = fs.gen_paths(dir_imgs, fs.filter_is_img) dir_segm_labels = CAFFE_ROOT + "data/fcn_label_full/" + phase + "_maps" paths_segm_labels = fs.gen_paths(dir_segm_labels) paths_pairs = fs.fname_pairs(paths_imgs, paths_segm_labels) paths_imgs, paths_segm_labels = map(list, zip(*paths_pairs)) if not os.path.exists(lmimgDst): print "lmdb dir not exists,make it" os.makedirs(lmimgDst) if not os.path.exists(lmlabelDst): print "lmdb dir not exists,make it" os.makedirs(lmlabelDst) # for a, b in paths_pairs: # print a,b size1 = to_lmdb.imgs_to_lmdb(paths_imgs, lmimgDst, CAFFE_ROOT=CAFFE_ROOT) size2 = to_lmdb.matfiles_to_lmdb(paths_segm_labels, lmlabelDst, "gt", CAFFE_ROOT=CAFFE_ROOT) dif = size1 - size2 dif = dif.sum() scipy.io.savemat("./size1", dict({"sz": size1}), appendmat=True) scipy.io.savemat("./size2", dict({"sz": size2}), appendmat=True) print "size dif:" + str(dif) return 0
def main(args): dir_imgs = CAFFE_ROOT+'data/fcn_label_full/' + phase + '_jpg' paths_imgs = fs.gen_paths(dir_imgs, fs.filter_is_img) dir_segm_labels = CAFFE_ROOT + 'data/fcn_label_full/' + phase + '_maps' paths_segm_labels = fs.gen_paths(dir_segm_labels) paths_pairs = fs.fname_pairs(paths_imgs, paths_segm_labels) paths_imgs, paths_segm_labels = map(list, zip(*paths_pairs)) if not os.path.exists(lmimgDst): print 'lmdb dir not exists,make it' os.makedirs(lmimgDst) if not os.path.exists(lmlabelDst): print 'lmdb dir not exists,make it' os.makedirs(lmlabelDst) #for a, b in paths_pairs: # print a,b size1 = to_lmdb.imgs_to_lmdb(paths_imgs, lmimgDst, CAFFE_ROOT = CAFFE_ROOT) size2 = to_lmdb.matfiles_to_lmdb(paths_segm_labels, lmlabelDst, 'gt',CAFFE_ROOT = CAFFE_ROOT) dif = size1 - size2 dif = dif.sum() scipy.io.savemat('./size1',dict({'sz':size1}),appendmat=True) scipy.io.savemat('./size2',dict({'sz':size2}),appendmat=True) print 'size dif:'+str(dif) return 0
def pascal_context_to_lmdb(dir_imgs, dir_segm_labels, fpath_labels_list, fpath_labels_list_subset, dst_prefix, dir_dst, CAFFE_ROOT=None, val_list=None): ''' Fine intersection of filename in both directories and create one lmdb directory for each val_list - list of entities to exclude from train (validation subset e.g. ['2008_000002', '2010_000433']) ''' if dst_prefix is None: dst_prefix = '' labels_list = get_labels_list(fpath_labels_list) labels_59_list = get_labels_list(fpath_labels_list_subset) #print labels_list #print labels_59_list labels_lut = get_labels_lut(labels_list, labels_59_list) def apply_labels_lut(m): return labels_lut[m] paths_imgs = fs.gen_paths(dir_imgs, fs.filter_is_img) paths_segm_labels = fs.gen_paths(dir_segm_labels) paths_pairs = fs.fname_pairs(paths_imgs, paths_segm_labels) paths_imgs, paths_segm_labels = map(list, zip(*paths_pairs)) #for a, b in paths_pairs: # print a,b if val_list is not None: # do train/val split train_idx, val_idx = get_train_val_split(paths_imgs, val_list) # images paths_imgs_train = [paths_imgs[i] for i in train_idx] fpath_lmdb_imgs_train = os.path.join(dir_dst, '%scontext_imgs_train_lmdb' % dst_prefix) to_lmdb.imgs_to_lmdb(paths_imgs_train, fpath_lmdb_imgs_train, CAFFE_ROOT=CAFFE_ROOT) paths_imgs_val = [paths_imgs[i] for i in val_idx] fpath_lmdb_imgs_val = os.path.join(dir_dst, '%scontext_imgs_val_lmdb' % dst_prefix) to_lmdb.imgs_to_lmdb(paths_imgs_val, fpath_lmdb_imgs_val, CAFFE_ROOT=CAFFE_ROOT) # ground truth paths_segm_labels_train = [paths_segm_labels[i] for i in train_idx] fpath_lmdb_segm_labels_train = os.path.join(dir_dst, '%scontext_labels_train_lmdb' % dst_prefix) to_lmdb.matfiles_to_lmdb(paths_segm_labels_train, fpath_lmdb_segm_labels_train, 'LabelMap', CAFFE_ROOT=CAFFE_ROOT, lut=apply_labels_lut) paths_segm_labels_val = [paths_segm_labels[i] for i in val_idx] fpath_lmdb_segm_labels_val = os.path.join(dir_dst, '%scontext_labels_val_lmdb' % dst_prefix) to_lmdb.matfiles_to_lmdb(paths_segm_labels_val, fpath_lmdb_segm_labels_val, 'LabelMap', CAFFE_ROOT=CAFFE_ROOT, lut=apply_labels_lut) return len(paths_imgs_train), len(paths_imgs_val),\ fpath_lmdb_imgs_train, fpath_lmdb_segm_labels_train, fpath_lmdb_imgs_val, fpath_lmdb_segm_labels_val else: fpath_lmdb_imgs = os.path.join(dir_dst, '%scontext_imgs_lmdb' % dst_prefix) to_lmdb.imgs_to_lmdb(paths_imgs, fpath_lmdb_imgs, CAFFE_ROOT=CAFFE_ROOT) fpath_lmdb_segm_labels = os.path.join(dir_dst, '%scontext_labels_lmdb' % dst_prefix) to_lmdb.matfiles_to_lmdb(paths_segm_labels, fpath_lmdb_segm_labels, 'LabelMap', CAFFE_ROOT=CAFFE_ROOT, lut=apply_labels_lut) return len(paths_imgs), fpath_lmdb_imgs, fpath_lmdb_segm_labels
def pascal_context_to_lmdb(dir_imgs, dir_segm_labels, fpath_labels_list, fpath_labels_list_subset, dst_prefix, dir_dst, val_list=None): ''' Fine intersection of filename in both directories and create one lmdb directory for each val_list - list of entities to exclude from train (validation subset e.g. ['2008_000002', '2010_000433']) ''' if dst_prefix is None: dst_prefix = '' labels_list = get_labels_list(fpath_labels_list) labels_59_list = get_labels_list(fpath_labels_list_subset) #print labels_list #print labels_59_list labels_lut = du.get_labels_lut(labels_list, labels_59_list) def apply_labels_lut(m): return labels_lut[m] paths_imgs = fs.gen_paths(dir_imgs, fs.filter_is_img) paths_segm_labels = fs.gen_paths(dir_segm_labels) paths_pairs = fs.fname_pairs(paths_imgs, paths_segm_labels) paths_imgs, paths_segm_labels = map(list, zip(*paths_pairs)) #for a, b in paths_pairs: # print a,b if val_list is not None: # do train/val split train_idx, val_idx = du.get_train_val_split_from_names(paths_imgs, val_list) # images paths_imgs_train = [paths_imgs[i] for i in train_idx] fpath_lmdb_imgs_train = os.path.join(dir_dst, '%scontext_imgs_train_lmdb' % dst_prefix) to_lmdb.imgs_to_lmdb(paths_imgs_train, fpath_lmdb_imgs_train) paths_imgs_val = [paths_imgs[i] for i in val_idx] fpath_lmdb_imgs_val = os.path.join(dir_dst, '%scontext_imgs_val_lmdb' % dst_prefix) to_lmdb.imgs_to_lmdb(paths_imgs_val, fpath_lmdb_imgs_val) # ground truth paths_segm_labels_train = [paths_segm_labels[i] for i in train_idx] fpath_lmdb_segm_labels_train = os.path.join(dir_dst, '%scontext_labels_train_lmdb' % dst_prefix) to_lmdb.matfiles_to_lmdb(paths_segm_labels_train, fpath_lmdb_segm_labels_train, 'LabelMap', lut=apply_labels_lut) paths_segm_labels_val = [paths_segm_labels[i] for i in val_idx] fpath_lmdb_segm_labels_val = os.path.join(dir_dst, '%scontext_labels_val_lmdb' % dst_prefix) to_lmdb.matfiles_to_lmdb(paths_segm_labels_val, fpath_lmdb_segm_labels_val, 'LabelMap', lut=apply_labels_lut) return len(paths_imgs_train), len(paths_imgs_val),\ fpath_lmdb_imgs_train, fpath_lmdb_segm_labels_train, fpath_lmdb_imgs_val, fpath_lmdb_segm_labels_val else: fpath_lmdb_imgs = os.path.join(dir_dst, '%scontext_imgs_lmdb' % dst_prefix) to_lmdb.imgs_to_lmdb(paths_imgs, fpath_lmdb_imgs) fpath_lmdb_segm_labels = os.path.join(dir_dst, '%scontext_labels_lmdb' % dst_prefix) to_lmdb.matfiles_to_lmdb(paths_segm_labels, fpath_lmdb_segm_labels, 'LabelMap', lut=apply_labels_lut) return len(paths_imgs), fpath_lmdb_imgs, fpath_lmdb_segm_labels
def img_to_lmdb(dir_imgs, dir_segm_labels, fpath_labels_list, fpath_labels_list_subset, dst_prefix, dir_dst, CAFFE_ROOT=None, val_list=None): ''' Fine intersection of filename in both directories and create one lmdb directory for each val_list - list of entities to exclude from train (validation subset e.g. ['2008_000002', '2010_000433']) ''' if dst_prefix is None: dst_prefix = '' labels_list = get_labels_list(fpath_labels_list) labels_59_list = get_labels_list(fpath_labels_list_subset) #print labels_list #print labels_59_list labels_lut = get_labels_lut(labels_list, labels_59_list) #labels_lut = np.zeros((len(labels_list)+1,), dtype='int') #for i in range(len(labels_list)+1): #print type(labels_list) #print labels_list[i] #id1, names=labels_list[i] #labels_lut[i]=id1 #labels_lut=labels_list print labels_lut def apply_labels_lut(m): return labels_lut[m] #paths_imgs = fs.gen_paths(dir_imgs, fs.filter_is_img) #paths_segm_labels = fs.gen_paths(dir_segm_labels) #paths_pairs = fs.fname_pairs(paths_imgs, paths_segm_labels) #paths_imgs, paths_segm_labels = map(list, zip(*paths_pairs)) paths_imgs = get_file_list(dir_imgs, []) paths_segm_labels = get_file_list(dir_segm_labels, []) #for a, b in paths_pairs: # print a,b if val_list is not None: # do train/val split train_idx, val_idx = get_train_val_split(paths_imgs, val_list) # images paths_imgs_train = [paths_imgs[i] for i in train_idx] fpath_lmdb_imgs_train = os.path.join(dir_dst, '%scontext_imgs_train_lmdb' % dst_prefix) to_lmdb.imgs_to_lmdb(paths_imgs_train, fpath_lmdb_imgs_train, CAFFE_ROOT=CAFFE_ROOT) paths_imgs_val = [paths_imgs[i] for i in val_idx] fpath_lmdb_imgs_val = os.path.join(dir_dst, '%scontext_imgs_val_lmdb' % dst_prefix) to_lmdb.imgs_to_lmdb(paths_imgs_val, fpath_lmdb_imgs_val, CAFFE_ROOT=CAFFE_ROOT) # ground truth paths_segm_labels_train = [paths_segm_labels[i] for i in train_idx] fpath_lmdb_segm_labels_train = os.path.join(dir_dst, '%scontext_labels_train_lmdb' % dst_prefix) to_lmdb.matfiles_to_lmdb(paths_segm_labels_train, fpath_lmdb_segm_labels_train, 'LabelMap', CAFFE_ROOT=CAFFE_ROOT, lut=apply_labels_lut) paths_segm_labels_val = [paths_segm_labels[i] for i in val_idx] fpath_lmdb_segm_labels_val = os.path.join(dir_dst, '%scontext_labels_val_lmdb' % dst_prefix) to_lmdb.matfiles_to_lmdb(paths_segm_labels_val, fpath_lmdb_segm_labels_val, 'LabelMap', CAFFE_ROOT=CAFFE_ROOT, lut=apply_labels_lut) return len(paths_imgs_train), len(paths_imgs_val),\ fpath_lmdb_imgs_train, fpath_lmdb_segm_labels_train, fpath_lmdb_imgs_val, fpath_lmdb_segm_labels_val else: fpath_lmdb_imgs = os.path.join(dir_dst, '%sval_imgs_lmdb' % dst_prefix) to_lmdb.imgs_to_lmdb(paths_imgs, fpath_lmdb_imgs, CAFFE_ROOT=CAFFE_ROOT) fpath_lmdb_segm_labels = os.path.join(dir_dst, '%sval_labels_lmdb' % dst_prefix) to_lmdb.matfiles_to_lmdb(paths_segm_labels, fpath_lmdb_segm_labels, 'LabelMap', CAFFE_ROOT=CAFFE_ROOT, lut=apply_labels_lut) return len(paths_imgs), fpath_lmdb_imgs, fpath_lmdb_segm_labels