def convert_dataset(source_path, mode, dilate, target_path): images, labels = read_mnist(mode, source_path) if dilate == 1: suffix = 'dilated_' else: suffix = '' db_path = os.path.join(target_path, '{0}mnist_{1}_lmdb'.format(suffix, mode)) try: shutil.rmtree(db_path) except: pass os.makedirs(db_path, mode=0744) num_images = images.shape[0] datum = Datum(); datum.channels = 1 datum.height = images.shape[1] datum.width = images.shape[2] mdb_env = lmdb.Environment(db_path, map_size=1099511627776, mode=0664) mdb_txn = mdb_env.begin(write=True) mdb_dbi = mdb_env.open_db(txn=mdb_txn) for i in xrange(num_images): img = images[i, :, :] if dilate == 1: img = cv2.dilate(img, np.ones((3, 3))) datum.data = img.tostring() datum.label = np.int(labels.ravel()[i]) value = datum.SerializeToString() key = '{:08d}'.format(i) mdb_txn.put(key, value, db=mdb_dbi) if i % 1000 == 0: mdb_txn.commit() mdb_txn = mdb_env.begin(write=True) if num_images % 1000 != 0: mdb_txn.commit() mdb_env.close()
def build_dataset(img_data,label,dataset_path): """ build the lmdb-format training dataset :param train_data: :param train_label: :param lmdb_filename: :return: """ data_size=img_data.shape[0] img_width=FLAGS.resize img_height=FLAGS.resize img_channel=3 # reshape to img img_data=np.reshape(img_data,(data_size,3,32,32)) # convert RGB to BGR img_data=img_data[:,::-1,:,:] # open the lmdb map_size=img_data.nbytes*10*((FLAGS.resize/32)**2) env=lmdb.open(dataset_path,map_size=map_size) with env.begin(write=True) as txn: #txn is a Transaction for data_idx in range(data_size): img=img_data[data_idx,:,:,:] #resize the image resized_image=scipy.misc.imresize(np.rollaxis(img,0,2),(FLAGS.resize,FLAGS.resize,3),'bilinear') img=np.rollaxis(resized_image,2) if (data_idx+1)%10000==0: print "[msg]%d images have been written" % data_idx datum=Datum() datum.channels=img_channel datum.height=img_height datum.width=img_width datum.label=int(label[data_idx]) datum.data=img.tobytes() str_id='{:08}'.format(data_idx) txn.put(str_id,datum.SerializeToString())
def extract(filename, dbname): env = lmdb.open(dbname, map_size=2 ** 30) with env.begin(write=True) as txn: with open(filename) as fin: for i, line in enumerate(fin.readlines()): elem = line.strip().split(" ") d = Datum() if elem[0] == "+1": d.label = 1 else: d.label = 0 features = [0] * feature_dimension for e in elem[1:]: pos, v = e.split(":") features[int(pos) - 1] = 1 d.channels = 1 d.height = 1 d.width = feature_dimension d.data = "".join([chr(x) for x in features]) txn.put(str(i), d.SerializeToString())
def _add_record(self, data, label=None, key=None): data_dims = data.shape if data.ndim == 1: data_dims = np.array([data_dims[0], 1, 1], dtype=int) elif data.ndim == 2: data_dims = np.array([data_dims[0], data_dims[1], 1], dtype=int) datum = Datum() datum.channels, datum.height, datum.width = data_dims[0], data_dims[ 1], data_dims[2] if data.dtype == np.uint8: datum.data = data.tostring() else: datum.float_data.extend(data.tolist()) datum.label = int(label) if label is not None else -1 key = ('{:08}'.format(self.num) if key is None else key).encode('ascii') with self.env.begin(write=True) as txn: txn.put(key, datum.SerializeToString()) self.num += 1
def extract(filename, dbname): env = lmdb.open(dbname, map_size=2**30) with env.begin(write=True) as txn: with open(filename) as fin: for i, line in enumerate( fin.readlines() ): elem = line.strip().split(' ') d = Datum() if elem[0] == '+1': d.label = 1 else: d.label = 0 features = [0] * feature_dimension for e in elem[1:]: pos,v = e.split(':') features[int(pos) - 1] = 1 d.channels = 1 d.height = 1 d.width = feature_dimension d.data = "".join([chr(x) for x in features]) txn.put(str(i), d.SerializeToString())
def write_dataset(images, labels, indices, suffix, target_path): db_path = os.path.join(target_path, '{0}_lmdb'.format(suffix)) try: shutil.rmtree(db_path) except: pass os.makedirs(db_path, mode=0744) num_images = indices.size datum = Datum(); datum.channels = 3 datum.height = images[0].shape[1] datum.width = images[0].shape[2] mdb_env = lmdb.Environment(db_path, map_size=1099511627776, mode=0664) mdb_txn = mdb_env.begin(write=True) mdb_dbi = mdb_env.open_db(txn=mdb_txn) for i, img_idx in enumerate(indices): img = images[img_idx] datum.data = img.tostring() datum.label = np.int(labels.ravel()[img_idx]) value = datum.SerializeToString() key = '{:08d}'.format(i) mdb_txn.put(key, value, db=mdb_dbi) if i % 1000 == 0: mdb_txn.commit() mdb_txn = mdb_env.begin(write=True) if num_images % 1000 != 0: mdb_txn.commit() mdb_env.close()
def write(self, images, labels=None, keys=None, flag="labels"): """ Write a single image or multiple images and the corresponding label(s). The imags are expected to be two-dimensional NumPy arrays with multiple channels (if applicable). :param images: input images as list of numpy.ndarray with height x width x channels :type images: [numpy.ndarray] :param labels: corresponding labels (if applicable) as list :type labels: [float] :param keys: train.txt or val.txt 每一行中的文件的路径 :type keys: [str] :return: list of keys corresponding to the written images :rtype: [string] """ if type(labels) == list and len(labels) > 0: assert len(images) == len(labels) if flag == "labels": keys_ = [] env = lmdb.open(self._lmdb_path, map_size=max(1099511627776, len(images) * images[0].nbytes)) with env.begin(write=True) as transaction: for i in range(len(images)): datum = Datum() datum.data = images[i].tobytes() assert version_compare( numpy.version.version, '1.9' ) is True, "installed numpy is 1.9 or higher, change .tostring() to .tobytes()" if type(labels) == list and len(labels) > 0: # datum.label = labels[i] t = labels[i] print(t) datum.label = t else: datum.label = -1 key = to_key(self._write_pointer) if keys: key = key + "_" + keys[i] keys_.append(key) transaction.put(key.encode('UTF-8'), datum.SerializeToString()) self._write_pointer += 1 if i % 100 == 0: print("writing images to lmdb database... ", i) else: keys_ = [] env = lmdb.open(self._lmdb_path, map_size=max(1099511627776, len(images) * images[0].nbytes)) with env.begin(write=True) as transaction: for i in range(len(images)): datum = Datum() datum.channels = images[i].shape[2] datum.height = images[i].shape[0] datum.width = images[i].shape[1] assert version_compare( numpy.version.version, '1.9' ) is True, "installed numpy is 1.9 or higher, change .tostring() to .tobytes()" assert images[i].dtype == numpy.uint8 or images[ i].dtype == numpy.float, "currently only numpy.uint8 and numpy.float images are supported" if images[i].dtype == numpy.uint8: datum.data = images[i].transpose(2, 0, 1).tobytes() else: datum.float_data.extend(images[i].transpose(2, 0, 1).flat) if type(labels) == list and len(labels) > 0: # datum.label = labels[i] t = labels[i] print(t) datum.label = t else: datum.label = -1 key = to_key(self._write_pointer) if keys: key = key + "_" + keys[i] keys_.append(key) transaction.put(key.encode('UTF-8'), datum.SerializeToString()) self._write_pointer += 1 if i % 100 == 0: print("writing images to lmdb database... ", i) return keys_