def convert_test(tfrecords_path): inception_graph = tf.Graph() inception_sess = tf.Session(graph=inception_graph) with inception_graph.as_default(), inception_sess.as_default(): incept_model = inception.inception_model() with tf.Graph().as_default() as g, tf.Session().as_default() as sess: labels_path = tf.placeholder(dtype=tf.string) images_ds = tf.contrib.data.Dataset.from_tensor_slices(tf.constant(tf.gfile.ListDirectory(paths.TEST_DIR))) \ .map(read_test_example) labels_iter = images_ds.make_initializable_iterator() next_label = labels_iter.get_next() sess.run(labels_iter.initializer, feed_dict={labels_path: paths.LABELS}) print('Writing ', tfrecords_path) with tf.python_io.TFRecordWriter( tfrecords_path, tf.python_io.TFRecordCompressionType.NONE) as writer: try: while True: img, id = sess.run(next_label) with inception_graph.as_default(): inception_output = incept_model( inception_sess, img).reshape(-1).tolist() # print(inception_output.shape) print('writing %s - %s' % (len(img), id)) example = tf.train.Example(features=tf.train.Features( feature={ 'id': bytes_feature(id.encode()), consts.IMAGE_RAW_FIELD: bytes_feature(img), consts.INCEPTION_OUTPUT_FIELD: float_feature(inception_output) })) writer.write(example.SerializeToString()) except tf.errors.OutOfRangeError: print('End of the dataset') writer.flush() print('Finished')
def convert_train(tfrecords_path): one_hot_encoder, _ = dataset.one_hot_label_encoder() inception_graph = tf.Graph() inception_sess = tf.Session(graph=inception_graph) with inception_graph.as_default(), inception_sess.as_default(): incept_model = inception.inception_model() with tf.Graph().as_default(), tf.Session().as_default() as sess: labels_path = tf.placeholder(dtype=tf.string) images_ds = tf.contrib.data.TextLineDataset(labels_path) \ .skip(1) \ .map(parse_row) \ .map(read_example) labels_iter = images_ds.make_initializable_iterator() next_label = labels_iter.get_next() sess.run(labels_iter.initializer, feed_dict={labels_path: paths.LABELS}) print('Writing ', tfrecords_path) bar = pyprind.ProgBar(13000, update_interval=1, width=60) augmenter = image_augmenter() with tf.python_io.TFRecordWriter( tfrecords_path, tf.python_io.TFRecordCompressionType.NONE) as writer: try: while True: id, img, breed_label = sess.run(next_label) one_hot_label = one_hot_encoder([breed_label ]).reshape(-1).tolist() def get_inception_ouput(img): with inception_graph.as_default(): inception_output = incept_model( inception_sess, img).reshape(-1).tolist() return inception_output # print(inception_output.shape) # print('writing %s - %s' % (len(img), breed_label)) images = [img] images.extend(augmenter(img)) for image in images: example = build_train_example( image, one_hot_label, breed_label, get_inception_ouput(image)) writer.write(example.SerializeToString()) bar.update() except tf.errors.OutOfRangeError: print('End of the dataset') writer.flush() writer.close() print('Finished')
consts.LABEL_ONE_HOT_FIELD: float_feature(one_hot_label), consts.INCEPTION_OUTPUT_FIELD: float_feature(inception_output) })) return example if __name__ == '__main__': one_hot_encoder, _ = dataset.one_hot_label_encoder() with tf.Graph().as_default(), \ tf.Session().as_default() as sess, \ tf.python_io.TFRecordWriter(paths.STANFORD_DS_TF_RECORDS, tf.python_io.TFRecordCompressionType.NONE) as writer: incept_model = inception.inception_model() def get_inception_ouput(img): inception_output = incept_model(sess, img).reshape(-1).tolist() return inception_output for breed_dir in [d for d in os.listdir(annotations_root_dir)]: print(breed_dir) for annotation_file in [ f for f in os.listdir( os.path.join(annotations_root_dir, breed_dir)) ]: print(annotation_file) annotation = parse_annotation( os.path.join(annotations_root_dir, breed_dir, annotation_file))