def test_build_tf_record_input_reader(self): tf_record_path = self.create_tf_record() input_reader_text_proto = """ shuffle: false num_readers: 1 tf_record_input_reader {{ input_path: '{0}' }} """.format(tf_record_path) input_reader_proto = input_reader_pb2.InputReader() text_format.Merge(input_reader_text_proto, input_reader_proto) tensor_dict = dataset_builder.make_initializable_iterator( dataset_builder.build(input_reader_proto, batch_size=1)).get_next() with tf.train.MonitoredSession() as sess: output_dict = sess.run(tensor_dict) self.assertTrue(fields.InputDataFields.groundtruth_instance_masks not in output_dict) self.assertEquals((1, 4, 5, 3), output_dict[fields.InputDataFields.image].shape) self.assertAllEqual( [[2]], output_dict[fields.InputDataFields.groundtruth_classes]) self.assertEquals( (1, 1, 4), output_dict[fields.InputDataFields.groundtruth_boxes].shape) self.assertAllEqual( [0.0, 0.0, 1.0, 1.0], output_dict[fields.InputDataFields.groundtruth_boxes][0][0])
def test_build_tf_record_input_reader_with_batch_size_two_and_masks(self): tf_record_path = self.create_tf_record() input_reader_text_proto = """ shuffle: false num_readers: 1 load_instance_masks: true tf_record_input_reader {{ input_path: '{0}' }} """.format(tf_record_path) input_reader_proto = input_reader_pb2.InputReader() text_format.Merge(input_reader_text_proto, input_reader_proto) def one_hot_class_encoding_fn(tensor_dict): tensor_dict[ fields.InputDataFields.groundtruth_classes] = tf.one_hot( tensor_dict[fields.InputDataFields.groundtruth_classes] - 1, depth=3) return tensor_dict tensor_dict = dataset_builder.make_initializable_iterator( dataset_builder.build( input_reader_proto, transform_input_data_fn=one_hot_class_encoding_fn, batch_size=2)).get_next() with tf.train.MonitoredSession() as sess: output_dict = sess.run(tensor_dict) self.assertAllEqual([2, 1, 4, 5], output_dict[ fields.InputDataFields.groundtruth_instance_masks].shape)
def test_make_initializable_iterator_with_hashTable(self): keys = [1, 0, -1] dataset = tf.data.Dataset.from_tensor_slices([[1, 2, -1, 5]]) table = tf.contrib.lookup.HashTable( initializer=tf.contrib.lookup.KeyValueTensorInitializer( keys=keys, values=list(reversed(keys))), default_value=100) dataset = dataset.map(table.lookup) data = dataset_builder.make_initializable_iterator(dataset).get_next() init = tf.tables_initializer() with self.test_session() as sess: sess.run(init) self.assertAllEqual(sess.run(data), [-1, 100, 1, 100])
def test_build_tf_record_input_reader_and_load_instance_masks(self): tf_record_path = self.create_tf_record() input_reader_text_proto = """ shuffle: false num_readers: 1 load_instance_masks: true tf_record_input_reader {{ input_path: '{0}' }} """.format(tf_record_path) input_reader_proto = input_reader_pb2.InputReader() text_format.Merge(input_reader_text_proto, input_reader_proto) tensor_dict = dataset_builder.make_initializable_iterator( dataset_builder.build(input_reader_proto, batch_size=1)).get_next() with tf.train.MonitoredSession() as sess: output_dict = sess.run(tensor_dict) self.assertAllEqual((1, 1, 4, 5), output_dict[ fields.InputDataFields.groundtruth_instance_masks].shape)
def test_sample_one_of_n_shards(self): tf_record_path = self.create_tf_record(num_examples=4) input_reader_text_proto = """ shuffle: false num_readers: 1 sample_1_of_n_examples: 2 tf_record_input_reader {{ input_path: '{0}' }} """.format(tf_record_path) input_reader_proto = input_reader_pb2.InputReader() text_format.Merge(input_reader_text_proto, input_reader_proto) tensor_dict = dataset_builder.make_initializable_iterator( dataset_builder.build(input_reader_proto, batch_size=1)).get_next() with tf.train.MonitoredSession() as sess: output_dict = sess.run(tensor_dict) self.assertAllEqual(['0'], output_dict[fields.InputDataFields.source_id]) output_dict = sess.run(tensor_dict) self.assertEquals(['2'], output_dict[fields.InputDataFields.source_id])
def get_next(config): return dataset_builder.make_initializable_iterator( dataset_builder.build(config)).get_next()