def test_varlen_images_feature_spec_raises(self, batch_size): file_pattern = os.path.join(self.create_tempdir().full_path, 'test.tfrecord') image_width = 640 image_height = 512 padded_varlen_size = 3 maxval = 255 # Maximum value for byte-encoded image. image_np = np.random.uniform( size=(image_height, image_width), high=maxval).astype(np.int32) image_with_invalid_size = np.ones((1024, 1280)) * 255 png_encoded_image = image.numpy_to_image_string(image_np, 'png') png_encoded_image_with_invalid_size = image.numpy_to_image_string( image_with_invalid_size, 'png') test_data = [[png_encoded_image_with_invalid_size], [png_encoded_image, png_encoded_image_with_invalid_size]] self._write_test_varlen_images_examples(test_data, file_pattern) feature_spec = tensorspec_utils.TensorSpecStruct() feature_spec.varlen_images = tensorspec_utils.ExtendedTensorSpec( shape=(padded_varlen_size, image_height, image_width, 1), dtype=tf.uint8, name='varlen_images', data_format='png', varlen_default_value=0) dataset = tfdata.parallel_read(file_patterns=file_pattern) dataset = dataset.batch(batch_size, drop_remainder=True) dataset = tfdata.serialized_to_parsed(dataset, feature_spec, None) features = dataset.make_one_shot_iterator().get_next() # Check tensor shapes. self.assertAllEqual( [None, padded_varlen_size, image_height, image_width, 1], features.varlen_images.get_shape().as_list()) with self.session() as session: with self.assertRaises(tf.errors.InvalidArgumentError): session.run(features)
def test_images_decoding(self, np_data_type, tf_data_type): file_pattern = os.path.join(self.create_tempdir().full_path, 'test.tfrecord') image_width = 640 image_height = 512 maxval = np.iinfo(np_data_type).max # Maximum value for byte-encoded image. image_np = np.random.uniform( size=(image_height, image_width), high=maxval).astype(np.int32) png_encoded_image = image.numpy_to_image_string(image_np, 'png', np_data_type) test_data = [[png_encoded_image]] self._write_test_images_examples(test_data, file_pattern) feature_spec = tensorspec_utils.TensorSpecStruct() feature_spec.images = tensorspec_utils.ExtendedTensorSpec( shape=(image_height, image_width, 1), dtype=tf_data_type, name='image/encoded', data_format='png') dataset = tfdata.parallel_read(file_patterns=file_pattern) dataset = dataset.batch(1, drop_remainder=True) if np_data_type == np.uint32: with self.assertRaises(tf.errors.InvalidArgumentError): dataset = tfdata.serialized_to_parsed(dataset, feature_spec, None) else: dataset = tfdata.serialized_to_parsed(dataset, feature_spec, None) features = dataset.make_one_shot_iterator().get_next() # Check tensor shapes. self.assertAllEqual( [1, image_height, image_width, 1], features.images.get_shape().as_list()) with self.session() as session: np_features = session.run(features) self.assertEqual(np_features['images'].dtype, np_data_type)
def _write_test_sequence_examples(self, sequence_length, tfrecord_path): example = tf.train.SequenceExample() context_key = 'context_feature' seq_key = 'sequence_feature' image_seq_key = 'image_sequence_feature' example.context.feature[context_key].int64_list.value.append(10) for i in range(sequence_length): f = example.feature_lists.feature_list[seq_key].feature.add() f.float_list.value.extend([3, 1]) f = example.feature_lists.feature_list[image_seq_key].feature.add() img = TEST_IMAGE * i f.bytes_list.value.append(image.numpy_to_image_string(img, 'jpeg')) with tf.python_io.TFRecordWriter(tfrecord_path) as writer: writer.write(example.SerializeToString())
def test_images_decoding_raises(self): file_pattern = os.path.join(self.create_tempdir().full_path, 'test.tfrecord') image_width = 640 image_height = 512 maxval = np.iinfo(np.uint32).max # Maximum value for byte-encoded image. image_np = np.random.uniform( size=(image_height, image_width), high=maxval).astype(np.int32) png_encoded_image = image.numpy_to_image_string(image_np, 'png', np.uint32) test_data = [[png_encoded_image]] self._write_test_images_examples(test_data, file_pattern) feature_spec = tensorspec_utils.TensorSpecStruct() feature_spec.images = tensorspec_utils.ExtendedTensorSpec( shape=(image_height, image_width, 1), dtype=tf.uint32, name='image/encoded', data_format='png') dataset = tfdata.parallel_read(file_patterns=file_pattern) dataset = dataset.batch(1, drop_remainder=True) with self.assertRaises(ValueError): tfdata.serialized_to_parsed(dataset, feature_spec, None)