Exemplo n.º 1
0
    def test_decode_example_with_sparse_tensor_to_dense(self):
        np_indices = np.array([1, 2, 5])
        np_values = np.array([0.1, 0.2, 0.6]).astype('f')
        np_shape = np.array([6])
        np_dense = np.array([0.0, 0.1, 0.2, 0.0, 0.0, 0.6]).astype('f')
        example = example_pb2.Example(features=feature_pb2.Features(
            feature={
                'indices': self._encode_int64_feature(np_indices),
                'values': self._encode_float_feature(np_values),
            }))

        serialized_example = example.SerializeToString()

        with self.test_session():
            serialized_example = array_ops.reshape(serialized_example,
                                                   shape=[])
            keys_to_features = {
                'indices': parsing_ops.VarLenFeature(dtype=dtypes.int64),
                'values': parsing_ops.VarLenFeature(dtype=dtypes.float32),
            }
            items_to_handlers = {
                'labels':
                tfexample_decoder.SparseTensor(shape=np_shape, densify=True),
            }
            decoder = TFExampleDecoder(keys_to_features, items_to_handlers)
            [tf_labels] = decoder.decode(serialized_example, ['labels'])
            labels = tf_labels.eval()
            self.assertAllClose(labels, np_dense)
Exemplo n.º 2
0
    def test_decode_example_with_tensor(self):
        tensor_shape = (2, 3, 1)
        np_array = np.random.rand(2, 3, 1)

        example = example_pb2.Example(features=feature_pb2.Features(
            feature={
                'image/depth_map': self._encode_float_feature(np_array),
            }))

        serialized_example = example.SerializeToString()

        with self.test_session():
            serialized_example = array_ops.reshape(serialized_example,
                                                   shape=[])

            keys_to_features = {
                'image/depth_map':
                parsing_ops.FixedLenFeature(
                    tensor_shape,
                    dtypes.float32,
                    default_value=array_ops.zeros(tensor_shape))
            }

            items_to_handlers = {
                'depth': tfexample_decoder.Tensor('image/depth_map')
            }

            decoder = TFExampleDecoder(keys_to_features, items_to_handlers)
            [tf_depth] = decoder.decode(serialized_example, ['depth'])
            depth = tf_depth.eval()

        self.assertAllClose(np_array, depth)
Exemplo n.º 3
0
    def test_decode_example_with_fix_len_tensor_with_shape(self):
        np_array = np.array([[1, 2, 3], [4, 5, 6]])

        example = example_pb2.Example(features=feature_pb2.Features(
            feature={
                'labels': self._encode_int64_feature(np_array),
            }))

        serialized_example = example.SerializeToString()

        with self.test_session():
            serialized_example = array_ops.reshape(serialized_example,
                                                   shape=[])
            keys_to_features = {
                'labels':
                parsing_ops.FixedLenFeature(np_array.shape,
                                            dtype=dtypes.int64),
            }
            items_to_handlers = {
                'labels': tfexample_decoder.Tensor('labels',
                                                   shape=np_array.shape),
            }
            decoder = TFExampleDecoder(keys_to_features, items_to_handlers)
            [tf_labels] = decoder.decode(serialized_example, ['labels'])
            labels = tf_labels.eval()
            self.assertAllEqual(labels, np_array)
Exemplo n.º 4
0
    def test_decode_example_with_string_tensor(self):
        tensor_shape = (2, 3, 1)
        np_array = np.array([[['ab'], ['cd'], ['ef']],
                             [['ghi'], ['jkl'], ['mnop']]])

        example = example_pb2.Example(features=feature_pb2.Features(
            feature={
                'labels': self._bytes_feature(np_array),
            }))

        serialized_example = example.SerializeToString()

        with self.test_session():
            serialized_example = array_ops.reshape(serialized_example,
                                                   shape=[])
            keys_to_features = {
                'labels':
                parsing_ops.FixedLenFeature(tensor_shape,
                                            dtypes.string,
                                            default_value=constant_op.constant(
                                                '',
                                                shape=tensor_shape,
                                                dtype=dtypes.string))
            }
            items_to_handlers = {'labels': tfexample_decoder.Tensor('labels')}
            decoder = TFExampleDecoder(keys_to_features, items_to_handlers)
            [tf_labels] = decoder.decode(serialized_example, ['labels'])
            labels = tf_labels.eval()

            labels = labels.astype(np_array.dtype)
            self.assertTrue(np.array_equal(np_array, labels))
Exemplo n.º 5
0
    def test_decode_image_with_item_handler_callback(self):
        image_shape = (2, 3, 3)
        for image_encoding in ['jpeg', 'png']:
            image, serialized_example = self.generate_image(
                image_format=image_encoding, image_shape=image_shape)

            with self.test_session():

                def conditional_decoding(keys_to_tensors):
                    """See base class."""
                    image_buffer = keys_to_tensors['image/encoded']
                    image_format = keys_to_tensors['image/format']

                    def decode_png():
                        return image_ops.decode_png(image_buffer, 3)

                    def decode_jpg():
                        return image_ops.decode_jpeg(image_buffer, 3)

                    image = control_flow_ops.case(
                        {math_ops.equal(image_format, 'png'): decode_png},
                        default=decode_jpg,
                        exclusive=True)
                    image = array_ops.reshape(image, image_shape)
                    return image

                keys_to_features = {
                    'image/encoded':
                    parsing_ops.FixedLenFeature((),
                                                dtypes.string,
                                                default_value=''),
                    'image/format':
                    parsing_ops.FixedLenFeature((),
                                                dtypes.string,
                                                default_value='jpeg')
                }

                items_to_handlers = {
                    'image':
                    tfexample_decoder.ItemHandlerCallback(
                        ['image/encoded', 'image/format'],
                        conditional_decoding)
                }

                decoder = TFExampleDecoder(keys_to_features, items_to_handlers)
                [tf_image] = decoder.decode(serialized_example, ['image'])
                decoded_image = tf_image.eval()
                if image_encoding == 'jpeg':
                    # For jenkins:
                    image = image.astype(np.float32)
                    decoded_image = decoded_image.astype(np.float32)
                    self.assertAllClose(image,
                                        decoded_image,
                                        rtol=.5,
                                        atol=1.001)
                else:
                    self.assertAllClose(image, decoded_image, atol=0)
Exemplo n.º 6
0
def _create_tfrecord_dataset(tmpdir):
    if not gfile.Exists(tmpdir):
        gfile.MakeDirs(tmpdir)

    data_sources = test_utils.create_tfrecord_files(tmpdir, num_files=1)

    keys_to_features = {
        'image/encoded':
        tf.FixedLenFeature(shape=(), dtype=dtypes.string, default_value=''),
        'image/format':
        tf.FixedLenFeature(shape=(), dtype=dtypes.string,
                           default_value='jpeg'),
        'image/class/label':
        tf.FixedLenFeature(shape=[1],
                           dtype=dtypes.int64,
                           default_value=array_ops.zeros([1],
                                                         dtype=dtypes.int64))
    }

    items_to_handlers = {
        'image': tfslim.tfexample_decoder.Image(),
        'label': tfslim.tfexample_decoder.Tensor('image/class/label'),
    }

    decoder = TFExampleDecoder(keys_to_features, items_to_handlers)

    return Dataset(data_sources=data_sources,
                   reader=tf.TFRecordReader,
                   decoder=decoder,
                   num_samples=100)
Exemplo n.º 7
0
    def test_decode_example_multi_shape_key_tensor(self):
        np_image = np.random.rand(2, 3, 1).astype('f')
        np_labels = np.array([[[1], [2], [3]], [[4], [5], [6]]])
        height, width, depth = np_labels.shape

        example = example_pb2.Example(features=feature_pb2.Features(
            feature={
                'image': self._encode_float_feature(np_image),
                'image/shape': self._encode_int64_feature(
                    np.array(np_image.shape)),
                'labels': self._encode_int64_feature(np_labels),
                'labels/height': self._encode_int64_feature(np.array([height
                                                                      ])),
                'labels/width': self._encode_int64_feature(np.array([width])),
                'labels/depth': self._encode_int64_feature(np.array([depth])),
            }))

        serialized_example = example.SerializeToString()

        with self.test_session():
            serialized_example = array_ops.reshape(serialized_example,
                                                   shape=[])
            keys_to_features = {
                'image': parsing_ops.VarLenFeature(dtype=dtypes.float32),
                'image/shape': parsing_ops.VarLenFeature(dtype=dtypes.int64),
                'labels': parsing_ops.VarLenFeature(dtype=dtypes.int64),
                'labels/height': parsing_ops.VarLenFeature(dtype=dtypes.int64),
                'labels/width': parsing_ops.VarLenFeature(dtype=dtypes.int64),
                'labels/depth': parsing_ops.VarLenFeature(dtype=dtypes.int64),
            }
            items_to_handlers = {
                'image':
                tfexample_decoder.Tensor('image', shape_keys='image/shape'),
                'labels':
                tfexample_decoder.Tensor('labels',
                                         shape_keys=[
                                             'labels/height', 'labels/width',
                                             'labels/depth'
                                         ]),
            }
            decoder = TFExampleDecoder(keys_to_features, items_to_handlers)
            [tf_image, tf_labels] = decoder.decode(serialized_example,
                                                   ['image', 'labels'])
            self.assertAllEqual(tf_image.eval(), np_image)
            self.assertAllEqual(tf_labels.eval(), np_labels)
Exemplo n.º 8
0
    def test_decode_example_with_bounding_box(self):
        num_bboxes = 10
        np_ymin = np.random.rand(num_bboxes, 1)
        np_xmin = np.random.rand(num_bboxes, 1)
        np_ymax = np.random.rand(num_bboxes, 1)
        np_xmax = np.random.rand(num_bboxes, 1)
        np_bboxes = np.hstack([np_ymin, np_xmin, np_ymax, np_xmax])

        example = example_pb2.Example(features=feature_pb2.Features(
            feature={
                'image/object/bbox/ymin': self._encode_float_feature(np_ymin),
                'image/object/bbox/xmin': self._encode_float_feature(np_xmin),
                'image/object/bbox/ymax': self._encode_float_feature(np_ymax),
                'image/object/bbox/xmax': self._encode_float_feature(np_xmax),
            }))
        serialized_example = example.SerializeToString()

        with self.test_session():
            serialized_example = array_ops.reshape(serialized_example,
                                                   shape=[])

            keys_to_features = {
                'image/object/bbox/ymin':
                parsing_ops.VarLenFeature(dtypes.float32),
                'image/object/bbox/xmin':
                parsing_ops.VarLenFeature(dtypes.float32),
                'image/object/bbox/ymax':
                parsing_ops.VarLenFeature(dtypes.float32),
                'image/object/bbox/xmax':
                parsing_ops.VarLenFeature(dtypes.float32),
            }

            items_to_handlers = {
                'object/bbox':
                tfexample_decoder.BoundingBox(['ymin', 'xmin', 'ymax', 'xmax'],
                                              'image/object/bbox/'),
            }

            decoder = TFExampleDecoder(keys_to_features, items_to_handlers)
            [tf_bboxes] = decoder.decode(serialized_example, ['object/bbox'])
            bboxes = tf_bboxes.eval()

        self.assertAllClose(np_bboxes, bboxes)
Exemplo n.º 9
0
    def decode_example(self, serialized_example, item_handler, image_format):
        """Decodes the given serialized example with the specified item handler.

        Args:
            serialized_example: a serialized TF example string.
            item_handler: the item handler used to decode the image.
            image_format: the image format being decoded.

        Returns:
            the decoded image found in the serialized Example.
        """
        serialized_example = array_ops.reshape(serialized_example, shape=[])
        decoder = TFExampleDecoder(keys_to_features={
            'image/encoded':
            tf.FixedLenFeature((), dtypes.string, default_value=''),
            'image/format':
            tf.FixedLenFeature((), dtypes.string, default_value=image_format),
        },
                                   items_to_handlers={'image': item_handler})
        [tf_image] = decoder.decode(serialized_example, ['image'])
        return tf_image
Exemplo n.º 10
0
    def test_decode_example_with_item_handler_callback(self):
        np.random.seed(0)
        tensor_shape = (2, 3, 1)
        np_array = np.random.rand(2, 3, 1)

        example = example_pb2.Example(features=feature_pb2.Features(
            feature={
                'image/depth_map': self._encode_float_feature(np_array),
            }))

        serialized_example = example.SerializeToString()

        with self.test_session():
            serialized_example = array_ops.reshape(serialized_example,
                                                   shape=[])

            keys_to_features = {
                'image/depth_map':
                parsing_ops.FixedLenFeature(
                    tensor_shape,
                    dtypes.float32,
                    default_value=array_ops.zeros(tensor_shape))
            }

            def handle_depth(keys_to_tensors):
                depth = list(keys_to_tensors.values())[0]
                depth += 1
                return depth

            items_to_handlers = {
                'depth':
                tfexample_decoder.ItemHandlerCallback('image/depth_map',
                                                      handle_depth)
            }

            decoder = TFExampleDecoder(keys_to_features, items_to_handlers)
            [tf_depth] = decoder.decode(serialized_example, ['depth'])
            depth = tf_depth.eval()

        self.assertAllClose(np_array, depth - 1)
Exemplo n.º 11
0
    def test_decode_example_with_var_len_tensor(self):
        np_array = np.array([[[1], [2], [3]], [[4], [5], [6]]])

        example = example_pb2.Example(features=feature_pb2.Features(
            feature={
                'labels': self._encode_int64_feature(np_array),
            }))

        serialized_example = example.SerializeToString()

        with self.test_session():
            serialized_example = array_ops.reshape(serialized_example,
                                                   shape=[])
            keys_to_features = {
                'labels': parsing_ops.VarLenFeature(dtype=dtypes.int64),
            }
            items_to_handlers = {
                'labels': tfexample_decoder.Tensor('labels'),
            }
            decoder = TFExampleDecoder(keys_to_features, items_to_handlers)
            [tf_labels] = decoder.decode(serialized_example, ['labels'])
            labels = tf_labels.eval()
            self.assertAllEqual(labels, np_array.flatten())
Exemplo n.º 12
0
    def test_decode_example_with_iInt64_tensor(self):
        np_array = np.random.randint(1, 10, size=(2, 3, 1))

        example = example_pb2.Example(features=feature_pb2.Features(
            feature={
                'array': self._encode_int64_feature(np_array),
            }))

        serialized_example = example.SerializeToString()

        with self.test_session():
            serialized_example = array_ops.reshape(serialized_example,
                                                   shape=[])
            keys_to_features = {
                'array': parsing_ops.FixedLenFeature(np_array.shape,
                                                     dtypes.int64)
            }
            items_to_handlers = {
                'array': tfexample_decoder.Tensor('array'),
            }
            decoder = TFExampleDecoder(keys_to_features, items_to_handlers)
            [tf_array] = decoder.decode(serialized_example, ['array'])
            self.assertAllEqual(tf_array.eval(), np_array)
Exemplo n.º 13
0
    def make_data_provider(self, **kwargs):
        """Creates DataProvider instance for this input pipeline. Additional keyword arguments
        are passed to the DataProvider.
        """
        splitter_source = SplitTokensDecoder(
            tokens_feature_name='source_token',
            length_feature_name='source_len',
            append_token='SEQUENCE_END',
            delimiter=self.source_delimiter)

        splitter_target = SplitTokensDecoder(
            tokens_feature_name='target_token',
            length_feature_name='target_len',
            prepend_token='SEQUENCE_START',
            append_token='SEQUENCE_END',
            delimiter=self.target_delimiter)

        keys_to_features = {
            self.source_field: tf.FixedLenFeature((), tf.string),
            self.target_field: tf.FixedLenFeature((), tf.string, default_value="")
        }

        items_to_handlers = {
            'source_token': tfslim.tfexample_decoder.ItemHandlerCallback(
                keys=[self.source_field],
                func=lambda dict: splitter_source.decode(dict[self.source_field],
                                                         ['source_token'])[0]),
            'source_len': tfslim.tfexample_decoder.ItemHandlerCallback(
                keys=[self.source_field],
                func=lambda dict: splitter_source.decode(dict[self.source_field],
                                                         ['source_len'])[0]),
            'target_token': tfslim.tfexample_decoder.ItemHandlerCallback(
                keys=[self.target_field],
                func=lambda dict: splitter_target.decode(dict[self.target_field],
                                                         ['target_token'])[0]),
            'target_len': tfslim.tfexample_decoder.ItemHandlerCallback(
                keys=[self.target_field],
                func=lambda dict: splitter_target.decode(dict[self.target_field],
                                                         ['target_len'])[0])
        }

        decoder = TFExampleDecoder(keys_to_features, items_to_handlers)

        dataset = Dataset(data_sources=self.files, reader=tf.TFRecordReader, decoder=decoder)

        return DatasetDataProvider(dataset=dataset, shuffle=self.shuffle,
                                   num_epochs=self.num_epochs, **kwargs)
Exemplo n.º 14
0
    def make_data_provider(self, **kwargs):
        """Creates DataProvider instance for this input pipeline. Additional keyword arguments
        are passed to the DataProvider.
        """
        keys_to_features = {
            'image/encoded': tf.FixedLenFeature((), tf.string, default_value=''),
            'image/format': tf.FixedLenFeature((), tf.string,
                                               default_value=self.meta_data.get('image_format')),
            'image/class/label': tf.FixedLenFeature(
                [1], tf.int64, default_value=tf.zeros([1], dtype=tf.int64)),
        }

        image_shape = [self.meta_data.get('height'),
                       self.meta_data.get('width'),
                       self.meta_data.get('channels')]
        if not all(image_shape):
            # no reshaping should be done
            image_shape = None

        items_to_handlers = {
            'image': tfslim.tfexample_decoder.Image(shape=image_shape,
                                                    channels=self.meta_data.get('channels')),
            'label': tfslim.tfexample_decoder.Tensor('image/class/label', shape=[]),
        }

        decoder = TFExampleDecoder(keys_to_features, items_to_handlers)

        dataset = Dataset(
            data_sources=self.data_files,
            reader=tf.TFRecordReader,
            decoder=decoder,
            num_samples=self.meta_data.get('num_samples', {}).get(self.mode),
            num_classes=self.meta_data['num_classes'],
            items_to_descriptions=self.meta_data.get('items_to_descriptions', {}),
            meta_data=self.meta_data,
            labels_to_names=self.meta_data['labels_to_classes'])

        return DatasetDataProvider(dataset=dataset, shuffle=self.shuffle,
                                   num_epochs=self.num_epochs, **kwargs)