def _makeDataset(self, inputter, data_file, metadata=None, dataset_size=1, shapes=None): if metadata is not None: inputter.initialize(metadata) self.assertEqual(dataset_size, inputter.get_dataset_size(data_file)) dataset = inputter.make_dataset(data_file) dataset = dataset.map( lambda *arg: inputter.process(item_or_tuple(arg))) dataset = dataset.padded_batch( 1, padded_shapes=data.get_padded_shapes(dataset)) iterator = dataset.make_initializable_iterator() tf.add_to_collection(tf.GraphKeys.TABLE_INITIALIZERS, iterator.initializer) next_element = iterator.get_next() if shapes is not None: for features in (next_element, inputter.get_serving_input_receiver().features): self.assertNotIn("raw", features) for field, shape in six.iteritems(shapes): self.assertIn(field, features) self.assertAllEqual(shape, features[field].get_shape().as_list()) transformed = inputter.transform_data(next_element) return next_element, transformed
def _first_element(inputter, data_file, metadata): inputter.initialize(metadata) dataset = inputter.make_dataset(data_file) iterator = dataset.make_initializable_iterator() tf.add_to_collection(tf.GraphKeys.TABLE_INITIALIZERS, iterator.initializer) next_element = iterator.get_next() data = inputter.process(next_element) for key, value in six.iteritems(data): data[key] = tf.expand_dims(value, 0) transformed = inputter.transform_data(data) return data, transformed
def _first_element(inputter, data_file, metadata): inputter.initialize(metadata) dataset = inputter.make_dataset(data_file) iterator = dataset.make_initializable_iterator() tf.add_to_collection(tf.GraphKeys.TABLE_INITIALIZERS, iterator.initializer) next_element = iterator.get_next() data = inputter.process(next_element) for key, value in data.items(): data[key] = tf.expand_dims(value, 0) transformed = inputter.transform_data(data) return data, transformed
def _first_element(inputter, data_file, metadata=None): if metadata is not None: inputter.initialize(metadata) dataset = inputter.make_dataset(data_file) iterator = dataset.make_initializable_iterator() tf.add_to_collection(tf.GraphKeys.TABLE_INITIALIZERS, iterator.initializer) next_element = iterator.get_next() data = inputter.process(next_element) data_in = {} for key, value in six.iteritems(data): value = tf.expand_dims(value, 0) value.set_shape([None] + inputter.padded_shapes[key]) data_in[key] = value transformed = inputter.transform_data(data_in) return data, transformed