def _get_dataset_next(self, files, config, batch_size): def decode_func(value): return [tf.string_to_number(value, out_type=tf.int32)] dataset = dataset_builder.read_dataset(tf.data.TextLineDataset, files, config) dataset = dataset.map(decode_func) dataset = dataset.batch(batch_size) return dataset.make_one_shot_iterator().get_next()
def _get_dataset_next(self, files, config, batch_size): def decode_func(value): return [tf.string_to_number(value, out_type=tf.int32)] dataset = dataset_builder.read_dataset( tf.data.TextLineDataset, files, config) dataset = dataset.map(decode_func) dataset = dataset.batch(batch_size) return dataset.make_one_shot_iterator().get_next()
def _get_dataset_next(self, files, config, batch_size, num_batches_skip=0): def decode_func(value): return [tf.string_to_number(value, out_type=tf.int32)] dataset = dataset_builder.read_dataset(tf.data.TextLineDataset, files, config) dataset = dataset.map(decode_func) dataset = dataset.batch(batch_size) if num_batches_skip > 0: dataset = dataset.skip(num_batches_skip) return get_iterator_next_for_testing(dataset, self.is_tf2())