Beispiel #1
0
    def make_batch_feature(self,
                           filenames,
                           num_epochs,
                           batch_size,
                           reader_num_threads=1,
                           parser_num_threads=1,
                           shuffle=False,
                           shuffle_seed=None,
                           drop_final_batch=False):
        self.filenames = filenames
        self.num_epochs = num_epochs
        self.batch_size = batch_size

        return readers.make_batched_features_dataset(
            file_pattern=self.filenames,
            batch_size=self.batch_size,
            features={
                "file": parsing_ops.FixedLenFeature([], dtypes.int64),
                "record": parsing_ops.FixedLenFeature([], dtypes.int64),
                "keywords": parsing_ops.VarLenFeature(dtypes.string)
            },
            reader=core_readers.TFRecordDataset,
            num_epochs=self.num_epochs,
            shuffle=shuffle,
            shuffle_seed=shuffle_seed,
            reader_num_threads=reader_num_threads,
            parser_num_threads=parser_num_threads,
            drop_final_batch=drop_final_batch)
  def _read_batch_features(self,
                           filenames,
                           num_epochs,
                           batch_size,
                           reader_num_threads=1,
                           parser_num_threads=1,
                           shuffle=False,
                           shuffle_seed=None):
    self.filenames = filenames
    self.num_epochs = num_epochs
    self.batch_size = batch_size

    return readers.make_batched_features_dataset(
        file_pattern=self.filenames,
        batch_size=self.batch_size,
        features={
            "file": parsing_ops.FixedLenFeature([], dtypes.int64),
            "record": parsing_ops.FixedLenFeature([], dtypes.int64),
            "keywords": parsing_ops.VarLenFeature(dtypes.string)
        },
        reader=core_readers.TFRecordDataset,
        num_epochs=self.num_epochs,
        shuffle=shuffle,
        shuffle_seed=shuffle_seed,
        reader_num_threads=reader_num_threads,
        parser_num_threads=parser_num_threads).make_one_shot_iterator(
        ).get_next()
 def _predict_input_fn():
   dataset = readers.make_batched_features_dataset(
       examples_file, batch_size, feature_spec, num_epochs=1)
   def features_fn(features):
     features.pop('label')
     return features
   return dataset.map(features_fn)
  def make_batch_feature(self,
                         filenames,
                         num_epochs,
                         batch_size,
                         label_key=None,
                         reader_num_threads=1,
                         parser_num_threads=1,
                         shuffle=False,
                         shuffle_seed=None,
                         drop_final_batch=False):
    self.filenames = filenames
    self.num_epochs = num_epochs
    self.batch_size = batch_size

    return readers.make_batched_features_dataset(
        file_pattern=self.filenames,
        batch_size=self.batch_size,
        features={
            "file": parsing_ops.FixedLenFeature([], dtypes.int64),
            "record": parsing_ops.FixedLenFeature([], dtypes.int64),
            "keywords": parsing_ops.VarLenFeature(dtypes.string),
            "label": parsing_ops.FixedLenFeature([], dtypes.string),
        },
        label_key=label_key,
        reader=core_readers.TFRecordDataset,
        num_epochs=self.num_epochs,
        shuffle=shuffle,
        shuffle_seed=shuffle_seed,
        reader_num_threads=reader_num_threads,
        parser_num_threads=parser_num_threads,
        drop_final_batch=drop_final_batch)
Beispiel #5
0
 def _eval_input_fn():
     dataset = readers.make_batched_features_dataset(examples_file,
                                                     batch_size,
                                                     feature_spec,
                                                     num_epochs=1)
     return dataset.map(lambda features:
                        (features, features.pop('label')))
Beispiel #6
0
 def _eval_input_fn():
   dataset = readers.make_batched_features_dataset(
       examples_file, batch_size, feature_spec, num_epochs=1)
   return dataset.map(lambda features: (features, features.pop('label')))