class DummyConfigMnist(testing.DummyDataset): """Same as DummyMnist, but with config.""" BUILDER_CONFIGS = [ dataset_builder.BuilderConfig( name='dummy_config', version='0.1.0', description='testing config', ), dataset_builder.BuilderConfig( name='dummy_config2', version='0.1.0', description='testing config', ), ]
class VersionDummyDataset(DummyDatasetWithConfigs): BUILDER_CONFIGS = [ dataset_builder.BuilderConfig( name="default", description="Add 1 to the records", ), dataset_builder.BuilderConfig( name="custom", description="Add 2 to the records", version=config_version, release_notes=config_release_notes, ), ] VERSION = global_version RELEASE_NOTES = global_release_notes
class DummyConfigMnist(testing.DummyMnist): BUILDER_CONFIGS = [ dataset_builder.BuilderConfig( name='dummy_config', version='0.1.0', description='testing config', ) ]
class DummyNewConfig(DummyMnist): BUILDER_CONFIGS = [ dataset_builder.BuilderConfig( name='new_config', version=utils.Version('1.0.0'), description='Config description.', ), dataset_builder.BuilderConfig( name='old_config', version=utils.Version('2.0.0'), supported_versions=[ utils.Version('1.0.0'), ], description='Config description.', ), ]
class MyDataset(testing.DummyMnist): # pylint: disable=unused-variable """Dummy dataset.""" BUILDER_CONFIGS = [ dataset_builder.BuilderConfig( # pylint: disable=g-complex-comprehension name=name, version='2.0.0', description=f'{name} description') for name in ('default_config', 'other_config') ]
class DummyMnistConfigs(DummyMnist): """Builder with config and manual instructions.""" MANUAL_DOWNLOAD_INSTRUCTIONS = """Some manual instructions.""" BUILDER_CONFIGS = [ dataset_builder.BuilderConfig( name="config_name", version=utils.Version("0.0.1"), description="Config description.", ), ]
def __init__(self, builder_dir: utils.PathLike): """Constructor. Args: builder_dir: Directory of the dataset to load (e.g. `~/tensorflow_datasets/mnist/3.0.0/`) Raises: FileNotFoundError: If the builder_dir does not exists. """ builder_dir = os.path.expanduser(builder_dir) info_path = os.path.join(builder_dir, dataset_info.DATASET_INFO_FILENAME) if not tf.io.gfile.exists(info_path): raise FileNotFoundError( f'Could not load `ReadOnlyBuilder`: {info_path} does not exists.' ) # Restore name, config, info info_proto = dataset_info.read_from_json(info_path) self.name = info_proto.name self.VERSION = version_lib.Version(info_proto.version) # pylint: disable=invalid-name release_notes = info_proto.release_notes or {} self.RELEASE_NOTES = release_notes # pylint: disable=invalid-name if info_proto.module_name: # Overwrite the module so documenting `ReadOnlyBuilder` point to the # original source code. self.__module__ = info_proto.module_name if info_proto.config_name: builder_config = dataset_builder.BuilderConfig( name=info_proto.config_name, description=info_proto.config_description, version=info_proto.version or None, release_notes=release_notes, ) else: builder_config = None # __init__ will call _build_data_dir, _create_builder_config, # _pick_version to set the data_dir, config, and version super().__init__( data_dir=builder_dir, config=builder_config, version=info_proto.version, ) # For pickling, should come after super.__init__ which is setting that same # _original_state attribute. self._original_state = dict(builder_dir=builder_dir) if self.info.features is None: raise ValueError( f'Cannot restore {self.info.full_name}. It likelly mean the dataset ' 'was generated with an old TFDS version (<=3.2.1).')
def __init__(self, builder_dir: str): """Constructor. Args: builder_dir: Directory of the dataset to load (e.g. `~/tensorflow_datasets/mnist/3.0.0/`) Raises: FileNotFoundError: If the builder_dir does not exists. """ builder_dir = os.path.expanduser(builder_dir) info_path = os.path.join(builder_dir, dataset_info.DATASET_INFO_FILENAME) if not tf.io.gfile.exists(info_path): raise FileNotFoundError( f'Could not load `ReadOnlyBuilder`: {info_path} does not exists.' ) # Restore name, config, info info_proto = dataset_info.read_from_json(info_path) self.name = info_proto.name self.VERSION = version_lib.Version(info_proto.version) # pylint: disable=invalid-name if info_proto.config_name: builder_config = dataset_builder.BuilderConfig( name=info_proto.config_name, description=info_proto.config_description, version=info_proto.version or None, ) else: builder_config = None # __init__ will call _build_data_dir, _create_builder_config, # _pick_version to set the data_dir, config, and version super().__init__( data_dir=builder_dir, config=builder_config, version=info_proto.version, ) if self.info.features is None: raise ValueError( f'Cannot restore {self.info.full_name}. It likelly mean the dataset ' 'was generated with an old TFDS version (<=3.2.1).')
def __init__(self, builder_dir: str): """Constructor. Args: builder_dir: Directory of the dataset to load (e.g. `~/tensorflow_datasets/mnist/3.0.0/`) Raises: FileNotFoundError: If the builder_dir does not exists. """ builder_dir = os.path.expanduser(builder_dir) info_path = os.path.join(builder_dir, dataset_info.DATASET_INFO_FILENAME) if not tf.io.gfile.exists(info_path): raise FileNotFoundError( f'Could not load `ReadOnlyBuilder`: {info_path} does not exists.' ) # Restore name, config, info info_proto = dataset_info.read_from_json(info_path) self.name = info_proto.name if info_proto.config_name: builder_config = dataset_builder.BuilderConfig( name=info_proto.config_name, version=info_proto.version, # TODO(tfds): Restore description. ) else: builder_config = None # __init__ will call _build_data_dir, _create_builder_config, # _pick_version to set the data_dir, config, and version super().__init__( data_dir=builder_dir, config=builder_config, version=info_proto.version, )
class DummyDatasetWithConfig(testing.DummyDataset, skip_registration=True): BUILDER_CONFIGS = [ dataset_builder.BuilderConfig(name='x', version='2.0.0'), dataset_builder.BuilderConfig(name='y'), ]