def test_dataset_info_from_proto(): builder = RandomShapedImageGenerator(data_dir=testing.make_tmp_dir()) train = dataset_info_pb2.SplitInfo(name="train", num_shards=2, shard_lengths=[4, 5]) test = dataset_info_pb2.SplitInfo(name="test", num_shards=3, shard_lengths=[1, 2, 3]) text_feature = feature_pb2.Feature( python_class_name="tensorflow_datasets.core.features.text_feature.Text", text=feature_pb2.TextFeature()) proto = dataset_info_pb2.DatasetInfo( name="random_shaped_image_generator", version=str(builder.version), features=feature_pb2.Feature( python_class_name= "tensorflow_datasets.core.features.features_dict.FeaturesDict", features_dict=feature_pb2.FeaturesDict( features={"text": text_feature})), splits=[train, test]) result = dataset_info.DatasetInfo.from_proto(builder=builder, proto=proto) assert result.splits["test"].shard_lengths == test.shard_lengths assert result.splits["train"].shard_lengths == train.shard_lengths assert set(result.features.keys()) == {"text"} assert result.version == builder.version
def setUpClass(cls): super(DocumentDatasetsTest, cls).setUpClass() cls._tfds_tmp_dir = testing.make_tmp_dir() builder = DummyMnist(data_dir=cls._tfds_tmp_dir) builder.download_and_prepare() # Patch the visualization util (to avoid GCS access during test) cls._old_path = document_datasets.VisualizationDocUtil.BASE_PATH document_datasets.VisualizationDocUtil.BASE_PATH = cls._tfds_tmp_dir
def setUpClass(cls): cls._builder = DummyDataset( data_dir=testing.make_tmp_dir(), range_train=range(0, 666), range_test=range(1000, 1501), # Number of shard can be arbitrary and do not matter num_shards_train=13, num_shards_test=31, ) cls._builder.download_and_prepare()
def setUpClass(cls): super(DatasetBuilderReadTest, cls).setUpClass() cls._tfds_tmp_dir = testing.make_tmp_dir() builder = DummyDatasetSharedGenerator(data_dir=cls._tfds_tmp_dir) builder.download_and_prepare()
def setUpClass(cls): super(DatasetInfoTest, cls).setUpClass() cls._tfds_tmp_dir = testing.make_tmp_dir() cls._builder = DummyDatasetSharedGenerator(data_dir=cls._tfds_tmp_dir)
def setUpClass(cls): super(DocumentDatasetsTest, cls).setUpClass() cls._tfds_tmp_dir = testing.make_tmp_dir() builder = DummyMnist(data_dir=cls._tfds_tmp_dir) builder.download_and_prepare()
def setUpClass(cls): super(SplitsTest, cls).setUpClass() cls._builder = testing.DummyDatasetSharedGenerator( data_dir=testing.make_tmp_dir()) cls._builder.download_and_prepare()
def setUpClass(cls): cls._builder = DummyDataset(data_dir=testing.make_tmp_dir()) cls._builder.download_and_prepare()
def setUpClass(cls): super(DatasetInfoTest, cls).setUpClass() dataset_builder._is_py2_download_and_prepare_disabled = False cls._tfds_tmp_dir = testing.make_tmp_dir() cls._builder = DummyDatasetSharedGenerator(data_dir=cls._tfds_tmp_dir)
def setUpClass(cls): super(SplitsIntegrationTest, cls).setUpClass() cls._builder = DummyDataset(data_dir=testing.make_tmp_dir()) cls._builder.download_and_prepare()
def setUpClass(cls): super(DatasetInfoTest, cls).setUpClass() cls._tfds_tmp_dir = testing.make_tmp_dir() cls._builder = mnist.MNIST(data_dir=cls._tfds_tmp_dir)
def setUpClass(cls): super().setUpClass() cls._tfds_tmp_dir = testing.make_tmp_dir() builder = DummyDatasetWithSupervisedKeys(data_dir=cls._tfds_tmp_dir) builder.download_and_prepare()