コード例 #1
0
 def test_load_real_metric(self, metric_name):
     with tempfile.TemporaryDirectory() as temp_data_dir:
         download_config = DownloadConfig()
         download_config.download_mode = GenerateMode.FORCE_REDOWNLOAD
         load_metric(metric_name,
                     data_dir=temp_data_dir,
                     download_config=download_config)
コード例 #2
0
    def test_load_real_dataset(self, dataset_name):
        with tempfile.TemporaryDirectory() as temp_data_dir:
            download_config = DownloadConfig()
            download_config.download_mode = GenerateMode.FORCE_REDOWNLOAD

            dataset = load_dataset(dataset_name, data_dir=temp_data_dir, download_config=download_config)
            for split in dataset.keys():
                self.assertTrue(len(dataset[split]) > 0)
コード例 #3
0
    def test_load_real_dataset_local(self, dataset_name):
        with tempfile.TemporaryDirectory() as temp_data_dir:
            download_config = DownloadConfig()
            download_config.download_mode = GenerateMode.FORCE_REDOWNLOAD
            download_and_prepare_kwargs = {"download_config": download_config}

            dataset = load_dataset(
                "./datasets/" + dataset_name,
                data_dir=temp_data_dir,
                download_and_prepare_kwargs=download_and_prepare_kwargs,
            )
            for split in dataset.keys():
                self.assertTrue(len(dataset[split]) > 0)
コード例 #4
0
ファイル: test_dataset_common.py プロジェクト: xwild/nlp
    def test_load_real_dataset(self, dataset_name):
        if "/" not in dataset_name:
            logging.info("Skip {} because it is a canonical dataset")
            return

        with tempfile.TemporaryDirectory() as temp_data_dir:
            download_config = DownloadConfig()
            download_config.download_mode = GenerateMode.FORCE_REDOWNLOAD
            download_and_prepare_kwargs = {"download_config": download_config}

            dataset = load_dataset(
                dataset_name,
                data_dir=temp_data_dir,
                download_and_prepare_kwargs=download_and_prepare_kwargs)
            for split in dataset.keys():
                self.assertTrue(len(dataset[split]) > 0)