def _get_version(name): """Get a dataset version.""" from mne.datasets._fetch import fetch_dataset if not has_dataset(name): return None dataset_params = MNE_DATASETS[name] dataset_params['dataset_name'] = name config_key = MNE_DATASETS[name]['config_key'] # get download path for specific dataset path = _get_path(path=None, key=config_key, name=name) return fetch_dataset(dataset_params, path=path, return_version=True)[1]
def _download_mne_dataset(name, processor, path, force_update, update_path, download, accept=False): """Aux function for downloading internal MNE datasets.""" from mne.datasets._fetch import fetch_dataset # import pooch library for handling the dataset downloading pooch = _soft_import('pooch', 'dataset downloading', strict=True) dataset_params = MNE_DATASETS[name] dataset_params['dataset_name'] = name config_key = MNE_DATASETS[name]['config_key'] folder_name = MNE_DATASETS[name]['folder_name'] # get download path for specific dataset path = _get_path(path=path, key=config_key, name=name) # instantiate processor that unzips file if processor == 'nested_untar': processor_ = pooch.Untar(extract_dir=op.join(path, folder_name)) elif processor == 'nested_unzip': processor_ = pooch.Unzip(extract_dir=op.join(path, folder_name)) else: processor_ = processor # handle case of multiple sub-datasets with different urls if name == 'visual_92_categories': dataset_params = [] for name in ['visual_92_categories_1', 'visual_92_categories_2']: this_dataset = MNE_DATASETS[name] this_dataset['dataset_name'] = name dataset_params.append(this_dataset) return fetch_dataset(dataset_params=dataset_params, processor=processor_, path=path, force_update=force_update, update_path=update_path, download=download, accept=accept)
def has_dataset(name): """Check for presence of a dataset. Parameters ---------- name : str | dict The dataset to check. Strings refer to one of the supported datasets listed :ref:`here <datasets>`. A :class:`dict` can be used to check for user-defined datasets (see the Notes section of :func:`fetch_dataset`), and must contain keys ``dataset_name``, ``archive_name``, ``url``, ``folder_name``, ``hash``. Returns ------- has : bool True if the dataset is present. """ from mne.datasets._fetch import fetch_dataset if isinstance(name, dict): dataset_name = name['dataset_name'] dataset_params = name else: dataset_name = 'spm' if name == 'spm_face' else name dataset_params = MNE_DATASETS[dataset_name] dataset_params['dataset_name'] = dataset_name config_key = dataset_params['config_key'] # get download path for specific dataset path = _get_path(path=None, key=config_key, name=dataset_name) dp = fetch_dataset(dataset_params, path=path, download=False, check_version=False) if dataset_name.startswith('bst_'): check = dataset_name else: check = MNE_DATASETS[dataset_name]['folder_name'] return dp.endswith(check)