def data_path(subject, path=None, force_update=False, fnirs=False): """Get path to local copy of bbci_eeg_fnirs dataset URL. Parameters ---------- subject : int Number of subject to use path : None | str Location of where to look for the data storing location. If None, the environment variable or config parameter ``MNE_DATASETS_BBCIFNIRS_PATH`` is used. If it doesn't exist, the "~/mne_data" directory is used. If the dataset is not found under the given path, the data will be automatically downloaded to the specified folder. force_update : bool Force update of the dataset even if a local copy exists. Returns ------- path : list of str Local path to the given data file. This path is contained inside a list of length one, for compatibility. """ # noqa: E501 if subject < 1 or subject > 30: raise ValueError( "Valid subjects between 1 and 30, subject {:d} requested".format(subject)) key = 'MNE_DATASETS_BBCIFNIRS_PATH' path = _get_path(path, key, 'BBCI EEG-fNIRS') _do_path_update(path, None, key, 'BBCI EEG-fNIRS') if not op.isdir(op.join(path, 'MNE-eegfnirs-data')): os.makedirs(op.join(path, 'MNE-eegfnirs-data')) if fnirs: return fnirs_data_path(op.join(path, 'MNE-eegfnirs-data'), subject) else: return eeg_data_path(op.join(path, 'MNE-eegfnirs-data'), subject)
def data_path(self, subject, path=None, force_update=False, update_path=None, verbose=None): if subject not in self.subject_list: raise (ValueError("Invalid subject number")) sub = "{:02d}".format(subject) sign = self.code.split()[1] key = "MNE_DATASETS_{:s}_PATH".format(sign) key_dest = "MNE-{:s}-data".format(sign.lower()) if get_config(key) is None: set_config(key, osp.join(osp.expanduser("~"), "mne_data")) path = osp.join(_get_path(None, key, sign), key_dest) filelist = fs_get_file_list(self.figshare_id) reg = fs_get_file_hash(filelist) fsn = fs_get_file_id(filelist) gb = pooch.create(path=path, base_url=MAMEM_URL, registry=reg) spath = [] for f in fsn.keys(): if f[2:4] == sub: spath.append(gb.fetch(fsn[f])) # _do_path_update(path, update_path, key, sign) return spath
def data_dl(url, sign, path=None, force_update=False, update_path=True, verbose=None): """Download file from url to specified path This function should replace data_path as the MNE will not support the download of dataset anymore. This version is using Pooch. Parameters ---------- url : str Path to remote location of data sign : str Signifier of dataset path : None | str Location of where to look for the BNCI data storing location. If None, the environment variable or config parameter ``MNE_DATASETS_(signifier)_PATH`` is used. If it doesn't exist, the "~/mne_data" directory is used. If the dataset is not found under the given path, the data will be automatically downloaded to the specified folder. force_update : bool Force update of the dataset even if a local copy exists. update_path : bool | None If True, set the MNE_DATASETS_(signifier)_PATH in mne-python config to the given path. If None, the user is prompted. verbose : bool, str, int, or None If not None, override default verbose level (see :func:`mne.verbose`). Returns ------- path : list of str Local path to the given data file. This path is contained inside a list of length one, for compatibility. """ sign = sign.upper() key = "MNE_DATASETS_{:s}_PATH".format(sign) key_dest = "MNE-{:s}-data".format(sign.lower()) if get_config(key) is None: set_config(key, osp.join(osp.expanduser("~"), "mne_data")) path = _get_path(path, key, sign) destination = _url_to_local_path(url, osp.join(path, key_dest)) # Fetch the file if not osp.isfile(destination) or force_update: if osp.isfile(destination): os.remove(destination) if not osp.isdir(osp.dirname(destination)): os.makedirs(osp.dirname(destination)) known_hash = None else: known_hash = file_hash(destination) dlpath = retrieve( url, known_hash, fname=osp.basename(url), path=osp.dirname(destination) ) # Offer to update the path if update_path: _do_path_update(path, update_path, key, sign) return dlpath
def data_path(url, sign, path=None, force_update=False, update_path=True, verbose=None): """Get path to local copy of given dataset URL. This is a low-level function useful for getting a local copy of a remote dataset Parameters ---------- url : str Path to remote location of data sign : str Signifier of dataset path : None | str Location of where to look for the BNCI data storing location. If None, the environment variable or config parameter ``MNE_DATASETS_(signifier)_PATH`` is used. If it doesn't exist, the "~/mne_data" directory is used. If the dataset is not found under the given path, the data will be automatically downloaded to the specified folder. force_update : bool Force update of the dataset even if a local copy exists. update_path : bool | None If True, set the MNE_DATASETS_(signifier)_PATH in mne-python config to the given path. If None, the user is prompted. verbose : bool, str, int, or None If not None, override default verbose level (see :func:`mne.verbose`). Returns ------- path : list of str Local path to the given data file. This path is contained inside a list of length one, for compatibility. """ # noqa: E501 sign = sign.upper() key = 'MNE_DATASETS_{:s}_PATH'.format(sign) key_dest = 'MNE-{:s}-data'.format(sign.lower()) if get_config(key) is None: set_config(key, osp.join(osp.expanduser("~"), "mne_data")) path = _get_path(path, key, sign) destination = _url_to_local_path(url, op.join(path, key_dest)) # Fetch the file if not op.isfile(destination) or force_update: if op.isfile(destination): os.remove(destination) if not op.isdir(op.dirname(destination)): os.makedirs(op.dirname(destination)) _fetch_file(url, destination, print_destination=False) # Offer to update the path _do_path_update(path, update_path, key, sign) return destination
def data_path( self, subject, path=None, force_update=False, update_path=None, verbose=None ): if subject not in self.subject_list: raise (ValueError("Invalid subject number")) key = "MNE_DATASETS_WEIBO2014_PATH" path = _get_path(path, key, "Weibo 2014") basepath = os.path.join(path, "MNE-weibo-2014") if not os.path.isdir(basepath): os.makedirs(basepath) return eeg_data_path(basepath, subject)
def data_path( self, subject, path=None, force_update=False, update_path=None, verbose=None ): if subject not in self.subject_list: raise (ValueError("Invalid subject number")) key = "MNE_DATASETS_ZHOU2016_PATH" path = _get_path(path, key, "Zhou 2016") basepath = os.path.join(path, "MNE-zhou-2016") if not os.path.isdir(basepath): os.makedirs(basepath) return local_data_path(basepath, subject)
def data_path(self, subject, path=None, force_update=False, update_path=None, verbose=None): if subject not in self.subject_list: raise (ValueError("Invalid subject number")) # Check if the .dat, .hea and .win files are present # The .dat and .hea files give the main data # .win file gives the event windows and the frequencies # .flash file can give the exact times of the flashes if necessary # Return the file paths depending on the number of sessions for each # subject that are denoted a, b, c, ... sub = "{:02d}".format(subject) sign = self.code.split()[1] if sign == "MAMEM1": fn = "dataset1/S0{}*.dat" elif sign == "MAMEM2": fn = "dataset2/T0{}*.dat" elif sign == "MAMEM3": fn = "dataset3/U0{}*.dat" key = "MNE_DATASETS_{:s}_PATH".format(sign) key_dest = "MNE-{:s}-data".format(sign.lower()) path = _get_path(path, key, sign) path = os.path.join(path, key_dest) s_paths = glob.glob(os.path.join(path, fn.format(sub))) subject_paths = [] for name in s_paths: subject_paths.append(os.path.splitext(name)[0]) # if files for the subject are not present if not subject_paths or force_update: # if not downloaded, get the list of files to download datarec = wfdb.get_record_list("mssvepdb") datalist = [] for ele in datarec: if fn.format(sub) in ele: datalist.append(ele) wfdb.io.dl_database("mssvepdb", path, datalist, annotators="win", overwrite=force_update) # Return the file paths depending on the number of sessions s_paths = glob.glob(os.path.join(path, fn.format(sub))) subject_paths = [] for name in s_paths: # The adaptation session has the letter x at the end in MAMEM2 # It should be removed from the returned file names if (os.path.splitext(name)[0][-1]) != "x": subject_paths.append(os.path.splitext(name)[0]) return subject_paths
def __init__( self, evaluation_class, paradigm_class, suffix="", overwrite=False, hdf5_path=None, additional_columns=None, ): """ class that will abstract result storage """ from moabb.evaluations.base import BaseEvaluation from moabb.paradigms.base import BaseParadigm assert issubclass(evaluation_class, BaseEvaluation) assert issubclass(paradigm_class, BaseParadigm) if additional_columns is None: self.additional_columns = [] else: assert all([isinstance(ac, str) for ac in additional_columns]) self.additional_columns = additional_columns if hdf5_path is None: if get_config("MOABB_RESULTS") is None: set_config("MOABB_RESULTS", osp.join(osp.expanduser("~"), "mne_data")) self.mod_dir = _get_path(None, "MOABB_RESULTS", "results") # was previously stored in the moabb source file folder: # self.mod_dir = osp.dirname(osp.abspath(inspect.getsourcefile(moabb))) else: self.mod_dir = osp.abspath(hdf5_path) self.filepath = osp.join( self.mod_dir, "results", paradigm_class.__name__, evaluation_class.__name__, "results{}.hdf5".format("_" + suffix), ) os.makedirs(osp.dirname(self.filepath), exist_ok=True) self.filepath = self.filepath if overwrite and osp.isfile(self.filepath): os.remove(self.filepath) if not osp.isfile(self.filepath): with h5py.File(self.filepath, "w") as f: f.attrs["create_time"] = np.string_( "{:%Y-%m-%d, %H:%M}".format(datetime.now()))
def mne_data_path(url: str, sign: str, path: Union[str, Path] = None, proxies: Optional[Dict[str, str]] = None, force_update: bool = False, update_path: bool = True, verbose: Optional[Union[bool, str, int]] = None) -> str: """Get the local path of the target file. This function returns the local path of the target file, downloading it if needed or requested. The local path keeps the same structure as the url. Parameters ---------- url : str url of the target file. sign : str the unique identifier to which the file belongs path : Union[str, Path], optional local folder to save the file, by default None proxies : Optional[Dict[str, str]], optional use proxies to download files, e.g. {'https': 'socks5://127.0.0.1:1080'}, by default None force_update : bool, optional whether to re-download the file, by default False update_path : bool, optional whether to update mne config, by default True verbose : Optional[Union[bool, str, int]], optional [description], by default None Returns ------- str local path of the target file """ sign = sign.upper() key = 'MNE_DATASETS_{:s}_PATH'.format(sign) key_dest = 'MNE-{:s}-data'.format(sign.lower()) path = _get_path(path, key, sign) destination = _url_to_local_path(url, os.path.join(path, key_dest)) # Fetch the file if not os.path.exists(destination) or force_update: if not os.path.isdir(os.path.dirname(destination)): os.makedirs(os.path.dirname(destination)) if os.path.isfile(destination): os.remove(destination) _fetch_file(url, destination, proxies=proxies) _do_path_update(path, update_path, key, sign) return destination
def data_path(self, subject, path=None, force_update=False, update_path=None, verbose=None): if subject not in self.subject_list: raise (ValueError("Invalid subject number")) key = 'MNE_DATASETS_BBCIFNIRS_PATH' path = _get_path(path, key, 'BBCI EEG-fNIRS') # FIXME: this always update the path _do_path_update(path, True, key, 'BBCI EEG-fNIRS') if not op.isdir(op.join(path, 'MNE-eegfnirs-data')): os.makedirs(op.join(path, 'MNE-eegfnirs-data')) if self.fnirs: return fnirs_data_path(op.join(path, 'MNE-eegfnirs-data'), subject) else: return eeg_data_path(op.join(path, 'MNE-eegfnirs-data'), subject)
def data_path(path=None, force_update=False, update_path=True, download=True, verbose=None): # noqa: D103 datapath = _get_path(None, 'MNE_DATASETS_SAMPLE_PATH', None) downloadpath = 'https://github.com/rob-luke/' \ 'BIDS-NIRS-Tapping/archive/master.zip' if not op.isdir(datapath + "/MNE-fNIRS-motor-group-data/"): remove_archive, full = _download(datapath, downloadpath, "MNE-fNIRS-motor-group-data.zip", "60472f83805b5676730e0d256fabeb7d") _extract(datapath, "fNIRS-motor-group", op.join(datapath, "MNE-fNIRS-motor-group-data"), full, op.join(datapath, "BIDS-NIRS-Tapping-master"), True) datapath = op.join(datapath, "MNE-fNIRS-motor-group-data/") return datapath
def download_sample_data(dataset="ssvep", subject=1, session=1): """Download BCI data for example purpose Parameters ---------- dataset : str type of the dataset, could be "ssvep", "p300" or "imagery" Default is "ssvep", as other are not implemented subject : int Subject id, dataset specific (default: 1) session : int, default 1 Session number%load , dataset specific (default: 1) Returns ------- destination : str Path to downloaded data """ if dataset == "ssvep": DATASET_URL = 'https://zenodo.org/record/2392979/files/' url = '{:s}subject{:02d}_run{:d}_raw.fif'.format(DATASET_URL, subject, session + 1) sign = 'SSVEPEXO' key, key_dest = 'MNE_DATASETS_SSVEPEXO_PATH', 'MNE-ssvepexo-data' elif dataset == "p300" or dataset == "imagery": raise NotImplementedError("Not yet implemented") # Use MNE _fetch_file to download EEG file if get_config(key) is None: set_config(key, os.path.join(os.path.expanduser("~"), "mne_data")) # Adquire local de dados do MNE path = _get_path(None, key, sign) # Baixa o que está no URL para pasta local, que é criada no segundo parametro destination = _url_to_local_path(url, os.path.join(path, key_dest)) # Cria pasta com todo caminho pro arquivo os.makedirs(os.path.dirname(destination), exist_ok=True) if not os.path.exists(destination): _fetch_file(url, destination, print_destination=False) return destination
def get_dataset_path(sign, path): """Returns the dataset path allowing for changes in MNE_DATA config Parameters ---------- sign : str Signifier of dataset path : None | str Location of where to look for the data storing location. If None, the environment variable or config parameter ``MNE_DATASETS_(signifier)_PATH`` is used. If it doesn't exist, the "~/mne_data" directory is used. If the dataset is not found under the given path, the data will be automatically downloaded to the specified folder. Returns ------- path : None | str Location of where to look for the data storing location """ sign = sign.upper() key = "MNE_DATASETS_{:s}_PATH".format(sign) if get_config(key) is None: if get_config("MNE_DATA") is None: path_def = osp.join(osp.expanduser("~"), "mne_data") print( "MNE_DATA is not already configured. It will be set to " "default location in the home directory - " + path_def + "\nAll datasets will be downloaded to this location, if anything is " "already downloaded, please move manually to this location" ) if not osp.isdir(path_def): os.makedirs(path_def) set_config("MNE_DATA", osp.join(osp.expanduser("~"), "mne_data")) set_config(key, get_config("MNE_DATA")) return _get_path(path, key, sign)
def data_path( self, subject, path=None, force_update=False, update_path=None, verbose=None, accept=False, ): if subject not in self.subject_list: raise (ValueError("Invalid subject number")) if accept: self.accept = True key = "MNE_DATASETS_BBCIFNIRS_PATH" path = _get_path(path, key, "BBCI EEG-fNIRS") if not op.isdir(op.join(path, "MNE-eegfnirs-data")): os.makedirs(op.join(path, "MNE-eegfnirs-data")) if self.fnirs: return fnirs_data_path( op.join(path, "MNE-eegfnirs-data"), subject, self.accept ) else: return eeg_data_path(op.join(path, "MNE-eegfnirs-data"), subject, self.accept)