def data_path(self, subject, path=None, force_update=False, update_path=None, verbose=None): if subject not in self.subject_list: raise (ValueError("Invalid subject number")) # check if has the .zip url = "{:s}subject{:d}.zip".format(EPFLP300_URL, subject) path_zip = dl.data_dl(url, "EPFLP300") path_folder = path_zip.strip("subject{:d}.zip".format(subject)) # check if has to unzip if not (os.path.isdir(path_folder + "subject{:d}".format(subject))): print("unzip", path_zip) zip_ref = zipfile.ZipFile(path_zip, "r") zip_ref.extractall(path_folder) # get the path to all files pattern = os.path.join("subject{:d}".format(subject), "*", "*") subject_paths = glob.glob(path_folder + pattern) return subject_paths
def data_path(self, subject, path=None, force_update=False, update_path=None, verbose=None): if subject not in self.subject_list: raise (ValueError("Invalid subject number")) # download .set _set = "{:s}subject{:d}.set".format(DOWNLOAD_URL, subject) set_local = dl.data_dl(_set, "MUNICHMI", path, force_update, verbose) # download .fdt _fdt = "{:s}subject{:d}.fdt".format(DOWNLOAD_URL, subject) dl.data_dl(_fdt, "MUNICHMI", path, force_update, verbose) return set_local
def data_path( self, subject, path=None, force_update=False, update_path=None, verbose=None ): """Download the data from one subject""" if subject not in self.subject_list: raise (ValueError("Invalid subject number")) url = "{:s}subject_0{:d}.mat".format(ExampleDataset_URL, subject) path = dl.data_dl(url, "ExampleDataset") return [path] # it has to return a list
def data_path( self, subject, path=None, force_update=False, update_path=None, verbose=None ): if subject not in self.subject_list: raise (ValueError("Invalid subject number")) def _url(prefix): return "/".join([GIN_URL, prefix, "{:d}.mat".format(subject)]) return [ dl.data_dl(_url(t), "SCHIRRMEISTER2017", path, force_update, verbose) for t in ["train", "test"] ]
def data_path(self, subject, path=None, force_update=False, update_path=None, verbose=None): if subject not in self.subject_list: raise (ValueError("Invalid subject number")) # check if has the .zip url = "{:s}subject{:d}.zip".format(BI2013a_URL, subject) path_zip = dl.data_dl(url, "BRAININVADERS") path_folder = path_zip.strip("subject{:d}.zip".format(subject)) # check if has to unzip if not (os.path.isdir(path_folder + "subject{:d}".format(subject))): print("unzip", path_zip) zip_ref = zipfile.ZipFile(path_zip, "r") zip_ref.extractall(path_folder) # filter the data regarding the experimental conditions meta_file = os.path.join("subject{:d}".format(subject), "meta.yml") meta_path = path_folder + meta_file with open(meta_path, "r") as stream: meta = yaml.load(stream, Loader=yaml.FullLoader) conditions = [] if self.adaptive: conditions = conditions + ["adaptive"] if self.nonadaptive: conditions = conditions + ["nonadaptive"] types = [] if self.training: types = types + ["training"] if self.online: types = types + ["online"] filenames = [] for run in meta["runs"]: run_condition = run["experimental_condition"] run_type = run["type"] if (run_condition in conditions) and (run_type in types): filenames = filenames + [run["filename"]] # list the filepaths for this subject subject_paths = [] for filename in filenames: subject_paths = subject_paths + glob.glob( os.path.join(path_folder, "subject{:d}".format(subject), "Session*", filename)) # noqa return subject_paths
def data_path( self, subject, path=None, force_update=False, update_path=None, verbose=None ): if subject not in self.subject_list: raise (ValueError("Invalid subject number")) subject_paths = [] for session in range(1, 3): url = "{0}session{1}/s{2}/sess{1:02d}_subj{2:02d}_EEG_MI.mat".format( Lee2019_URL, session, subject ) data_path = dl.data_dl( url, "Lee2019_MI", path, force_update, update_path, verbose ) subject_paths.append(data_path) return subject_paths
def data_path(url, path=None, force_update=False, update_path=None, verbose=None): return [dl.data_dl(url, "BNCI", path, force_update, update_path, verbose)]