def data_path(self, subject, path=None, force_update=False, update_path=None, verbose=None): if subject not in self.subject_list: raise (ValueError("Invalid subject number")) # check if has the .zip file_number = SpotPilotData._map_subject_to_filenumber(subject) url = f'{SPOT_PILOT_P300_URL}/FILE{file_number}/content' path_zip = dl.data_path(url, 'spot') path_folder = path_zip[:-8] + f'/subject{subject}' # check if has to unzip if not (os.path.isdir(path_folder)): print('unzip', path_zip) zip_ref = zipfile.ZipFile(path_zip, "r") zip_ref.extractall(path_zip[:-7]) # get the path to all files pattern = f'/*Run_2*.vhdr' subject_paths = glob.glob(path_folder + pattern) return sorted(subject_paths)
def data_path(self, subject, path=None, force_update=False, update_path=None, verbose=None): if subject not in self.subject_list: raise (ValueError("Invalid subject number")) # check if has the .zip url = '{:s}subject{:d}.zip'.format(EPFLP300_URL, subject) path_zip = dl.data_path(url, 'EPFLP300') path_folder = path_zip.strip('subject{:d}.zip'.format(subject)) # check if has to unzip if not (os.path.isdir(path_folder + 'subject{:d}'.format(subject))): print('unzip', path_zip) zip_ref = zipfile.ZipFile(path_zip, "r") zip_ref.extractall(path_folder) # get the path to all files pattern = os.path.join('subject{:d}'.format(subject), '*', '*') subject_paths = glob.glob(path_folder + pattern) return subject_paths
def data_path(url, path=None, force_update=False, update_path=None, verbose=None): return [ dl.data_path(url, 'BNCI', path, force_update, update_path, verbose) ]
def data_path(self, subject, path=None, force_update=False, update_path=None, verbose=None): if subject not in self.subject_list: raise (ValueError("Invalid subject number")) # download .set _set = "{:s}subject{:d}.set".format(DOWNLOAD_URL, subject) set_local = dl.data_path(_set, "MUNICHMI", path, force_update, update_path, verbose) # download .fdt _fdt = "{:s}subject{:d}.fdt".format(DOWNLOAD_URL, subject) dl.data_path(_fdt, "MUNICHMI", path, force_update, update_path, verbose) return set_local
def data_path(self, subject, path=None, force_update=False, update_path=None, verbose=None): """Download the data from one subject""" if subject not in self.subject_list: raise(ValueError("Invalid subject number")) url = '{:s}subject_0{:d}.mat'.format(ExampleDataset_URL, subject) path = dl.data_path(url, 'ExampleDataset') return [path] # it has to return a list
def data_path(self, subject, path=None, force_update=False, update_path=None, verbose=None): if subject not in self.subject_list: raise(ValueError('Invalid subject number')) def _url(prefix): return '/'.join([GIN_URL, prefix, '{:d}.mat'.format(subject)]) return [dl.data_path(_url(t), 'SCHIRRMEISTER2017', path, force_update, update_path, verbose) for t in ['train', 'test']]
def data_path(self, subject, path=None, force_update=False, update_path=None, verbose=None): if subject not in self.subject_list: raise (ValueError("Invalid subject number")) file_path_list = [] if self.VR: url = '{:s}subject_{:02d}_VR.mat'.format(VIRTUALREALITY_URL, subject) file_path = dl.data_path(url, 'VIRTUALREALITY') file_path_list.append(file_path) elif self.PC: url = '{:s}subject_{:02d}_PC.mat'.format(VIRTUALREALITY_URL, subject) file_path = dl.data_path(url, 'VIRTUALREALITY') file_path_list.append(file_path) return file_path_list
def data_path(self, subject, path=None, force_update=False, update_path=None, verbose=None): if subject not in self.subject_list: raise (ValueError("Invalid subject number")) # check if has the .zip url = "{:s}subject{:d}.zip".format(BI2013a_URL, subject) path_zip = dl.data_path(url, "BRAININVADERS") path_folder = path_zip.strip("subject{:d}.zip".format(subject)) # check if has to unzip if not (os.path.isdir(path_folder + "subject{:d}".format(subject))): print("unzip", path_zip) zip_ref = zipfile.ZipFile(path_zip, "r") zip_ref.extractall(path_folder) # filter the data regarding the experimental conditions meta_file = os.path.join("subject{:d}".format(subject), "meta.yml") meta_path = path_folder + meta_file with open(meta_path, "r") as stream: meta = yaml.load(stream, Loader=yaml.FullLoader) conditions = [] if self.adaptive: conditions = conditions + ["adaptive"] if self.nonadaptive: conditions = conditions + ["nonadaptive"] types = [] if self.training: types = types + ["training"] if self.online: types = types + ["online"] filenames = [] for run in meta["runs"]: run_condition = run["experimental_condition"] run_type = run["type"] if (run_condition in conditions) and (run_type in types): filenames = filenames + [run["filename"]] # list the filepaths for this subject subject_paths = [] for filename in filenames: subject_paths = subject_paths + glob.glob( os.path.join(path_folder, "subject{:d}".format(subject), "Session*", filename)) # noqa return subject_paths
def data_path(self, subject, path=None, force_update=False, update_path=None, verbose=None): if subject not in self.subject_list: raise (ValueError("Invalid subject number")) # check if has the .zip url = '{:s}subject{:d}.zip'.format(BI2013a_URL, subject) path_zip = dl.data_path(url, 'BRAININVADERS') path_folder = path_zip.strip('subject{:d}.zip'.format(subject)) # check if has to unzip if not (os.path.isdir(path_folder + 'subject{:d}'.format(subject))): print('unzip', path_zip) zip_ref = zipfile.ZipFile(path_zip, "r") zip_ref.extractall(path_folder) # filter the data regarding the experimental conditions meta_file = os.path.join('subject{:d}'.format(subject), 'meta.yml') meta_path = path_folder + meta_file with open(meta_path, 'r') as stream: meta = yaml.load(stream) conditions = [] if self.adaptive: conditions = conditions + ['adaptive'] if self.nonadaptive: conditions = conditions + ['nonadaptive'] types = [] if self.training: types = types + ['training'] if self.online: types = types + ['online'] filenames = [] for run in meta['runs']: run_condition = run['experimental_condition'] run_type = run['type'] if (run_condition in conditions) and (run_type in types): filenames = filenames + [run['filename']] # list the filepaths for this subject subject_paths = [] for filename in filenames: subject_paths = subject_paths + \ glob.glob(os.path.join(path_folder, 'subject{:d}'.format(subject), 'Session*', filename)) # noqa return subject_paths
def data_path(self, subject, path=None, force_update=False, update_path=None, verbose=None): if subject not in self.subject_list: raise (ValueError("Invalid subject number")) def _url(prefix): return "/".join([GIN_URL, prefix, "{:d}.mat".format(subject)]) return [ dl.data_path(_url(t), "SCHIRRMEISTER2017", path, force_update, update_path, verbose) for t in ["train", "test"] ]
def data_path(self, subject, path=None, force_update=False, update_path=None, verbose=None): if subject not in self.subject_list: raise (ValueError("Invalid subject number")) subject_paths = [] for session in range(1, 3): url = "{0}session{1}/s{2}/sess{1:02d}_subj{2:02d}_EEG_MI.mat".format( Lee2019_URL, session, subject) data_path = dl.data_path(url, "Lee2019_MI", path, force_update, update_path, verbose) subject_paths.append(data_path) return subject_paths