def load(subject_id, ds=data_source()): matfile = ds.open(URL_TEMPLATE % subject_id) mat = io.loadmat(matfile, struct_as_record=True) X = mat['X'].astype(np.float32) dt = np.diff(mat['I'][0]) chan_lab = [str(l[0]) for l in mat['chann'].flat] # create event matrix sample_rate = 1./np.median(dt) status_events = status_to_events(mat['Y'], sample_rate) block_events = block_to_events(mat['I']) events = np.hstack([ np.vstack([status_events, np.zeros((1, status_events.shape[1]), int)]), block_events ]) events = events[:,np.argsort(events[1])] # sort events on start time # fill other attributes event_lab = dict(EVENTS) folds = mat['I'][1][events[1]].astype(int) # use block number as fold id # construct final record return Recording(X=X, dt=dt, chan_lab=chan_lab, events=events, folds=folds, event_lab=event_lab, rec_id='reuderink-affpac-s%d' % subject_id, license=LICENSE)
def load(subject, ds=data_source(), url_template=URL_TEMPLATE): """Each subject performed 14 experimental runs: two one-minute baseline runs (one with eyes open, one with eyes closed), and three two-minute runs of each of the four following tasks: 1) A target appears on either the left or the right side of the screen. The subject OPENS AND CLOSES THE CORRESPONDING FIST until the target disappears. Then the subject relaxes. 2) A target appears on either the left or the right side of the screen. The subject IMAGINES OPENING AND CLOSING THE CORRESPONDING FIST until the target disappears. Then the subject relaxes. 3) A target appears on either the top or the bottom of the screen. The subject OPENS AND CLOSES EITHER BOTH FISTS (if the target is on top) or BOTH FEET (if the target is on the bottom) until the target disappears. Then the subject relaxes. 4) A target appears on either the top or the bottom of the screen. The subject IMAGINES OPENING AND CLOSING EITHER BOTH FISTS (if the target is on top) OR BOTH FEET (if the target is on the bottom) until the target disappears. Then the subject relaxes. The EDF+ file contains the duration of each event, however it seems to be slightly variable. Based on the first subjects, it seems that between each task there is a resting task, and both the active task and resting task take about 4.15 +- .1 seconds. The file headers for one subject all contain the same start timestamp, so we cannot estimate the time between runs. """ urls, runs = gen_urls(subject, url_template) log.debug("Generated URLs: %s." % (urls,)) # Load runs for this subject: rec = [load_schalk_run(ds.open(u), r) for (u, r) in zip(urls, runs)] runs, events = zip(*rec) # Combine information from different runs: X = np.hstack([r.X.astype(np.float32) for r in runs]) dt = np.hstack([block_dt(r.X.shape[1], r.sample_rate) for r in runs])[1:] chan_lab = clean_chan_lab(runs[0].chan_lab) folds = np.hstack([np.ones(e.shape[1], int) * i for (i, e) in enumerate(events)]) E = concatenate_events(events, [r.X.shape[1] for r in runs]) return Recording( X=X, dt=dt, chan_lab=chan_lab, events=E, folds=folds, event_lab=EVENTS, rec_id="schalk-physiobank-s%d" % subject, license=LICENSE, )
def load(subject, ds=data_source(), url_template=URL_TEMPLATE): '''Each subject performed 14 experimental runs: two one-minute baseline runs (one with eyes open, one with eyes closed), and three two-minute runs of each of the four following tasks: 1) A target appears on either the left or the right side of the screen. The subject OPENS AND CLOSES THE CORRESPONDING FIST until the target disappears. Then the subject relaxes. 2) A target appears on either the left or the right side of the screen. The subject IMAGINES OPENING AND CLOSING THE CORRESPONDING FIST until the target disappears. Then the subject relaxes. 3) A target appears on either the top or the bottom of the screen. The subject OPENS AND CLOSES EITHER BOTH FISTS (if the target is on top) or BOTH FEET (if the target is on the bottom) until the target disappears. Then the subject relaxes. 4) A target appears on either the top or the bottom of the screen. The subject IMAGINES OPENING AND CLOSING EITHER BOTH FISTS (if the target is on top) OR BOTH FEET (if the target is on the bottom) until the target disappears. Then the subject relaxes. The EDF+ file contains the duration of each event, however it seems to be slightly variable. Based on the first subjects, it seems that between each task there is a resting task, and both the active task and resting task take about 4.15 +- .1 seconds. The file headers for one subject all contain the same start timestamp, so we cannot estimate the time between runs. ''' urls, runs = gen_urls(subject, url_template) log.debug('Generated URLs: %s.' % (urls,)) # Load runs for this subject: rec = [load_schalk_run(ds.open(u), r) for (u, r) in zip(urls, runs)] runs, events = zip(*rec) # Combine information from different runs: X = np.hstack([r.X.astype(np.float32) for r in runs]) dt = np.hstack([block_dt(r.X.shape[1], r.sample_rate) for r in runs])[1:] chan_lab = clean_chan_lab(runs[0].chan_lab) folds = np.hstack([ np.ones(e.shape[1], int) * i for (i, e) in enumerate(events)]) E = concatenate_events(events, [r.X.shape[1] for r in runs]) return Recording(X=X, dt=dt, chan_lab=chan_lab, events=E, folds=folds, event_lab=EVENTS, rec_id='schalk-physiobank-s%d' % subject, license=LICENSE)
def load(subject, ds=data_source(), user='******', password='******'): # get HTTP authentication going password_mgr = urllib2.HTTPPasswordMgrWithDefaultRealm() password_mgr.add_password(None, 'http://bbci.de', user, password) handler = urllib2.HTTPBasicAuthHandler(password_mgr) opener = urllib2.build_opener(urllib2.HTTPHandler, handler) urllib2.install_opener(opener) # Load the training set. We need to get a *seekable* file from a zip file. # Hence, StringIO is used. tr = zipfile.ZipFile(ds.open(URL_TR % subject)) tr_mat = StringIO(tr.read('100Hz/data_set_IVa_%s.mat' % subject)) # Load test labels that were made available after the competition. te_mat = ds.open(URL_TE % subject) return load_mat(tr_mat, te_mat, 'bcicomp3.4a-%s' % subject)