def combine_sessions(sessions, **kwargs): """ Merge session data sets in single data set. """ # make a copy of the sessions, just to be safe sessions_ = list(sessions) # define dataset based on first session dataset_ = None # append data from other sessions for i, session_ in enumerate(sessions_): print("[+] session: {}, file: {}".format( i, session_.meta.reset_index(drop=False).session[0]) ) if dataset_ is None: dataset_ = Bunch(**dict(session_)) else: dataset_.data = dataset_.data.append(session_.data, ignore_index=True, sort=False) dataset_.meta = dataset_.meta.append(session_.meta, ignore_index=False, sort=False) dataset_.tmask = dataset_.tmask.append(session_.tmask, ignore_index=False, sort=False) # clean if kwargs.get('clean_meta'): dataset_.meta = clean_meta(dataset_.meta, **kwargs).reset_index(drop=False) # set X, y dataset_.X = dataset_.data.values.reshape(-1, dataset_.data.shape[-1]) dataset_.y = dataset_.meta.values.reshape(-1, dataset_.meta.shape[-1]) # cache sessions dataset_.sessions = list(sessions) return dataset_
def load_subject_data(dataset, index=0, mask='mask_vt', sample_mask=None, smoothing_fwhm=4, **kwargs): """ Load functional data for a single haxby subject. """ # extract relevant files func_fn = dataset.func[index] mask_fn = dataset.get(mask) if not isinstance(mask_fn, str): mask_fn = mask_fn[index] # extract data from func using mask_vt masker = NiftiMasker( mask_img=mask_fn, sample_mask=sample_mask, standardize=True, detrend=True, smoothing_fwhm=smoothing_fwhm, low_pass=0.09, high_pass=0.008, t_r=2.5, memory="nilearn_cache", ) X = masker.fit_transform(func_fn) data = pd.DataFrame(X) # return as bunch subject = Bunch() subject.data = data subject.X = X subject.masker = masker subject.mask = mask_fn subject.func = func_fn subject.subject_code = os.path.basename(os.path.dirname(func_fn)) return subject