def main(): parser = argparse.ArgumentParser( 'Gets the task list BEFORE the events files are copied into the dataset' ) parser.add_argument("--bidsdir", help="Path to a curated BIDS directory", required=True) args = parser.parse_args() bidsdir = args.bidsdir layout = BIDSLayout(bidsdir) task_list = layout.get_tasks() # move scenemem to end of list if 'scenemem' in task_list: task_list.append(task_list.pop(task_list.index('scenemem'))) # remove unhandled tasks if 'rest' in task_list: task_list.remove('rest') if 'binder' in task_list: task_list.remove('binder') if 'verbgen' in task_list: task_list.remove('verbgen') sys.stdout.write(' '.join(task_list))
def test_dcm2bids(): # tmpBase = os.path.join(TEST_DATA_DIR, "tmp") # bidsDir = TemporaryDirectory(dir=tmpBase) bidsDir = TemporaryDirectory() tmpSubDir = os.path.join(bidsDir.name, DEFAULT.tmpDirName, "sub-01") shutil.copytree(os.path.join(TEST_DATA_DIR, "sidecars"), tmpSubDir) app = Dcm2bids( [TEST_DATA_DIR], "01", os.path.join(TEST_DATA_DIR, "config_test.json"), bidsDir.name, ) app.run() layout = BIDSLayout(bidsDir.name, validate=False) assert layout.get_subjects() == ["01"] assert layout.get_sessions() == [] assert layout.get_tasks() == ["rest"] assert layout.get_runs() == [1, 2, 3] app = Dcm2bids( [TEST_DATA_DIR], "01", os.path.join(TEST_DATA_DIR, "config_test.json"), bidsDir.name, ) app.run() fmapFile = os.path.join(bidsDir.name, "sub-01", "fmap", "sub-01_echo-492_fmap.json") data = load_json(fmapFile) fmapMtime = os.stat(fmapFile).st_mtime assert data["IntendedFor"] == "dwi/sub-01_dwi.nii.gz" data = load_json( os.path.join(bidsDir.name, "sub-01", "localizer", "sub-01_run-01_localizer.json")) assert data["ProcedureStepDescription"] == "Modify by dcm2bids" # rerun shutil.rmtree(tmpSubDir) shutil.copytree(os.path.join(TEST_DATA_DIR, "sidecars"), tmpSubDir) app = Dcm2bids( [TEST_DATA_DIR], "01", os.path.join(TEST_DATA_DIR, "config_test.json"), bidsDir.name, ) app.run() fmapMtimeRerun = os.stat(fmapFile).st_mtime assert fmapMtime == fmapMtimeRerun if os.name != 'nt': bidsDir.cleanup()
def test_dcm2bids(): tmpBase = os.path.join(TEST_DATA_DIR, "tmp") #bidsDir = TemporaryDirectory(dir=tmpBase) bidsDir = TemporaryDirectory() tmpSubDir = os.path.join(bidsDir.name, DEFAULT.tmpDirName, "sub-01") shutil.copytree( os.path.join(TEST_DATA_DIR, "sidecars"), tmpSubDir) app = Dcm2bids( [TEST_DATA_DIR], "01", os.path.join(TEST_DATA_DIR, "config_test.json"), bidsDir.name ) app.run() layout = BIDSLayout(bidsDir.name, validate=False) assert layout.get_subjects() == ["01"] assert layout.get_sessions() == [] assert layout.get_tasks() == ["rest"] assert layout.get_runs() == [1,2,3] app = Dcm2bids( [TEST_DATA_DIR], "01", os.path.join(TEST_DATA_DIR, "config_test.json"), bidsDir.name ) app.run() fmapFile = os.path.join( bidsDir.name, "sub-01", "fmap", "sub-01_echo-492_fmap.json") data = load_json(fmapFile) fmapMtime = os.stat(fmapFile).st_mtime assert data["IntendedFor"] == "dwi/sub-01_dwi.nii.gz" data = load_json(os.path.join( bidsDir.name, "sub-01", "localizer", "sub-01_run-01_localizer.json")) assert data["ProcedureStepDescription"] == "Modify by dcm2bids" #rerun shutil.rmtree(tmpSubDir) shutil.copytree( os.path.join(TEST_DATA_DIR, "sidecars"), tmpSubDir) app = Dcm2bids( [TEST_DATA_DIR], "01", os.path.join(TEST_DATA_DIR, "config_test.json"), bidsDir.name ) app.run() fmapMtimeRerun = os.stat(fmapFile).st_mtime assert fmapMtime == fmapMtimeRerun bidsDir.cleanup()
def _fill_empty_lists(layout: BIDSLayout, subjects: list, tasks: list, sessions: list, runs: t.List[str]): """ If filters are not provided by the user, load them from layout. """ subjects = subjects if subjects else layout.get_subjects() tasks = tasks if tasks else layout.get_tasks() sessions = sessions if sessions else layout.get_sessions() runs = runs if runs else layout.get_runs() return subjects, tasks, sessions, runs
def _run_interface(self, runtime): import json from bids import BIDSLayout def validate_derivatives(bids_dir, derivatives): """ Validate derivatives argument provided by the user. Args: bids_dir: list Path to bids root directory. derivatives: str or list(str) Derivatives to use for denoising. Returns: derivatives_: list Validated derivatives list. scope: list Right scope keyword used in pybids query. """ if isinstance(derivatives, str): derivatives_ = [derivatives] else: derivatives_ = derivatives # Create full paths to derivatives folders derivatives_ = [ os.path.join(bids_dir, 'derivatives', d) for d in derivatives_ ] # Establish right scope keyword for arbitrary packages scope = [] for derivative_path in derivatives_: dataset_desc_path = os.path.join(derivative_path, 'dataset_description.json') try: with open(dataset_desc_path, 'r') as f: dataset_desc = json.load(f) scope.append(dataset_desc['PipelineDescription']['Name']) except FileNotFoundError as e: raise Exception(f"{derivative_path} should contain" + " dataset_description.json file") from e except KeyError as e: raise Exception( f"Key 'PipelineDescription.Name' is " + "required in {dataset_desc_path} file") from e return derivatives_, scope def validate_option(layout, option, kind='task'): """ Validate BIDS query filters provided by the user. Args: layout: bids.layout.layout.BIDSLayout Lightweight class representing BIDS project file tree. option: list Filter arguments provided by the user. kind: string Type of query. Available options are 'task', 'session' and 'subject'. Returns: option_: list Validated filter values. """ # Grab all possible filter values if kind == 'task': option_all = layout.get_tasks() elif kind == 'session': option_all = layout.get_sessions() elif kind == 'subject': option_all = layout.get_subjects() option_ = option for option_item in option_: if option_item not in option_all: raise ValueError(f'{kind} {option_item} is not found') return option_ # Validate derivatives argument derivatives, scope = validate_derivatives( bids_dir=self.inputs.bids_dir, derivatives=self.inputs.derivatives) layout = BIDSLayout(root=self.inputs.bids_dir, validate=True, derivatives=derivatives) # Validate optional arguments filter_base = {} if isdefined(self.inputs.task): task = validate_option(layout, self.inputs.task, kind='task') filter_base['task'] = task else: task = layout.get_tasks() if isdefined(self.inputs.session): session = validate_option(layout, self.inputs.session, kind='session') filter_base['session'] = session if isdefined(self.inputs.subject): subject = validate_option(layout, self.inputs.subject, kind='subject') filter_base['subject'] = subject # Define query filters keys_entities = ['task', 'session', 'subject', 'datatype'] filter_fmri = { 'extension': ['nii', 'nii.gz'], 'suffix': 'bold', 'desc': 'preproc' } filter_fmri_aroma = { 'extension': ['nii', 'nii.gz'], 'suffix': 'bold', 'desc': 'smoothAROMAnonaggr', } filter_conf = { 'extension': 'tsv', 'suffix': 'regressors', 'desc': 'confounds', } # for later filter_conf_json = { 'extension': 'json', 'suffix': 'regressors', 'desc': 'confounds', } filter_fmri.update(filter_base) ######################################################################## ### SOLUTION FOR LATER ################################################# ######################################################################## # filter_fmri_aroma.update(filter_base) # filter_conf.update(filter_base) # filter_conf_json.update(filter_base) # # Grab all requested files # fmri_prep = layout.get(scope=scope, **filter_fmri) # if self.inputs.ica_aroma: # fmri_prep_aroma = layout.get(scope=scope, **filter_fmri_aroma) # conf_raw = layout.get(scope=scope, **filter_conf) # conf_json = layout.get(scope=scope, **filter_conf_json) ######################################################################## ######################################################################## ######################################################################## fmri_prep, fmri_prep_aroma, conf_raw, conf_json, entities = ( [] for _ in range(5)) for fmri_file in layout.get(scope=scope, **filter_fmri): entity_bold = fmri_file.get_entities() # Look for corresponding confounds file filter_entities = { key: value for key, value in entity_bold.items() if key in keys_entities } # Constraining search filter_conf.update(filter_entities) filter_conf_json.update(filter_entities) conf_file = layout.get(scope=scope, **filter_conf) conf_json_file = layout.get(scope=scope, **filter_conf_json) if not conf_file: raise FileNotFoundError( f"Regressor file not found for file {fmri_file.path}") else: # Add entity only if both files are available if len(conf_file) > 1: print( f"Warning: Multiple regressors found for file {fmri_file.path}.\n" f"Selecting {conf_file[0].path}" ) # TODO: find proper warning (logging?) conf_file = conf_file[0] if not conf_json_file: raise FileNotFoundError( f"Regressor file not found for file {fmri_file.path}") else: # Add entity only if both files are available if len(conf_json_file) > 1: print( f"Warning: Multiple .json regressors found for file {fmri_file.path}.\n" f"Selecting {conf_json_file[0].path}") conf_json_file = conf_json_file[0] if self.inputs.ica_aroma: filter_fmri_aroma.update( filter_entities) # Add specific fields to constrain search fmri_aroma_file = layout.get(scope=scope, **filter_fmri_aroma) if not fmri_aroma_file: raise FileNotFoundError( f"ICA-Aroma file not found for file {fmri_file.path}") else: # Add entity only if both files are available if len(fmri_aroma_file) > 1: print( f"Warning: Multiple ICA-Aroma files found for file {fmri_file.path}.\n" f"Selecting {fmri_aroma_file[0].path}") # TODO: find proper warning (logging?) fmri_aroma_file = fmri_aroma_file[0] fmri_prep_aroma.append(fmri_aroma_file.path) fmri_prep.append(fmri_file.path) conf_raw.append(conf_file.path) conf_json.append(conf_json_file.path) entities.append(filter_entities) # Extract TRs tr_dict = {} for t in task: filter_fmri_tr = filter_fmri.copy() filter_fmri_tr['task'] = t example_file = layout.get(**filter_fmri_tr)[0] tr = layout.get_metadata(example_file.path)['RepetitionTime'] tr_dict[t] = tr self._results['fmri_prep'] = fmri_prep self._results['fmri_prep_aroma'] = fmri_prep_aroma self._results['conf_raw'] = conf_raw self._results['conf_json'] = conf_json self._results['entities'] = entities self._results['tr_dict'] = tr_dict return runtime
from bids import BIDSLayout bids_dataset = '/home/emdupre/Desktop/ds000007' # Change to your dataset location ! # first, we want to make sure we have a valid BIDS dataset ! # let's navigate to : # https://bids-standard.github.io/bids-validator/ layout = BIDSLayout(bids_dataset) # Let's learn a little about the data we have layout.description layout.get_modalities() layout.get_tasks()
def _run_interface(self, runtime): # Validate derivatives argument derivatives, scope = validate_derivatives( bids_dir=self.inputs.bids_dir, derivatives=self.inputs.derivatives) layout = BIDSLayout(root=self.inputs.bids_dir, derivatives=derivatives, validate=True, index_metadata=False) # Validate optional arguments filter_base = {} if isdefined(self.inputs.task): task = validate_option(layout, self.inputs.task, kind='task') filter_base['task'] = task else: task = layout.get_tasks() if isdefined(self.inputs.session): session = validate_option(layout, self.inputs.session, kind='session') filter_base['session'] = session if isdefined(self.inputs.subject): subject = validate_option(layout, self.inputs.subject, kind='subject') filter_base['subject'] = subject # Define query filters filter_fmri = { 'extension': ['nii', 'nii.gz'], 'suffix': 'bold', 'desc': 'preproc', 'space': 'MNI152NLin2009cAsym' } filter_fmri_aroma = { 'extension': ['nii', 'nii.gz'], 'suffix': 'bold', 'desc': 'smoothAROMAnonaggr', 'space': 'MNI152NLin2009cAsym' } filter_conf = { 'extension': 'tsv', 'suffix': 'regressors', 'desc': 'confounds', } filter_conf_json = { 'extension': 'json', 'suffix': 'regressors', 'desc': 'confounds', } filter_fmri.update(filter_base) filter_fmri_aroma.update(filter_base) filter_conf.update(filter_base) filter_conf_json.update(filter_base) # Grab all requested files fmri_prep = layout.get(scope=scope, **filter_fmri) if self.inputs.ica_aroma: fmri_prep_aroma = layout.get(scope=scope, **filter_fmri_aroma) if not fmri_prep_aroma: raise MissingFile( "ICA-AROMA files not found in BIDS directory") conf_raw = layout.get(scope=scope, **filter_conf) conf_json = layout.get(scope=scope, **filter_conf_json) # Validate correspondence between queried files entities = [] for i, fmri_file in enumerate(fmri_prep): # reference common entities for preprocessed files if self.inputs.ica_aroma and fmri_prep_aroma: compare_common_entities(fmri_file, fmri_prep_aroma[i]) compare_common_entities(fmri_file, conf_raw[i]) compare_common_entities(fmri_file, conf_json[i]) entities.append({ key: value for key, value in fmri_file.get_entities().items() if key in ['task', 'session', 'subject', 'datatype'] }) # Extract TRs tr_dict = {} #TODO: this is just a funny workaround, look for better solution later layout_for_tr = BIDSLayout(root=self.inputs.bids_dir, derivatives=derivatives, validate=True, index_metadata=True) for t in task: filter_fmri_tr = filter_fmri.copy() filter_fmri_tr['task'] = t try: example_file = layout_for_tr.get(**filter_fmri_tr)[0] except IndexError: raise MissingFile(f"no imaging file found for task {t}") tr_dict[t] = layout_for_tr.get_metadata( example_file.path)['RepetitionTime'] self._results['fmri_prep'] = [file.path for file in fmri_prep] if self.inputs.ica_aroma: self._results['fmri_prep_aroma'] = [ file.path for file in fmri_prep_aroma ] self._results['conf_raw'] = [file.path for file in conf_raw] self._results['conf_json'] = [file.path for file in conf_json] self._results['entities'] = entities self._results['tr_dict'] = tr_dict return runtime
"suffix": "T1w", "extensions": ["nii", ".nii.gz"] } layout = BIDSLayout(args['directory']) data_grabber_node_iterables = [] if args['task'] is not None: struct_params['task'] = args['task'] time_series_params['task'] = args['task'] elif args['fmri_task'] is not None: time_series_params['task'] = args['fmri_task'] elif args['t1_task'] is not None: struct_params['task'] = args['t1_task'] else: data_grabber_node_iterables.append(('task', layout.get_tasks())) if args['acquisition'] is not None: struct_params['acquisition'] = args['acquisition'] time_series_params['acquisition'] = args['acquisition'] elif args['t1_acquisition'] is not None: struct_params['acquisition'] = args['t1_acquisition'] elif args['fmri_acquisition'] is not None: time_series_params['acquisition'] = args['fmri_acquisition'] else: data_grabber_node_iterables.append( ('acquisition', layout.get_acquisitions())) if args['session'] is not None: struct_params['session'] = args['session'] time_series_params['session'] = args['session']
def get_files(subject_id, session, task, raw_data_dir, preprocessed_data_dir, space=None, run=[], strict=True, **kwargs): """ Given some information, retrieve all the files and metadata from a BIDS-formatted dataset that will be passed to the analysis pipeline. """ from bids import BIDSLayout # only the raw files have the correct metadata, eg TR, and the event files are here raw_layout = BIDSLayout(raw_data_dir, validate=False, derivatives=False) preproc_layout = BIDSLayout(preprocessed_data_dir, validate=False) subjects = preproc_layout.get_subjects() assert subject_id in subjects and subject_id in raw_layout.get_subjects( ), "Subject not found!" sessions = preproc_layout.get_sessions() assert session in sessions, "Session not found!" tasks = preproc_layout.get_tasks() assert task in tasks, "Task not found!" if space == "None": space = None if space is None: print("Space is None") bolds = sorted([ f for f in preproc_layout.get(subject=subject_id, session=session, task=task, run=run, suffix='bold', extension=['nii.gz'], return_type='file') ]) else: bolds = sorted([ f for f in preproc_layout.get(subject=subject_id, session=session, task=task, run=run, suffix='bold', extension=['nii.gz'], return_type='file') if f"space-{space}" in f ]) print(f"BOLDS: {len(bolds)}\n{bolds}") if space is None: masks = sorted([ f for f in preproc_layout.get(subject=subject_id, suffix='mask', session=session, task=task, extension=['nii.gz'], return_type='file') ]) if not masks: masks = sorted([ f for f in preproc_layout.get(subject=subject_id, suffix='mask', session=session, extension=['nii.gz'], return_type='file') ]) else: masks = sorted([ f for f in preproc_layout.get(subject=subject_id, suffix='mask', session=session, task=task, extension=['nii.gz'], return_type='file') if f"space-{space}" in f ]) if not masks: masks = sorted([ f for f in preproc_layout.get(subject=subject_id, suffix='mask', session=session, extension=['nii.gz'], return_type='file') if f"space-{space}" in f ]) if len(masks ) == 1: # there is only one mask and it is to be used for all runs masks = masks * len(bolds) print(f"Masks: {len(masks)}\n{masks}") eventfiles = sorted( raw_layout.get(subject=subject_id, suffix='events', task=task, session=session, run=run, extension=['tsv'], return_type='file')) print(f"Eventfiles: {len(eventfiles)}\n{eventfiles}") raw_bolds = sorted( raw_layout.get(subject=subject_id, suffix='bold', task=task, session=session, run=run, extension=['nii.gz'], return_type='file')) TRs = [raw_layout.get_metadata(f)['RepetitionTime'] for f in raw_bolds] print(TRs, len(TRs)) confounds = sorted( preproc_layout.get(subject=subject_id, suffix="regressors", task=task, session=session, run=run, extension=['tsv'], return_type='file')) print(f"Confounds: {len(confounds)}\n{confounds}") if not confounds: confounds = [''] * len(bolds) #print(list(zip(bolds, masks, eventfiles, TRs))) # edit 11/9/18 - remove assert on event files, since some early hemifield scans don't have it # but warn! if (len(eventfiles) != len(bolds)): print("Some functional runs do not have corresponding event files!") assert TRs.count(TRs[0]) == len( TRs ), "Not all TRs are the same!" # all runs for a particular task must have same TR if strict: assert len(bolds) == len( masks ) > 0, "Input lists are not the same length!" # used to also check for ==len(confounds) TR = TRs[0] return bolds, masks, eventfiles, TR, confounds