def test_no_bandpass_filtering(self): '''Expect that if low_pass, high_pass and tr_dict are not provided _filtering_kwargs attribute containg optional arguments for clean_img function will be empty dictionary (filtering not specified).''' denoise = Denoise(fmri_prep=self.fmri_prep, conf_prep=self.conf_prep, pipeline=self.pipeline, output_dir=self.out_dir.name) denoise._validate_filtering('test') self.assertEqual(denoise._filtering_kwargs, dict())
def test_missing_both_fmri_prep_files(self): '''Expect FileNotFoundError when neither fmri_prep nor fmri_prep_aroma were specified.''' denoise = Denoise( conf_prep=self.conf_prep, pipeline=self.pipeline, output_dir=self.out_dir.name, ) with self.assertRaises(FileNotFoundError): denoise.run()
def test_empty_confounds(self): '''Expect that clean_image will receive None as value for confounds keyword when conf_prep is empty confounds table.''' denoise = Denoise( fmri_prep=self.fmri_prep, conf_prep=self.conf_prep, pipeline=self.pipeline, output_dir=self.out_dir.name, ) denoise._load_confouds() self.assertEqual(denoise._confounds, None)
def test_missing_fmri_prep(self): '''Expect FileNotFoundError if pipeline without aroma is selected but only fmri_prep_aroma is provided.''' self.pipeline['aroma'] = False denoise = Denoise( fmri_prep_aroma=self.fmri_prep_aroma, conf_prep=self.conf_prep, pipeline=self.pipeline, output_dir=self.out_dir.name, ) with self.assertRaises(FileNotFoundError): denoise.run()
def test_incorrect_tr_dict(self): '''Expect KeyError if tr_dict does not contain requested task.''' self.tr_dict = {'another_task': 2} denoise = Denoise(fmri_prep=self.fmri_prep, conf_prep=self.conf_prep, pipeline=self.pipeline, output_dir=self.out_dir.name, tr_dict=self.tr_dict, high_pass=1 / 128, low_pass=1 / 5) with self.assertRaises(KeyError): denoise._validate_filtering('test')
def test_missing_tr_dict(self): '''Expect an Exception if either high_pass or low_pass is provided, but tr_dict is missing (clean_img require tr if filtering is requested).''' denoise = Denoise( fmri_prep=self.fmri_prep, conf_prep=self.conf_prep, pipeline=self.pipeline, output_dir=self.out_dir.name, high_pass=1 / 128, ) with self.assertRaises(TypeError): denoise._validate_filtering()
def test_non_empty_confounds(self): '''Expect that clean_image will receive correctly loaded confounds as numpy.ndarray.''' conf_prep = pd.DataFrame(np.random.random((100, 2))) conf_prep.to_csv(self.conf_prep, sep='\t', index=False) denoise = Denoise( fmri_prep=self.fmri_prep, conf_prep=self.conf_prep, pipeline=self.pipeline, output_dir=self.out_dir.name, ) denoise._load_confouds() self.assertEqual(denoise._confounds.shape, (100, 2))
def test_missing_noaroma_files_on_aroma_pipeline(self): self.pipeline['aroma'] = True denoise = Denoise(fmri_prep_aroma=self.fmri_prep, conf_prep=self.conf_prep, pipeline=self.pipeline, output_dir=self.out_dir.name) def _(runtime): denoise._validate_fmri_prep_files() return runtime denoise._run_interface = _ denoise.run()
def run(output_dir: str, pipeline_name: str, fmri_file: str, conf_raw: str, conf_json: str): pipeline = load_pipeline_from_json(get_pipeline_path(pipeline_name)) workflow = Workflow(name="test_workflow", base_dir=output_dir) conf_node = Node(Confounds(pipeline=pipeline, conf_raw=conf_raw, conf_json=conf_json, subject="test", task="test", session="test", output_dir=output_dir), name="Confprep") denoising_node = Node(Denoise(pipeline=pipeline, task="test", output_dir=output_dir), name="Denoise") if not is_IcaAROMA(pipeline): smoothing_node = Node(Smooth(fmri_prep=fmri_file, output_directory=output_dir), name="Smooth") workflow.connect([(smoothing_node, denoising_node, [("fmri_smoothed", "fmri_prep")])]) else: denoising_node.inputs.fmri_prep_aroma = fmri_file workflow.connect([(conf_node, denoising_node, [("conf_prep", "conf_prep")]) ]) workflow.run()
def test_banpass_filtering(self): '''Expect that if low_pass and high_pass arguments are provided, then _filtering_kwargs dict will contain correct keywords for clean_img function.''' denoise = Denoise(fmri_prep=self.fmri_prep, conf_prep=self.conf_prep, pipeline=self.pipeline, output_dir=self.out_dir.name, tr_dict=self.tr_dict, high_pass=1 / 128, low_pass=1 / 5) denoise._validate_filtering('test') self.assertEqual({ 'high_pass': 1 / 128, 'low_pass': 1 / 5, 't_r': 2 }, denoise._filtering_kwargs)
def build_node(self) -> Node: denoise_node = Node(name='InputSource', interface=Denoise()) denoise_node.inputs.conf_prep = self.conf_prep denoise_node.inputs.pipeline = self.pipeline denoise_node.inputs.output_dir = self.out_dir.name denoise_node.inputs.tr_dict = self.tr_dict denoise_node.inputs.fmri_prep_aroma = self.fmri_prep_aroma denoise_node.inputs.fmri_prep = self.fmri_prep return denoise_node
def init_fmridenoise_wf(bids_dir, derivatives='fmriprep', task=[], session=[], subject=[], pipelines_paths=get_pipelines_paths(), smoothing=True, ica_aroma=False, high_pass=0.008, low_pass=0.08, # desc=None, # ignore=None, force_index=None, base_dir='/tmp/fmridenoise/', name='fmridenoise_wf' ): workflow = pe.Workflow(name=name, base_dir=base_dir) temps.base_dir = base_dir # 1) --- Selecting pipeline # Inputs: fulfilled pipelineselector = pe.Node( PipelineSelector(), name="PipelineSelector") pipelineselector.iterables = ('pipeline_path', pipelines_paths) # Outputs: pipeline, pipeline_name, low_pass, high_pass # 2) --- Loading BIDS structure # Inputs: directory, task, derivatives grabbing_bids = pe.Node( BIDSGrab( bids_dir=bids_dir, derivatives=derivatives, task=task, session=session, subject=subject, ica_aroma=ica_aroma ), name="BidsGrabber") # Outputs: fmri_prep, conf_raw, conf_json, entities, tr_dict # 3) --- Confounds preprocessing # Inputs: pipeline, conf_raw, conf_json temppath = os.path.join(base_dir, 'prep_conf') prep_conf = pe.MapNode( Confounds( output_dir=temps.mkdtemp(temppath) ), iterfield=['conf_raw', 'conf_json', 'entities'], name="ConfPrep") # Outputs: conf_prep, low_pass, high_pass # 4) --- Denoising # Inputs: conf_prep, low_pass, high_pass iterate = ['fmri_prep', 'conf_prep', 'entities'] if not ica_aroma: iterate = iterate else: iterate.append('fmri_prep_aroma') temppath = os.path.join(base_dir, 'denoise') denoise = pe.MapNode( Denoise( smoothing=smoothing, high_pass=high_pass, low_pass=low_pass, ica_aroma=ica_aroma, output_dir=temps.mkdtemp(temppath) ), iterfield=iterate, name="Denoiser", mem_gb=6) # Outputs: fmri_denoised # 5) --- Connectivity estimation # Inputs: fmri_denoised temppath = os.path.join(base_dir, 'connectivity') parcellation_path = get_parcelation_file_path() connectivity = pe.MapNode( Connectivity( output_dir=temps.mkdtemp(temppath), parcellation=parcellation_path ), iterfield=['fmri_denoised'], name='ConnCalc') # Outputs: conn_mat, carpet_plot # 6) --- Group confounds # Inputs: conf_summary, pipeline_name # FIXME BEGIN # This is part of temporary solution. # Group nodes write to bids dir insted of tmp and let files be grabbed by datasink os.makedirs(os.path.join(bids_dir, 'derivatives', 'fmridenoise'), exist_ok=True) # FIXME END group_conf_summary = pe.Node( GroupConfounds( output_dir=os.path.join(bids_dir, 'derivatives', 'fmridenoise'), ), name="GroupConf") # Outputs: group_conf_summary # 7) --- Group connectivity # Inputs: corr_mat, pipeline_name group_connectivity = pe.Node( GroupConnectivity( output_dir=os.path.join(bids_dir, 'derivatives', 'fmridenoise'), ), name="GroupConn") # Outputs: group_corr_mat # 8) --- Quality measures # Inputs: group_corr_mat, group_conf_summary, pipeline_name quality_measures = pe.MapNode( QualityMeasures( output_dir=os.path.join(bids_dir, 'derivatives', 'fmridenoise'), distance_matrix=get_distance_matrix_file_path() ), iterfield=['group_corr_mat', 'group_conf_summary'], name="QualityMeasures") # Outputs: fc_fd_summary, edges_weight, edges_weight_clean # 9) --- Merge quality measures into lists for further processing # Inputs: fc_fd_summary, edges_weight, edges_weight_clean merge_quality_measures = pe.JoinNode(MergeGroupQualityMeasures(), joinsource=pipelineselector, name="Merge") # Outputs: fc_fd_summary, edges_weight # 10) --- Quality measures across pipelines # Inputs: fc_fd_summary, edges_weight pipelines_quality_measures = pe.Node( PipelinesQualityMeasures( output_dir=os.path.join(bids_dir, 'derivatives', 'fmridenoise'), ), name="PipelinesQC") # Outputs: pipelines_fc_fd_summary, pipelines_edges_weight # 11) --- Report from data report_creator = pe.JoinNode( ReportCreator( group_data_dir=os.path.join(bids_dir, 'derivatives', 'fmridenoise') ), joinsource=pipelineselector, joinfield=['pipelines', 'pipelines_names'], name='ReportCreator') # 12) --- Save derivatives # TODO: Fill missing in/out ds_confounds = pe.MapNode(BIDSDataSink(base_directory=bids_dir), iterfield=['in_file', 'entities'], name="ds_confounds") ds_denoise = pe.MapNode(BIDSDataSink(base_directory=bids_dir), iterfield=['in_file', 'entities'], name="ds_denoise") ds_connectivity = pe.MapNode(BIDSDataSink(base_directory=bids_dir), iterfield=['in_file', 'entities'], name="ds_connectivity") ds_carpet_plot = pe.MapNode(BIDSDataSink(base_directory=bids_dir), iterfield=['in_file', 'entities'], name="ds_carpet_plot") ds_matrix_plot = pe.MapNode(BIDSDataSink(base_directory=bids_dir), iterfield=['in_file', 'entities'], name="ds_matrix_plot") # --- Connecting nodes workflow.connect([ (grabbing_bids, denoise, [('tr_dict', 'tr_dict')]), (grabbing_bids, denoise, [('fmri_prep', 'fmri_prep'), ('fmri_prep_aroma', 'fmri_prep_aroma')]), (grabbing_bids, denoise, [('entities', 'entities')]), (grabbing_bids, prep_conf, [('conf_raw', 'conf_raw'), ('conf_json', 'conf_json'), ('entities', 'entities')]), (grabbing_bids, ds_confounds, [('entities', 'entities')]), (grabbing_bids, ds_denoise, [('entities', 'entities')]), (grabbing_bids, ds_connectivity, [('entities', 'entities')]), (grabbing_bids, ds_carpet_plot, [('entities', 'entities')]), (grabbing_bids, ds_matrix_plot, [('entities', 'entities')]), (pipelineselector, prep_conf, [('pipeline', 'pipeline')]), (pipelineselector, denoise, [('pipeline', 'pipeline')]), (prep_conf, group_conf_summary, [('conf_summary', 'conf_summary'), ('pipeline_name', 'pipeline_name')]), (pipelineselector, ds_denoise, [('pipeline_name', 'pipeline_name')]), (pipelineselector, ds_connectivity, [('pipeline_name', 'pipeline_name')]), (pipelineselector, ds_confounds, [('pipeline_name', 'pipeline_name')]), (pipelineselector, ds_carpet_plot, [('pipeline_name', 'pipeline_name')]), (pipelineselector, ds_matrix_plot, [('pipeline_name', 'pipeline_name')]), (prep_conf, denoise, [('conf_prep', 'conf_prep')]), (denoise, connectivity, [('fmri_denoised', 'fmri_denoised')]), (prep_conf, group_connectivity, [('pipeline_name', 'pipeline_name')]), (connectivity, group_connectivity, [('corr_mat', 'corr_mat')]), (prep_conf, ds_confounds, [('conf_prep', 'in_file')]), (denoise, ds_denoise, [('fmri_denoised', 'in_file')]), (connectivity, ds_connectivity, [('corr_mat', 'in_file')]), (connectivity, ds_carpet_plot, [('carpet_plot', 'in_file')]), (connectivity, ds_matrix_plot, [('matrix_plot', 'in_file')]), (group_connectivity, quality_measures, [('pipeline_name', 'pipeline_name'), ('group_corr_mat', 'group_corr_mat')]), (group_conf_summary, quality_measures, [('group_conf_summary', 'group_conf_summary')]), (quality_measures, merge_quality_measures, [('fc_fd_summary', 'fc_fd_summary'), ('edges_weight', 'edges_weight'), ('edges_weight_clean', 'edges_weight_clean'), ('exclude_list', 'exclude_list')]), (merge_quality_measures, pipelines_quality_measures, [('fc_fd_summary', 'fc_fd_summary'), ('edges_weight', 'edges_weight'), ('edges_weight_clean', 'edges_weight_clean')]), (merge_quality_measures, report_creator, [('exclude_list', 'excluded_subjects')]), (pipelines_quality_measures, report_creator, [('plot_pipeline_edges_density', 'plot_pipeline_edges_density'), ('plot_pipelines_edges_density_no_high_motion', 'plot_pipelines_edges_density_no_high_motion'), ('plot_pipelines_fc_fd_pearson', 'plot_pipelines_fc_fd_pearson'), ('plot_pipelines_fc_fd_uncorr', 'plot_pipelines_fc_fd_uncorr'), ('plot_pipelines_distance_dependence', 'plot_pipelines_distance_dependence')]), (pipelineselector, report_creator, [('pipeline', 'pipelines'), ('pipeline_name', 'pipelines_names')]) ]) return workflow
def init_fmridenoise_wf(bids_dir, output_dir, derivatives=True, pipelines_paths=glob.glob(os.path.dirname(fmridenoise.__file__) + "/pipelines/*"), #, desc=None, # ignore=None, force_index=None, # model=None, participants=None, base_dir=None, name='fmridenoise_wf' ): workflow = pe.Workflow(name='fmridenoise', base_dir=None) # 1) --- Selecting pipeline # Inputs: fulfilled pipelineselector = pe.Node( PipelineSelector(), name="PipelineSelector") pipelineselector.iterables = ('pipeline_path', pipelines_paths) # Outputs: pipeline # 2) --- Loading BIDS structure # Inputs: directory loading_bids = pe.Node( BIDSLoad( bids_dir=bids_dir, derivatives=derivatives ), name="BidsLoader") # Outputs: entities # 3) --- Selecting BIDS files # Inputs: entities selecting_bids = pe.MapNode( BIDSSelect( bids_dir=bids_dir, derivatives=derivatives ), iterfield=['entities'], name='BidsSelector') # Outputs: fmri_prep, conf_raw, entities # 4) --- Confounds preprocessing # Inputs: pipeline, conf_raw prep_conf = pe.MapNode( Confounds( output_dir=output_dir ), iterfield=['conf_raw'], name="ConfPrep") # Outputs: conf_prep # 5) --- Denoising # Inputs: conf_prep denoise = pe.MapNode( Denoise( output_dir=output_dir, ), iterfield=['fmri_prep', 'conf_prep'], name="Denoiser") # Outputs: fmri_denoised # 6) --- Connectivity estimation # Inputs: fmri_denoised parcellation_path = os.path.abspath(os.path.join(fmridenoise.__path__[0], "parcellation")) parcellation_path = glob.glob(parcellation_path + "/*")[0] connectivity = pe.MapNode( Connectivity(output_dir=output_dir, parcellation=parcellation_path), iterfield=['fmri_denoised'], name='ConnCalc') # Outputs: conn_mat # 7) --- Save derivatives # TODO: Fill missing in/out ds_confounds = pe.MapNode(BIDSDataSink(base_directory=output_dir, suffix='suff'), iterfield=['in_file', 'entities'], name="ds_confounds") ds_denoise = pe.MapNode(BIDSDataSink(base_directory=output_dir, suffix="denoise"), iterfield=['in_file', 'entities'], name="ds_denoise") ds_connectivity = pe.MapNode(BIDSDataSink(base_directory=output_dir, suffix="connect"), iterfield=['in_file', 'entities'], name="ds_connectivity") # --- Connecting nodes workflow.connect([ (loading_bids, selecting_bids, [('entities', 'entities')]), (selecting_bids, prep_conf, [('conf_raw', 'conf_raw')]), (pipelineselector, prep_conf, [('pipeline', 'pipeline')]), (selecting_bids, denoise, [('fmri_prep', 'fmri_prep')]), (prep_conf, denoise, [('conf_prep', 'conf_prep')]), (denoise, connectivity, [('fmri_denoised', 'fmri_denoised')]), (prep_conf, ds_confounds, [('conf_prep', 'in_file')]), (loading_bids, ds_confounds, [('entities', 'entities')]), (denoise, ds_denoise, [('fmri_denoised', 'in_file')]), (loading_bids, ds_denoise, [('entities', 'entities')]), (connectivity, ds_connectivity, [('corr_mat', 'in_file')]), (loading_bids, ds_connectivity, [('entities', 'entities')]) ]) return workflow
def __init__(self, bids_dir: str, subjects: t.List[str], tasks: t.List[str], conf_raw: t.List[str], conf_json: t.List[str], tr_dic: dict, pipelines_paths: t.List[str], high_pass: float, low_pass: float): self.fmri_prep_aroma_files = [] self.fmri_prep_files = [] # 1) --- Itersources for all further processing # Inputs: fulfilled self.pipelineselector = Node(PipelineSelector(), name="PipelineSelector") self.pipelineselector.iterables = ('pipeline_path', pipelines_paths) # Outputs: pipeline, pipeline_name, low_pass, high_pass # Inputs: fulfilled self.subjectselector = Node(IdentityInterface(fields=['subject']), name="SubjectSelector") self.subjectselector.iterables = ('subject', subjects) # Outputs: subject # Inputs: fulfilled self.taskselector = Node(IdentityInterface(fields=['task']), name="TaskSelector") self.taskselector.iterables = ('task', tasks) # Outputs: task # 2) --- Loading BIDS files # Inputs: subject, session, task self.bidsgrabber = Node(BIDSGrab(conf_raw_files=conf_raw, conf_json_files=conf_json), name="BidsGrabber") # Outputs: fmri_prep, fmri_prep_aroma, conf_raw, conf_json # 3) --- Confounds preprocessing # Inputs: pipeline, conf_raw, conf_json self.prep_conf = Node(Confounds(output_dir=temps.mkdtemp('prep_conf')), name="ConfPrep") # Outputs: conf_prep, conf_summary # 4) --- Denoising # Inputs: fmri_prep, fmri_prep_aroma, conf_prep, pipeline, entity, tr_dict self.denoise = Node(Denoise(high_pass=high_pass, low_pass=low_pass, tr_dict=tr_dic, output_dir=temps.mkdtemp('denoise')), name="Denoiser", mem_gb=12) # Outputs: fmri_denoised # 5) --- Connectivity estimation # Inputs: fmri_denoised self.connectivity = Node( Connectivity(output_dir=temps.mkdtemp('connectivity')), name='ConnCalc') # Outputs: conn_mat, carpet_plot # 6) --- Group confounds # Inputs: conf_summary, pipeline_name self.group_conf_summary = JoinNode( GroupConfounds(output_dir=temps.mkdtemp('group_conf_summary'), ), joinfield=["conf_summary_json_files"], joinsource=self.subjectselector, name="GroupConf") # Outputs: group_conf_summary # 7) --- Group connectivity # Inputs: corr_mat, pipeline_name self.group_connectivity = JoinNode(GroupConnectivity( output_dir=temps.mkdtemp('group_connectivity'), ), joinfield=["corr_mat"], joinsource=self.subjectselector, name="GroupConn") # Outputs: group_corr_mat # 8) --- Quality measures # Inputs: group_corr_mat, group_conf_summary, pipeline_name self.quality_measures = Node(QualityMeasures( output_dir=temps.mkdtemp('quality_measures'), distance_matrix=get_distance_matrix_file_path()), name="QualityMeasures") # Outputs: fc_fd_summary, edges_weight, edges_weight_clean self.quality_measures_join = create_flatten_identity_join_node( name='JoinQualityMeasuresOverPipeline', joinsource=self.pipelineselector, fields=[ 'excluded_subjects', 'warnings', 'corr_matrix_plot', 'corr_matrix_no_high_motion_plot' ], flatten_fields=['warnings']) # 10) --- Quality measures across pipelines # Inputs: fc_fd_summary, edges_weight self.pipelines_join = JoinNode(IdentityInterface(fields=['pipelines']), name='JoinPipelines', joinsource=self.pipelineselector, joinfield=['pipelines']) self.pipelines_quality_measures = JoinNode( PipelinesQualityMeasures( output_dir=temps.mkdtemp('pipelines_quality_measures'), # TODO: Replace with datasinks for needed output ), joinsource=self.pipelineselector, joinfield=[ 'fc_fd_summary', 'edges_weight', 'edges_weight_clean', 'fc_fd_corr_values', 'fc_fd_corr_values_clean' ], name="PipelinesQualityMeasures") self.pipeline_quality_measures_join_tasks = create_flatten_identity_join_node( name="JoinPipelinesQualityMeasuresOverTasks", joinsource=self.taskselector, fields=[ 'warnings', 'excluded_subjects', 'plot_pipelines_edges_density', 'plot_pipelines_edges_density_no_high_motion', 'plot_pipelines_fc_fd_pearson', 'plot_pipelines_fc_fd_pearson_no_high_motion', 'plot_pipelines_fc_fd_uncorr', 'plot_pipelines_distance_dependence', 'plot_pipelines_distance_dependence_no_high_motion', 'plot_pipelines_tdof_loss', 'corr_matrix_plot', 'corr_matrix_no_high_motion_plot' ], flatten_fields=[ 'warnings', 'excluded_subjects', 'corr_matrix_plot', 'corr_matrix_no_high_motion_plot' ]) # Outputs: pipelines_fc_fd_summary, pipelines_edges_weight # 11) --- Report from data report_dir = os.path.join(bids_dir, 'derivatives', 'fmridenoise', 'report') os.makedirs(report_dir, exist_ok=True) self.report_creator = Node(ReportCreator(runtime_info=RuntimeInfo( input_args=str(reduce(lambda x, y: f"{x} {y}", sys.argv)), version=get_versions().get('version')), output_dir=report_dir), name='ReportCreator') self.report_creator.inputs.tasks = tasks # 12) --- Save derivatives base_entities = {'bids_dir': bids_dir, 'derivative': 'fmridenoise'} self.ds_confounds = Node(BIDSDataSink(base_entities=base_entities), name="ds_confounds") self.ds_denoise = Node(BIDSDataSink(base_entities=base_entities), name="ds_denoise") self.ds_connectivity_corr_mat = Node( BIDSDataSink(base_entities=base_entities), name="ds_connectivity") self.ds_connectivity_carpet_plot = Node( BIDSDataSink(base_entities=base_entities), name="ds_carpet_plot") self.ds_connectivity_matrix_plot = Node( BIDSDataSink(base_entities=base_entities), name="ds_matrix_plot") self.ds_group_conf_summary = Node( BIDSDataSink(base_entities=base_entities), name="ds_group_conf_summary") self.ds_group_connectivity = Node( BIDSDataSink(base_entities=base_entities), name="ds_group_connectivity") self.ds_qm_motion_plot = Node( BIDSDataSink(base_entities=base_entities), name="ds_quality_measures_motion_plot") self.ds_qm_corr_matrix_plot_no_high = Node( BIDSDataSink(base_entities=base_entities), name="ds_quality_measures_corr_matrix_plot_no_high") self.ds_qm_corr_matrix_plot = Node( BIDSDataSink(base_entities=base_entities), name="ds_quality_measures_corr_matrix_plot") self.ds_pqm_fc_fd_summary = Node( BIDSDataSink(base_entities=base_entities), name="ds_pipeline_qm_fc_fd_summery") self.ds_pqm_edges_weight = Node( BIDSDataSink(base_entities=base_entities), name='ds_pipeline_qm_edges_weight') self.ds_pqm_edges_weight_clean = Node( BIDSDataSink(base_entities=base_entities), name='ds_pipeline_qm_edges_weight_clean') self.ds_pqm_plot_edges_density = Node( BIDSDataSink(base_entities=base_entities), name='ds_pipeline_qm_plot_edges_density') self.ds_pqm_plot_edges_density_no_high = Node( BIDSDataSink(base_entities=base_entities), name='ds_pipeline_qm_plot_edges_density_no_high') self.ds_pqm_plot_fc_fd = Node( BIDSDataSink(base_entities=base_entities), name='ds_pipeline_qm_plot_fc_fd') self.ds_pqm_plot_fc_fd_no_high = Node( BIDSDataSink(base_entities=base_entities), name='ds_pipeline_qm_plot_fc_fd_no_high') self.ds_pqm_plot_fc_fd_uncorr = Node( BIDSDataSink(base_entities=base_entities), name='ds_pipeline_qm_plot_fc_fd_uncorr') self.ds_pqm_plot_distance_dependence = Node( BIDSDataSink(base_entities=base_entities), name='ds_pipeline_qm_plot_distance_dependence') self.ds_pqm_plot_distance_dependence_no_high = Node( BIDSDataSink(base_entities=base_entities), name='ds_pipeline_qm_plot_distance_dependence_no_high') self.ds_pqm_plot_tdof_loss = Node( BIDSDataSink(base_entities=base_entities), name='ds_pipeline_qm_plot_tdof_loss') self.connections = [ # bidsgrabber (self.subjectselector, self.bidsgrabber, [('subject', 'subject')]), (self.taskselector, self.bidsgrabber, [('task', 'task')]), # prep_conf (self.pipelineselector, self.prep_conf, [('pipeline', 'pipeline')] ), (self.bidsgrabber, self.prep_conf, [('conf_raw', 'conf_raw'), ('conf_json', 'conf_json')]), # denoise (self.prep_conf, self.denoise, [('conf_prep', 'conf_prep')]), (self.pipelineselector, self.denoise, [('pipeline', 'pipeline')]), # group conf summary (self.prep_conf, self.group_conf_summary, [('conf_summary', 'conf_summary_json_files')]), # connectivity (self.denoise, self.connectivity, [('fmri_denoised', 'fmri_denoised')]), # group connectivity (self.connectivity, self.group_connectivity, [("corr_mat", "corr_mat")]), # quality measures (self.pipelineselector, self.quality_measures, [('pipeline', 'pipeline')]), (self.group_connectivity, self.quality_measures, [('group_corr_mat', 'group_corr_mat')]), (self.group_conf_summary, self.quality_measures, [('group_conf_summary', 'group_conf_summary')]), # quality measure join over pipelines (self.quality_measures, self.quality_measures_join, [ ('excluded_subjects', 'excluded_subjects'), ('warnings', 'warnings'), ('corr_matrix_plot', 'corr_matrix_plot'), ('corr_matrix_no_high_motion_plot', 'corr_matrix_no_high_motion_plot') ]), # pipeline quality measures (self.quality_measures, self.pipelines_quality_measures, [('fc_fd_summary', 'fc_fd_summary'), ('edges_weight', 'edges_weight'), ('edges_weight_clean', 'edges_weight_clean'), ('fc_fd_corr_values', 'fc_fd_corr_values'), ('fc_fd_corr_values_clean', 'fc_fd_corr_values_clean')]), (self.taskselector, self.pipelines_quality_measures, [('task', 'task')]), # pipelines_join (self.pipelineselector, self.pipelines_join, [('pipeline', 'pipelines')]), # pipeline_quality_measures_join (self.pipelines_quality_measures, self.pipeline_quality_measures_join_tasks, [ ('pipelines_fc_fd_summary', 'pipelines_fc_fd_summary'), ('plot_pipelines_edges_density', 'plot_pipelines_edges_density'), ('plot_pipelines_edges_density_no_high_motion', 'plot_pipelines_edges_density_no_high_motion'), ('plot_pipelines_fc_fd_pearson', 'plot_pipelines_fc_fd_pearson'), ('plot_pipelines_fc_fd_pearson_no_high_motion', 'plot_pipelines_fc_fd_pearson_no_high_motion'), ('plot_pipelines_fc_fd_uncorr', 'plot_pipelines_fc_fd_uncorr'), ('plot_pipelines_distance_dependence', 'plot_pipelines_distance_dependence'), ('plot_pipelines_distance_dependence_no_high_motion', 'plot_pipelines_distance_dependence_no_high_motion'), ('plot_pipelines_tdof_loss', 'plot_pipelines_tdof_loss'), ]), (self.quality_measures_join, self.pipeline_quality_measures_join_tasks, [ ('excluded_subjects', 'excluded_subjects'), ('warnings', 'warnings'), ('corr_matrix_plot', 'corr_matrix_plot'), ('corr_matrix_no_high_motion_plot', 'corr_matrix_no_high_motion_plot') ]), # report creator (self.pipelines_join, self.report_creator, [('pipelines', 'pipelines')]), # all datasinks # # ds_denoise (self.denoise, self.ds_denoise, [("fmri_denoised", "in_file")]), # # ds_connectivity (self.connectivity, self.ds_connectivity_corr_mat, [("corr_mat", "in_file")]), (self.connectivity, self.ds_connectivity_matrix_plot, [("matrix_plot", "in_file")]), (self.connectivity, self.ds_connectivity_carpet_plot, [("carpet_plot", "in_file")]), # # ds_confounds (self.prep_conf, self.ds_confounds, [("conf_prep", "in_file")]), # # ds_group_conf (self.group_conf_summary, self.ds_group_conf_summary, [('group_conf_summary', 'in_file')]), # # ds_group_connectivity (self.group_connectivity, self.ds_group_connectivity, [('group_corr_mat', 'in_file')]), # # ds_quality_measures (self.quality_measures, self.ds_qm_motion_plot, [('motion_plot', 'in_file')]), (self.quality_measures, self.ds_qm_corr_matrix_plot, [('corr_matrix_plot', 'in_file')]), (self.quality_measures, self.ds_qm_corr_matrix_plot_no_high, [('corr_matrix_no_high_motion_plot', 'in_file')]), # # ds_pipelines_quality_measures (self.pipelines_quality_measures, self.ds_pqm_fc_fd_summary, [('pipelines_fc_fd_summary', 'in_file')]), (self.pipelines_quality_measures, self.ds_pqm_edges_weight, [('pipelines_edges_weight', 'in_file')]), (self.pipelines_quality_measures, self.ds_pqm_edges_weight_clean, [('pipelines_edges_weight_clean', 'in_file')]), (self.pipelines_quality_measures, self.ds_pqm_plot_edges_density, [('plot_pipelines_edges_density', 'in_file')]), (self.pipelines_quality_measures, self.ds_pqm_plot_edges_density_no_high, [ ('plot_pipelines_edges_density_no_high_motion', 'in_file') ]), (self.pipelines_quality_measures, self.ds_pqm_plot_fc_fd, [('plot_pipelines_fc_fd_pearson', 'in_file')]), (self.pipelines_quality_measures, self.ds_pqm_plot_fc_fd_no_high, [('plot_pipelines_fc_fd_pearson_no_high_motion', 'in_file')]), (self.pipelines_quality_measures, self.ds_pqm_plot_fc_fd_uncorr, [('plot_pipelines_fc_fd_uncorr', 'in_file')]), (self.pipelines_quality_measures, self.ds_pqm_plot_distance_dependence, [ ('plot_pipelines_distance_dependence', 'in_file') ]), (self.pipelines_quality_measures, self.ds_pqm_plot_distance_dependence_no_high, [ ('plot_pipelines_distance_dependence_no_high_motion', 'in_file') ]), (self.pipelines_quality_measures, self.ds_pqm_plot_tdof_loss, [('plot_pipelines_tdof_loss', 'in_file')]) ] self.last_join = self.pipeline_quality_measures_join_tasks