def run(output_dir: str, pipeline_name: str, fmri_file: str, conf_raw: str,
        conf_json: str):
    pipeline = load_pipeline_from_json(get_pipeline_path(pipeline_name))
    workflow = Workflow(name="test_workflow", base_dir=output_dir)
    conf_node = Node(Confounds(pipeline=pipeline,
                               conf_raw=conf_raw,
                               conf_json=conf_json,
                               subject="test",
                               task="test",
                               session="test",
                               output_dir=output_dir),
                     name="Confprep")
    denoising_node = Node(Denoise(pipeline=pipeline,
                                  task="test",
                                  output_dir=output_dir),
                          name="Denoise")
    if not is_IcaAROMA(pipeline):
        smoothing_node = Node(Smooth(fmri_prep=fmri_file,
                                     output_directory=output_dir),
                              name="Smooth")
        workflow.connect([(smoothing_node, denoising_node, [("fmri_smoothed",
                                                             "fmri_prep")])])
    else:
        denoising_node.inputs.fmri_prep_aroma = fmri_file
    workflow.connect([(conf_node, denoising_node, [("conf_prep", "conf_prep")])
                      ])
    workflow.run()
Exemple #2
0
class QualityMeasuresAsNodeTestBase:
    group_conf_summary: pd.DataFrame = ...
    distance_matrix: np.ndarray = ...
    group_corr_mat: np.ndarray = ...
    pipeline = load_pipeline_from_json(get_pipeline_path('pipeline-Null'))

    @classmethod
    def tearDownClass(cls) -> None:
        shutil.rmtree(cls.tempdir)

    @classmethod
    def setUpClass(cls) -> None:
        cls.tempdir = tempfile.mkdtemp()
        group_conf_summary_file = join(
            cls.tempdir, 'task-test_pipeline-Null_group_conf_summary.tsv')
        cls.group_conf_summary.to_csv(group_conf_summary_file,
                                      sep="\t",
                                      index=False)
        distance_matrix_file = join(
            cls.tempdir, "task-test_pipeline-Null_distance_matrix.npy")
        np.save(distance_matrix_file, cls.distance_matrix)
        group_corr_mat_file = join(
            cls.tempdir, "task-test_pipeline-Null_group_corr_mat.npy")
        np.save(group_corr_mat_file, cls.group_corr_mat)
        cls.quality_measures_node = Node(QualityMeasures(),
                                         name="QualityMeasures")
        cls.quality_measures_node.inputs.group_conf_summary = group_conf_summary_file
        cls.quality_measures_node.inputs.distance_matrix = distance_matrix_file
        cls.quality_measures_node.inputs.group_corr_mat = group_corr_mat_file
        cls.quality_measures_node.inputs.pipeline = cls.pipeline
        cls.quality_measures_node.inputs.output_dir = cls.tempdir
        cls.result = cls.quality_measures_node.run()
Exemple #3
0
 def test_pipeline_3(self):
     pipeline = load_pipeline_from_json(
         get_pipeline_path('pipeline-24HMP_aCompCor_SpikeReg'))
     summary = get_pipeline_summary(pipeline)
     for confound in summary:
         if confound['Confound'] == 'WM':
             self.assertEqual(confound['Raw'], NO)
             self.assertEqual(confound["Temp. deriv."], NO)
             self.assertEqual(confound["Quadr. terms"], NO)
         elif confound['Confound'] == 'CSF':
             self.assertEqual(confound['Raw'], NO)
             self.assertEqual(confound["Temp. deriv."], NO)
             self.assertEqual(confound["Quadr. terms"], NO)
         elif confound['Confound'] == 'GS':
             self.assertEqual(confound['Raw'], NO)
             self.assertEqual(confound["Temp. deriv."], NO)
             self.assertEqual(confound["Quadr. terms"], NO)
         elif confound['Confound'] == 'aCompCor':
             self.assertEqual(confound['Raw'], YES)
             self.assertEqual(confound["Temp. deriv."], NA)
             self.assertEqual(confound["Quadr. terms"], NA)
         elif confound['Confound'] == 'ICA-AROMA':
             self.assertEqual(confound['Raw'], NO)
             self.assertEqual(confound["Temp. deriv."], NA)
             self.assertEqual(confound["Quadr. terms"], NA)
         elif confound['Confound'] == 'Spikes':
             self.assertEqual(confound['Raw'], YES)
             self.assertEqual(confound["Temp. deriv."], NA)
             self.assertEqual(confound["Quadr. terms"], NA)
         else:
             raise ValueError(f'Unknown confound {confound}')
Exemple #4
0
def parse_pipelines(pipelines_args: str or set = "all") -> set:
    """
    Parses all possible pipeline options:
    :param pipelines_args: set or str, only valid string argument is 'all'.
    If argument is set it can containing both names of pipelines from
    fmridenoise.pipelines directory or path(s) to valid json file(s)
    containing of valid pipeline description.
    :return: set of valid pipelines paths.
    """
    if type(pipelines_args) is str:
        if pipelines_args != "all":
            raise ValueError("Only valid string argument is 'all'")
        else:
            return get_pipelines_paths()
    known_pipelines = get_pipelines_names()
    pipelines_args = set(pipelines_args)
    if pipelines_args <= known_pipelines:
        return get_pipelines_paths(pipelines_args)
    ret = set()
    for p in pipelines_args:
        if p in known_pipelines:
            ret.add(get_pipeline_path(p))
        elif p not in known_pipelines and is_valid(load_pipeline_from_json(p)):
            ret.add(p)
        else:
            raise ValueError(f"File: '{p} is not a valid pipeline")
    return ret
def run(output_dir: str, pipeline_name: str, group_corr_mat: str,
        group_conf_summary: str):
    workflow = Workflow(name="test_workflow", base_dir=output_dir)
    identity_node = Node(IdentityInterface(fields=[
        "pipeline", "group_corr_mat", "distance_matrix", "group_conf_summary"
    ]),
                         name="SomeInputSource")
    identity_node.inputs.pipeline = load_pipeline_from_json(
        get_pipeline_path(pipeline_name))
    identity_node.inputs.group_corr_mat = group_corr_mat
    identity_node.inputs.distance_matrix = get_distance_matrix_file_path()
    identity_node.inputs.group_conf_summary = group_conf_summary
    quality_node = Node(QualityMeasures(output_dir=output_dir),
                        name="QualitMeasures")
    workflow.connect([(identity_node, quality_node,
                       [("pipeline", "pipeline"),
                        ("group_corr_mat", "group_corr_mat"),
                        ("distance_matrix", "distance_matrix"),
                        ("group_conf_summary", "group_conf_summary")])])
    workflow.run()
 excluded_subjects = [
     ExcludedSubjects(pipeline_name='Null',
                      task='rest',
                      session='1',
                      run=1,
                      excluded={'sub-1', 'sub-2', 'sub-3'}),
     ExcludedSubjects(pipeline_name='Null',
                      task='rest',
                      session='1',
                      run=2,
                      excluded={'sub-1', 'sub-2', 'sub-3'})
 ]
 pipelines = []
 for pipeline_name in pipelines_dict.values():
     pipelines.append(
         load_pipeline_from_json(get_pipeline_path(pipeline_name)))
 # Input arguments for ReportCreator interface
 plots_dict = create_dummy_plots(entity_list=entity_list,
                                 pipeline_dict=pipelines_dict,
                                 path_out=os.path.join(report_dir, 'tmp'))
 # Create & run interface
 interface = ReportCreator(runtime_info=RuntimeInfo(
     input_args=str(reduce(lambda x, y: f"{x} {y}", sys.argv)),
     version=get_versions().get('version')),
                           pipelines=pipelines,
                           tasks=['rest', 'tapping'],
                           sessions=['1', '2'],
                           runs=[1, 2],
                           output_dir=report_dir,
                           warnings=warnings,
                           excluded_subjects=excluded_subjects,
Exemple #7
0
 def test_get_pipeline_path(self):
     name = 'pipeline-24HMP_8Phys_SpikeReg_4GS'
     target = join(self.pipelines_dir,
                   'pipeline-24HMP_8Phys_SpikeReg_4GS.json')
     path = pipe.get_pipeline_path(name)
     self.assertEqual(target, path)