Ejemplo n.º 1
0
 def _list_outputs(self):
     if self.inputs.in_file == Undefined:
         return {'out_file': Undefined}
     entities = parse_file_entities_with_pipelines(self.inputs.in_file)
     entities.update(self.inputs.base_entities)
     os.makedirs(build_path(entities, self.output_dir_pattern), exist_ok=True)
     path = build_path(entities, self.output_path_pattern)
     assert not os.path.exists(path), f"File already exists, overwriting protection: {path}"
     copyfile(self.inputs.in_file, path, copy=True)
     return {'out_file': path}
Ejemplo n.º 2
0
 def _run_interface(self, runtime):
     if exists(self.inputs.fmri_prep):
         img = load(self.inputs.fmri_prep)
         smoothed = smooth_img(img, fwhm=6)
         entities = parse_file_entities(self.inputs.fmri_prep)
         output_path = join(
             self.inputs.output_directory,
             build_path(entities, self.smooth_file_pattern, False))
         assert not exists(
             output_path), f"Smoothing is run twice at {output_path}"
         save(smoothed, output_path)
         self._results['fmri_smoothed'] = output_path
     elif self.inputs.is_file_mandatory:
         raise FileExistsError(
             f"Mandatory fMRI image file doesn't exists (input arg {self.inputs.fmri_prep})"
         )
     return runtime
Ejemplo n.º 3
0
 def generatePaths(
     cls,
     subjects: t.List[str] = (None, ),
     sessions: t.List[str] = (None, ),
     tasks: t.List[str] = (None, ),
     runs: t.List[str] = (None, )
 ) -> t.List[str]:
     return [
         build_path(
             {
                 'subject': subject,
                 'session': session,
                 'task': task,
                 'run': run
             }, cls.path_patter,
             False) for subject, session, task, run in product(
                 subjects, sessions, tasks, runs)
     ]
Ejemplo n.º 4
0
    def _run_interface(self, runtime):

        fmri_file = self._validate_fmri_prep_files()
        entities = parse_file_entities(fmri_file)
        self._validate_filtering(entities['task'])
        self._load_confouds()
        entities = parse_file_entities(self._fmri_file)
        fmri_denoised = clean_img(nb.load(self._fmri_file),
                                  confounds=self._confounds,
                                  **self._filtering_kwargs)

        entities['pipeline'] = self.inputs.pipeline['name']
        fmri_denoised_fname = join(
            self.inputs.output_dir,
            build_path(entities, self.fmri_denoised_pattern, False))
        assert not exists(fmri_denoised_fname), f"Denoising is run twice at {self._fmri_file} " \
                                                f"with result {fmri_denoised_fname}"
        nb.save(fmri_denoised, fmri_denoised_fname)
        self._results['fmri_denoised'] = fmri_denoised_fname

        return runtime
Ejemplo n.º 5
0
def stringify_entity(entity_dict: t.Dict[str, str]):
    pattern = "[ses-{session}_]task-{task}_[run-{run}]-[pipeline-{pipeline}]"
    return build_path(entity_dict, pattern)
Ejemplo n.º 6
0
    def _run_interface(self, runtime):
        # Find all distinct entities
        plots_all_pipelines, plots_pipeline = {}, {}
        for plots_type, plots_list in self.inputs.__dict__.items():

            if (plots_type.startswith('plots_all_pipelines')
                    and isinstance(plots_list, list)):
                plots_all_pipelines[plots_type] = list(remove_undefined(plots_list))

            if (plots_type.startswith('plots_pipeline')
                    and isinstance(plots_list, list)):
                plots_pipeline[plots_type] = list(remove_undefined(plots_list))

        unique_entities = set(
            map(lambda path: frozendict(filter(
                lambda pair: pair[0] in ['session', 'task', 'run'],
                parse_file_entities_with_pipelines(path).items())),
                chain(*chain(plots_all_pipelines.values(), plots_pipeline.values()))))

        unique_pipelines = set(pipeline['name'] for pipeline in self.inputs.pipelines)
        # Create input for create_report
        figures_dir = Path(self.inputs.output_dir).joinpath('figures')
        figures_dir.mkdir(parents=True, exist_ok=True)
        report_data = []

        for entity in unique_entities:

            entity_data = {'entity_name': build_path(entity, "[ses-{session}] task-{task} [run-{run}]"),
                           'entity_id': build_path(entity, "[ses-{session}-]task-{task}[-run-{run}]"),
                           'excluded_subjects': set(),
                           'warnings': [],
                           'errors': [],
                           'pipeline': []}
            # Manage excluded subjects
            for excluded in self.inputs.excluded_subjects:
                if is_entity_subset(excluded.entities, entity):
                    entity_data['excluded_subjects'] |= excluded.excluded
            entity_data['excluded_subjects'] = str(entity_data['excluded_subjects']).lstrip('{').rstrip(
                        '}') if len(entity_data['excluded_subjects']) > 1 else []
            # Manage errors and warnings
            for error in self.inputs.warnings:
                if is_entity_subset(error.entities, entity):
                    if error.critical:
                        entity_data['errors'].append(error.build_message())
                    else:
                        entity_data['warnings'].append(error.build_message())

            # Manage plots for all_pipelines
            for plots_type, plots_list in plots_all_pipelines.items():
                for plot in plots_list:
                    if is_entity_subset(parse_file_entities_with_pipelines(plot), entity):
                        plot_basename = os.path.basename(plot)
                        plot_relative_path = os.path.join('figures', plot_basename)
                        copyfile(plot, os.path.join(figures_dir, plot_basename))
                        entity_data[plots_type] = plot_relative_path
                        break

            # Manage plots for single pipeline
            for pipeline in unique_pipelines:
                pipeline_data = {
                    'pipeline_dict': next(pipeline_dict
                                          for pipeline_dict
                                          in self.inputs.pipelines
                                          if pipeline_dict['name'] == pipeline)
                }

                # Manage plots for single pipeline
                for plots_type, plots_list in plots_pipeline.items():
                    for plot in plots_list:
                        if pipeline in plot and is_entity_subset(parse_file_entities_with_pipelines(plot), entity):
                            plot_basename = os.path.basename(plot)
                            plot_relative_path = os.path.join('figures', plot_basename)
                            copyfile(plot, os.path.join(figures_dir, plot_basename))
                            pipeline_data[plots_type] = plot_relative_path

                # append new pipeline data dict
                entity_data['pipeline'].append(pipeline_data)

            # append new entity data dict
            report_data.append(entity_data)
        # sort report data
        report_data.sort(key=lambda x: dict.get(x, "entity_name"))
        # Create report
        create_report(
            runtime_info=self.inputs.runtime_info,
            report_data=report_data,
            output_dir=self.inputs.output_dir,
            report_name='fMRIdenoise_report.html'
        )

        return runtime