Example #1
0
class RequirementsStudy(Study):

    __metaclass__ = StudyMetaClass

    add_data_specs = [
        DatasetSpec('ones', nifti_gz_format),
        DatasetSpec('twos', nifti_gz_format, 'pipeline')
    ]

    def pipeline(self):
        pipeline = self.create_pipeline(
            name='pipeline',
            inputs=[DatasetSpec('ones', nifti_gz_format)],
            outputs=[DatasetSpec('twos', nifti_gz_format)],
            desc=("A pipeline that tests loading of requirements"),
            version=1,
            citations=[],
        )
        # Convert from DICOM to NIfTI.gz format on input
        input_merge = pipeline.create_node(Merge(2), "input_merge")
        maths = pipeline.create_node(MRMath(),
                                     "maths",
                                     requirements=[(dummy1_req, dummy2_req,
                                                    mrtrix3_req),
                                                   dcm2niix1_req])
        pipeline.connect_input('ones', input_merge, 'in1')
        pipeline.connect_input('ones', input_merge, 'in2')
        pipeline.connect(input_merge, 'out', maths, 'in_files')
        maths.inputs.operation = 'sum'
        pipeline.connect_output('twos', maths, 'out_file')
        pipeline.assert_connected()
        return pipeline
Example #2
0
 def combined_pipeline(self, **kwargs):
     pipeline = self.create_pipeline(
         name='combined',
         inputs=[DatasetSpec('ss1_z', mrtrix_format),
                 DatasetSpec('full_e', mrtrix_format),
                 DatasetSpec('partial_ss2_z', mrtrix_format)],
         outputs=[DatasetSpec('g', mrtrix_format)],
         desc=(
             "A dummy pipeline used to test MultiMultiStudy class"),
         version=1,
         citations=[],
         **kwargs)
     merge = pipeline.create_node(Merge(3), name="merge")
     mrmath = pipeline.create_node(MRMath(), name="mrmath",
                                   requirements=[mrtrix3_req])
     mrmath.inputs.operation = pipeline.option('combined_op')
     # Connect inputs
     pipeline.connect_input('ss1_z', merge, 'in1')
     pipeline.connect_input('full_e', merge, 'in2')
     pipeline.connect_input('partial_ss2_z', merge, 'in3')
     # Connect nodes
     pipeline.connect(merge, 'out', mrmath, 'in_files')
     # Connect outputs
     pipeline.connect_output('g', mrmath, 'out_file')
     return pipeline
Example #3
0
 def pipeline1(self, **kwargs):
     pipeline = self.create_pipeline(
         name='pipeline1',
         inputs=[DatasetSpec('start', nifti_gz_format)],
         outputs=[
             DatasetSpec('derived1_1', nifti_gz_format),
             DatasetSpec('derived1_2', nifti_gz_format)
         ],
         desc="A dummy pipeline used to test 'run_pipeline' method",
         version=1,
         citations=[],
         **kwargs)
     if not pipeline.option('pipeline_option'):
         raise Exception("Pipeline option was not cascaded down to "
                         "pipeline1")
     mrconvert = pipeline.create_node(MRConvert(),
                                      name="convert1",
                                      requirements=[mrtrix3_req])
     mrconvert2 = pipeline.create_node(MRConvert(),
                                       name="convert2",
                                       requirements=[mrtrix3_req])
     # Connect inputs
     pipeline.connect_input('start', mrconvert, 'in_file')
     pipeline.connect_input('start', mrconvert2, 'in_file')
     # Connect outputs
     pipeline.connect_output('derived1_1', mrconvert, 'out_file')
     pipeline.connect_output('derived1_2', mrconvert2, 'out_file')
     return pipeline
Example #4
0
 def pipeline2(self, **kwargs):
     pipeline = self.create_pipeline(
         name='pipeline2',
         inputs=[
             DatasetSpec('start', nifti_gz_format),
             DatasetSpec('derived1_1', nifti_gz_format)
         ],
         outputs=[DatasetSpec('derived2', nifti_gz_format)],
         desc="A dummy pipeline used to test 'run_pipeline' method",
         version=1,
         citations=[],
         **kwargs)
     if not pipeline.option('pipeline_option'):
         raise Exception("Pipeline option was not cascaded down to "
                         "pipeline2")
     mrmath = pipeline.create_node(MRCat(),
                                   name="mrcat",
                                   requirements=[mrtrix3_req])
     mrmath.inputs.axis = 0
     # Connect inputs
     pipeline.connect_input('start', mrmath, 'first_scan')
     pipeline.connect_input('derived1_1', mrmath, 'second_scan')
     # Connect outputs
     pipeline.connect_output('derived2', mrmath, 'out_file')
     return pipeline
Example #5
0
class TestStudy(Study):

    __metaclass__ = StudyMetaClass

    add_data_specs = [
        DatasetSpec('dataset1', nifti_gz_format),
        DatasetSpec('dataset2', nifti_gz_format),
        DatasetSpec('dataset3', nifti_gz_format),
        DatasetSpec('dataset5', nifti_gz_format)
    ]
Example #6
0
 def pipeline(self):
     pipeline = self.create_pipeline(
         name='pipeline',
         inputs=[DatasetSpec('mrtrix', mrtrix_format),
                 DatasetSpec('nifti_gz', nifti_gz_format),
                 DatasetSpec('dicom', nifti_gz_format),
                 DatasetSpec('directory', directory_format),
                 DatasetSpec('zip', directory_format)],
         outputs=[DatasetSpec('nifti_gz_from_dicom', nifti_gz_format),
                  DatasetSpec('mrtrix_from_nifti_gz', nifti_gz_format),
                  DatasetSpec('nifti_from_mrtrix', mrtrix_format),
                  DatasetSpec('directory_from_zip', directory_format),
                  DatasetSpec('zip_from_directory', directory_format)],
         desc=("A pipeline that tests out various data format "
                      "conversions"),
         version=1,
         citations=[],)
     # Convert from DICOM to NIfTI.gz format on input
     nifti_gz_from_dicom = pipeline.create_node(
         IdentityInterface(fields=['file']), "nifti_gz_from_dicom")
     pipeline.connect_input('dicom', nifti_gz_from_dicom,
                            'file')
     pipeline.connect_output('nifti_gz_from_dicom', nifti_gz_from_dicom,
                             'file')
     # Convert from NIfTI.gz to MRtrix format on output
     mrtrix_from_nifti_gz = pipeline.create_node(
         IdentityInterface(fields=['file']), name='mrtrix_from_nifti_gz')
     pipeline.connect_input('nifti_gz', mrtrix_from_nifti_gz,
                            'file')
     pipeline.connect_output('mrtrix_from_nifti_gz', mrtrix_from_nifti_gz,
                             'file')
     # Convert from MRtrix to NIfTI format on output
     nifti_from_mrtrix = pipeline.create_node(
         IdentityInterface(fields=['file']), 'nifti_from_mrtrix')
     pipeline.connect_input('mrtrix', nifti_from_mrtrix,
                            'file')
     pipeline.connect_output('nifti_from_mrtrix', nifti_from_mrtrix,
                             'file')
     # Convert from zip file to directory format on input
     directory_from_zip = pipeline.create_node(
         IdentityInterface(fields=['file']), 'directory_from_zip')
     pipeline.connect_input('zip', directory_from_zip,
                            'file')
     pipeline.connect_output('directory_from_zip', directory_from_zip,
                             'file')
     # Convert from NIfTI.gz to MRtrix format on output
     zip_from_directory = pipeline.create_node(
         IdentityInterface(fields=['file']), 'zip_from_directory')
     pipeline.connect_input('directory', zip_from_directory,
                            'file')
     pipeline.connect_output('zip_from_directory', zip_from_directory,
                             'file')
     pipeline.assert_connected()
     return pipeline
Example #7
0
class StudyB(Study):

    __metaclass__ = StudyMetaClass

    add_data_specs = [
        DatasetSpec('w', mrtrix_format),
        DatasetSpec('x', mrtrix_format),
        DatasetSpec('y', mrtrix_format, 'pipeline_beta'),
        DatasetSpec('z', mrtrix_format, 'pipeline_beta')]

    add_option_specs = [
        OptionSpec('o1', 10),
        OptionSpec('o2', '20'),
        OptionSpec('o3', 30.0),
        OptionSpec('product_op', 'not-specified')]  # Needs to be set to 'product' @IgnorePep8

    def pipeline_beta(self, **kwargs):  # @UnusedVariable
        pipeline = self.create_pipeline(
            name='pipeline_beta',
            inputs=[DatasetSpec('w', mrtrix_format),
                    DatasetSpec('x', mrtrix_format)],
            outputs=[DatasetSpec('y', mrtrix_format),
                     DatasetSpec('z', mrtrix_format)],
            desc="A dummy pipeline used to test MultiStudy class",
            version=1,
            citations=[],
            **kwargs)
        merge1 = pipeline.create_node(Merge(2), name='merge1')
        merge2 = pipeline.create_node(Merge(2), name='merge2')
        merge3 = pipeline.create_node(Merge(2), name='merge3')
        mrsum1 = pipeline.create_node(MRMath(), name="mrsum1",
                                      requirements=[mrtrix3_req])
        mrsum1.inputs.operation = 'sum'
        mrsum2 = pipeline.create_node(MRMath(), name="mrsum2",
                                      requirements=[mrtrix3_req])
        mrsum2.inputs.operation = 'sum'
        mrproduct = pipeline.create_node(MRMath(), name="mrproduct",
                                         requirements=[mrtrix3_req])
        mrproduct.inputs.operation = pipeline.option('product_op')
        # Connect inputs
        pipeline.connect_input('w', merge1, 'in1')
        pipeline.connect_input('x', merge1, 'in2')
        pipeline.connect_input('x', merge2, 'in1')
        # Connect nodes
        pipeline.connect(merge1, 'out', mrsum1, 'in_files')
        pipeline.connect(mrsum1, 'out_file', merge2, 'in2')
        pipeline.connect(merge2, 'out', mrsum2, 'in_files')
        pipeline.connect(mrsum1, 'out_file', merge3, 'in1')
        pipeline.connect(mrsum2, 'out_file', merge3, 'in2')
        pipeline.connect(merge3, 'out', mrproduct, 'in_files')
        # Connect outputs
        pipeline.connect_output('y', mrsum2, 'out_file')
        pipeline.connect_output('z', mrproduct, 'out_file')
        return pipeline
Example #8
0
 def pipeline1(self):
     pipeline = self.create_pipeline(
         'pipeline1',
         inputs=[DatasetSpec('required', text_format)],
         outputs=[DatasetSpec('derivable', text_format)],
         desc="",
         citations=[],
         version=1)
     identity = pipeline.create_node(IdentityInterface(['a']), 'identity')
     pipeline.connect_input('required', identity, 'a')
     pipeline.connect_output('derivable', identity, 'a')
     return pipeline
Example #9
0
 def pipeline4(self, **kwargs):
     pipeline = self.create_pipeline(
         'pipeline4',
         inputs=[DatasetSpec('wrong_option', text_format)],
         outputs=[DatasetSpec('wrong_option2', text_format)],
         desc="",
         citations=[],
         version=1,
         **kwargs)
     identity = pipeline.create_node(IdentityInterface(['a']), 'identity')
     pipeline.connect_input('wrong_option', identity, 'a')
     pipeline.connect_output('wrong_option2', identity, 'a')
     return pipeline
Example #10
0
class TestMatchStudy(Study):

    __metaclass__ = StudyMetaClass

    add_data_specs = [
        DatasetSpec('gre_phase', dicom_format),
        DatasetSpec('gre_mag', dicom_format)
    ]

    def dummy_pipeline1(self):
        pass

    def dummy_pipeline2(self):
        pass
Example #11
0
 def pipeline(self):
     pipeline = self.create_pipeline(
         name='pipeline',
         inputs=[DatasetSpec('input_dataset', nifti_gz_format)],
         outputs=[DatasetSpec('output_dataset', nifti_gz_format)],
         desc=("A dummy pipeline used to test dicom-to-nifti "
                      "conversion method"),
         version=1,
         citations=[])
     identity = pipeline.create_node(IdentityInterface(['field']),
                                     name='identity')
     # Connect inputs
     pipeline.connect_input('input_dataset', identity, 'field')
     # Connect outputs
     pipeline.connect_output('output_dataset', identity, 'field')
     return pipeline
Example #12
0
 def pipeline3(self, **kwargs):
     pipeline = self.create_pipeline(
         name='pipeline3',
         inputs=[DatasetSpec('derived2', nifti_gz_format)],
         outputs=[DatasetSpec('derived3', nifti_gz_format)],
         desc="A dummy pipeline used to test 'run_pipeline' method",
         version=1,
         citations=[],
         **kwargs)
     mrconvert = pipeline.create_node(MRConvert(),
                                      name="convert",
                                      requirements=[mrtrix3_req])
     # Connect inputs
     pipeline.connect_input('derived2', mrconvert, 'in_file')
     # Connect outputs
     pipeline.connect_output('derived3', mrconvert, 'out_file')
     return pipeline
Example #13
0
 def pipeline2(self):
     pipeline = self.create_pipeline(
         'pipeline2',
         inputs=[
             DatasetSpec('required', text_format),
             DatasetSpec('optional', text_format)
         ],
         outputs=[DatasetSpec('missing_input', text_format)],
         desc="",
         citations=[],
         version=1)
     identity = pipeline.create_node(IdentityInterface(['a', 'b']),
                                     'identity')
     pipeline.connect_input('required', identity, 'a')
     pipeline.connect_input('optional', identity, 'b')
     pipeline.connect_output('missing_input', identity, 'a')
     return pipeline
Example #14
0
class ExistingPrereqStudy(Study):

    __metaclass__ = StudyMetaClass

    add_data_specs = [
        DatasetSpec('start', mrtrix_format),
        DatasetSpec('tens', mrtrix_format, 'tens_pipeline'),
        DatasetSpec('hundreds', mrtrix_format, 'hundreds_pipeline'),
        DatasetSpec('thousands', mrtrix_format, 'thousands_pipeline')
    ]

    def pipeline_factory(self, incr, input, output):  # @ReservedAssignment
        pipeline = self.create_pipeline(
            name=output,
            inputs=[DatasetSpec(input, mrtrix_format)],
            outputs=[DatasetSpec(output, mrtrix_format)],
            desc=("A dummy pipeline used to test 'partial-complete' method"),
            version=1,
            citations=[])
        # Nodes
        operands = pipeline.create_node(Merge(2), name='merge')
        mult = pipeline.create_node(MRCalc(),
                                    name="convert1",
                                    requirements=[mrtrix3_req])
        operands.inputs.in2 = incr
        mult.inputs.operation = 'add'
        # Connect inputs
        pipeline.connect_input(input, operands, 'in1')
        # Connect inter-nodes
        pipeline.connect(operands, 'out', mult, 'operands')
        # Connect outputs
        pipeline.connect_output(output, mult, 'out_file')
        return pipeline

    def tens_pipeline(self, **kwargs):  # @UnusedVariable
        return self.pipeline_factory(10, 'start', 'tens')

    def hundreds_pipeline(self, **kwargs):  # @UnusedVariable
        return self.pipeline_factory(100, 'tens', 'hundreds')

    def thousands_pipeline(self, **kwargs):  # @UnusedVariable
        return self.pipeline_factory(1000, 'hundreds', 'thousands')
Example #15
0
 def pipeline3(self, **kwargs):
     outputs = [DatasetSpec('another_derivable', text_format)]
     switch = self.pre_option('switch', 'pipeline3', **kwargs)
     if switch:
         outputs.append(DatasetSpec('wrong_option', text_format))
     pipeline = self.create_pipeline(
         'pipeline3',
         inputs=[DatasetSpec('required', text_format)],
         outputs=outputs,
         desc="",
         citations=[],
         version=1)
     identity = pipeline.create_node(IdentityInterface(['a', 'b']),
                                     'identity')
     pipeline.connect_input('required', identity, 'a')
     pipeline.connect_input('required', identity, 'b')
     pipeline.connect_output('another_derivable', identity, 'a')
     if switch:
         pipeline.connect_output('wrong_option', identity, 'b')
     return pipeline
Example #16
0
class PartialMultiStudy(MultiStudy):

    __metaclass__ = MultiStudyMetaClass

    add_sub_study_specs = [
        SubStudySpec('ss1', StudyA,
                     {'a': 'x', 'b': 'y', 'p1': 'o1'}),
        SubStudySpec('ss2', StudyB,
                     {'b': 'w', 'c': 'x', 'p1': 'o1'})]

    add_data_specs = [
        DatasetSpec('a', mrtrix_format),
        DatasetSpec('b', mrtrix_format),
        DatasetSpec('c', mrtrix_format)]

    pipeline_alpha_trans = MultiStudy.translate(
        'ss1', 'pipeline_alpha')

    add_option_specs = [
        OptionSpec('p1', 1000)]
Example #17
0
 def visit_summary_pipeline(self, **kwargs):
     pipeline = self.create_pipeline(
         name="visit_summary",
         inputs=[DatasetSpec('ones_slice', mrtrix_format)],
         outputs=[DatasetSpec('visit_summary', mrtrix_format)],
         desc=("Test of project summary variables"),
         version=1,
         citations=[],
         **kwargs)
     mrmath = pipeline.create_join_visits_node(MRMath(),
                                               'in_files',
                                               'mrmath',
                                               requirements=[mrtrix3_req])
     mrmath.inputs.operation = 'sum'
     # Connect inputs
     pipeline.connect_input('ones_slice', mrmath, 'in_files')
     # Connect outputs
     pipeline.connect_output('visit_summary', mrmath, 'out_file')
     pipeline.assert_connected()
     return pipeline
Example #18
0
 def test_dataset_and_field(self):
     objs = [
         DatasetSpec('a', nifti_gz_format, 'dummy_pipeline1'),
         FieldSpec('b', int, 'dummy_pipeline2')
     ]
     for i, obj in enumerate(objs):
         fname = os.path.join(self.pkl_dir, '{}.pkl'.format(i))
         with open(fname, 'w') as f:
             pkl.dump(obj, f)
         with open(fname) as f:
             re_obj = pkl.load(f)
         self.assertEqual(obj, re_obj)
Example #19
0
 def pipeline4(self, **kwargs):
     pipeline = self.create_pipeline(
         name='pipeline4',
         inputs=[
             DatasetSpec('derived1_2', nifti_gz_format),
             DatasetSpec('derived3', nifti_gz_format)
         ],
         outputs=[DatasetSpec('derived4', nifti_gz_format)],
         desc="A dummy pipeline used to test 'run_pipeline' method",
         version=1,
         citations=[],
         **kwargs)
     mrmath = pipeline.create_node(MRCat(),
                                   name="mrcat",
                                   requirements=[mrtrix3_req])
     mrmath.inputs.axis = 0
     # Connect inputs
     pipeline.connect_input('derived1_2', mrmath, 'first_scan')
     pipeline.connect_input('derived3', mrmath, 'second_scan')
     # Connect outputs
     pipeline.connect_output('derived4', mrmath, 'out_file')
     return pipeline
Example #20
0
 def pipeline_factory(self, incr, input, output):  # @ReservedAssignment
     pipeline = self.create_pipeline(
         name=output,
         inputs=[DatasetSpec(input, mrtrix_format)],
         outputs=[DatasetSpec(output, mrtrix_format)],
         desc=("A dummy pipeline used to test 'partial-complete' method"),
         version=1,
         citations=[])
     # Nodes
     operands = pipeline.create_node(Merge(2), name='merge')
     mult = pipeline.create_node(MRCalc(),
                                 name="convert1",
                                 requirements=[mrtrix3_req])
     operands.inputs.in2 = incr
     mult.inputs.operation = 'add'
     # Connect inputs
     pipeline.connect_input(input, operands, 'in1')
     # Connect inter-nodes
     pipeline.connect(operands, 'out', mult, 'operands')
     # Connect outputs
     pipeline.connect_output(output, mult, 'out_file')
     return pipeline
Example #21
0
 def pipeline_alpha(self, **kwargs):  # @UnusedVariable
     pipeline = self.create_pipeline(
         name='pipeline_alpha',
         inputs=[DatasetSpec('x', mrtrix_format),
                 DatasetSpec('y', mrtrix_format)],
         outputs=[DatasetSpec('z', mrtrix_format)],
         desc="A dummy pipeline used to test MultiStudy class",
         version=1,
         citations=[],
         **kwargs)
     merge = pipeline.create_node(Merge(2), name="merge")
     mrmath = pipeline.create_node(MRMath(), name="mrmath",
                                   requirements=[mrtrix3_req])
     mrmath.inputs.operation = 'sum'
     # Connect inputs
     pipeline.connect_input('x', merge, 'in1')
     pipeline.connect_input('y', merge, 'in2')
     # Connect nodes
     pipeline.connect(merge, 'out', mrmath, 'in_files')
     # Connect outputs
     pipeline.connect_output('z', mrmath, 'out_file')
     return pipeline
Example #22
0
 def pipeline_beta(self, **kwargs):  # @UnusedVariable
     pipeline = self.create_pipeline(
         name='pipeline_beta',
         inputs=[DatasetSpec('w', mrtrix_format),
                 DatasetSpec('x', mrtrix_format)],
         outputs=[DatasetSpec('y', mrtrix_format),
                  DatasetSpec('z', mrtrix_format)],
         desc="A dummy pipeline used to test MultiStudy class",
         version=1,
         citations=[],
         **kwargs)
     merge1 = pipeline.create_node(Merge(2), name='merge1')
     merge2 = pipeline.create_node(Merge(2), name='merge2')
     merge3 = pipeline.create_node(Merge(2), name='merge3')
     mrsum1 = pipeline.create_node(MRMath(), name="mrsum1",
                                   requirements=[mrtrix3_req])
     mrsum1.inputs.operation = 'sum'
     mrsum2 = pipeline.create_node(MRMath(), name="mrsum2",
                                   requirements=[mrtrix3_req])
     mrsum2.inputs.operation = 'sum'
     mrproduct = pipeline.create_node(MRMath(), name="mrproduct",
                                      requirements=[mrtrix3_req])
     mrproduct.inputs.operation = pipeline.option('product_op')
     # Connect inputs
     pipeline.connect_input('w', merge1, 'in1')
     pipeline.connect_input('x', merge1, 'in2')
     pipeline.connect_input('x', merge2, 'in1')
     # Connect nodes
     pipeline.connect(merge1, 'out', mrsum1, 'in_files')
     pipeline.connect(mrsum1, 'out_file', merge2, 'in2')
     pipeline.connect(merge2, 'out', mrsum2, 'in_files')
     pipeline.connect(mrsum1, 'out_file', merge3, 'in1')
     pipeline.connect(mrsum2, 'out_file', merge3, 'in2')
     pipeline.connect(merge3, 'out', mrproduct, 'in_files')
     # Connect outputs
     pipeline.connect_output('y', mrsum2, 'out_file')
     pipeline.connect_output('z', mrproduct, 'out_file')
     return pipeline
Example #23
0
 def subject_ids_access_pipeline(self, **kwargs):
     pipeline = self.create_pipeline(
         name='subject_ids_access',
         inputs=[],
         outputs=[DatasetSpec('subject_ids', text_format)],
         desc=("A dummy pipeline used to test access to 'subject' IDs"),
         version=1,
         citations=[],
         **kwargs)
     subjects_to_file = pipeline.create_join_subjects_node(
         IteratorToFile(), name='subjects_to_file', joinfield='ids')
     pipeline.connect_subject_id(subjects_to_file, 'ids')
     pipeline.connect_output('subject_ids', subjects_to_file, 'out_file')
     return pipeline
Example #24
0
class MultiMultiStudy(MultiStudy):

    __metaclass__ = MultiStudyMetaClass

    add_sub_study_specs = [
        SubStudySpec('ss1', StudyA, {}),
        SubStudySpec('full', FullMultiStudy),
        SubStudySpec('partial', PartialMultiStudy)]

    add_data_specs = [
        DatasetSpec('g', mrtrix_format, 'combined_pipeline')]

    add_option_specs = [
        OptionSpec('combined_op', 'sum')]

    def combined_pipeline(self, **kwargs):
        pipeline = self.create_pipeline(
            name='combined',
            inputs=[DatasetSpec('ss1_z', mrtrix_format),
                    DatasetSpec('full_e', mrtrix_format),
                    DatasetSpec('partial_ss2_z', mrtrix_format)],
            outputs=[DatasetSpec('g', mrtrix_format)],
            desc=(
                "A dummy pipeline used to test MultiMultiStudy class"),
            version=1,
            citations=[],
            **kwargs)
        merge = pipeline.create_node(Merge(3), name="merge")
        mrmath = pipeline.create_node(MRMath(), name="mrmath",
                                      requirements=[mrtrix3_req])
        mrmath.inputs.operation = pipeline.option('combined_op')
        # Connect inputs
        pipeline.connect_input('ss1_z', merge, 'in1')
        pipeline.connect_input('full_e', merge, 'in2')
        pipeline.connect_input('partial_ss2_z', merge, 'in3')
        # Connect nodes
        pipeline.connect(merge, 'out', mrmath, 'in_files')
        # Connect outputs
        pipeline.connect_output('g', mrmath, 'out_file')
        return pipeline
Example #25
0
class TestLocalInterfacePickle(TestCase):

    datasets = [DatasetSpec('a', nifti_gz_format)]
    fields = [FieldSpec('b', int)]

    def setUp(self):
        self.tmp_dir = tempfile.mkdtemp()
        self.pkl_dir = tempfile.mkdtemp()

    def tearDown(self):
        shutil.rmtree(self.tmp_dir)
        shutil.rmtree(self.pkl_dir)

    def test_source(self):
        source = LocalSource('a_study', self.datasets, self.fields,
                             base_dir=self.tmp_dir)
        fname = os.path.join(self.pkl_dir, 'source.pkl')
        with open(fname, 'w') as f:
            pkl.dump(source, f)
        with open(fname) as f:
            re_source = pkl.load(f)
        self.assertEqual(source, re_source)
Example #26
0
class FullMultiStudy(MultiStudy):

    __metaclass__ = MultiStudyMetaClass

    add_sub_study_specs = [
        SubStudySpec('ss1', StudyA,
                     {'a': 'x',
                      'b': 'y',
                      'd': 'z',
                      'p1': 'o1',
                      'p2': 'o2',
                      'p3': 'o3'}),
        SubStudySpec('ss2', StudyB,
                     {'b': 'w',
                      'c': 'x',
                      'e': 'y',
                      'f': 'z',
                      'q1': 'o1',
                      'q2': 'o2',
                      'p3': 'o3',
                      'required_op': 'product_op'})]

    add_data_specs = [
        DatasetSpec('a', mrtrix_format),
        DatasetSpec('b', mrtrix_format),
        DatasetSpec('c', mrtrix_format),
        DatasetSpec('d', mrtrix_format, 'pipeline_alpha_trans'),
        DatasetSpec('e', mrtrix_format, 'pipeline_beta_trans'),
        DatasetSpec('f', mrtrix_format, 'pipeline_beta_trans')]

    add_option_specs = [
        OptionSpec('p1', 100),
        OptionSpec('p2', '200'),
        OptionSpec('p3', 300.0),
        OptionSpec('q1', 150),
        OptionSpec('q2', '250'),
        OptionSpec('required_op', 'still-not-specified')]

    pipeline_alpha_trans = MultiStudy.translate(
        'ss1', 'pipeline_alpha')
    pipeline_beta_trans = MultiStudy.translate(
        'ss2', 'pipeline_beta')
Example #27
0
class DummyStudy(Study):

    __metaclass__ = StudyMetaClass

    add_data_specs = [
        DatasetSpec('source1', nifti_gz_format),
        DatasetSpec('source2', nifti_gz_format),
        DatasetSpec('source3', nifti_gz_format),
        DatasetSpec('source4', nifti_gz_format,
                    optional=True),
        DatasetSpec('sink1', nifti_gz_format, 'dummy_pipeline'),
        DatasetSpec('sink3', nifti_gz_format, 'dummy_pipeline'),
        DatasetSpec('sink4', nifti_gz_format, 'dummy_pipeline'),
        DatasetSpec('subject_sink', nifti_gz_format, 'dummy_pipeline',
                    frequency='per_subject'),
        DatasetSpec('visit_sink', nifti_gz_format, 'dummy_pipeline',
                    frequency='per_visit'),
        DatasetSpec('project_sink', nifti_gz_format, 'dummy_pipeline',
                    frequency='per_project'),
        DatasetSpec('resink1', nifti_gz_format, 'dummy_pipeline'),
        DatasetSpec('resink2', nifti_gz_format, 'dummy_pipeline'),
        DatasetSpec('resink3', nifti_gz_format, 'dummy_pipeline')]

    def dummy_pipeline(self):
        pass
Example #28
0
class TestDerivableStudy(Study):

    __metaclass__ = StudyMetaClass

    add_data_specs = [
        DatasetSpec('required', text_format),
        DatasetSpec('optional', text_format, optional=True),
        DatasetSpec('derivable', text_format, 'pipeline1'),
        DatasetSpec('missing_input', text_format, 'pipeline2'),
        DatasetSpec('another_derivable', text_format, 'pipeline3'),
        DatasetSpec('wrong_option', text_format, 'pipeline3'),
        DatasetSpec('wrong_option2', text_format, 'pipeline4')
    ]

    add_option_specs = [OptionSpec('switch', 0)]

    def pipeline1(self):
        pipeline = self.create_pipeline(
            'pipeline1',
            inputs=[DatasetSpec('required', text_format)],
            outputs=[DatasetSpec('derivable', text_format)],
            desc="",
            citations=[],
            version=1)
        identity = pipeline.create_node(IdentityInterface(['a']), 'identity')
        pipeline.connect_input('required', identity, 'a')
        pipeline.connect_output('derivable', identity, 'a')
        return pipeline

    def pipeline2(self):
        pipeline = self.create_pipeline(
            'pipeline2',
            inputs=[
                DatasetSpec('required', text_format),
                DatasetSpec('optional', text_format)
            ],
            outputs=[DatasetSpec('missing_input', text_format)],
            desc="",
            citations=[],
            version=1)
        identity = pipeline.create_node(IdentityInterface(['a', 'b']),
                                        'identity')
        pipeline.connect_input('required', identity, 'a')
        pipeline.connect_input('optional', identity, 'b')
        pipeline.connect_output('missing_input', identity, 'a')
        return pipeline

    def pipeline3(self, **kwargs):
        outputs = [DatasetSpec('another_derivable', text_format)]
        switch = self.pre_option('switch', 'pipeline3', **kwargs)
        if switch:
            outputs.append(DatasetSpec('wrong_option', text_format))
        pipeline = self.create_pipeline(
            'pipeline3',
            inputs=[DatasetSpec('required', text_format)],
            outputs=outputs,
            desc="",
            citations=[],
            version=1)
        identity = pipeline.create_node(IdentityInterface(['a', 'b']),
                                        'identity')
        pipeline.connect_input('required', identity, 'a')
        pipeline.connect_input('required', identity, 'b')
        pipeline.connect_output('another_derivable', identity, 'a')
        if switch:
            pipeline.connect_output('wrong_option', identity, 'b')
        return pipeline

    def pipeline4(self, **kwargs):
        pipeline = self.create_pipeline(
            'pipeline4',
            inputs=[DatasetSpec('wrong_option', text_format)],
            outputs=[DatasetSpec('wrong_option2', text_format)],
            desc="",
            citations=[],
            version=1,
            **kwargs)
        identity = pipeline.create_node(IdentityInterface(['a']), 'identity')
        pipeline.connect_input('wrong_option', identity, 'a')
        pipeline.connect_output('wrong_option2', identity, 'a')
        return pipeline
Example #29
0
class TestStudy(Study):

    __metaclass__ = StudyMetaClass

    add_data_specs = [
        DatasetSpec('start', nifti_gz_format),
        DatasetSpec('ones_slice', mrtrix_format),
        DatasetSpec('derived1_1', nifti_gz_format, 'pipeline1'),
        DatasetSpec('derived1_2', nifti_gz_format, 'pipeline1'),
        DatasetSpec('derived2', nifti_gz_format, 'pipeline2'),
        DatasetSpec('derived3', nifti_gz_format, 'pipeline3'),
        DatasetSpec('derived4', nifti_gz_format, 'pipeline4'),
        DatasetSpec('subject_summary',
                    mrtrix_format,
                    'subject_summary_pipeline',
                    frequency='per_subject'),
        DatasetSpec('visit_summary',
                    mrtrix_format,
                    'visit_summary_pipeline',
                    frequency='per_visit'),
        DatasetSpec('project_summary',
                    mrtrix_format,
                    'project_summary_pipeline',
                    frequency='per_project'),
        DatasetSpec('subject_ids',
                    text_format,
                    'subject_ids_access_pipeline',
                    frequency='per_visit'),
        DatasetSpec('visit_ids',
                    text_format,
                    'visit_ids_access_pipeline',
                    frequency='per_subject')
    ]

    add_option_specs = [OptionSpec('pipeline_option', False)]

    def pipeline1(self, **kwargs):
        pipeline = self.create_pipeline(
            name='pipeline1',
            inputs=[DatasetSpec('start', nifti_gz_format)],
            outputs=[
                DatasetSpec('derived1_1', nifti_gz_format),
                DatasetSpec('derived1_2', nifti_gz_format)
            ],
            desc="A dummy pipeline used to test 'run_pipeline' method",
            version=1,
            citations=[],
            **kwargs)
        if not pipeline.option('pipeline_option'):
            raise Exception("Pipeline option was not cascaded down to "
                            "pipeline1")
        mrconvert = pipeline.create_node(MRConvert(),
                                         name="convert1",
                                         requirements=[mrtrix3_req])
        mrconvert2 = pipeline.create_node(MRConvert(),
                                          name="convert2",
                                          requirements=[mrtrix3_req])
        # Connect inputs
        pipeline.connect_input('start', mrconvert, 'in_file')
        pipeline.connect_input('start', mrconvert2, 'in_file')
        # Connect outputs
        pipeline.connect_output('derived1_1', mrconvert, 'out_file')
        pipeline.connect_output('derived1_2', mrconvert2, 'out_file')
        return pipeline

    def pipeline2(self, **kwargs):
        pipeline = self.create_pipeline(
            name='pipeline2',
            inputs=[
                DatasetSpec('start', nifti_gz_format),
                DatasetSpec('derived1_1', nifti_gz_format)
            ],
            outputs=[DatasetSpec('derived2', nifti_gz_format)],
            desc="A dummy pipeline used to test 'run_pipeline' method",
            version=1,
            citations=[],
            **kwargs)
        if not pipeline.option('pipeline_option'):
            raise Exception("Pipeline option was not cascaded down to "
                            "pipeline2")
        mrmath = pipeline.create_node(MRCat(),
                                      name="mrcat",
                                      requirements=[mrtrix3_req])
        mrmath.inputs.axis = 0
        # Connect inputs
        pipeline.connect_input('start', mrmath, 'first_scan')
        pipeline.connect_input('derived1_1', mrmath, 'second_scan')
        # Connect outputs
        pipeline.connect_output('derived2', mrmath, 'out_file')
        return pipeline

    def pipeline3(self, **kwargs):
        pipeline = self.create_pipeline(
            name='pipeline3',
            inputs=[DatasetSpec('derived2', nifti_gz_format)],
            outputs=[DatasetSpec('derived3', nifti_gz_format)],
            desc="A dummy pipeline used to test 'run_pipeline' method",
            version=1,
            citations=[],
            **kwargs)
        mrconvert = pipeline.create_node(MRConvert(),
                                         name="convert",
                                         requirements=[mrtrix3_req])
        # Connect inputs
        pipeline.connect_input('derived2', mrconvert, 'in_file')
        # Connect outputs
        pipeline.connect_output('derived3', mrconvert, 'out_file')
        return pipeline

    def pipeline4(self, **kwargs):
        pipeline = self.create_pipeline(
            name='pipeline4',
            inputs=[
                DatasetSpec('derived1_2', nifti_gz_format),
                DatasetSpec('derived3', nifti_gz_format)
            ],
            outputs=[DatasetSpec('derived4', nifti_gz_format)],
            desc="A dummy pipeline used to test 'run_pipeline' method",
            version=1,
            citations=[],
            **kwargs)
        mrmath = pipeline.create_node(MRCat(),
                                      name="mrcat",
                                      requirements=[mrtrix3_req])
        mrmath.inputs.axis = 0
        # Connect inputs
        pipeline.connect_input('derived1_2', mrmath, 'first_scan')
        pipeline.connect_input('derived3', mrmath, 'second_scan')
        # Connect outputs
        pipeline.connect_output('derived4', mrmath, 'out_file')
        return pipeline

    def visit_ids_access_pipeline(self, **kwargs):
        pipeline = self.create_pipeline(
            name='visit_ids_access',
            inputs=[],
            outputs=[DatasetSpec('visit_ids', text_format)],
            desc=("A dummy pipeline used to test access to 'session' IDs"),
            version=1,
            citations=[],
            **kwargs)
        sessions_to_file = pipeline.create_join_visits_node(
            IteratorToFile(), name='sess_to_file', joinfield='ids')
        pipeline.connect_visit_id(sessions_to_file, 'ids')
        pipeline.connect_output('visit_ids', sessions_to_file, 'out_file')
        return pipeline

    def subject_ids_access_pipeline(self, **kwargs):
        pipeline = self.create_pipeline(
            name='subject_ids_access',
            inputs=[],
            outputs=[DatasetSpec('subject_ids', text_format)],
            desc=("A dummy pipeline used to test access to 'subject' IDs"),
            version=1,
            citations=[],
            **kwargs)
        subjects_to_file = pipeline.create_join_subjects_node(
            IteratorToFile(), name='subjects_to_file', joinfield='ids')
        pipeline.connect_subject_id(subjects_to_file, 'ids')
        pipeline.connect_output('subject_ids', subjects_to_file, 'out_file')
        return pipeline

    def subject_summary_pipeline(self, **kwargs):
        pipeline = self.create_pipeline(
            name="subject_summary",
            inputs=[DatasetSpec('ones_slice', mrtrix_format)],
            outputs=[DatasetSpec('subject_summary', mrtrix_format)],
            desc=("Test of project summary variables"),
            version=1,
            citations=[],
            **kwargs)
        mrmath = pipeline.create_join_visits_node(MRMath(),
                                                  'in_files',
                                                  'mrmath',
                                                  requirements=[mrtrix3_req])
        mrmath.inputs.operation = 'sum'
        # Connect inputs
        pipeline.connect_input('ones_slice', mrmath, 'in_files')
        # Connect outputs
        pipeline.connect_output('subject_summary', mrmath, 'out_file')
        pipeline.assert_connected()
        return pipeline

    def visit_summary_pipeline(self, **kwargs):
        pipeline = self.create_pipeline(
            name="visit_summary",
            inputs=[DatasetSpec('ones_slice', mrtrix_format)],
            outputs=[DatasetSpec('visit_summary', mrtrix_format)],
            desc=("Test of project summary variables"),
            version=1,
            citations=[],
            **kwargs)
        mrmath = pipeline.create_join_visits_node(MRMath(),
                                                  'in_files',
                                                  'mrmath',
                                                  requirements=[mrtrix3_req])
        mrmath.inputs.operation = 'sum'
        # Connect inputs
        pipeline.connect_input('ones_slice', mrmath, 'in_files')
        # Connect outputs
        pipeline.connect_output('visit_summary', mrmath, 'out_file')
        pipeline.assert_connected()
        return pipeline

    def project_summary_pipeline(self, **kwargs):
        pipeline = self.create_pipeline(
            name="project_summary",
            inputs=[DatasetSpec('ones_slice', mrtrix_format)],
            outputs=[DatasetSpec('project_summary', mrtrix_format)],
            desc=("Test of project summary variables"),
            version=1,
            citations=[],
            **kwargs)
        mrmath1 = pipeline.create_join_visits_node(MRMath(),
                                                   'in_files',
                                                   'mrmath1',
                                                   requirements=[mrtrix3_req])
        mrmath2 = pipeline.create_join_subjects_node(
            MRMath(), 'in_files', 'mrmath2', requirements=[mrtrix3_req])
        mrmath1.inputs.operation = 'sum'
        mrmath2.inputs.operation = 'sum'
        # Connect inputs
        pipeline.connect_input('ones_slice', mrmath1, 'in_files')
        pipeline.connect(mrmath1, 'out_file', mrmath2, 'in_files')
        # Connect outputs
        pipeline.connect_output('project_summary', mrmath2, 'out_file')
        pipeline.assert_connected()
        return pipeline