Ejemplo n.º 1
0
analyze_format.set_converter(dicom_format, MrtrixConverter)
analyze_format.set_converter(nifti_format, MrtrixConverter)
analyze_format.set_converter(nifti_gz_format, MrtrixConverter)
analyze_format.set_converter(mrtrix_image_format, MrtrixConverter)

mrtrix_image_format.set_converter(dicom_format, MrtrixConverter)
mrtrix_image_format.set_converter(nifti_format, MrtrixConverter)
mrtrix_image_format.set_converter(nifti_gz_format, MrtrixConverter)
mrtrix_image_format.set_converter(analyze_format, MrtrixConverter)

STD_IMAGE_FORMATS = [dicom_format, nifti_format, nifti_gz_format,
                     nifti_gz_x_format, analyze_format, mrtrix_image_format]

multi_nifti_gz_format = FileFormat(name='multi_nifti_gz', extension=None,
                                   directory=True, within_dir_exts=['.nii.gz'])
multi_nifti_gz_format.set_converter(zip_format, UnzipConverter)
multi_nifti_gz_format.set_converter(targz_format, UnTarGzConverter)

# Tractography formats
mrtrix_track_format = FileFormat(name='mrtrix_track', extension='.tck')

# Tabular formats
rfile_format = FileFormat(name='rdata', extension='.RData')
tsv_format = FileFormat(name='tab_separated', extension='.tsv')
# matlab_format = FileFormat(name='matlab', extension='.mat')
csv_format = FileFormat(name='comma_separated', extension='.csv')
text_matrix_format = FileFormat(name='text_matrix', extension='.mat')

# Diffusion gradient-table data formats
fsl_bvecs_format = FileFormat(name='fsl_bvecs', extension='.bvec')
fsl_bvals_format = FileFormat(name='fsl_bvals', extension='.bval')
Ejemplo n.º 2
0
mrtrix_image_format.set_converter(dicom_format, MrtrixConverter)
mrtrix_image_format.set_converter(nifti_format, MrtrixConverter)
mrtrix_image_format.set_converter(nifti_gz_format, MrtrixConverter)
mrtrix_image_format.set_converter(analyze_format, MrtrixConverter)

STD_IMAGE_FORMATS = [
    dicom_format, nifti_format, nifti_gz_format, nifti_gz_x_format,
    analyze_format, mrtrix_image_format
]

multi_nifti_gz_format = FileFormat(name='multi_nifti_gz',
                                   extension=None,
                                   directory=True,
                                   within_dir_exts=['.nii.gz'])
multi_nifti_gz_format.set_converter(zip_format, UnzipConverter)
multi_nifti_gz_format.set_converter(targz_format, UnTarGzConverter)

# Tractography formats
mrtrix_track_format = FileFormat(name='mrtrix_track', extension='.tck')

# Tabular formats
rfile_format = FileFormat(name='rdata', extension='.RData')
tsv_format = FileFormat(name='tab_separated', extension='.tsv')
# matlab_format = FileFormat(name='matlab', extension='.mat')
csv_format = FileFormat(name='comma_separated', extension='.csv')
text_matrix_format = FileFormat(name='text_matrix', extension='.mat')

# Diffusion gradient-table data formats
fsl_bvecs_format = FileFormat(name='fsl_bvecs', extension='.bvec')
fsl_bvals_format = FileFormat(name='fsl_bvals', extension='.bval')
Ejemplo n.º 3
0
        tree = self.dataset.tree
        for subj_id, visits in self.PROJECT_STRUCTURE.items():
            for visit_id in visits:
                session = tree.subject(subj_id).session(visit_id)
                fileset = session.fileset('thousand',
                                          from_analysis=self.STUDY_NAME)
                fileset.format = text_format
                self.assertContentsEqual(fileset, targets[subj_id][visit_id],
                                         "{}:{}".format(subj_id, visit_id))


test1_format = FileFormat('test1', extension='.t1')
test2_format = FileFormat('test2', extension='.t2')
test3_format = FileFormat('test3', extension='.t3')

test2_format.set_converter(test1_format, IdentityConverter)


class TestInputValidationAnalysis(with_metaclass(AnalysisMetaClass, Analysis)):

    add_data_specs = [
        InputFilesetSpec('a', (test1_format, test2_format)),
        InputFilesetSpec('b', test3_format),
        FilesetSpec('c', test2_format, 'identity_pipeline'),
        FilesetSpec('d', test3_format, 'identity_pipeline')
    ]

    def identity_pipeline(self, **name_maps):
        pipeline = self.new_pipeline(
            name='pipeline',
            desc="A dummy pipeline used to test analysis input validation",