def test_cache_download(self): repository = XnatRepository(project_id=self.project, server=SERVER, cache_dir=tempfile.mkdtemp()) study = self.create_study(TestStudy, 'cache_download', inputs=[ FilesetSelector('fileset1', text_format, 'fileset1'), FilesetSelector('fileset3', text_format, 'fileset3') ], repository=repository) study.cache_inputs() for subject_id, visits in list(self.STRUCTURE.items()): subj_dir = op.join(repository.cache_dir, self.project, '{}_{}'.format(self.project, subject_id)) for visit_id in visits: sess_dir = op.join( subj_dir, '{}_{}_{}'.format(self.project, subject_id, visit_id)) for inpt in study.inputs: self.assertTrue( op.exists( op.join(sess_dir, inpt.name + inpt.format.extension)))
def test_preprocess(self): study = self.create_study(DiffusionStudy, 'preprocess', [ FilesetSelector('magnitude', 'r_l_dwi_b700_30', mrtrix_format), FilesetSelector('dwi_reference', 'l_r_dwi_b0_6', mrtrix_format) ]) preproc = list(study.data('preproc'))[0] self.assertTrue(os.path.exists(preproc.path))
def test_full_multi_study(self): study = self.create_study( FullMultiStudy, 'full', [ FilesetSelector('a', text_format, 'ones'), FilesetSelector('b', text_format, 'ones'), FilesetSelector('c', text_format, 'ones') ], parameters=[Parameter('required_op', 'mul')]) d, e, f = study.data(('d', 'e', 'f'), subject_id='SUBJECT', visit_id='VISIT') self.assertContentsEqual(d, 2.0) self.assertContentsEqual(e, 3.0) self.assertContentsEqual(f, 6.0) # Test parameter values in MultiStudy self.assertEqual(study._get_parameter('p1').value, 100) self.assertEqual(study._get_parameter('p2').value, '200') self.assertEqual(study._get_parameter('p3').value, 300.0) self.assertEqual(study._get_parameter('q1').value, 150) self.assertEqual(study._get_parameter('q2').value, '250') self.assertEqual(study._get_parameter('required_op').value, 'mul') # Test parameter values in SubStudy ss1 = study.sub_study('ss1') self.assertEqual(ss1._get_parameter('o1').value, 100) self.assertEqual(ss1._get_parameter('o2').value, '200') self.assertEqual(ss1._get_parameter('o3').value, 300.0) ss2 = study.sub_study('ss2') self.assertEqual(ss2._get_parameter('o1').value, 150) self.assertEqual(ss2._get_parameter('o2').value, '250') self.assertEqual(ss2._get_parameter('o3').value, 300.0) self.assertEqual(ss2._get_parameter('product_op').value, 'mul')
def test_partial_multi_study(self): study = self.create_study( PartialMultiStudy, 'partial', [ FilesetSelector('a', text_format, 'ones'), FilesetSelector('b', text_format, 'ones'), FilesetSelector('c', text_format, 'ones') ], parameters=[Parameter('ss2_product_op', 'mul')]) ss1_z = study.data('ss1_z', subject_id='SUBJECT', visit_id='VISIT') ss2_z = list(study.data('ss2_z'))[0] self.assertContentsEqual(ss1_z, 2.0) self.assertContentsEqual(study.data('ss2_y'), 3.0) self.assertContentsEqual(ss2_z, 6.0) # Test parameter values in MultiStudy self.assertEqual(study._get_parameter('p1').value, 1000) self.assertEqual(study._get_parameter('ss1_o2').value, '2') self.assertEqual(study._get_parameter('ss1_o3').value, 3.0) self.assertEqual(study._get_parameter('ss2_o2').value, '20') self.assertEqual(study._get_parameter('ss2_o3').value, 30.0) self.assertEqual(study._get_parameter('ss2_product_op').value, 'mul') # Test parameter values in SubStudy ss1 = study.sub_study('ss1') self.assertEqual(ss1._get_parameter('o1').value, 1000) self.assertEqual(ss1._get_parameter('o2').value, '2') self.assertEqual(ss1._get_parameter('o3').value, 3.0) ss2 = study.sub_study('ss2') self.assertEqual(ss2._get_parameter('o1').value, 1000) self.assertEqual(ss2._get_parameter('o2').value, '20') self.assertEqual(ss2._get_parameter('o3').value, 30.0) self.assertEqual(ss2._get_parameter('product_op').value, 'mul')
def test_extract_b0(self): study = self.create_study(DiffusionStudy, 'extract_b0', [ FilesetSelector('preproc', 'preproc', nifti_gz_format), FilesetSelector('grad_dirs', 'gradient_dirs', fsl_bvecs_format), FilesetSelector('bvalues', 'bvalues', fsl_bvals_format) ]) study.extract_b0_pipeline().run(work_dir=self.work_dir) self.assertFilesetCreated('primary.nii.gz', study.name)
def test_repository_roundtrip(self): # Create working dirs # Create DarisSource node repository = XnatRepository(project_id=self.project, server=SERVER, cache_dir=self.cache_dir) study = DummyStudy(self.STUDY_NAME, repository, processor=LinearProcessor('a_dir'), inputs=[ FilesetSelector('source1', text_format, 'source1'), FilesetSelector('source2', text_format, 'source2'), FilesetSelector('source3', text_format, 'source3'), FilesetSelector('source4', text_format, 'source4') ]) # TODO: Should test out other file formats as well. source_files = ['source1', 'source2', 'source3', 'source4'] sink_files = ['sink1', 'sink3', 'sink4'] inputnode = pe.Node(IdentityInterface(['subject_id', 'visit_id']), 'inputnode') inputnode.inputs.subject_id = str(self.SUBJECT) inputnode.inputs.visit_id = str(self.VISIT) source = study.source(source_files) sink = study.sink(sink_files) sink.inputs.name = 'repository-roundtrip-unittest' sink.inputs.desc = ( "A test session created by repository roundtrip unittest") # Create workflow connecting them together workflow = pe.Workflow('source-sink-unit-test', base_dir=self.work_dir) workflow.add_nodes((source, sink)) workflow.connect(inputnode, 'subject_id', source, 'subject_id') workflow.connect(inputnode, 'visit_id', source, 'visit_id') workflow.connect(inputnode, 'subject_id', sink, 'subject_id') workflow.connect(inputnode, 'visit_id', sink, 'visit_id') for source_name in source_files: if source_name != 'source2': sink_name = source_name.replace('source', 'sink') workflow.connect(source, source_name + PATH_SUFFIX, sink, sink_name + PATH_SUFFIX) workflow.run() # Check cache was created properly self.assertEqual( ls_with_md5_filter(self.session_cache()), ['source1.txt', 'source2.txt', 'source3.txt', 'source4.txt']) expected_sink_filesets = ['sink1', 'sink3', 'sink4'] self.assertEqual( ls_with_md5_filter(self.session_cache(from_study=self.STUDY_NAME)), [d + text_format.extension for d in expected_sink_filesets]) with self._connect() as login: fileset_names = list(login.experiments[self.session_label( from_study=self.STUDY_NAME)].scans.keys()) self.assertEqual(sorted(fileset_names), expected_sink_filesets)
def test_response(self): study = self.create_study(DiffusionStudy, 'response', [ FilesetSelector('bias_correct', 'bias_correct', nifti_gz_format), FilesetSelector('brain_mask', 'brain_mask', nifti_gz_format), FilesetSelector('grad_dirs', 'gradient_dirs', fsl_bvecs_format), FilesetSelector('bvalues', 'bvalues', fsl_bvals_format) ]) study.response_pipeline().run(work_dir=self.work_dir) self.assertFilesetCreated('response.txt', study.name)
def test_bias_correct(self): study = self.create_study(DiffusionStudy, 'bias_correct', [ FilesetSelector('preproc', 'preproc', nifti_gz_format), FilesetSelector('grad_dirs', 'gradient_dirs', fsl_bvecs_format), FilesetSelector('bvalues', 'bvalues', fsl_bvals_format) ]) study.bias_correct_pipeline(mask_tool='mrtrix').run( work_dir=self.work_dir) self.assertFilesetCreated('bias_correct.nii.gz', study.name)
def test_registration_to_matrix(self): study = self.create_study( CoregisteredToMatrixStudy, 'registration_to_matrix', { FilesetSelector('to_register', 'flair', nifti_gz_format), FilesetSelector('reference', 'mprage', nifti_gz_format), FilesetSelector('matrix', 'matrix', text_matrix_format) }) study.linear_registration_pipeline().run(work_dir=self.work_dir) self.assertFilesetCreated('registered.nii.gz', study.name)
def test_input_validation(self): self.create_study(TestInputValidationStudy, 'test_input_validation', inputs=[ FilesetSelector('a', test1_format, 'a'), FilesetSelector('b', test3_format, 'b'), FilesetSelector('c', test1_format, 'a'), FilesetSelector('d', test3_format, 'd') ])
def test_input_validation_fail(self): self.assertRaises(ArcanaUsageError, self.create_study, TestInputValidationStudy, 'test_validation_fail', inputs=[ FilesetSelector('a', test3_format, 'a'), FilesetSelector('b', test3_format, 'b') ])
def test_ute(self): study = self.create_study( UTEStudy, 'pipeline', { FilesetSelector('ute_echo1', 'ute_echo1', dicom_format), FilesetSelector('ute_echo2', 'ute_echo2', dicom_format), FilesetSelector('umap_ute', 'umap_ute', dicom_format) }) study.conversion_to_dicom_pipeline().run(work_dir=self.work_dir) self.assertFilesetCreated('sute_cont_dicoms', study.name) self.assertFilesetCreated('sute_fix_dicoms', study.name)
def make_study(self): return self.create_study(ExampleStudy, 'dummy', inputs=[ FilesetSelector('one', text_format, 'one_input'), FilesetSelector('ten', text_format, 'ten_input') ], parameters={'pipeline_parameter': True})
def test_freesurfer_pipeline(self): study = self.create_study(T1T2Study, 'freesurfer', inputs=[ FilesetSelector('t1', 'mprage', nifti_gz_format), FilesetSelector('t2', 'flair', nifti_gz_format) ]) study.freesurfer_pipeline().run(work_dir=self.work_dir) self.assertFilesetCreated('fs_recon_all.fs.zip')
def test_t2_registration_pipeline(self): study = self.create_study(T1T2Study, 't2_registration', inputs=[ FilesetSelector('t1', 'mprage', nifti_gz_format), FilesetSelector('t2', 'flair', nifti_gz_format) ]) study.t2_registration_pipeline().run(work_dir=self.work_dir) self.assertFilesetCreated('t2_coreg.nii.gz', study.name)
def test_format_conversions(self): study = self.create_study(ConversionStudy, 'conversion', [ FilesetSelector('text', text_format, 'text'), FilesetSelector('directory', directory_format, 'directory'), FilesetSelector('zip', zip_format, 'zip') ]) self.assertCreated(list(study.data('text_from_text'))[0]) self.assertCreated(list(study.data('directory_from_zip_on_input'))[0]) self.assertCreated(list(study.data('zip_from_directory_on_input'))[0]) self.assertCreated(list(study.data('directory_from_zip_on_output'))[0]) self.assertCreated(list(study.data('zip_from_directory_on_output'))[0])
def test_coreg_and_brain_mask(self): study = self.create_study(TestCoregStudy, 'coreg_and_mask_study', inputs=[ FilesetSelector('ref_primary', 'mprage', nifti_gz_format), FilesetSelector('tocoreg_primary', nifti_gz_format, 'flair') ]) coreg_brain = list(study.data('tocoreg_coreg_brain'))[0] self.assertFilesetsEqual(coreg_brain, self.reference('coreg_brain'))
def test_suvr(self): study = self.create_study( StaticPETStudy, 'suvr', inputs=[ FilesetSelector('registered_volume', 'suvr_registered_volume', nifti_gz_format), FilesetSelector('base_mask', 'cerebellum_mask', nifti_gz_format) ]) study.suvr_pipeline().run(work_dir=self.work_dir, plugin='Linear') self.assertFilesetCreated('SUVR_image.nii.gz', study.name)
def test_registration(self): study = self.create_study(CoregisteredStudy, 'registration', inputs=[ FilesetSelector('to_register', 'flair', nifti_gz_format), FilesetSelector('reference', 'mprage', nifti_gz_format) ]) pipeline = study.linear_registration_pipeline() pipeline.run(work_dir=self.work_dir) self.assertFilesetCreated('registered.nii.gz', study.name) self.assertFilesetCreated('matrix.mat', study.name)
def test_concatenate(self): study = self.create_study(NODDIStudy, 'concatenate', inputs=[ FilesetSelector('low_b_dw_scan', 'r_l_dwi_b700_30', mrtrix_format), FilesetSelector('high_b_dw_scan', 'r_l_dwi_b2000_60', mrtrix_format) ]) study.concatenate_pipeline().run(work_dir=self.work_dir) self.assertFilesetCreated('dwi_scan.mif', study.name)
def test_order_match(self): study = self.create_study( TestMatchStudy, 'test_dicom', inputs=[ FilesetSelector('gre_phase', dicom_format, pattern=self.GRE_PATTERN, order=1, is_regex=True), FilesetSelector('gre_mag', dicom_format, pattern=self.GRE_PATTERN, order=0, is_regex=True)]) phase = list(study.data('gre_phase'))[0] mag = list(study.data('gre_mag'))[0] self.assertEqual(phase.name, 'gre_field_mapping_3mm_phase') self.assertEqual(mag.name, 'gre_field_mapping_3mm_mag')
def test_id_match(self): study = test_fileset.TestMatchStudy( name='test_dicom', repository=XnatRepository(project_id=self.project, server=SERVER, cache_dir=tempfile.mkdtemp()), processor=LinearProcessor(self.work_dir), inputs=[ FilesetSelector('gre_phase', dicom_format, id=7), FilesetSelector('gre_mag', dicom_format, id=6) ]) phase = list(study.data('gre_phase'))[0] mag = list(study.data('gre_mag'))[0] self.assertEqual(phase.name, 'gre_field_mapping_3mm_phase') self.assertEqual(mag.name, 'gre_field_mapping_3mm_mag')
def test_per_session_prereqs(self): study = self.create_study( ExistingPrereqStudy, self.STUDY_NAME, inputs=[FilesetSelector('one', text_format, 'one')]) study.data('thousand') targets = { 'subject1': { 'visit1': 1100.0, 'visit2': 1110.0, 'visit3': 1000.0 }, 'subject2': { 'visit1': 1111.0, 'visit2': 1110.0, 'visit3': 1000.0 } } tree = self.repository.tree() for subj_id, visits in self.PROJECT_STRUCTURE.items(): for visit_id in visits: session = tree.subject(subj_id).session(visit_id) fileset = session.fileset('thousand', study=self.STUDY_NAME) self.assertContentsEqual(fileset, targets[subj_id][visit_id], "{}:{}".format(subj_id, visit_id)) if subj_id == 'subject1' and visit_id == 'visit3': self.assertNotIn( 'ten', [d.name for d in session.filesets], "'ten' should not be generated for " "subject1:visit3 as hundred and thousand are " "already present")
def test_fields_roundtrip(self): repository = XnatRepository(server=SERVER, cache_dir=self.cache_dir, project_id=self.project) study = DummyStudy( self.STUDY_NAME, repository, processor=LinearProcessor('a_dir'), inputs=[FilesetSelector('source1', text_format, 'source1')]) fields = ['field{}'.format(i) for i in range(1, 4)] sink = study.sink(outputs=fields, name='fields_sink') sink.inputs.field1_field = field1 = 1 sink.inputs.field2_field = field2 = 2.0 sink.inputs.field3_field = field3 = str('3') sink.inputs.subject_id = self.SUBJECT sink.inputs.visit_id = self.VISIT sink.inputs.desc = "Test sink of fields" sink.inputs.name = 'test_sink' sink.run() source = study.source(inputs=fields, name='fields_source') source.inputs.visit_id = self.VISIT source.inputs.subject_id = self.SUBJECT source.inputs.desc = "Test source of fields" source.inputs.name = 'test_source' results = source.run() self.assertEqual(results.outputs.field1_field, field1) self.assertEqual(results.outputs.field2_field, field2) self.assertEqual(results.outputs.field3_field, field3)
class TestDicomTagMatch(BaseTestCase): IMAGE_TYPE_TAG = ('0008', '0008') GRE_PATTERN = 'gre_field_mapping_3mm.*' PHASE_IMAGE_TYPE = ['ORIGINAL', 'PRIMARY', 'P', 'ND'] MAG_IMAGE_TYPE = ['ORIGINAL', 'PRIMARY', 'M', 'ND', 'NORM'] DICOM_MATCH = [ FilesetSelector('gre_phase', dicom_format, GRE_PATTERN, dicom_tags={IMAGE_TYPE_TAG: PHASE_IMAGE_TYPE}, is_regex=True), FilesetSelector('gre_mag', dicom_format, GRE_PATTERN, dicom_tags={IMAGE_TYPE_TAG: MAG_IMAGE_TYPE}, is_regex=True) ] def test_dicom_match(self): study = self.create_study(TestMatchStudy, 'test_dicom', inputs=self.DICOM_MATCH) phase = list(study.data('gre_phase'))[0] mag = list(study.data('gre_mag'))[0] self.assertEqual(phase.name, 'gre_field_mapping_3mm_phase') self.assertEqual(mag.name, 'gre_field_mapping_3mm_mag') def test_order_match(self): study = self.create_study(TestMatchStudy, 'test_dicom', inputs=[ FilesetSelector('gre_phase', dicom_format, pattern=self.GRE_PATTERN, order=1, is_regex=True), FilesetSelector('gre_mag', dicom_format, pattern=self.GRE_PATTERN, order=0, is_regex=True) ]) phase = list(study.data('gre_phase'))[0] mag = list(study.data('gre_mag'))[0] self.assertEqual(phase.name, 'gre_field_mapping_3mm_phase') self.assertEqual(mag.name, 'gre_field_mapping_3mm_mag')
def test_repository_roundtrip(self): study = DummyStudy(self.STUDY_NAME, self.repository, processor=LinearProcessor('a_dir'), inputs=[ FilesetSelector('source1', text_format, 'source1'), FilesetSelector('source2', text_format, 'source2'), FilesetSelector('source3', text_format, 'source3'), FilesetSelector('source4', text_format, 'source4') ]) # TODO: Should test out other file formats as well. source_files = ('source1', 'source2', 'source3', 'source4') sink_files = ('sink1', 'sink3', 'sink4') inputnode = pe.Node(IdentityInterface(['subject_id', 'visit_id']), 'inputnode') inputnode.inputs.subject_id = self.SUBJECT inputnode.inputs.visit_id = self.VISIT source = study.source(source_files) sink = study.sink(sink_files) sink.inputs.name = 'repository_sink' sink.inputs.desc = ( "A test session created by repository roundtrip unittest") # Create workflow connecting them together workflow = pe.Workflow('source_sink_unit_test', base_dir=self.work_dir) workflow.add_nodes((source, sink)) workflow.connect(inputnode, 'subject_id', source, 'subject_id') workflow.connect(inputnode, 'visit_id', source, 'visit_id') workflow.connect(inputnode, 'subject_id', sink, 'subject_id') workflow.connect(inputnode, 'visit_id', sink, 'visit_id') for source_name in source_files: if not source_name.endswith('2'): sink_name = source_name.replace('source', 'sink') workflow.connect(source, source_name + PATH_SUFFIX, sink, sink_name + PATH_SUFFIX) workflow.run() # Check local directory was created properly outputs = [ f for f in sorted( os.listdir(self.get_session_dir(from_study=self.STUDY_NAME))) if not (f == DirectoryRepository.FIELDS_FNAME) ] self.assertEqual(outputs, ['.derived', 'sink1.txt', 'sink3.txt', 'sink4.txt'])
def test_brain_extraction_pipelines(self): study = self.create_study(T1T2Study, 'brain_mask', inputs=[ FilesetSelector('t1', 'mprage', nifti_gz_format), FilesetSelector('t2', 'flair', nifti_gz_format), FilesetSelector('manual_wmh_mask', nifti_gz_format, 'manual_wmh_mask') ]) study.t1_brain_extraction_pipeline().run(work_dir=self.work_dir) study.manual_wmh_mask_registration_pipeline().run( work_dir=self.work_dir) for fname in ('t1_brain.nii.gz', 't2_brain.nii.gz', 'brain_mask.nii.gz', 'manual_wmh_mask_coreg.nii.gz'): self.assertFilesetCreated(fname, study.name)
def test_module_load_in_map(self): study = self.create_study( RequirementsStudy, 'requirements', [FilesetSelector('ones', text_format, 'ones')]) threes = study.data('threes') fours = study.data('fours') self.assertEqual(next(iter(threes)).value, 3) self.assertEqual(next(iter(fours)).value, 4) self.assertEqual(ModulesEnvironment.preloaded(), {})
def test_multi_multi_study(self): study = self.create_study( MultiMultiStudy, 'multi_multi', [ FilesetSelector('ss1_x', text_format, 'ones'), FilesetSelector('ss1_y', text_format, 'ones'), FilesetSelector('full_a', text_format, 'ones'), FilesetSelector('full_b', text_format, 'ones'), FilesetSelector('full_c', text_format, 'ones'), FilesetSelector('partial_a', text_format, 'ones'), FilesetSelector('partial_b', text_format, 'ones'), FilesetSelector('partial_c', text_format, 'ones') ], parameters=[ Parameter('full_required_op', 'mul'), Parameter('partial_ss2_product_op', 'mul') ]) self.assertContentsEqual(study.data('g'), 11.0) # Test parameter values in MultiStudy self.assertEqual(study._get_parameter('full_p1').value, 100) self.assertEqual(study._get_parameter('full_p2').value, '200') self.assertEqual(study._get_parameter('full_p3').value, 300.0) self.assertEqual(study._get_parameter('full_q1').value, 150) self.assertEqual(study._get_parameter('full_q2').value, '250') self.assertEqual(study._get_parameter('full_required_op').value, 'mul') # Test parameter values in SubStudy ss1 = study.sub_study('full').sub_study('ss1') self.assertEqual(ss1._get_parameter('o1').value, 100) self.assertEqual(ss1._get_parameter('o2').value, '200') self.assertEqual(ss1._get_parameter('o3').value, 300.0) ss2 = study.sub_study('full').sub_study('ss2') self.assertEqual(ss2._get_parameter('o1').value, 150) self.assertEqual(ss2._get_parameter('o2').value, '250') self.assertEqual(ss2._get_parameter('o3').value, 300.0) self.assertEqual(ss2._get_parameter('product_op').value, 'mul') # Test parameter values in MultiStudy self.assertEqual(study._get_parameter('partial_p1').value, 1000) self.assertEqual(study._get_parameter('partial_ss1_o2').value, '2') self.assertEqual(study._get_parameter('partial_ss1_o3').value, 3.0) self.assertEqual(study._get_parameter('partial_ss2_o2').value, '20') self.assertEqual(study._get_parameter('partial_ss2_o3').value, 30.0) self.assertEqual( study._get_parameter('partial_ss2_product_op').value, 'mul') # Test parameter values in SubStudy ss1 = study.sub_study('partial').sub_study('ss1') self.assertEqual(ss1._get_parameter('o1').value, 1000) self.assertEqual(ss1._get_parameter('o2').value, '2') self.assertEqual(ss1._get_parameter('o3').value, 3.0) ss2 = study.sub_study('partial').sub_study('ss2') self.assertEqual(ss2._get_parameter('o1').value, 1000) self.assertEqual(ss2._get_parameter('o2').value, '20') self.assertEqual(ss2._get_parameter('o3').value, 30.0) self.assertEqual(ss2._get_parameter('product_op').value, 'mul')
def test_reg(self): study = self.create_study(DynamicPETStudy, 'reg', inputs=[ FilesetSelector('pet_volumes', 'pet_image', nifti_gz_format) ]) study.ICA_pipeline().run(work_dir=self.work_dir, plugin='Linear') self.assertFilesetCreated('decomposed_file.nii.gz', study.name)