def test_err(self): rp = ResourcePool() rp['sub-A00008399_ses-BAS1_T1w'] = Resource(A00008326_file) r_key = R('sub-A00008399_ses-BAS1_T1w') anatomical_image = rp[r_key] file_basename = PythonJob(function=basename, reference='basename') file_basename.path = anatomical_image rp[R(r_key, label='base')] = file_basename.path rp[R(r_key, label='dir')] = file_basename.dirname def err(message, path): raise Exception(message) erred = PythonJob(function=err, reference='erring_job') erred.message = Resource('This jobs has erred') erred.path = file_basename.dirname rp[R('T1w', label='err')] = erred.no_return err_file_reversed = PythonJob(function=reversed_string, reference='err_reversed_string') err_file_reversed.path = erred.no_return rp[R('T1w', label='errbaserev')] = err_file_reversed.reversed file_reversed = PythonJob(function=reversed_string, reference='reversed_string') file_reversed.path = file_basename.dirname rp[R('T1w', label='baserev')] = file_reversed.reversed for executor in executors: res_rp = DependencySolver(rp).execute(executor=executor()) self.assertIsInstance(res_rp[R('T1w', label='err')], InvalidResource) self.assertIsInstance(res_rp[R('T1w', label='errbaserev')], InvalidResource) self.assertNotIsInstance(res_rp[R('T1w', label='baserev')], InvalidResource)
def create_workflow(config, resource_pool, context): func = PythonJob(function=lambda x: { 'y': x }) func.x = Resource(config['msg']) resource_pool['T1w'] = func.y return 'test'
def test_resource_pool_extraction(self): slot = '' rp = ResourcePool() rp['space-orig_T1w'] = Resource(slot) rp['space-orig_desc-skullstrip-afni_mask'] = Resource(slot) rp['space-orig_desc-skullstrip-bet_mask'] = Resource(slot) rp['space-orig_desc-skullstrip-afni+nuis-gsr_bold'] = Resource(slot) rp['space-orig_desc-skullstrip-bet+nuis-gsr_bold'] = Resource(slot) rp['space-orig_desc-skullstrip-afni+nuis-nogsr_bold'] = Resource(slot) rp['space-orig_desc-skullstrip-bet+nuis-nogsr_bold'] = Resource(slot) rp['space-MNI_desc-nuis-gsr_mask'] = Resource(slot) rp['space-MNI_desc-nuis-nogsr_mask'] = Resource(slot) extraction = dict( rp.extract('space-orig_T1w', 'space-orig_mask', 'space-orig_bold', 'space-MNI_mask')) self.assertEqual(len(extraction), 4) self.assertEqual( extraction[R(desc='skullstrip-bet+nuis-gsr')][R('space-orig_T1w')], rp[R('space-orig_T1w')]) self.assertEqual( extraction[R( desc='skullstrip-bet+nuis-gsr')][R('space-orig_bold')], rp[R('space-orig_desc-skullstrip-bet+nuis-gsr_bold')]) self.assertEqual( extraction[R( desc='skullstrip-bet+nuis-gsr')][R('space-orig_bold')], rp[R('space-orig_desc-skullstrip-bet+nuis-gsr_bold')]) self.assertEqual( extraction[R( desc='skullstrip-bet+nuis-gsr')][R('space-orig_bold')], rp[R('space-orig_desc-skullstrip-bet+nuis-gsr_bold')]) self.assertEqual( extraction[R( desc='skullstrip-bet+nuis-nogsr')][R('space-MNI_mask')], rp[R('space-MNI_desc-nuis-nogsr_mask')])
def test_resource_pool_extraction_subsesrun(self): rp = ResourcePool() subs = 4 sess = 3 runs = 2 for sub, ses in product(range(subs), range(sess)): ses_prefix = 'sub-%03d_ses-%03d_' % (sub, ses) rp[ses_prefix + 'space-orig_T1w'] = Resource(ses_prefix + 'space-orig_T1w') rp[ses_prefix + 'space-orig_desc-skullstrip-afni_mask'] = Resource( ses_prefix + 'space-orig_desc-skullstrip-afni_mask') rp[ses_prefix + 'space-orig_desc-skullstrip-bet_mask'] = Resource( ses_prefix + 'space-orig_desc-skullstrip-bet_mask') for sub, ses, run in product(range(subs), range(sess), range(runs)): run_prefix = 'sub-%03d_ses-%03d_run-%03d_' % (sub, ses, run) rp[run_prefix + 'space-orig_desc-skullstrip-afni+nuis-gsr_bold'] = Resource( run_prefix + 'space-orig_desc-skullstrip-afni+nuis-gsr_bold') rp[run_prefix + 'space-orig_desc-skullstrip-bet+nuis-gsr_bold'] = Resource( run_prefix + 'space-orig_desc-skullstrip-bet+nuis-gsr_bold') rp[run_prefix + 'space-orig_desc-skullstrip-afni+nuis-nogsr_bold'] = Resource( run_prefix + 'space-orig_desc-skullstrip-afni+nuis-nogsr_bold') rp[run_prefix + 'space-orig_desc-skullstrip-bet+nuis-nogsr_bold'] = Resource( run_prefix + 'space-orig_desc-skullstrip-bet+nuis-nogsr_bold') extraction = list(rp[[ 'space-orig_T1w', 'space-orig_mask', ]]) self.assertEqual(len(extraction), 2 * subs * sess) extraction = list(rp[[ 'space-orig_T1w', 'space-orig_mask', 'space-orig_bold', ]]) self.assertEqual(len(extraction), 4 * subs * sess * runs) extraction = list(rp[[ 'sub-*_space-orig_T1w', 'sub-*_space-orig_mask', 'sub-*_space-orig_bold', ]]) self.assertEqual(len(extraction), 4 * sess * runs)
def __setattr__(self, attr, value): if attr.startswith('_'): self.__dict__[attr] = value return if attr not in self._interface.inputs.visible_traits(): raise AttributeError(f'Invalid input name: {attr}') if not isinstance(value, (Resource, ResourcePool)): value = Resource(value) self._inputs[attr] = value
def __setattr__(self, attr, value): if attr.startswith('_'): self.__dict__[attr] = value return if not isinstance(value, Resource): value = Resource(value) elif type(value) == Resource or type(value) == S3Resource: value = copy.copy(value) self._inputs[attr] = value self._hash = None
def _gather(self, results): logger.info('Gathering resources') resource_pool = ResourcePool() is_s3_outputs = isinstance(self._ctx.outputs_dir, S3Resource) if is_s3_outputs: local_output_dir = os.path.join(self._ctx.working_dir, 'outputs') else: local_output_dir = self._ctx.outputs_dir Path(local_output_dir).mkdir(parents=True, exist_ok=True) for _, attr in self.graph.nodes.items(): job = attr['job'] if not isinstance(job.resource, ComputedResource): continue job_hash = hash(job) references = attr.get('references', []) if not references: continue if job_hash in results and not isinstance(results[job_hash], Exception): result = results[job_hash] else: result = InvalidResource(job) for key in attr.get('references', []): if isinstance(result, Path): logger.info(f'Setting {result} in {key}') ext = os.path.basename(result).split('.', 1)[-1] bids_name = job.resource.bids_name bids_dir = bids.derivative_location(bids_name, key) destination = os.path.join(local_output_dir, bids_dir) Path(destination).mkdir(parents=True, exist_ok=True) output = os.path.join(destination, f'{key}.{ext}') logger.info(f'Copying file from "{result}" to "{output}"') shutil.copyfile(result, output) bids_file = os.path.join(bids_dir, f'{key}.{ext}') result = self._ctx.outputs_dir / bids_file if is_s3_outputs else Resource( output) resource_pool[key] = result if is_s3_outputs: logger.info("Uploading result to the output bucket.....") self._ctx.outputs_dir.upload(local_output_dir) return resource_pool
def test_resource_pool(self): rp = ResourcePool() slot = 'output_file' tags = ['write_to_mni', 'smooth_before', 'write_at_4mm', 'qc_carpet'] resource_key = R('atlas-aal_roi-112_desc-skullstripping-afni_mask', tags=tags) resource = Resource(slot) rp[resource_key] = resource self.assertEqual(rp[resource_key], resource)
def load_resource(resource_pool: ResourcePool, ctx: Context): inputs_dir = ctx.inputs_dir participant_label = ctx.participant_label is_s3 = isinstance(inputs_dir, S3Resource) walk = inputs_dir.walk if is_s3 else functools.partial(os.walk, inputs_dir, topdown=False) for root, dirs, files in walk(): for f in files: logger.debug(f'Processing file {root}/{f}.') if 'nii' in f: filename: str = f.split('.')[0] if participant_label is None or any([label in filename for label in participant_label]): resource_pool[filename] = inputs_dir % os.path.join(root, f) \ if is_s3 \ else Resource(os.path.join(root, f)) logger.info(f'Added {filename} to the resource pool.')
def test_parallel(self): wait = 3 rp = ResourcePool() delayed1 = PythonJob(function=timestamp, reference='time1') delayed1.delay = Resource(wait) rp[R('T1w', label='time1')] = delayed1.time delayed2 = PythonJob(function=timestamp, reference='time2') delayed2.delay = Resource(wait) rp[R('T1w', label='time2')] = delayed2.time res_rp = DependencySolver(rp).execute(executor=DaskExecution()) self.assertIn(R('label-time1_T1w'), res_rp) self.assertIn(R('label-time2_T1w'), res_rp) time1 = res_rp[R('label-time1_T1w')].content time2 = res_rp[R('label-time2_T1w')].content # To ensure parallelism, both tasks should be run 'at the same time' # so the difference between their finish time execution will be # lesser than the time each one took to compute self.assertLess(time1 - time2, wait) res_rp = DependencySolver(rp).execute(executor=Execution()) self.assertIn(R('label-time1_T1w'), res_rp) self.assertIn(R('label-time2_T1w'), res_rp) time1 = res_rp[R('label-time1_T1w')].content time2 = res_rp[R('label-time2_T1w')].content self.assertGreaterEqual(abs(time1 - time2), wait)
def test_resource_pool_extraction_sameresourcetype(self): rp = ResourcePool() rp['sub-001_T1w'] = Resource('001-A') rp['sub-001_label-initial_T1w'] = Resource('001-B') rp['sub-002_T1w'] = Resource('002-A') rp['sub-002_label-initial_T1w'] = Resource('002-B') for k, srp in rp[['T1w']]: sub = k['sub'] self.assertEqual(srp[R(k, suffix='T1w')], Resource(f'{sub}-A')) self.assertEqual(srp[R(k, label='initial', suffix='T1w')], Resource(f'{sub}-B'))
def setUp(self): self.rp = ResourcePool() self.rp['sub-A00008326_ses-BAS1_T1w'] = Resource(A00008326_file) self.rp['sub-A00008399_ses-BAS1_T1w'] = Resource(A00008399_file)
def create_workflow(config: AttrDict, resource_pool: ResourcePool, context: Context): for _, rp in resource_pool[['label-reorient_T1w']]: anat = rp[R('T1w', label='reorient')] train_model = UNet2d(dim_in=config.dim_in, num_conv_block=config.num_conv_block, kernel_root=config.kernel_root) if config.unet_model.lower().startswith('s3://'): unet_path = S3Resource(config.unet_model, working_dir=tempfile.mkdtemp())() else: unet_path = config.unet_model checkpoint = torch.load(unet_path, map_location={'cuda:0': 'cpu'}) train_model.load_state_dict(checkpoint['state_dict']) model = nn.Sequential(train_model, nn.Softmax2d()) # create a node called unet_mask unet_mask = PythonJob(function=predict_volumes, reference='unet_mask') unet_mask.model = Resource(model) unet_mask.cimg_in = anat """ Revised mask with ANTs """ # fslmaths <whole head> -mul <mask> brain.nii.gz unet_masked_brain = NipypeJob( interface=fsl.MultiImageMaths(op_string="-mul %s"), reference='unet_masked_brain') unet_masked_brain.in_file = anat unet_masked_brain.operand_files = unet_mask.output_path # flirt -v -dof 6 -in brain.nii.gz -ref NMT_SS_0.5mm.nii.gz -o brain_rot2atl -omat brain_rot2atl.mat -interp sinc # TODO change it to ANTs linear transform native_brain_to_template_brain = NipypeJob( interface=fsl.FLIRT(reference=config.template_brain_only_for_anat, dof=6, interp='sinc'), reference='native_brain_to_template_brain') native_brain_to_template_brain.in_file = unet_masked_brain.out_file # flirt -in head.nii.gz -ref NMT_0.5mm.nii.gz -o head_rot2atl -applyxfm -init brain_rot2atl.mat # TODO change it to ANTs linear transform native_head_to_template_head = NipypeJob( interface=fsl.FLIRT(reference=config.template_skull_for_anat, apply_xfm=True), reference='native_head_to_template_head') native_head_to_template_head.in_file = anat native_head_to_template_head.in_matrix_file = native_brain_to_template_brain.out_matrix_file # fslmaths NMT_SS_0.5mm.nii.gz -bin templateMask.nii.gz template_brain_mask = NipypeJob( interface=fsl.maths.MathsCommand(args='-bin'), reference='template_brain_mask') template_brain_mask.in_file = config.template_brain_only_for_anat # ANTS 3 -m CC[head_rot2atl.nii.gz,NMT_0.5mm.nii.gz,1,5] -t SyN[0.25] -r Gauss[3,0] -o atl2T1rot -i 60x50x20 --use-Histogram-Matching --number-of-affine-iterations 10000x10000x10000x10000x10000 --MI-option 32x16000 ants_template_head_to_template = NipypeJob( interface=ants.Registration(), reference='template_head_to_template') ants_template_head_to_template.metric = ['CC'] ants_template_head_to_template.metric_weight = [1, 5] ants_template_head_to_template.moving_image = config.template_skull_for_anat ants_template_head_to_template.transforms = ['SyN'] ants_template_head_to_template.transform_parameters = [(0.25, )] ants_template_head_to_template.interpolation = 'NearestNeighbor' ants_template_head_to_template.number_of_iterations = [[60, 50, 20]] ants_template_head_to_template.smoothing_sigmas = [[0.6, 0.2, 0.0]] ants_template_head_to_template.shrink_factors = [[4, 2, 1]] ants_template_head_to_template.convergence_threshold = [1.e-8] ants_template_head_to_template.fixed_image = native_head_to_template_head.out_file # antsApplyTransforms -d 3 -i templateMask.nii.gz -t atl2T1rotWarp.nii.gz atl2T1rotAffine.txt -r brain_rot2atl.nii.gz -o brain_rot2atl_mask.nii.gz template_head_transform_to_template = NipypeJob( interface=ants.ApplyTransforms(dimension=3), reference='template_head_transform_to_template') template_head_transform_to_template.input_image = template_brain_mask.out_file template_head_transform_to_template.reference_image = native_brain_to_template_brain.out_file template_head_transform_to_template.transforms = ants_template_head_to_template.forward_transforms # convert_xfm -omat brain_rot2native.mat -inverse brain_rot2atl.mat invt = NipypeJob(interface=fsl.ConvertXFM(invert_xfm=True), reference='convert_xfm') invt.in_file = native_brain_to_template_brain.out_matrix_file # flirt -in brain_rot2atl_mask.nii.gz -ref brain.nii.gz -o brain_mask.nii.gz -applyxfm -init brain_rot2native.mat template_brain_to_native_brain = NipypeJob( interface=fsl.FLIRT(apply_xfm=True), reference='template_brain_to_native_brain') template_brain_to_native_brain.in_file = template_head_transform_to_template.output_image template_brain_to_native_brain.reference = unet_masked_brain.out_file template_brain_to_native_brain.in_matrix_file = invt.out_file # fslmaths brain_mask.nii.gz -thr .5 -bin brain_mask_thr.nii.gz refined_mask = NipypeJob(interface=fsl.Threshold(thresh=0.5, args='-bin'), reference='refined_mask') refined_mask.in_file = template_brain_to_native_brain.out_file # get a new brain with mask refined_brain = NipypeJob( interface=fsl.MultiImageMaths(op_string="-mul %s"), reference='refined_brain') refined_brain.in_file = anat refined_brain.operand_files = refined_mask.out_file rp[R('T1w', desc='skullstrip-unet', suffix='mask')] = refined_mask.out_file rp[R('T1w', desc='skullstrip-unet', suffix='brain')] = refined_brain.out_file