def test_cycle(self): rp = ResourcePool() file_basename1 = PythonJob(function=basename, reference='basename1') file_basename2 = PythonJob(function=basename, reference='basename2') file_basename3 = PythonJob(function=basename, reference='basename3') file_basename2.path = file_basename1.path file_basename3.path = file_basename2.path file_basename1.path = file_basename3.path rp[R('T1w')] = file_basename1.path with self.assertRaises(ValueError): G = DependencySolver(rp).graph rp = ResourcePool() file_basename1 = PythonJob(function=basename, reference='basename1') file_basename1.path = file_basename1.path rp[R('T1w')] = file_basename1.path with self.assertRaises(ValueError): G = DependencySolver(rp).graph
def test_resource_pool_extraction(self): slot = '' rp = ResourcePool() rp['space-orig_T1w'] = Resource(slot) rp['space-orig_desc-skullstrip-afni_mask'] = Resource(slot) rp['space-orig_desc-skullstrip-bet_mask'] = Resource(slot) rp['space-orig_desc-skullstrip-afni+nuis-gsr_bold'] = Resource(slot) rp['space-orig_desc-skullstrip-bet+nuis-gsr_bold'] = Resource(slot) rp['space-orig_desc-skullstrip-afni+nuis-nogsr_bold'] = Resource(slot) rp['space-orig_desc-skullstrip-bet+nuis-nogsr_bold'] = Resource(slot) rp['space-MNI_desc-nuis-gsr_mask'] = Resource(slot) rp['space-MNI_desc-nuis-nogsr_mask'] = Resource(slot) extraction = dict( rp.extract('space-orig_T1w', 'space-orig_mask', 'space-orig_bold', 'space-MNI_mask')) self.assertEqual(len(extraction), 4) self.assertEqual( extraction[R(desc='skullstrip-bet+nuis-gsr')][R('space-orig_T1w')], rp[R('space-orig_T1w')]) self.assertEqual( extraction[R( desc='skullstrip-bet+nuis-gsr')][R('space-orig_bold')], rp[R('space-orig_desc-skullstrip-bet+nuis-gsr_bold')]) self.assertEqual( extraction[R( desc='skullstrip-bet+nuis-gsr')][R('space-orig_bold')], rp[R('space-orig_desc-skullstrip-bet+nuis-gsr_bold')]) self.assertEqual( extraction[R( desc='skullstrip-bet+nuis-gsr')][R('space-orig_bold')], rp[R('space-orig_desc-skullstrip-bet+nuis-gsr_bold')]) self.assertEqual( extraction[R( desc='skullstrip-bet+nuis-nogsr')][R('space-MNI_mask')], rp[R('space-MNI_desc-nuis-nogsr_mask')])
def test_err(self): rp = ResourcePool() rp['sub-A00008399_ses-BAS1_T1w'] = Resource(A00008326_file) r_key = R('sub-A00008399_ses-BAS1_T1w') anatomical_image = rp[r_key] file_basename = PythonJob(function=basename, reference='basename') file_basename.path = anatomical_image rp[R(r_key, label='base')] = file_basename.path rp[R(r_key, label='dir')] = file_basename.dirname def err(message, path): raise Exception(message) erred = PythonJob(function=err, reference='erring_job') erred.message = Resource('This jobs has erred') erred.path = file_basename.dirname rp[R('T1w', label='err')] = erred.no_return err_file_reversed = PythonJob(function=reversed_string, reference='err_reversed_string') err_file_reversed.path = erred.no_return rp[R('T1w', label='errbaserev')] = err_file_reversed.reversed file_reversed = PythonJob(function=reversed_string, reference='reversed_string') file_reversed.path = file_basename.dirname rp[R('T1w', label='baserev')] = file_reversed.reversed for executor in executors: res_rp = DependencySolver(rp).execute(executor=executor()) self.assertIsInstance(res_rp[R('T1w', label='err')], InvalidResource) self.assertIsInstance(res_rp[R('T1w', label='errbaserev')], InvalidResource) self.assertNotIsInstance(res_rp[R('T1w', label='baserev')], InvalidResource)
def test_load(self): module_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'data/fake_workflow') self.assertEqual( loader.load(module_path)({ 'msg': 'mocks' }, ResourcePool(), None), 'test')
def test_resource_pool_extraction_subsesrun(self): rp = ResourcePool() subs = 4 sess = 3 runs = 2 for sub, ses in product(range(subs), range(sess)): ses_prefix = 'sub-%03d_ses-%03d_' % (sub, ses) rp[ses_prefix + 'space-orig_T1w'] = Resource(ses_prefix + 'space-orig_T1w') rp[ses_prefix + 'space-orig_desc-skullstrip-afni_mask'] = Resource( ses_prefix + 'space-orig_desc-skullstrip-afni_mask') rp[ses_prefix + 'space-orig_desc-skullstrip-bet_mask'] = Resource( ses_prefix + 'space-orig_desc-skullstrip-bet_mask') for sub, ses, run in product(range(subs), range(sess), range(runs)): run_prefix = 'sub-%03d_ses-%03d_run-%03d_' % (sub, ses, run) rp[run_prefix + 'space-orig_desc-skullstrip-afni+nuis-gsr_bold'] = Resource( run_prefix + 'space-orig_desc-skullstrip-afni+nuis-gsr_bold') rp[run_prefix + 'space-orig_desc-skullstrip-bet+nuis-gsr_bold'] = Resource( run_prefix + 'space-orig_desc-skullstrip-bet+nuis-gsr_bold') rp[run_prefix + 'space-orig_desc-skullstrip-afni+nuis-nogsr_bold'] = Resource( run_prefix + 'space-orig_desc-skullstrip-afni+nuis-nogsr_bold') rp[run_prefix + 'space-orig_desc-skullstrip-bet+nuis-nogsr_bold'] = Resource( run_prefix + 'space-orig_desc-skullstrip-bet+nuis-nogsr_bold') extraction = list(rp[[ 'space-orig_T1w', 'space-orig_mask', ]]) self.assertEqual(len(extraction), 2 * subs * sess) extraction = list(rp[[ 'space-orig_T1w', 'space-orig_mask', 'space-orig_bold', ]]) self.assertEqual(len(extraction), 4 * subs * sess * runs) extraction = list(rp[[ 'sub-*_space-orig_T1w', 'sub-*_space-orig_mask', 'sub-*_space-orig_bold', ]]) self.assertEqual(len(extraction), 4 * sess * runs)
def _gather(self, results): logger.info('Gathering resources') resource_pool = ResourcePool() is_s3_outputs = isinstance(self._ctx.outputs_dir, S3Resource) if is_s3_outputs: local_output_dir = os.path.join(self._ctx.working_dir, 'outputs') else: local_output_dir = self._ctx.outputs_dir Path(local_output_dir).mkdir(parents=True, exist_ok=True) for _, attr in self.graph.nodes.items(): job = attr['job'] if not isinstance(job.resource, ComputedResource): continue job_hash = hash(job) references = attr.get('references', []) if not references: continue if job_hash in results and not isinstance(results[job_hash], Exception): result = results[job_hash] else: result = InvalidResource(job) for key in attr.get('references', []): if isinstance(result, Path): logger.info(f'Setting {result} in {key}') ext = os.path.basename(result).split('.', 1)[-1] bids_name = job.resource.bids_name bids_dir = bids.derivative_location(bids_name, key) destination = os.path.join(local_output_dir, bids_dir) Path(destination).mkdir(parents=True, exist_ok=True) output = os.path.join(destination, f'{key}.{ext}') logger.info(f'Copying file from "{result}" to "{output}"') shutil.copyfile(result, output) bids_file = os.path.join(bids_dir, f'{key}.{ext}') result = self._ctx.outputs_dir / bids_file if is_s3_outputs else Resource( output) resource_pool[key] = result if is_s3_outputs: logger.info("Uploading result to the output bucket.....") self._ctx.outputs_dir.upload(local_output_dir) return resource_pool
def test_resource_pool_extraction_sameresourcetype(self): rp = ResourcePool() rp['sub-001_T1w'] = Resource('001-A') rp['sub-001_label-initial_T1w'] = Resource('001-B') rp['sub-002_T1w'] = Resource('002-A') rp['sub-002_label-initial_T1w'] = Resource('002-B') for k, srp in rp[['T1w']]: sub = k['sub'] self.assertEqual(srp[R(k, suffix='T1w')], Resource(f'{sub}-A')) self.assertEqual(srp[R(k, label='initial', suffix='T1w')], Resource(f'{sub}-B'))
def test_resource_pool(self): rp = ResourcePool() slot = 'output_file' tags = ['write_to_mni', 'smooth_before', 'write_at_4mm', 'qc_carpet'] resource_key = R('atlas-aal_roi-112_desc-skullstripping-afni_mask', tags=tags) resource = Resource(slot) rp[resource_key] = resource self.assertEqual(rp[resource_key], resource)
def test_connect(self): rp = ResourcePool() mod2 = NipypeJob(EngineTestInterface(), reference="mod2") mod1 = NipypeJob(EngineTestInterface(), reference="mod1") mod2.input1 = mod1.output1 rp[ResourceKey('T1w')] = mod2.output1 g = DependencySolver(rp).graph self.assertIn(id(mod1), g.nodes) self.assertIn(id(mod2), g.nodes) self.assertTrue( networkx.algorithms.bidirectional_dijkstra(g, id(mod1), id(mod2)))
def build(context: Context, disable_concurrency=False, **kwargs) -> ResourcePool: rp = ResourcePool() load_resource(rp, context) for entry, params in schema.steps(context.pipeline_config): loader.load(entry)(params, rp, context) logger.info('Executing pipeline...') if disable_concurrency: res_rp = DependencySolver(rp, context).execute(executor=Execution()) else: res_rp = DependencySolver(rp, context).execute(executor=DaskExecution(ctx=context)) logger.info('Execution Completed.') if not context.save_working_dir: shutil.rmtree(context.working_dir) return res_rp
def _build(self, config): with tempfile.NamedTemporaryFile(suffix='.yml', mode='a+') as tf: yaml.dump( { 'radiomeSchemaVersion': 1.0, 'class': 'pipeline', 'name': 'test', 'steps': [{ 'step1': { 'run': self._module, 'in': config } }] }, tf) tf.seek(0) self._args.config_file = tf.name ctx = cli.build_context(self._args) rp = ResourcePool() load_resource(rp, ctx) loader.load(self._module)(config, rp, ctx) solver = DependencySolver(rp, ctx) return ctx, solver
def test_parallel(self): wait = 3 rp = ResourcePool() delayed1 = PythonJob(function=timestamp, reference='time1') delayed1.delay = Resource(wait) rp[R('T1w', label='time1')] = delayed1.time delayed2 = PythonJob(function=timestamp, reference='time2') delayed2.delay = Resource(wait) rp[R('T1w', label='time2')] = delayed2.time res_rp = DependencySolver(rp).execute(executor=DaskExecution()) self.assertIn(R('label-time1_T1w'), res_rp) self.assertIn(R('label-time2_T1w'), res_rp) time1 = res_rp[R('label-time1_T1w')].content time2 = res_rp[R('label-time2_T1w')].content # To ensure parallelism, both tasks should be run 'at the same time' # so the difference between their finish time execution will be # lesser than the time each one took to compute self.assertLess(time1 - time2, wait) res_rp = DependencySolver(rp).execute(executor=Execution()) self.assertIn(R('label-time1_T1w'), res_rp) self.assertIn(R('label-time2_T1w'), res_rp) time1 = res_rp[R('label-time1_T1w')].content time2 = res_rp[R('label-time2_T1w')].content self.assertGreaterEqual(abs(time1 - time2), wait)
def setUp(self): self.rp = ResourcePool() self.rp['sub-A00008326_ses-BAS1_T1w'] = Resource(A00008326_file) self.rp['sub-A00008399_ses-BAS1_T1w'] = Resource(A00008399_file)