def test_subtract(self): tmp_dir = tempfile.mkdtemp() in_file = op.join(tmp_dir, 'in_file.nii.gz') with open(in_file, 'w') as f: f.write('test') out_file = op.join(tmp_dir, 'out_file.nii.gz') unwrap = Node(UnwrapPhase(), name='unwrap') unwrap.inputs.in_file = in_file unwrap.inputs.voxelsize = [2.0, 2.0, 2.0] # unwrap.inputs.out_file = out_file unwrap.run() self.assertTrue(op.exists(out_file))
def __call__(self, **kwargs): kwargs = modify_paths(kwargs, relative=False) interface = self.interface() # Set the inputs early to get some argument checking interface.inputs.set(**kwargs) # Make a name for our node inputs = interface.inputs.get_hashval() hasher = hashlib.new('md5') hasher.update(pickle.dumps(inputs)) dir_name = '%s-%s' % (interface.__class__.__module__.replace( '.', '-'), interface.__class__.__name__) job_name = hasher.hexdigest() node = Node(interface, name=job_name) node.base_dir = os.path.join(self.base_dir, dir_name) cwd = os.getcwd() try: out = node.run() finally: # node.run() changes to the node directory - if something goes wrong # before it cds back you would end up in strange places os.chdir(cwd) if self.callback is not None: self.callback(dir_name, job_name) return out
def __call__(self, **kwargs): kwargs = modify_paths(kwargs, relative=False) interface = self.interface() # Set the inputs early to get some argument checking interface.inputs.set(**kwargs) # Make a name for our node inputs = interface.inputs.get_hashval() hasher = hashlib.new('md5') hasher.update(pickle.dumps(inputs)) dir_name = '%s-%s' % (interface.__class__.__module__.replace('.', '-'), interface.__class__.__name__) job_name = hasher.hexdigest() node = Node(interface, name=job_name) node.base_dir = os.path.join(self.base_dir, dir_name) cwd = os.getcwd() try: out = node.run() finally: # node.run() changes to the node directory - if something goes wrong # before it cds back you would end up in strange places os.chdir(cwd) if self.callback is not None: self.callback(dir_name, job_name) return out
def _merge_nii(file_list, out_filename): from nipype.pipeline.engine import Node, Workflow import nipype.interfaces.fsl as fsl merge = Node(fsl.Merge(dimension='t'), name='merge') merge.base_dir = os.getcwd() merge.inputs.in_files = file_list merge.inputs.merged_file = out_filename result = merge.run() return result.outputs.merged_file
def __call__(self, **kwargs): kwargs = modify_paths(kwargs, relative=False) interface = self.interface() # Set the inputs early to get some argument checking interface.inputs.set(**kwargs) # Make a name for our node inputs = interface.inputs.get_hashval() hasher = hashlib.new('md5') hasher.update(pickle.dumps(inputs)) dir_name = '%s-%s' % (interface.__class__.__module__.replace( '.', '-'), interface.__class__.__name__) job_name = hasher.hexdigest() node = Node(interface, name=job_name) node.base_dir = os.path.join(self.base_dir, dir_name) out = node.run() if self.callback is not None: self.callback(dir_name, job_name) return out
def __call__(self, **kwargs): kwargs = modify_paths(kwargs, relative=False) interface = self.interface() # Set the inputs early to get some argument checking interface.inputs.set(**kwargs) # Make a name for our node inputs = interface.inputs.get_hashval() hasher = hashlib.new('md5') hasher.update(pickle.dumps(inputs)) dir_name = '%s-%s' % (interface.__class__.__module__.replace('.', '-'), interface.__class__.__name__) job_name = hasher.hexdigest() node = Node(interface, name=job_name) node.base_dir = os.path.join(self.base_dir, dir_name) out = node.run() if self.callback is not None: self.callback(dir_name, job_name) return out
def psacnn_workflow(input_file, output_dir, use_preprocess=True, model_file=None, contrast='t1w', use_gpu=True, gpu_id=0, save_label_image=False, save_prob_image=False, patch_size=96, batch_size=4, sample_rate=20000): subprocess.call(['mkdir', '-p', output_dir]) if use_gpu == False: os.environ["CUDA_DEVICE_ORDER"] = "PCI_BUS_ID" os.environ["CUDA_VISIBLE_DEVICES"] = "" gpu_id = -1 batch_size = 16 sample_rate = 40000 else: os.environ["CUDA_DEVICE_ORDER"] = "PCI_BUS_ID" os.environ["CUDA_VISIBLE_DEVICES"] = str(gpu_id) batch_size = 4 sample_rate = 20000 if use_preprocess == True: preprocess_flow = Workflow(name='preprocess', base_dir=output_dir) conform = Node(MRIConvert(conform=True, out_type='niigz', out_file='conformed.nii.gz'), name='conform') n4 = Node(N4BiasFieldCorrection(dimension=3, bspline_fitting_distance=300, shrink_factor=3, n_iterations=[50, 50, 30, 20], output_image='n4.nii.gz'), name='n4') robex = Node(ROBEX(seed=1729, stripped_image='brain.nii.gz'), name='robex') psacnn = Node(PSACNN(output_dir=output_dir, contrast=contrast, patch_size=patch_size, batch_size=batch_size, save_label_image=save_label_image, save_prob_image=save_prob_image, sample_rate=sample_rate), name='psacnn') preprocess_flow.connect([ (conform, n4, [('out_file', 'input_image')]), (n4, robex, [('output_image', 'input_image')]), (robex, psacnn, [('stripped_image', 'input_image')]) ]) preprocess_flow.write_graph(graph2use='orig') conform.inputs.in_file = input_file preprocess_flow.run('MultiProc', plugin_args={'n_procs': 16}) else: psacnn = PSACNN(input_image=input_file, output_dir=output_dir, contrast=contrast, patch_size=patch_size, batch_size=batch_size, save_label_image=save_label_image, save_prob_image=save_prob_image, sample_rate=sample_rate) # psacnn.inputs.input_image = input_file # psacnn.inputs.output_dir = output_dir # psacnn.inputs.contrast = contrast # psacnn.inputs.patch_size = patch_size # psacnn.inputs.batch_size = batch_size # psacnn.inputs.save_label_image = save_label_image # psacnn.inputs.save_prob_image = save_prob_image # psacnn.inputs.sample_rate = sample_rate psacnn.run()
home = '/Volumes/iang/active/BABIES/BABIES_MAMA' t1w = home + '/' + subject + '/t1w' dest = home + '/' + subject + '/anat' func1 = home + '/' + subject + '/func/run1' func2 = home + '/' + subject + '/func/run2' copyfile((t1w + '/t1w_raw.nii.gz'), (dest + '/t1w_raw.nii.gz')) # In[10]: reorient = Node(fsl.Reorient2Std(in_file=(dest + '/t1w_raw.nii.gz'), out_file=(dest + '/spgrorient.nii.gz'), output_type='NIFTI_GZ'), name='reorient') reorient.run() # In[11]: skullstrip = Node(fsl.BET(in_file=(dest + '/spgrorient.nii.gz'), out_file=(dest + '/spgrbrain.nii.gz'), robust=True, frac=0.5, vertical_gradient=0), name='skullstrip') skullstrip.run() # In[13]: #Preprocess functional run1 func_reorient1 = Node(fsl.Reorient2Std(