def test_default_engine(self): tmp = tempfile.mktemp(suffix='.json') ce = capsul_engine(tmp) ce.save() try: ce2 = capsul_engine(tmp) self.assertEqual(ce.execution_context.to_json(), ce2.execution_context.to_json()) self.assertEqual(ce.database.named_directory('capsul_engine'), ce2.database.named_directory('capsul_engine')) if sys.version_info[:2] >= (2, 7): self.assertIsInstance( ce.get_process_instance( 'capsul.pipeline.test.test_pipeline.MyPipeline'), Pipeline) else: self.assertTrue( isinstance( ce.get_process_instance( 'capsul.pipeline.test.test_pipeline.MyPipeline'), Pipeline)) finally: del ce del ce2 if os.path.exists(tmp): os.remove(tmp)
def test_populse_db_engine(self): if populse_db is None: if sys.version_info[:2] >= (2, 7): self.skipTest('populse_db is not installed') else: return # no skip exception in python 2.6, so just do nothing tmp = tempfile.mktemp(suffix='.sqlite') ce = capsul_engine(tmp) ce.save() try: ce2 = capsul_engine(tmp) self.assertEqual(ce.execution_context.to_json(), ce2.execution_context.to_json()) self.assertEqual(ce.database.named_directory('capsul_engine'), ce2.database.named_directory('capsul_engine')) if sys.version_info[:2] >= (2, 7): self.assertIsInstance( ce.get_process_instance( 'capsul.pipeline.test.test_pipeline.MyPipeline'), Pipeline) else: self.assertTrue( isinstance( ce.get_process_instance( 'capsul.pipeline.test.test_pipeline.MyPipeline'), Pipeline)) finally: del ce del ce2 if os.path.exists(tmp): os.remove(tmp)
def init_process(self, int_name): """ Instantiation of the process attribute given an process identifier. :param int_name: a process identifier """ if getattr(self, 'study_config'): ce = self.study_config.engine else: ce = capsul_engine() self.process = ce.get_process_instance(int_name)
def test_populse_db_engine(self): if populse_db is None: if sys.version_info[:2] >= (2, 7): self.skipTest('populse_db is not installed') else: return # no skip exception in python 2.6, so just do nothing tmp = tempfile.mktemp(suffix='.sqlite') ce = capsul_engine(tmp) ce.save() ce2 = None try: ce2 = capsul_engine(tmp) self.assertEqual(ce.execution_context.to_json(), ce2.execution_context.to_json()) self.assertEqual(ce.database.named_directory('capsul_engine'), ce2.database.named_directory('capsul_engine')) if sys.version_info[:2] >= (2, 7): self.assertIsInstance( ce.get_process_instance( 'capsul.pipeline.test.test_pipeline.MyPipeline'), Pipeline) else: self.assertTrue( isinstance( ce.get_process_instance( 'capsul.pipeline.test.test_pipeline.MyPipeline'), Pipeline)) finally: del ce del ce2 # garbage collect to ensure the database is closed # (otherwise it can cause problems on Windows when removing the # sqlite file) import gc gc.collect() if os.path.exists(tmp): os.remove(tmp)
def get_capsul_engine(): """ Get a global CapsulEngine object used for all operations in MIA application. The engine is created once when needed. :returns: Config.capsul_engine: capsul.engine.CapsulEngine object """ config = Config() config.get_capsul_config() if Config.capsul_engine is None: Config.capsul_engine = capsul_engine() Config().update_capsul_config() return Config.capsul_engine
def set_capsul_config(self, capsul_config_dict): """Set CAPSUL configuration dict into MIA config. This method is used just (and only) after editing capsul config (in File > Mia preferences, Pipeline tab, Edit CAPSUL config button), in order to synchronise the new Capsul config with the Mia preferences. :param capsul_config_dict: a dict; {'engine': {...}, 'engine_modules': [...]} """ self.config['capsul_config'] = capsul_config_dict # update MIA values engine_config = capsul_config_dict.get('engine') new_engine = capsul_engine() for environment, config in engine_config.items(): if environment == 'capsul_engine': continue new_engine.import_configs(environment, config) engine_config = new_engine.settings.export_config_dict('global') # afni afni = engine_config.get('global', {}).get('capsul.engine.module.afni') if afni: afni = next(iter(afni.values())) afni_path = afni.get('directory') use_afni = bool(afni_path) if afni_path: self.set_afni_path(afni_path) self.set_use_afni(use_afni) # ants ants = engine_config.get('global', {}).get('capsul.engine.module.ants') if ants: ants = next(iter(ants.values())) ants_path = ants.get('directory') use_ants = bool(ants_path) if ants_path: self.set_ants_path(ants_path) self.set_use_ants(use_ants) # fsl fsl = engine_config.get('global', {}).get('capsul.engine.module.fsl') use_fsl = False if fsl: fsl = next(iter(fsl.values())) fsl_conf_path = fsl.get('config') fsl_dir_path = fsl.get('directory') if fsl_conf_path: use_fsl = True self.set_fsl_config(fsl_conf_path) self.set_use_fsl(True) # if only the directory parameter has been set, let's try using # the config parameter = directory/fsl.sh: elif fsl_dir_path: use_fsl = True self.set_fsl_config(os.path.join(fsl_dir_path, 'fsl.sh')) self.set_use_fsl(True) if use_fsl is False: self.set_use_fsl(False) # matlab matlab = engine_config.get('global', {}).get('capsul.engine.module.matlab') use_matlab = False use_mcr = False if matlab: matlab = next(iter(matlab.values())) matlab_path = matlab.get('executable') if bool(matlab_path) and os.path.isfile(matlab_path): use_matlab = True mcr_dir = matlab.get('mcr_directory') if bool(mcr_dir) and os.path.isdir(mcr_dir): use_mcr = True # spm spm = engine_config.get('global', {}).get('capsul.engine.module.spm') use_spm_standalone = False use_spm = False if spm: # TODO: we only take the first element of the dictionary (the one # that is normally edited in the Capsul config GUI). There is # actually a problem because this means that there may be # hidden config(s) ... This can produce bugs and at least # unpredictable results for the user ... spm = next(iter(spm.values())) spm_dir = spm.get('directory', False) use_spm_standalone = spm.get('standalone', False) if use_spm_standalone and os.path.isdir(spm_dir) and use_mcr: pass else: use_spm_standalone = False if (use_spm_standalone is False and os.path.isdir(spm_dir) and use_matlab): use_spm = True else: use_spm = False if use_spm: self.set_spm_path(spm_dir) self.set_use_spm(True) self.set_use_spm_standalone(False) self.set_matlab_path(matlab_path) self.set_use_matlab(True) self.set_use_matlab_standalone(False) elif use_spm_standalone: self.set_spm_standalone_path(spm_dir) self.set_use_spm_standalone(True) self.set_use_spm(False) self.set_matlab_standalone_path(mcr_dir) self.set_use_matlab_standalone(True) self.set_use_matlab(False) # TODO: Because there are two parameters for matlab (executable and # mcr_directory) in Capsul config, if the user defines both, we don't # know which on to choose! Here we choose to favour matlab in front # of MCR if both are chosen, is it desirable? elif use_matlab: self.set_matlab_path(matlab_path) self.set_use_matlab(True) self.set_use_matlab_standalone(False) self.set_use_spm(False) self.set_use_spm_standalone(False) elif use_mcr: self.set_matlab_standalone_path(mcr_dir) self.set_use_matlab_standalone(True) self.set_use_matlab(False) self.set_use_spm(False) self.set_use_spm_standalone(False) else: self.set_use_matlab(False) self.set_use_matlab_standalone(False) self.set_use_spm(False) self.set_use_spm_standalone(False) if (use_matlab and use_mcr and use_spm is False and use_spm_standalone is False): print('\n The Matlab executable and the mcr_directory parameters ' 'have been set concomitantly in the Capsul configuration. ' 'This leads to an indeterminacy. By default, Matlab is ' 'retained at the expense of MCR.') self.update_capsul_config() # store into capsul engine
def spm_check_call(spm_batch_filename, **kwargs): ''' Equivalent to Python subprocess.check_call for SPM batch ''' cmd = spm_command(spm_batch_filename) return soma.subprocess.check_call(cmd, **kwargs) def spm_check_output(spm_batch_filename, **kwargs): ''' Equivalent to Python subprocess.check_output for SPM batch ''' cmd = spm_command(spm_batch_filename) return soma.subprocess.check_output(cmd, **kwargs) if __name__ == '__main__': from capsul.api import capsul_engine from capsul.in_context.spm import spm_call import tempfile ce = capsul_engine(config={ 'spm': dict(directory='/casa/spm_directory', use=True)}) with ce.execution_context: batch = tempfile.NamedTemporaryFile(suffix='.m') batch.write("fprintf(1, '%s', spm('dir'));") batch.flush() spm_call(batch.name)
return soma.subprocess.call(cmd, **kwargs) def spm_check_call(spm_batch_filename, **kwargs): ''' Equivalent to Python subprocess.check_call for SPM batch ''' cmd = spm_command(spm_batch_filename) return soma.subprocess.check_call(cmd, **kwargs) def spm_check_output(spm_batch_filename, **kwargs): ''' Equivalent to Python subprocess.check_output for SPM batch ''' cmd = spm_command(spm_batch_filename) return soma.subprocess.check_output(cmd, **kwargs) if __name__ == '__main__': from capsul.api import capsul_engine import tempfile ce = capsul_engine() ce.global_config.spm.directory = '/casa/spm12_standalone' with ce: batch = tempfile.NamedTemporaryFile(suffix='.m') batch.write("fprintf(1, '%s', spm('dir'));") batch.flush() spm_call(batch.name)
def setUp(self): self.sqlite_file = str(tempfile.mktemp(suffix='.sqlite')) self.ce = capsul_engine(self.sqlite_file)