def test_pathmgr_model(self): paths = core.PathManager() case = DataManager(self.case_dict) d = paths.model_paths(case) self.assertEqual(d['MODEL_DATA_DIR'], 'TEST_MODEL_DATA_ROOT/A') self.assertEqual(d['MODEL_WK_DIR'], 'TEST_WORKING_DIR/MDTF_A_1900_2100')
def __init__(self, unittest=False): """Constructor. Only executed once, since this is a :class:`~src.util.Singleton`. Reads and parses data in CMIP6_CV.json. """ if unittest: # value not used, when we're testing will mock out call to read_json # below with actual translation table to use for test file_ = 'dummy_filename' else: paths = core.PathManager() file_ = os.path.join(paths.CODE_ROOT, 'data', 'cmip6-cmor-tables','Tables','CMIP6_CV.json') self._contents = util.read_json(file_, log=_log) self._contents = self._contents['CV'] for k in ['product','version_metadata','required_global_attributes', 'further_info_url','Conventions','license']: # remove unecessary information del self._contents[k] # munge table_ids self._contents['table_id'] = dict.fromkeys(self._contents['table_id']) for tbl in self._contents['table_id']: self._contents['table_id'][tbl] = dc.asdict(CMIP6_MIPTable(tbl)) self.cv = dict() self._lookups = dict()
def setup(self, data_source): """Configuration set by the DataSource on the POD (after the POD is initialized, but before pre-run checks.) """ # set up paths/working directories paths = core.PathManager() paths = paths.pod_paths(self, data_source) for k,v in paths.items(): setattr(self, k, v) self.setup_pod_directories() self.set_entry_point() self.set_interpreter() config = core.ConfigManager() if config.get('overwrite_file_metadata', False): self.log.warning(('User has disabled preprocessing functionality that ' 'uses input metadata.'), tags=util.ObjectLogTag.BANNER) # set up env vars self.pod_env_vars.update(data_source.env_vars) self.nc_largefile = config.get('large_file', False) if self.nc_largefile: if self.program == 'ncl': # argument to ncl setfileoption() self.pod_env_vars['MDTF_NC_FORMAT'] = "NetCDF4" else: # argument to netCDF4-python/xarray/etc. self.pod_env_vars['MDTF_NC_FORMAT'] = "NETCDF4" else: if self.program == 'ncl': # argument to ncl setfileoption() self.pod_env_vars['MDTF_NC_FORMAT'] = "NetCDF4Classic" else: # argument to netCDF4-python/xarray/etc. self.pod_env_vars['MDTF_NC_FORMAT'] = "NETCDF4_CLASSIC"
def setUp_config_singletons(config=None, paths=None, pods=None, unittest=True): cwd = os.path.dirname(os.path.realpath(__file__)) code_root = os.path.dirname(os.path.dirname(cwd)) cli_obj = cli.MDTFTopLevelArgParser( code_root, skip_defaults=True, argv=f"-f {os.path.join(cwd, 'dummy_config.json')}") cli_obj.config = vars(cli_obj.parse_args()) if config: cli_obj.config.update(config) PodDataTuple = collections.namedtuple('PodDataTuple', 'sorted_lists pod_data realm_data') dummy_pod_data = PodDataTuple(pod_data=pods, realm_data=dict(), sorted_lists={ 'pods': [], 'realms': [] }) _ = core.ConfigManager(cli_obj, dummy_pod_data, unittest=unittest) pm = core.PathManager(cli_obj, unittest=unittest) if paths: pm.update(paths) _ = core.VariableTranslator(code_root, unittest=unittest) _ = core.TempDirManager(None, unittest=unittest)
def validate_commands(self): """Produces the shell command(s) to validate the POD's runtime environment (ie, check for all requested third-party module dependencies.) Dependencies are passed as arguments to the shell script ``src/validate_environment.sh``, which is invoked in the POD's subprocess before the POD is run. Returns: (:py:obj:`str`): Command-line invocation to validate the POD's runtime environment. """ paths = core.PathManager() command_path = os.path.join(paths.CODE_ROOT, \ 'src', 'validate_environment.sh') reqs = self.pod.runtime_requirements # abbreviate command = [ command_path, ' -v', ' -p '.join([''] + list(reqs)), ' -z '.join([''] + list(self.pod.pod_env_vars)), ' -a '.join([''] + reqs.get('python', [])), ' -b '.join([''] + reqs.get('ncl', [])), ' -c '.join([''] + reqs.get('Rscript', [])) ] return [''.join(command)]
def test_pathmgr_pod(self): paths = core.PathManager() case = DataManager(self.case_dict) pod = Diagnostic('AA') d = paths.pod_paths(pod, case) self.assertEqual(d['POD_CODE_DIR'], 'TEST_CODE_ROOT/diagnostics/AA') self.assertEqual(d['POD_OBS_DATA'], 'TEST_OBS_DATA_ROOT/AA') self.assertEqual(d['POD_WK_DIR'], 'TEST_WORKING_DIR/MDTF_A_1900_2100/AA')
def test_pathmgr_global_asserterror(self): d = { 'OBS_DATA_ROOT': 'B', 'MODEL_DATA_ROOT': 'C', 'WORKING_DIR': 'D', 'OUTPUT_DIR': 'E' } paths = core.PathManager() self.assertRaises(AssertionError, paths.parse, d, list(d.keys()))
def make_tar_file(self): """Make the tar file locally in WK_DIR and gcp to destination, since OUT_DIR might be mounted read-only. """ paths = core.PathManager() out_path = super(GFDLHTMLOutputManager, self).make_tar_file() _, file_name = os.path.split(out_path) tar_dest_path = os.path.join(paths.OUTPUT_DIR, file_name) gfdl_util.gcp_wrapper(out_path, tar_dest_path, log=self.obj.log) return tar_dest_path
def _set_case_root_dir(self, log=_log): """Additional logic to set CASE_ROOT_DIR from MODEL_DATA_ROOT. """ config = core.ConfigManager() paths = core.PathManager() if not self.CASE_ROOT_DIR and config.CASE_ROOT_DIR: log.debug("Using global CASE_ROOT_DIR = '%s'.", config.CASE_ROOT_DIR) self.CASE_ROOT_DIR = config.CASE_ROOT_DIR if not self.CASE_ROOT_DIR: model_root = getattr(paths, 'MODEL_DATA_ROOT', None) log.debug("Setting CASE_ROOT_DIR to MODEL_DATA_ROOT = '%s'.", model_root) self.CASE_ROOT_DIR = model_root # verify CASE_ROOT_DIR exists if not os.path.isdir(self.CASE_ROOT_DIR): log.critical("Data directory CASE_ROOT_DIR = '%s' not found.", self.CASE_ROOT_DIR) util.exit_handler(code=1)
def __init__(self, case_dict, parent): self.catalog = None super(GFDL_GCP_FileDataSourceBase, self).__init__(case_dict, parent) config = core.ConfigManager() self.frepp_mode = config.get('frepp', False) self.dry_run = config.get('dry_run', False) self.timeout = config.get('file_transfer_timeout', 0) if self.frepp_mode: paths = core.PathManager() self.overwrite = True # flag to not overwrite config and .tar: want overwrite for frepp self.file_overwrite = True # if overwrite=False, WK_DIR & OUT_DIR will have been set to a # unique name in parent's init. Set it back so it will be overwritten. d = paths.model_paths(self, overwrite=True) self.MODEL_WK_DIR = d.MODEL_WK_DIR self.MODEL_OUT_DIR = d.MODEL_OUT_DIR
def __init__(self, log=_log): super(CondaEnvironmentManager, self).__init__(log=log) paths = core.PathManager() self.code_root = paths.CODE_ROOT self.conda_dir = os.path.join(self.code_root, 'src', 'conda') self.env_list = [] for file_ in os.listdir(self.conda_dir): if file_.endswith('.yml'): name, _ = os.path.splitext(file_) self.env_list.append(name.split('env_')[-1]) # find conda executable # conda_init for bash defines conda as a shell function; will get error # if we try to call the conda executable directly try: conda_info = util.run_shell_command( f"{self.conda_dir}/conda_init.sh {paths.get('conda_root','')}", log=self.log) for line in conda_info: key, val = line.split('=') if key == '_CONDA_EXE': self.conda_exe = val assert os.path.exists(self.conda_exe) elif key == '_CONDA_ROOT': self.conda_root = val except Exception as exc: raise util.PodRuntimeError("Can't find conda.") from exc # find where environments are installed if 'conda_env_root' in paths and paths.conda_env_root: self.conda_env_root = paths.conda_env_root if not os.path.isdir(self.conda_env_root): self.log.warning( "Conda env directory '%s' not found; creating.", self.conda_env_root) os.makedirs(self.conda_env_root) # recursive mkdir if needed else: # only true in default anaconda install, may need to fix self.conda_env_root = os.path.join(self.conda_root, 'envs')
def tearDown_config_singletons(): # clear Singletons try: temp = core.ConfigManager(unittest=True) temp._reset() except Exception: pass try: temp = core.PathManager(unittest=True) temp._reset() except Exception: pass try: temp = core.VariableTranslator(unittest=True) temp._reset() except Exception: pass try: temp = core.TempDirManager(unittest=True) temp._reset() except Exception: pass
def _tarball_file_path(self): paths = core.PathManager() assert hasattr(self, 'WK_DIR') file_name = self.WK_DIR + '.tar' return os.path.join(paths.WORKING_DIR, file_name)
def __init__(self, log=_log): super(VirtualenvEnvironmentManager, self).__init__(log=log) paths = core.PathManager() self.venv_root = paths.get('venv_root', '') self.r_lib_root = paths.get('r_lib_root', '')
def test_pathmgr_global(self): paths = core.PathManager() self.assertEqual(paths.CODE_ROOT, 'A') self.assertEqual(paths.OUTPUT_DIR, 'E')