コード例 #1
0
def gcp_wrapper(source_path, dest_dir, timeout=None, dry_run=None):
    """Wrapper for file and recursive directory copying using the GFDL 
    site-specific General Copy Program (`https://gitlab.gfdl.noaa.gov/gcp/gcp`__.)
    Assumes GCP environment module has been loaded beforehand, and calls GCP in
    a subprocess.
    """
    modMgr = ModuleManager()
    modMgr.load('gcp')
    config = core.ConfigManager()
    if timeout is None:
        timeout = config.get('file_transfer_timeout', 0)
    if dry_run is None:
        dry_run = config.get('dry_run', False)

    source_path = os.path.normpath(source_path)
    dest_dir = os.path.normpath(dest_dir)
    # gcp requires trailing slash, ln ignores it
    if os.path.isdir(source_path):
        source = ['-r', 'gfdl:' + source_path + os.sep]
        # gcp /A/B/ /C/D/ will result in /C/D/B, so need to specify parent dir
        dest = ['gfdl:' + os.path.dirname(dest_dir) + os.sep]
    else:
        source = ['gfdl:' + source_path]
        dest = ['gfdl:' + dest_dir + os.sep]
    _log.info('\tGCP {} -> {}'.format(source[-1], dest[-1]))
    util.run_command(['gcp', '--sync', '-v', '-cd'] + source + dest,
                     timeout=timeout,
                     dry_run=dry_run)
コード例 #2
0
def setUp_config_singletons(config=None, paths=None, pods=None, unittest=True):
    cwd = os.path.dirname(os.path.realpath(__file__))
    code_root = os.path.dirname(os.path.dirname(cwd))
    cli_obj = cli.MDTFTopLevelArgParser(
        code_root,
        skip_defaults=True,
        argv=f"-f {os.path.join(cwd, 'dummy_config.json')}")
    cli_obj.config = vars(cli_obj.parse_args())
    if config:
        cli_obj.config.update(config)

    PodDataTuple = collections.namedtuple('PodDataTuple',
                                          'sorted_lists pod_data realm_data')
    dummy_pod_data = PodDataTuple(pod_data=pods,
                                  realm_data=dict(),
                                  sorted_lists={
                                      'pods': [],
                                      'realms': []
                                  })

    _ = core.ConfigManager(cli_obj, dummy_pod_data, unittest=unittest)
    pm = core.PathManager(cli_obj, unittest=unittest)
    if paths:
        pm.update(paths)
    _ = core.VariableTranslator(code_root, unittest=unittest)
    _ = core.TempDirManager(None, unittest=unittest)
コード例 #3
0
    def setup(self, data_source):
        """Configuration set by the DataSource on the POD (after the POD is
        initialized, but before pre-run checks.)
        """
        # set up paths/working directories
        paths = core.PathManager()
        paths = paths.pod_paths(self, data_source)
        for k,v in paths.items():
            setattr(self, k, v)
        self.setup_pod_directories()
        self.set_entry_point()
        self.set_interpreter()
        config = core.ConfigManager()
        if config.get('overwrite_file_metadata', False):
            self.log.warning(('User has disabled preprocessing functionality that '
                'uses input metadata.'), tags=util.ObjectLogTag.BANNER)
        # set up env vars
        self.pod_env_vars.update(data_source.env_vars)

        self.nc_largefile = config.get('large_file', False)
        if self.nc_largefile:
            if self.program == 'ncl':
                # argument to ncl setfileoption()
                self.pod_env_vars['MDTF_NC_FORMAT'] = "NetCDF4"
            else:
                # argument to netCDF4-python/xarray/etc.
                self.pod_env_vars['MDTF_NC_FORMAT'] = "NETCDF4"
        else:
            if self.program == 'ncl':
                # argument to ncl setfileoption()
                self.pod_env_vars['MDTF_NC_FORMAT'] = "NetCDF4Classic"
            else:
                # argument to netCDF4-python/xarray/etc.
                self.pod_env_vars['MDTF_NC_FORMAT'] = "NETCDF4_CLASSIC"
コード例 #4
0
    def parse_config(self, config_d):
        """Parse contents of JSON config file into a list of
        :class`ExplicitFileDataSourceConfigEntry` objects.
        """
        # store contents in ConfigManager so they can be backed up in output
        # (HTMLOutputManager.backup_config_files())
        config = core.ConfigManager()
        config._configs['data_source_config'] = core.ConfigTuple(
            name='data_source_config',
            backup_filename='ExplicitFileDataSource_config.json',
            contents=config_d)

        # parse contents
        for pod_name, v_dict in config_d.items():
            for v_name, v_data in v_dict.items():
                entry = ExplicitFileDataSourceConfigEntry.from_struct(
                    pod_name, v_name, v_data)
                self._config[pod_name][v_name] = entry
                self.config_by_id[entry.glob_id] = entry
        # don't bother to validate here -- if we didn't specify files for all
        # vars it'll manifest as a failed query & be logged as error there.

        # set overwrite_metadata flag if needed
        self._has_user_metadata = any(x._has_user_metadata
                                      for x in self.config_by_id.values())
        if self._has_user_metadata and \
            not config.get('overwrite_file_metadata', False):
            self.log.warning((
                "Requesting metadata edits in ExplicitFileDataSource "
                "implies the use of the --overwrite-file-metadata flag. Input "
                "file metadata will be overwritten."),
                             tags=util.ObjectLogTag.BANNER)
            config['overwrite_file_metadata'] = True
コード例 #5
0
 def from_config(cls, pod_name, parent):
     """Usual method of instantiating Diagnostic objects, from the contents
     of its settings.jsonc file as stored in the
     :class:`~core.ConfigManager`.
     """
     config = core.ConfigManager()
     return cls.from_struct(pod_name, config.pod_data[pod_name], parent)
コード例 #6
0
ファイル: gfdl.py プロジェクト: NOAA-GFDL/MDTF-diagnostics
 def __init__(self, case):
     config = core.ConfigManager()
     try:
         self.frepp_mode = config.get('frepp', False)
         self.dry_run = config.get('dry_run', False)
         self.timeout = config.get('file_transfer_timeout', 0)
     except (AttributeError, KeyError) as exc:
         case.log.store_exception(exc)
コード例 #7
0
 def __init__(self, case):
     config = core.ConfigManager()
     try:
         self.make_variab_tar = config['make_variab_tar']
         self.dry_run = config['dry_run']
         self.overwrite = config['overwrite']
         self.file_overwrite = self.overwrite  # overwrite both config and .tar
     except KeyError as exc:
         case.log.exception("Caught %r", exc)
コード例 #8
0
    def __init__(self, pod_dict, EnvMgrClass):
        config = core.ConfigManager()
        self.test_mode = config.test_mode
        # transfer all pods, even failed ones, because we need to call their
        self.pods = [self._PodWrapperClass(pod=p) for p in pod_dict.values()]
        self.env_mgr = EnvMgrClass()

        # Need to run bash explicitly because 'conda activate' sources
        # env vars (can't do that in posix sh). tcsh could also work.
        self.bash_exec = find_executable('bash')
コード例 #9
0
 def backup_config_file(self, case):
     """Record settings in file config_save.json for rerunning.
     """
     config = core.ConfigManager()
     out_file = os.path.join(self.WK_DIR, self._backup_config_file_name)
     if not self.file_overwrite:
         out_file, _ = util.bump_version(out_file)
     elif os.path.exists(out_file):
         _log.info("%s: Overwriting %s.", case.name, out_file)
     util.write_json(config.backup_config, out_file)
コード例 #10
0
    def __init__(self, case):
        config = core.ConfigManager()
        try:
            self.frepp_mode = config.get('frepp', False)
            self.dry_run = config.get('dry_run', False)
            self.timeout = config.get('file_transfer_timeout', 0)
        except (AttributeError, KeyError) as exc:
            _log.exception(f"Caught {repr(exc)}.")

        super(GFDLHTMLOutputManager, self).__init__(case)
コード例 #11
0
ファイル: gfdl.py プロジェクト: NOAA-GFDL/MDTF-diagnostics
 def __init__(self, case_dict, parent):
     super(GfdlppDataManager, self).__init__(case_dict, parent)
     # default behavior when run interactively:
     # frepp_mode = False, any_components = True
     # default behavior when invoked by FRE wrapper:
     # frepp_mode = True (set to False by calling wrapper with --run_once)
     # any_components = True (set to False with --component_only)
     config = core.ConfigManager()
     self.frepp_mode = config.get('frepp', False)
     self.any_components = config.get('any_components', False)
コード例 #12
0
def html_templating_dict(pod):
    """Get the dict of recognized substitutions to perform in HTML templates.
    """
    config = core.ConfigManager()
    template = config.global_env_vars.copy()
    template.update(pod.pod_env_vars)
    d = {str(k): str(v) for k, v in template.items()}
    for attr in ('name', 'long_name', 'description', 'convention', 'realm'):
        d[attr] = str(getattr(pod, attr, ""))
    return d
コード例 #13
0
 def _functions(self):
     config = core.ConfigManager()
     if config.get('disable_preprocessor', False):
         return (preprocessor.CropDateRangeFunction,
                 preprocessor.RenameVariablesFunction)
     else:
         # Add ApplyScaleAndOffsetFunction to functions used by parent class
         return (preprocessor.CropDateRangeFunction,
                 preprocessor.ApplyScaleAndOffsetFunction,
                 preprocessor.PrecipRateToFluxFunction,
                 preprocessor.ConvertUnitsFunction,
                 preprocessor.ExtractLevelFunction,
                 preprocessor.RenameVariablesFunction)
コード例 #14
0
 def __init__(self, case):
     config = core.ConfigManager()
     try:
         self.make_variab_tar = config['make_variab_tar']
         self.dry_run = config['dry_run']
         self.overwrite = config['overwrite']
         self.file_overwrite = self.overwrite  # overwrite both config and .tar
     except KeyError as exc:
         _log.exception(f"Caught {repr(exc)}.")
     self.CODE_ROOT = case.code_root
     self.WK_DIR = case.MODEL_WK_DIR  # abbreviate
     self.OUT_DIR = case.MODEL_OUT_DIR  # abbreviate
     self._case = case
コード例 #15
0
 def __init__(self, pod, output_mgr):
     """Performs cleanup tasks when the POD has finished running.
     """
     config = core.ConfigManager()
     try:
         self.save_ps = config['save_ps']
         self.save_nc = config['save_nc']
         self.save_non_nc = config['save_non_nc']
     except KeyError as exc:
         _log.exception(f"Caught {repr(exc)}.")
         raise
     self.CODE_ROOT = output_mgr.CODE_ROOT
     self.WK_DIR = output_mgr.WK_DIR
     self._pod = pod
コード例 #16
0
 def backup_config_files(self):
     """Record settings in file config_save.json for rerunning.
     """
     config = core.ConfigManager()
     for config_tup in config._configs.values():
         if config_tup.backup_filename is None:
             continue
         out_file = os.path.join(self.WK_DIR, config_tup.backup_filename)
         if not self.file_overwrite:
             out_file, _ = util.bump_version(out_file)
         elif os.path.exists(out_file):
             self.obj.log.info("%s: Overwriting '%s'.", self.obj.full_name,
                               out_file)
         util.write_json(config_tup.contents, out_file, log=self.obj.log)
コード例 #17
0
    def __post_init__(self):
        """Validate user input.
        """
        super(PPDataSourceAttributes, self).__post_init__()
        config = core.ConfigManager()

        if not self.CASE_ROOT_DIR and config.CASE_ROOT_DIR:
            _log.debug("Using global CASE_ROOT_DIR = '%s'.", config.CASE_ROOT_DIR)
            self.CASE_ROOT_DIR = config.CASE_ROOT_DIR
        # verify case root dir exists
        if not os.path.isdir(self.CASE_ROOT_DIR):
            _log.critical("Data directory CASE_ROOT_DIR = '%s' not found.",
                self.CASE_ROOT_DIR)
            exit(1)
コード例 #18
0
 def __init__(self, pod, output_mgr):
     """Performs cleanup tasks when the POD has finished running.
     """
     config = core.ConfigManager()
     try:
         self.save_ps = config['save_ps']
         self.save_nc = config['save_nc']
         self.save_non_nc = config['save_non_nc']
     except KeyError as exc:
         pod.deactivate(exc)
         raise
     self.CODE_ROOT = output_mgr.CODE_ROOT
     self.CODE_DIR = pod.POD_CODE_DIR
     self.WK_DIR = pod.POD_WK_DIR
     self.obj = pod
コード例 #19
0
class SubprocessRuntimeManager(AbstractRuntimeManager):
    """RuntimeManager that spawns a separate system subprocess for each POD.
    """
    _PodWrapperClass = SubprocessRuntimePODWrapper

    def __init__(self, case, EnvMgrClass):
        config = core.ConfigManager()
        self.test_mode = config.test_mode
        # transfer all pods, even failed ones, because we need to call their
        self.pods = [self._PodWrapperClass(pod=p) for p in case.pods.values()]
        self.env_mgr = EnvMgrClass(log=case.log)
        self.case = case

        # Need to run bash explicitly because 'conda activate' sources
        # env vars (can't do that in posix sh). tcsh could also work.
        self.bash_exec = find_executable('bash')
コード例 #20
0
ファイル: gfdl.py プロジェクト: NOAA-GFDL/MDTF-diagnostics
    def __new__(cls, case_dict, parent, *args, **kwargs):
        """Dispatch DataManager instance creation based on the contents of
        case_dict."""
        config = core.ConfigManager()
        dir_ = case_dict.get('CASE_ROOT_DIR', config.CASE_ROOT_DIR)
        if 'pp' in os.path.basename(os.path.normpath(dir_)):
            dispatched_cls = GfdlppDataManager
        else:
            dispatched_cls = Gfdludacmip6DataManager
            # could use more careful logic here, but for now assume CMIP6 on
            # /uda as a fallback

        _log.debug("%s: Dispatched DataManager to %s.", cls.__name__,
                   dispatched_cls.__name__)
        obj = dispatched_cls.__new__(dispatched_cls)
        obj.__init__(case_dict, parent)
        return obj
コード例 #21
0
    def __post_init__(self, log=_log):
        """Validate user input.
        """
        super(ExplicitFileDataAttributes, self).__post_init__(log=log)

        config = core.ConfigManager()
        if not self.config_file:
            self.config_file = config.get('config_file', '')
        if not self.config_file:
            log.critical(("No configuration file found for ExplicitFileDataSource "
                "(--config-file)."))
            util.exit_handler(code=1)

        if self.convention != core._NO_TRANSLATION_CONVENTION:
            log.debug("Received incompatible convention '%s'; setting to '%s'.",
                self.convention, core._NO_TRANSLATION_CONVENTION)
            self.convention = core._NO_TRANSLATION_CONVENTION
コード例 #22
0
 def _set_case_root_dir(self, log=_log):
     """Additional logic to set CASE_ROOT_DIR from MODEL_DATA_ROOT.
     """
     config = core.ConfigManager()
     paths = core.PathManager()
     if not self.CASE_ROOT_DIR and config.CASE_ROOT_DIR:
         log.debug("Using global CASE_ROOT_DIR = '%s'.", config.CASE_ROOT_DIR)
         self.CASE_ROOT_DIR = config.CASE_ROOT_DIR
     if not self.CASE_ROOT_DIR:
         model_root = getattr(paths, 'MODEL_DATA_ROOT', None)
         log.debug("Setting CASE_ROOT_DIR to MODEL_DATA_ROOT = '%s'.", model_root)
         self.CASE_ROOT_DIR = model_root
     # verify CASE_ROOT_DIR exists
     if not os.path.isdir(self.CASE_ROOT_DIR):
         log.critical("Data directory CASE_ROOT_DIR = '%s' not found.",
             self.CASE_ROOT_DIR)
         util.exit_handler(code=1)
コード例 #23
0
class SubprocessRuntimeManager(AbstractRuntimeManager):
    """RuntimeManager class that runs each POD in a child subprocess spawned on
    the local machine. Resource allocation is delegated to the local machine's
    kernel's scheduler.
    """
    _PodWrapperClass = SubprocessRuntimePODWrapper

    def __init__(self, case, EnvMgrClass):
        config = core.ConfigManager()
        self.test_mode = config.test_mode
        # transfer all pods, even failed ones, because we need to call their
        self.pods = [self._PodWrapperClass(pod=p) for p in case.pods.values()]
        self.env_mgr = EnvMgrClass(log=case.log)
        self.case = case

        # Need to run bash explicitly because 'conda activate' sources
        # env vars (can't do that in posix sh). tcsh could also work.
        self.bash_exec = find_executable('bash')
コード例 #24
0
    def _functions(self):
        """Determine which PreprocessorFunctions are applicable to the current
        package run, defaulting to all of them.

        Returns:
            tuple of classes (inheriting from :class:`PreprocessorFunctionBase`)
            listing the preprocessing functions to be called, in order.
        """
        config = core.ConfigManager()
        if config.get('disable_preprocessor', False):
            # omit unit conversion functions; following two functions necessary
            # in all cases to obtain correct output
            return (CropDateRangeFunction, RenameVariablesFunction)
        else:
            # normal operation: run all functions
            return (CropDateRangeFunction, PrecipRateToFluxFunction,
                    ConvertUnitsFunction, ExtractLevelFunction,
                    RenameVariablesFunction, AssociatedVariablesFunction)
コード例 #25
0
ファイル: gfdl.py プロジェクト: NOAA-GFDL/MDTF-diagnostics
    def __init__(self, case_dict, parent):
        self.catalog = None
        super(GFDL_GCP_FileDataSourceBase, self).__init__(case_dict, parent)

        config = core.ConfigManager()
        self.frepp_mode = config.get('frepp', False)
        self.dry_run = config.get('dry_run', False)
        self.timeout = config.get('file_transfer_timeout', 0)

        if self.frepp_mode:
            paths = core.PathManager()
            self.overwrite = True
            # flag to not overwrite config and .tar: want overwrite for frepp
            self.file_overwrite = True
            # if overwrite=False, WK_DIR & OUT_DIR will have been set to a
            # unique name in parent's init. Set it back so it will be overwritten.
            d = paths.model_paths(self, overwrite=True)
            self.MODEL_WK_DIR = d.MODEL_WK_DIR
            self.MODEL_OUT_DIR = d.MODEL_OUT_DIR
コード例 #26
0
    def pre_run_setup(self):
        """Extra code only applicable in frepp cooperative mode. If this code is
        called, all the POD's model data has been generated. Write a placeholder
        directory to POD_OUT_DIR, so if frepp invokes the MDTF package again
        while we're running, only our results will be written to the overall
        output.
        """
        super(GfdlDiagnostic, self).pre_run_setup()

        config = core.ConfigManager()
        frepp_mode = config.get('frepp', False)
        if frepp_mode and not os.path.exists(self.POD_OUT_DIR):
            try:
                gfdl_util.make_remote_dir(self.POD_OUT_DIR, log=self.log)
                self._has_placeholder = True
            except Exception as exc:
                chained_exc = util.chain_exc(exc, (f"Making output directory at "
                    f"{self.POD_OUT_DIR}."), util.PodRuntimeError)
                self.deactivate(chained_exc)
コード例 #27
0
    def __init__(self, data_mgr, pod):
        config = core.ConfigManager()
        self.overwrite_ds = config.get('overwrite_file_metadata', False)

        self.WK_DIR = data_mgr.MODEL_WK_DIR
        self.convention = data_mgr.attrs.convention
        self.pod_convention = pod.convention

        if getattr(pod, 'nc_largefile', False):
            self.nc_format = "NETCDF4_CLASSIC"
        else:
            self.nc_format = "NETCDF4"
        # HACK only used for _FillValue workaround in clean_output_encoding
        self.output_to_ncl = ('ncl' in pod.runtime_requirements)

        # initialize xarray parser
        self.parser = self._XarrayParserClass(data_mgr, pod)
        # initialize PreprocessorFunctionBase objects
        self.functions = [cls_(data_mgr, pod) for cls_ in self._functions]
コード例 #28
0
    def __init__(self, pod, output_mgr):
        """Copy configuration info from POD object.

        Args:
            pod (:class:`~src.diagnostic.Diagnostic): POD which generated the
                output files being processed.
            output_mgr: OutputManager plugin handling the overall processing of
                output files from all PODs.
        """
        config = core.ConfigManager()
        try:
            self.save_ps = config['save_ps']
            self.save_nc = config['save_nc']
            self.save_non_nc = config['save_non_nc']
        except KeyError as exc:
            pod.deactivate(exc)
            raise
        self.CODE_ROOT = output_mgr.CODE_ROOT
        self.CODE_DIR = pod.POD_CODE_DIR
        self.WK_DIR = pod.POD_WK_DIR
        self.obj = pod
コード例 #29
0
def tearDown_config_singletons():
    # clear Singletons
    try:
        temp = core.ConfigManager(unittest=True)
        temp._reset()
    except Exception:
        pass
    try:
        temp = core.PathManager(unittest=True)
        temp._reset()
    except Exception:
        pass
    try:
        temp = core.VariableTranslator(unittest=True)
        temp._reset()
    except Exception:
        pass
    try:
        temp = core.TempDirManager(unittest=True)
        temp._reset()
    except Exception:
        pass
コード例 #30
0
ファイル: gfdl.py プロジェクト: NOAA-GFDL/MDTF-diagnostics
 def __init__(self, pod, output_mgr):
     super(GFDLHTMLPodOutputManager, self).__init__(pod, output_mgr)
     config = core.ConfigManager()
     self.frepp_mode = config.get('frepp', False)