Пример #1
0
def _make_transect_masks(mesh_name, suffix, fcMask, logger, cores,
                         subdivision_threshold=10e3):
    mesh_filename = 'restart.nc'

    geojson_filename = '{}.geojson'.format(suffix)
    mask_filename = '{}_{}.nc'.format(mesh_name, suffix)

    fcMask.to_geojson(geojson_filename)

    # these defaults may have been updated from config options -- pass them
    # along to the subprocess
    netcdf_format = mpas_tools.io.default_format
    netcdf_engine = mpas_tools.io.default_engine

    args = ['compute_mpas_transect_masks',
            '-m', mesh_filename,
            '-g', geojson_filename,
            '-o', mask_filename,
            '-t', 'edge',
            '-s', '{}'.format(subdivision_threshold),
            '--process_count', '{}'.format(cores),
            '--add_edge_sign',
            '--format', netcdf_format,
            '--engine', netcdf_engine]
    check_call(args, logger=logger)

    # make links in output directory
    output_dir = '../assembled_files/diagnostics/mpas_analysis/' \
                 'region_masks'
    symlink('../../../../diagnostics_files/{}'.format(mask_filename),
            '{}/{}'.format(output_dir, mask_filename))
Пример #2
0
 def configure(self):
     """
     Modify the configuration options for this test case
     """
     with path('compass.landice.tests.enthalpy_benchmark', 'README') as \
             target:
         symlink(str(target), '{}/README'.format(self.work_dir))
Пример #3
0
    def run(self):
        """
        Run this step of the testcase
        """
        with xarray.open_dataset('restart.nc') as ds:
            mesh_short_name = ds.attrs['MPAS_Mesh_Short_Name']
            mesh_prefix = ds.attrs['MPAS_Mesh_Prefix']
            prefix = 'MPAS_Mesh_{}'.format(mesh_prefix)
            creation_date = ds.attrs['{}_Version_Creation_Date'.format(prefix)]

        try:
            os.makedirs('../assembled_files/inputdata/ocn/mpas-o/{}'.format(
                mesh_short_name))
        except OSError:
            pass

        source_filename = 'restart.nc'
        dest_filename = 'mpaso.{}.{}.nc'.format(mesh_short_name, creation_date)

        with xarray.open_dataset(source_filename) as ds:
            ds.load()
            ds = ds.drop_vars('xtime')
            write_netcdf(ds, dest_filename)

        symlink(
            '../../../../../ocean_initial_condition/{}'.format(dest_filename),
            '../assembled_files/inputdata/ocn/mpas-o/{}/{}'.format(
                mesh_short_name, dest_filename))
Пример #4
0
 def configure(self):
     """
     Modify the configuration options for this test case
     """
     configure_global_ocean(test_case=self, mesh=self.mesh, init=self.init)
     with path('compass.ocean.tests.global_ocean.files_for_e3sm',
               'README') as target:
         symlink(str(target), '{}/README'.format(self.work_dir))
Пример #5
0
def _make_moc_masks(mesh_short_name, logger, cores):
    gf = GeometricFeatures()

    mesh_filename = 'restart.nc'

    function, prefix, date = get_aggregator_by_name('MOC Basins')
    fcMask = function(gf)

    suffix = '{}{}'.format(prefix, date)

    geojson_filename = '{}.geojson'.format(suffix)
    mask_filename = '{}_{}.nc'.format(mesh_short_name, suffix)

    fcMask.to_geojson(geojson_filename)

    # these defaults may have been updated from config options -- pass them
    # along to the subprocess
    netcdf_format = mpas_tools.io.default_format
    netcdf_engine = mpas_tools.io.default_engine

    args = ['compute_mpas_region_masks',
            '-m', mesh_filename,
            '-g', geojson_filename,
            '-o', mask_filename,
            '-t', 'cell',
            '--process_count', '{}'.format(cores),
            '--format', netcdf_format,
            '--engine', netcdf_engine]
    check_call(args, logger=logger)

    mask_and_transect_filename = '{}_mocBasinsAndTransects{}.nc'.format(
        mesh_short_name, date)

    dsMesh = xarray.open_dataset(mesh_filename)
    dsMask = xarray.open_dataset(mask_filename)

    dsMasksAndTransects = add_moc_southern_boundary_transects(
        dsMask, dsMesh, logger=logger)

    write_netcdf(dsMasksAndTransects, mask_and_transect_filename,
                 char_dim_name='StrLen')

    # make links in output directories (both inputdata and diagnostics)
    output_dir = '../assembled_files/inputdata/ocn/mpas-o/{}'.format(
        mesh_short_name)
    symlink(
        '../../../../../diagnostics_files/{}'.format(
            mask_and_transect_filename),
        '{}/{}'.format(output_dir, mask_and_transect_filename))

    output_dir = '../assembled_files/diagnostics/mpas_analysis/' \
                 'region_masks'
    symlink(
        '../../../../diagnostics_files/{}'.format(
            mask_and_transect_filename),
        '{}/{}'.format(output_dir, mask_and_transect_filename))
Пример #6
0
    def run(self):
        """
        Run this step of the test case
        """

        section = self.config['make_diagnostics_files']
        mesh_name = section.get('mesh_name')
        mesh_filename = section.get('mesh_filename')
        cores = section.getint('cores')
        with_ice_shelf_cavities = section.getboolean('with_ice_shelf_cavities')

        symlink(os.path.join('..', mesh_filename), 'restart.nc')
        make_diagnostics_files(self.config, self.logger, mesh_name,
                               with_ice_shelf_cavities, cores)
Пример #7
0
    def run(self):
        """
        Run this step of the testcase
        """
        logger = self.logger

        with xarray.open_dataset('restart.nc') as ds:
            mesh_short_name = ds.attrs['MPAS_Mesh_Short_Name']
            mesh_prefix = ds.attrs['MPAS_Mesh_Prefix']
            prefix = 'MPAS_Mesh_{}'.format(mesh_prefix)
            creation_date = ds.attrs['{}_Version_Creation_Date'.format(prefix)]

        try:
            os.makedirs('../assembled_files/inputdata/ocn/mpas-o/{}'.format(
                mesh_short_name))
        except OSError:
            pass

        symlink('graph.info', 'mpas-o.graph.info.{}'.format(creation_date))

        nCells = sum(1 for _ in open('graph.info'))
        min_graph_size = int(nCells / 6000)
        max_graph_size = int(nCells / 100)
        logger.info('Creating graph files between {} and {}'.format(
            min_graph_size, max_graph_size))
        n_power2 = 2**np.arange(1, 21)
        n_multiples12 = 12 * np.arange(1, 9)

        n = n_power2
        for power10 in range(3):
            n = np.concatenate([n, 10**power10 * n_multiples12])

        for index in range(len(n)):
            if min_graph_size <= n[index] <= max_graph_size:
                args = [
                    'gpmetis', 'mpas-o.graph.info.{}'.format(creation_date),
                    '{}'.format(n[index])
                ]
                check_call(args, logger)

        # create link in assembled files directory
        files = glob('mpas-o.graph.info.*')
        dest_path = '../assembled_files/inputdata/ocn/mpas-o/{}'.format(
            mesh_short_name)
        for file in files:
            symlink('../../../../../ocean_graph_partition/{}'.format(file),
                    '{}/{}'.format(dest_path, file))
Пример #8
0
    def run(self):
        """
        Run this step of the testcase
            """
        with_ice_shelf_cavities = self.with_ice_shelf_cavities

        with xarray.open_dataset('restart.nc') as ds:
            mesh_short_name = ds.attrs['MPAS_Mesh_Short_Name']
            mesh_prefix = ds.attrs['MPAS_Mesh_Prefix']
            prefix = 'MPAS_Mesh_{}'.format(mesh_prefix)
            creation_date = ds.attrs['{}_Version_Creation_Date'.format(prefix)]

        try:
            os.makedirs(
                '../assembled_files/inputdata/ocn/mpas-seaice/{}'.format(
                    mesh_short_name))
        except OSError:
            pass

        dest_filename = 'mpassi.{}.{}.nc'.format(mesh_short_name,
                                                 creation_date)

        keep_vars = [
            'areaCell', 'cellsOnCell', 'edgesOnCell', 'fCell', 'indexToCellID',
            'latCell', 'lonCell', 'meshDensity', 'nEdgesOnCell',
            'verticesOnCell', 'xCell', 'yCell', 'zCell', 'angleEdge',
            'cellsOnEdge', 'dcEdge', 'dvEdge', 'edgesOnEdge', 'fEdge',
            'indexToEdgeID', 'latEdge', 'lonEdge', 'nEdgesOnCell',
            'nEdgesOnEdge', 'verticesOnEdge', 'weightsOnEdge', 'xEdge',
            'yEdge', 'zEdge', 'areaTriangle', 'cellsOnVertex', 'edgesOnVertex',
            'fVertex', 'indexToVertexID', 'kiteAreasOnVertex', 'latVertex',
            'lonVertex', 'xVertex', 'yVertex', 'zVertex'
        ]

        if with_ice_shelf_cavities:
            keep_vars.append('landIceMask')

        with xarray.open_dataset('restart.nc') as ds:
            ds.load()
            ds = ds[keep_vars]
            write_netcdf(ds, dest_filename)

        symlink(
            '../../../../../seaice_initial_condition/{}'.format(dest_filename),
            '../assembled_files/inputdata/ocn/mpas-seaice/{}/{}'.format(
                mesh_short_name, dest_filename))
Пример #9
0
    def run(self):
        """
        Run this step of the testcase
            """
        with_ice_shelf_cavities = self.with_ice_shelf_cavities

        with xarray.open_dataset('restart.nc') as ds:
            mesh_short_name = ds.attrs['MPAS_Mesh_Short_Name']
            mesh_prefix = ds.attrs['MPAS_Mesh_Prefix']
            prefix = 'MPAS_Mesh_{}'.format(mesh_prefix)
            creation_date = ds.attrs['{}_Version_Creation_Date'.format(prefix)]

        try:
            os.makedirs('../assembled_files/inputdata/ocn/mpas-o/{}'.format(
                mesh_short_name))
        except OSError:
            pass

        if with_ice_shelf_cavities:
            nomask_str = '.nomask'
        else:
            nomask_str = ''

        scrip_filename = 'ocean.{}{}.scrip.{}.nc'.format(
            mesh_short_name,  nomask_str, creation_date)

        scrip_from_mpas('restart.nc', scrip_filename)

        symlink('../../../../../scrip/{}'.format(scrip_filename),
                '../assembled_files/inputdata/ocn/mpas-o/{}/{}'.format(
                    mesh_short_name, scrip_filename))

        if with_ice_shelf_cavities:
            scrip_mask_filename = 'ocean.{}.mask.scrip.{}.nc'.format(
                mesh_short_name, creation_date)
            scrip_from_mpas('restart.nc', scrip_mask_filename,
                            useLandIceMask=True)

            symlink(
                '../../../../../scrip/{}'.format(
                    scrip_mask_filename),
                '../assembled_files/inputdata/ocn/mpas-o/{}/{}'.format(
                    mesh_short_name, scrip_mask_filename))
Пример #10
0
def setup_case(path, test_case, config_file, machine, machine_info, work_dir,
               baseline_dir, mpas_model_path, cached_steps):
    """
    Set up one or more test cases

    Parameters
    ----------
    path : str
        Relative path for a test cases to set up

    test_case : compass.TestCase
        A test case to set up

    config_file : str
        Configuration file with custom options for setting up and running test
        cases

    machine : str
        The name of one of the machines with defined config options, which can
        be listed with ``compass list --machines``

    machine_info : mache.MachineInfo
        Information about the machine, to be included in the config options
        and passed along to each step

    work_dir : str
        A directory that will serve as the base for creating case directories

    baseline_dir : str
        Location of baseslines that can be compared to

    mpas_model_path : str
        The relative or absolute path to the root of a branch where the MPAS
        model has been built

    cached_steps : list of str
        Which steps (if any) should be cached.  If all steps should be cached,
         the first entry is "_all"
    """

    print('  {}'.format(path))

    config = configparser.ConfigParser(
        interpolation=configparser.ExtendedInterpolation())

    # start with default compass config options
    add_config(config, 'compass', 'default.cfg')

    # add the E3SM config options from mache
    merge_other_config(config, machine_info.config)
    # add the compass machine config file
    if machine is None:
        machine = 'default'
    add_config(config, 'compass.machines', '{}.cfg'.format(machine))

    # add the config options for the MPAS core
    mpas_core = test_case.mpas_core.name
    add_config(config, 'compass.{}'.format(mpas_core),
               '{}.cfg'.format(mpas_core))

    # add the config options for the test group (if defined)
    test_group = test_case.test_group.name
    add_config(config, 'compass.{}.tests.{}'.format(mpas_core, test_group),
               '{}.cfg'.format(test_group), exception=False)

    # add the config options for the test case (if defined)
    add_config(config, test_case.__module__,
               '{}.cfg'.format(test_case.name), exception=False)

    test_case_dir = os.path.join(work_dir, path)
    try:
        os.makedirs(test_case_dir)
    except OSError:
        pass
    test_case.work_dir = test_case_dir
    test_case.base_work_dir = work_dir

    # add the custom config file once before calling configure() in case we
    # need to use the config options from there
    if config_file is not None:
        config.read(config_file)

    # add config options specific to the test case
    test_case.config = config
    test_case.configure()

    # add the custom config file (again) last, so these options are the
    # defaults
    if config_file is not None:
        config.read(config_file)

    # add the baseline directory for this test case
    if baseline_dir is not None:
        test_case.baseline_dir = os.path.join(baseline_dir, path)

    # set the mpas_model path from the command line if provided
    if mpas_model_path is not None:
        config.set('paths', 'mpas_model', mpas_model_path)

    config.set('test_case', 'steps_to_run', ' '.join(test_case.steps_to_run))

    # make sure all paths in the paths, namelists and streams sections are
    # absolute paths
    ensure_absolute_paths(config)

    # write out the config file
    test_case_config = '{}.cfg'.format(test_case.name)
    test_case.config_filename = test_case_config
    with open(os.path.join(test_case_dir, test_case_config), 'w') as f:
        config.write(f)

    if len(cached_steps) > 0 and cached_steps[0] == '_all':
        cached_steps = list(test_case.steps.keys())
    if len(cached_steps) > 0:
        print_steps = ' '.join(cached_steps)
        print(f'    steps with cached outputs: {print_steps}')
    for step_name in cached_steps:
        test_case.steps[step_name].cached = True

    # iterate over steps
    for step in test_case.steps.values():
        # make the step directory if it doesn't exist
        step_dir = os.path.join(work_dir, step.path)
        try:
            os.makedirs(step_dir)
        except OSError:
            pass

        symlink(os.path.join(test_case_dir, test_case_config),
                os.path.join(step_dir, test_case_config))

        step.work_dir = step_dir
        step.base_work_dir = work_dir
        step.config_filename = test_case_config
        step.config = config

        # set up the step
        step.setup()

        # process input, output, namelist and streams files
        step.process_inputs_and_outputs()

    # wait until we've set up all the steps before pickling because steps may
    # need other steps to be set up
    for step in test_case.steps.values():

        # pickle the test case and step for use at runtime
        pickle_filename = os.path.join(step.work_dir, 'step.pickle')
        with open(pickle_filename, 'wb') as handle:
            pickle.dump((test_case, step), handle,
                        protocol=pickle.HIGHEST_PROTOCOL)

    # pickle the test case and step for use at runtime
    pickle_filename = os.path.join(test_case.work_dir, 'test_case.pickle')
    with open(pickle_filename, 'wb') as handle:
        pickle.dump(test_case, handle, protocol=pickle.HIGHEST_PROTOCOL)

    if 'LOAD_COMPASS_ENV' in os.environ:
        script_filename = os.environ['LOAD_COMPASS_ENV']
        # make a symlink to the script for loading the compass conda env.
        symlink(script_filename, os.path.join(test_case_dir,
                                              'load_compass_env.sh'))
Пример #11
0
def setup_cases(tests=None, numbers=None, config_file=None, machine=None,
                work_dir=None, baseline_dir=None, mpas_model_path=None,
                suite_name='custom', cached=None):
    """
    Set up one or more test cases

    Parameters
    ----------
    tests : list of str, optional
        Relative paths for a test cases to set up

    numbers : list of str, optional
        Case numbers to setup, as listed from ``compass list``, optionally with
        a suffix ``c`` to indicate that all steps in that test case should be
        cached

    config_file : str, optional
        Configuration file with custom options for setting up and running test
        cases

    machine : str, optional
        The name of one of the machines with defined config options, which can
        be listed with ``compass list --machines``

    work_dir : str, optional
        A directory that will serve as the base for creating case directories

    baseline_dir : str, optional
        Location of baseslines that can be compared to

    mpas_model_path : str, optional
        The relative or absolute path to the root of a branch where the MPAS
        model has been built

    suite_name : str, optional
        The name of the test suite if tests are being set up through a test
        suite or ``'custom'`` if not

    cached : list of list of str, optional
        For each test in ``tests``, which steps (if any) should be cached,
        or a list with "_all" as the first entry if all steps in the test case
        should be cached

    Returns
    -------
    test_cases : dict of compass.TestCase
        A dictionary of test cases, with the relative path in the work
        directory as keys
    """
    if machine is None and 'COMPASS_MACHINE' in os.environ:
        machine = os.environ['COMPASS_MACHINE']

    if machine is None:
        machine = discover_machine()

    if machine is None:
        machine_info = MachineInfo(machine='unknown')
    else:
        machine_info = MachineInfo(machine=machine)

    if config_file is None and machine is None:
        raise ValueError('At least one of config_file and machine is needed.')

    if tests is None and numbers is None:
        raise ValueError('At least one of tests or numbers is needed.')

    if cached is not None:
        if tests is None:
            warnings.warn('Ignoring "cached" argument because "tests" was '
                          'not provided')
        elif len(cached) != len(tests):
            raise ValueError('A list of cached steps must be provided for '
                             'each test in "tests"')

    if work_dir is None:
        work_dir = os.getcwd()
    work_dir = os.path.abspath(work_dir)

    mpas_cores = get_mpas_cores()

    all_test_cases = dict()
    for mpas_core in mpas_cores:
        for test_group in mpas_core.test_groups.values():
            for test_case in test_group.test_cases.values():
                all_test_cases[test_case.path] = test_case

    test_cases = dict()
    cached_steps = dict()
    if numbers is not None:
        keys = list(all_test_cases)
        for number in numbers:
            cache_all = False
            if number.endswith('c'):
                cache_all = True
                number = int(number[:-1])
            else:
                number = int(number)

            if number >= len(keys):
                raise ValueError('test number {} is out of range.  There are '
                                 'only {} tests.'.format(number, len(keys)))
            path = keys[number]
            if cache_all:
                cached_steps[path] = ['_all']
            else:
                cached_steps[path] = list()
            test_cases[path] = all_test_cases[path]

    if tests is not None:
        for index, path in enumerate(tests):
            if path not in all_test_cases:
                raise ValueError('Test case with path {} is not in '
                                 'test_cases'.format(path))
            if cached is not None:
                cached_steps[path] = cached[index]
            else:
                cached_steps[path] = list()
            test_cases[path] = all_test_cases[path]

    # get the MPAS core of the first test case.  We'll assume all tests are
    # for this core
    first_path = next(iter(test_cases))
    mpas_core = test_cases[first_path].mpas_core.name
    provenance.write(work_dir, test_cases, mpas_core=mpas_core,
                     config_filename=config_file,
                     mpas_model_path=mpas_model_path)

    print('Setting up test cases:')
    for path, test_case in test_cases.items():
        setup_case(path, test_case, config_file, machine, machine_info,
                   work_dir, baseline_dir, mpas_model_path,
                   cached_steps=cached_steps[path])

    test_suite = {'name': suite_name,
                  'test_cases': test_cases,
                  'work_dir': work_dir}

    # pickle the test or step dictionary for use at runtime
    pickle_file = os.path.join(test_suite['work_dir'],
                               '{}.pickle'.format(suite_name))
    with open(pickle_file, 'wb') as handle:
        pickle.dump(test_suite, handle, protocol=pickle.HIGHEST_PROTOCOL)

    if 'LOAD_COMPASS_ENV' in os.environ:
        script_filename = os.environ['LOAD_COMPASS_ENV']
        # make a symlink to the script for loading the compass conda env.
        symlink(script_filename, os.path.join(work_dir, 'load_compass_env.sh'))

    max_cores, max_of_min_cores = _get_required_cores(test_cases)

    print('target cores: {}'.format(max_cores))
    print('minimum cores: {}'.format(max_of_min_cores))

    return test_cases
Пример #12
0
def make_diagnostics_files(config, logger, mesh_short_name,
                           with_ice_shelf_cavities, cores):
    """
    Run this step of the testcase

    Parameters
    ----------
    config : configparser.ConfigParser
        Configuration options for this test case

    logger : logging.Logger
        A logger for output from the step

    mesh_short_name : str
        The E3SM short name of the mesh

    with_ice_shelf_cavities : bool
        Whether the mesh has ice-shelf cavities

    cores : int
        The number of cores to use to build mapping files
    """

    for directory in [
            '../assembled_files/inputdata/ocn/mpas-o/{}'.format(
                mesh_short_name),
            '../assembled_files/diagnostics/mpas_analysis/region_masks',
            '../assembled_files/diagnostics/mpas_analysis/maps']:
        try:
            os.makedirs(directory)
        except OSError:
            pass
    _make_moc_masks(mesh_short_name, logger, cores)

    gf = GeometricFeatures()
    region_groups = ['Antarctic Regions', 'Arctic Ocean Regions',
                     'Arctic Sea Ice Regions', 'Ocean Basins',
                     'Ocean Subbasins', 'ISMIP6 Regions']

    if with_ice_shelf_cavities:
        region_groups.append('Ice Shelves')

    for region_group in region_groups:
        function, prefix, date = get_aggregator_by_name(region_group)
        suffix = '{}{}'.format(prefix, date)
        fcMask = function(gf)
        _make_region_masks(mesh_short_name, suffix=suffix, fcMask=fcMask,
                           logger=logger, cores=cores)

    transect_groups = ['Transport Transects']
    for transect_group in transect_groups:
        function, prefix, date = get_aggregator_by_name(transect_group)
        suffix = '{}{}'.format(prefix, date)
        fcMask = function(gf)
        _make_transect_masks(mesh_short_name, suffix=suffix, fcMask=fcMask,
                             logger=logger, cores=cores)

    _make_analysis_lat_lon_map(config, mesh_short_name, cores, logger)
    _make_analysis_polar_map(config, mesh_short_name,
                             projection='antarctic', cores=cores,
                             logger=logger)
    _make_analysis_polar_map(config, mesh_short_name, projection='arctic',
                             cores=cores, logger=logger)

    # make links in output directory
    files = glob.glob('map_*')

    # make links in output directory
    output_dir = '../assembled_files/diagnostics/mpas_analysis/maps'
    for filename in files:
        symlink('../../../../diagnostics_files/{}'.format(filename),
                '{}/{}'.format(output_dir, filename))
Пример #13
0
    def process_inputs_and_outputs(self):
        """
        Process the inputs to and outputs from a step added with
        :py:meth:`compass.Step.add_input_file` and
        :py:meth:`compass.Step.add_output_file`.  This includes downloading
        files, making symlinks, and converting relative paths to absolute
        paths.

        Also generates namelist and streams files
       """
        mpas_core = self.mpas_core.name
        step_dir = self.work_dir
        config = self.config

        # process the outputs first because cached outputs will add more inputs
        if self.cached:
            # forget about the inputs -- we won't used them, but we will add
            # the cached outputs as inputs
            self.input_data = list()
            for output in self.outputs:
                filename = os.path.join(self.path, output)
                if filename not in self.mpas_core.cached_files:
                    raise ValueError(f'The file {filename} has not been added '
                                     f'to the cache database')
                target = self.mpas_core.cached_files[filename]
                self.add_input_file(filename=output,
                                    target=target,
                                    database='compass_cache')

        inputs = []
        databases_with_downloads = set()
        for entry in self.input_data:
            filename = entry['filename']
            target = entry['target']
            database = entry['database']
            url = entry['url']
            work_dir_target = entry['work_dir_target']
            package = entry['package']
            copy = entry['copy']

            if filename == '<<<model>>>':
                model = self.config.get('executables', 'model')
                filename = os.path.basename(model)
                target = os.path.abspath(model)

            if package is not None:
                if target is None:
                    target = filename
                with path(package, target) as package_path:
                    target = str(package_path)

            if work_dir_target is not None:
                target = os.path.join(self.base_work_dir, work_dir_target)

            if target is not None:
                download_target = target
            else:
                download_target = filename

            download_path = None

            if database is not None:
                # we're downloading a file to a cache of a database (if it's
                # not already there.
                if url is None:
                    base_url = config.get('download', 'server_base_url')
                    core_path = config.get('download', 'core_path')
                    url = '{}/{}/{}'.format(base_url, core_path, database)

                    url = '{}/{}'.format(url, target)

                database_root = config.get(
                    'paths', '{}_database_root'.format(mpas_core))
                download_path = os.path.join(database_root, database,
                                             download_target)
                if not os.path.exists(download_path):
                    database_subdir = os.path.join(database_root, database)
                    databases_with_downloads.add(database_subdir)
            elif url is not None:
                download_path = download_target

            if url is not None:
                download_target = download(url, download_path, config)
                if target is not None:
                    # this is the absolute path that we presumably want
                    target = download_target

            if target is not None:
                filepath = os.path.join(step_dir, filename)
                if copy:
                    shutil.copy(target, filepath)
                else:
                    symlink(target, filepath)
                inputs.append(target)
            else:
                inputs.append(filename)

        if len(databases_with_downloads) > 0:
            self._fix_permissions(databases_with_downloads)

        # convert inputs and outputs to absolute paths
        self.inputs = [
            os.path.abspath(os.path.join(step_dir, filename))
            for filename in inputs
        ]

        self.outputs = [
            os.path.abspath(os.path.join(step_dir, filename))
            for filename in self.outputs
        ]

        self._generate_namelists()
        self._generate_streams()
Пример #14
0
def adjust_ssh(variable, iteration_count, step):
    """
    Adjust the sea surface height or land-ice pressure to be dynamically
    consistent with one another.  A series of short model runs are performed,
    each with

    Parameters
    ----------
    variable : {'ssh', 'landIcePressure'}
        The variable to adjust

    iteration_count : int
        The number of iterations of adjustment

    step : compass.Step
        the step for performing SSH or land-ice pressure adjustment
    """
    cores = step.cores
    config = step.config
    logger = step.logger
    out_filename = None

    if variable not in ['ssh', 'landIcePressure']:
        raise ValueError("Unknown variable to modify: {}".format(variable))

    step.update_namelist_pio('namelist.ocean')
    partition(cores, config, logger)

    for iterIndex in range(iteration_count):
        logger.info(" * Iteration {}/{}".format(iterIndex + 1,
                                                iteration_count))

        in_filename = 'adjusting_init{}.nc'.format(iterIndex)
        out_filename = 'adjusting_init{}.nc'.format(iterIndex + 1)
        symlink(in_filename, 'adjusting_init.nc')

        logger.info("   * Running forward model")
        run_model(step, update_pio=False, partition_graph=False)
        logger.info("   - Complete")

        logger.info("   * Updating SSH or land-ice pressure")

        with xarray.open_dataset(in_filename) as ds:

            # keep the data set with Time for output
            ds_out = ds

            ds = ds.isel(Time=0)

            on_a_sphere = ds.attrs['on_a_sphere'].lower() == 'yes'

            initSSH = ds.ssh
            if 'minLevelCell' in ds:
                minLevelCell = ds.minLevelCell - 1
            else:
                minLevelCell = xarray.zeros_like(ds.maxLevelCell)

            with xarray.open_dataset('output_ssh.nc') as ds_ssh:
                # get the last time entry
                ds_ssh = ds_ssh.isel(Time=ds_ssh.sizes['Time'] - 1)
                finalSSH = ds_ssh.ssh
                topDensity = ds_ssh.density.isel(nVertLevels=minLevelCell)

            mask = numpy.logical_and(ds.maxLevelCell > 0,
                                     ds.modifyLandIcePressureMask == 1)

            deltaSSH = mask * (finalSSH - initSSH)

            # then, modify the SSH or land-ice pressure
            if variable == 'ssh':
                ssh = finalSSH.expand_dims(dim='Time', axis=0)
                ds_out['ssh'] = ssh
                # also update the landIceDraft variable, which will be used to
                # compensate for the SSH due to land-ice pressure when
                # computing sea-surface tilt
                ds_out['landIceDraft'] = ssh
                # we also need to stretch layerThickness to be compatible with
                # the new SSH
                stretch = ((finalSSH + ds.bottomDepth) /
                           (initSSH + ds.bottomDepth))
                ds_out['layerThickness'] = ds_out.layerThickness * stretch
                landIcePressure = ds.landIcePressure.values
            else:
                # Moving the SSH up or down by deltaSSH would change the
                # land-ice pressure by density(SSH)*g*deltaSSH. If deltaSSH is
                # positive (moving up), it means the land-ice pressure is too
                # small and if deltaSSH is negative (moving down), it means
                # land-ice pressure is too large, the sign of the second term
                # makes sense.
                gravity = constants['SHR_CONST_G']
                deltaLandIcePressure = topDensity * gravity * deltaSSH

                landIcePressure = numpy.maximum(
                    0.0, ds.landIcePressure + deltaLandIcePressure)

                ds_out['landIcePressure'] = \
                    landIcePressure.expand_dims(dim='Time', axis=0)

                finalSSH = initSSH

            write_netcdf(ds_out, out_filename)

            # Write the largest change in SSH and its lon/lat to a file
            with open('maxDeltaSSH_{:03d}.log'.format(iterIndex), 'w') as \
                    log_file:

                mask = landIcePressure > 0.
                iCell = numpy.abs(deltaSSH.where(mask)).argmax().values

                ds_cell = ds.isel(nCells=iCell)

                if on_a_sphere:
                    coords = 'lon/lat: {:f} {:f}'.format(
                        numpy.rad2deg(ds_cell.lonCell.values),
                        numpy.rad2deg(ds_cell.latCell.values))
                else:
                    coords = 'x/y: {:f} {:f}'.format(
                        1e-3 * ds_cell.xCell.values,
                        1e-3 * ds_cell.yCell.values)
                string = 'deltaSSHMax: {:g}, {}'.format(
                    deltaSSH.isel(nCells=iCell).values, coords)
                logger.info('     {}'.format(string))
                log_file.write('{}\n'.format(string))
                string = 'ssh: {:g}, landIcePressure: {:g}'.format(
                    finalSSH.isel(nCells=iCell).values,
                    landIcePressure.isel(nCells=iCell).values)
                logger.info('     {}'.format(string))
                log_file.write('{}\n'.format(string))

        logger.info("   - Complete\n")

    if out_filename is not None:
        shutil.copy(out_filename, 'adjusted_init.nc')