Exemplo n.º 1
0
def _make_transect_masks(mesh_name, suffix, fcMask, logger, cores,
                         subdivision_threshold=10e3):
    mesh_filename = 'restart.nc'

    geojson_filename = '{}.geojson'.format(suffix)
    mask_filename = '{}_{}.nc'.format(mesh_name, suffix)

    fcMask.to_geojson(geojson_filename)

    # these defaults may have been updated from config options -- pass them
    # along to the subprocess
    netcdf_format = mpas_tools.io.default_format
    netcdf_engine = mpas_tools.io.default_engine

    args = ['compute_mpas_transect_masks',
            '-m', mesh_filename,
            '-g', geojson_filename,
            '-o', mask_filename,
            '-t', 'edge',
            '-s', '{}'.format(subdivision_threshold),
            '--process_count', '{}'.format(cores),
            '--add_edge_sign',
            '--format', netcdf_format,
            '--engine', netcdf_engine]
    check_call(args, logger=logger)

    # make links in output directory
    output_dir = '../assembled_files/diagnostics/mpas_analysis/' \
                 'region_masks'
    symlink('../../../../diagnostics_files/{}'.format(mask_filename),
            '{}/{}'.format(output_dir, mask_filename))
Exemplo n.º 2
0
    def run(self):
        """
        Run this step of the test case
       """
        logger = self.logger
        section = self.config['enthalpy_benchmark']
        nx = section.getint('nx')
        ny = section.getint('ny')
        dc = section.getfloat('dc')
        levels = section.get('levels')

        dsMesh = make_planar_hex_mesh(nx=nx, ny=ny, dc=dc, nonperiodic_x=True,
                                      nonperiodic_y=True)

        write_netcdf(dsMesh, 'grid.nc')

        dsMesh = cull(dsMesh, logger=logger)
        dsMesh = convert(dsMesh, logger=logger)
        write_netcdf(dsMesh, 'mpas_grid.nc')

        args = ['create_landice_grid_from_generic_MPAS_grid.py',
                '-i', 'mpas_grid.nc',
                '-o', 'landice_grid.nc',
                '-l', levels,
                '--thermal']

        check_call(args, logger)

        make_graph_file(mesh_filename='landice_grid.nc',
                        graph_filename='graph.info')

        _setup_initial_conditions(section, 'landice_grid.nc')
Exemplo n.º 3
0
def _make_moc_masks(mesh_short_name, logger, cores):
    gf = GeometricFeatures()

    mesh_filename = 'restart.nc'

    function, prefix, date = get_aggregator_by_name('MOC Basins')
    fcMask = function(gf)

    suffix = '{}{}'.format(prefix, date)

    geojson_filename = '{}.geojson'.format(suffix)
    mask_filename = '{}_{}.nc'.format(mesh_short_name, suffix)

    fcMask.to_geojson(geojson_filename)

    # these defaults may have been updated from config options -- pass them
    # along to the subprocess
    netcdf_format = mpas_tools.io.default_format
    netcdf_engine = mpas_tools.io.default_engine

    args = ['compute_mpas_region_masks',
            '-m', mesh_filename,
            '-g', geojson_filename,
            '-o', mask_filename,
            '-t', 'cell',
            '--process_count', '{}'.format(cores),
            '--format', netcdf_format,
            '--engine', netcdf_engine]
    check_call(args, logger=logger)

    mask_and_transect_filename = '{}_mocBasinsAndTransects{}.nc'.format(
        mesh_short_name, date)

    dsMesh = xarray.open_dataset(mesh_filename)
    dsMask = xarray.open_dataset(mask_filename)

    dsMasksAndTransects = add_moc_southern_boundary_transects(
        dsMask, dsMesh, logger=logger)

    write_netcdf(dsMasksAndTransects, mask_and_transect_filename,
                 char_dim_name='StrLen')

    # make links in output directories (both inputdata and diagnostics)
    output_dir = '../assembled_files/inputdata/ocn/mpas-o/{}'.format(
        mesh_short_name)
    symlink(
        '../../../../../diagnostics_files/{}'.format(
            mask_and_transect_filename),
        '{}/{}'.format(output_dir, mask_and_transect_filename))

    output_dir = '../assembled_files/diagnostics/mpas_analysis/' \
                 'region_masks'
    symlink(
        '../../../../diagnostics_files/{}'.format(
            mask_and_transect_filename),
        '{}/{}'.format(output_dir, mask_and_transect_filename))
Exemplo n.º 4
0
    def run(self):
        """
        Run this step of the testcase
        """
        logger = self.logger

        with xarray.open_dataset('restart.nc') as ds:
            mesh_short_name = ds.attrs['MPAS_Mesh_Short_Name']
            mesh_prefix = ds.attrs['MPAS_Mesh_Prefix']
            prefix = 'MPAS_Mesh_{}'.format(mesh_prefix)
            creation_date = ds.attrs['{}_Version_Creation_Date'.format(prefix)]

        try:
            os.makedirs('../assembled_files/inputdata/ocn/mpas-o/{}'.format(
                mesh_short_name))
        except OSError:
            pass

        symlink('graph.info', 'mpas-o.graph.info.{}'.format(creation_date))

        nCells = sum(1 for _ in open('graph.info'))
        min_graph_size = int(nCells / 6000)
        max_graph_size = int(nCells / 100)
        logger.info('Creating graph files between {} and {}'.format(
            min_graph_size, max_graph_size))
        n_power2 = 2**np.arange(1, 21)
        n_multiples12 = 12 * np.arange(1, 9)

        n = n_power2
        for power10 in range(3):
            n = np.concatenate([n, 10**power10 * n_multiples12])

        for index in range(len(n)):
            if min_graph_size <= n[index] <= max_graph_size:
                args = [
                    'gpmetis', 'mpas-o.graph.info.{}'.format(creation_date),
                    '{}'.format(n[index])
                ]
                check_call(args, logger)

        # create link in assembled files directory
        files = glob('mpas-o.graph.info.*')
        dest_path = '../assembled_files/inputdata/ocn/mpas-o/{}'.format(
            mesh_short_name)
        for file in files:
            symlink('../../../../../ocean_graph_partition/{}'.format(file),
                    '{}/{}'.format(dest_path, file))
Exemplo n.º 5
0
    def run(self):
        """
        Run this step of the test case
       """
        logger = self.logger
        section = self.config['eismint2']

        nx = section.getint('nx')
        ny = section.getint('ny')
        dc = section.getfloat('dc')

        dsMesh = make_planar_hex_mesh(nx=nx,
                                      ny=ny,
                                      dc=dc,
                                      nonperiodic_x=False,
                                      nonperiodic_y=False)

        dsMesh = convert(dsMesh, logger=logger)
        write_netcdf(dsMesh, 'mpas_grid.nc')
        dsMesh.close()

        radius = section.get('radius')
        args = [
            'define_cullMask.py', '-f', 'mpas_grid.nc', '-m', 'radius', '-d',
            radius
        ]

        check_call(args, logger)

        dsMesh = xarray.open_dataset('mpas_grid.nc')
        dsMesh = cull(dsMesh, logger=logger)
        dsMesh = convert(dsMesh, logger=logger)
        write_netcdf(dsMesh, 'mpas_grid2.nc')

        levels = section.get('levels')
        args = [
            'create_landice_grid_from_generic_MPAS_grid.py', '-i',
            'mpas_grid2.nc', '-o', 'landice_grid.nc', '-l', levels,
            '--thermal', '--beta'
        ]

        check_call(args, logger)

        make_graph_file(mesh_filename='landice_grid.nc',
                        graph_filename='graph.info')
Exemplo n.º 6
0
    def run(self):
        """
        Run this step of the test case
       """
        mesh_type = self.mesh_type
        logger = self.logger
        config = self.config
        section = config['dome']

        if mesh_type == '2000m':
            nx = section.getint('nx')
            ny = section.getint('ny')
            dc = section.getfloat('dc')

            dsMesh = make_planar_hex_mesh(nx=nx,
                                          ny=ny,
                                          dc=dc,
                                          nonperiodic_x=True,
                                          nonperiodic_y=True)

            write_netcdf(dsMesh, 'grid.nc')

            dsMesh = cull(dsMesh, logger=logger)
            dsMesh = convert(dsMesh, logger=logger)
            write_netcdf(dsMesh, 'mpas_grid.nc')

        levels = section.get('levels')
        args = [
            'create_landice_grid_from_generic_MPAS_grid.py', '-i',
            'mpas_grid.nc', '-o', 'landice_grid.nc', '-l', levels
        ]

        check_call(args, logger)

        make_graph_file(mesh_filename='landice_grid.nc',
                        graph_filename='graph.info')

        _setup_dome_initial_conditions(config,
                                       logger,
                                       filename='landice_grid.nc')
Exemplo n.º 7
0
def partition(cores, config, logger, graph_file='graph.info'):
    """
    Partition the domain for the requested number of cores

    Parameters
    ----------
    cores : int
        The number of cores that the model should be run on

    config : configparser.ConfigParser
        Configuration options for the test case, used to get the partitioning
        executable

    logger : logging.Logger
        A logger for output from the step that is calling this function

    graph_file : str, optional
        The name of the graph file to partition

    """
    if cores > 1:
        executable = config.get('parallel', 'partition_executable')
        args = [executable, graph_file, '{}'.format(cores)]
        check_call(args, logger)
Exemplo n.º 8
0
def jigsaw_driver(cellWidth,
                  x,
                  y,
                  on_sphere=True,
                  earth_radius=6371.0e3,
                  geom_points=None,
                  geom_edges=None,
                  logger=None):
    """
    A function for building a jigsaw mesh

    Parameters
    ----------
    cellWidth : ndarray
        The size of each cell in the resulting mesh as a function of space

    x, y : ndarray
        The x and y coordinates of each point in the cellWidth array (lon and
        lat for spherical mesh)

    on_sphere : logical, optional
        Whether this mesh is spherical or planar

    earth_radius : float, optional
        Earth radius in meters

    geom_points : ndarray, optional
        list of point coordinates for bounding polygon for planar mesh

    geom_edges : ndarray, optional
        list of edges between points in geom_points that define the bounding polygon

    logger : logging.Logger, optional
        A logger for the output if not stdout
    """
    # Authors
    # -------
    # Mark Petersen, Phillip Wolfram, Xylar Asay-Davis

    # setup files for JIGSAW
    opts = jigsawpy.jigsaw_jig_t()
    opts.geom_file = 'mesh.msh'
    opts.jcfg_file = 'mesh.jig'
    opts.mesh_file = 'mesh-MESH.msh'
    opts.hfun_file = 'mesh-HFUN.msh'

    # save HFUN data to file
    hmat = jigsawpy.jigsaw_msh_t()
    if on_sphere:
        hmat.mshID = 'ELLIPSOID-GRID'
        hmat.xgrid = numpy.radians(x)
        hmat.ygrid = numpy.radians(y)
    else:
        hmat.mshID = 'EUCLIDEAN-GRID'
        hmat.xgrid = x
        hmat.ygrid = y
    hmat.value = cellWidth
    jigsawpy.savemsh(opts.hfun_file, hmat)

    # define JIGSAW geometry
    geom = jigsawpy.jigsaw_msh_t()
    if on_sphere:
        geom.mshID = 'ELLIPSOID-MESH'
        geom.radii = earth_radius * 1e-3 * numpy.ones(3, float)
    else:
        geom.mshID = 'EUCLIDEAN-MESH'
        geom.vert2 = geom_points
        geom.edge2 = geom_edges
    jigsawpy.savemsh(opts.geom_file, geom)

    # build mesh via JIGSAW!
    opts.hfun_scal = 'absolute'
    opts.hfun_hmax = float("inf")
    opts.hfun_hmin = 0.0
    opts.mesh_dims = +2  # 2-dim. simplexes
    opts.optm_qlim = 0.9375
    opts.verbosity = +1

    savejig(opts.jcfg_file, opts)
    check_call(['jigsaw', opts.jcfg_file], logger=logger)
Exemplo n.º 9
0
def run_model(step,
              update_pio=True,
              partition_graph=True,
              graph_file='graph.info',
              namelist=None,
              streams=None):
    """
    Run the model after determining the number of cores

    Parameters
    ----------
    step : compass.Step
        a step

    update_pio : bool, optional
        Whether to modify the namelist so the number of PIO tasks and the
        stride between them is consistent with the number of nodes and cores
        (one PIO task per node).

    partition_graph : bool, optional
        Whether to partition the domain for the requested number of cores.  If
        so, the partitioning executable is taken from the ``partition`` option
        of the ``[executables]`` config section.

    graph_file : str, optional
        The name of the graph file to partition

    namelist : str, optional
        The name of the namelist file, default is ``namelist.<core>``

    streams : str, optional
        The name of the streams file, default is ``streams.<core>``
    """
    mpas_core = step.mpas_core.name
    cores = step.cores
    threads = step.threads
    config = step.config
    logger = step.logger

    if namelist is None:
        namelist = 'namelist.{}'.format(mpas_core)

    if streams is None:
        streams = 'streams.{}'.format(mpas_core)

    if update_pio:
        step.update_namelist_pio(namelist)

    if partition_graph:
        partition(cores, config, logger, graph_file=graph_file)

    os.environ['OMP_NUM_THREADS'] = '{}'.format(threads)

    parallel_executable = config.get('parallel', 'parallel_executable')
    model = config.get('executables', 'model')
    model_basename = os.path.basename(model)

    # split the parallel executable into constituents in case it includes flags
    args = parallel_executable.split(' ')
    args.extend([
        '-n', '{}'.format(cores), './{}'.format(model_basename), '-n',
        namelist, '-s', streams
    ])

    check_call(args, logger)
Exemplo n.º 10
0
    def run(self):
        """
        Run this step of the test case
        """
        logger = self.logger
        config = self.config
        section = config['humboldt']

        logger.info('calling build_cell_wdith')
        cell_width, x1, y1, geom_points, geom_edges = self.build_cell_width()
        logger.info('calling build_planar_mesh')
        build_planar_mesh(cell_width,
                          x1,
                          y1,
                          geom_points,
                          geom_edges,
                          logger=logger)
        dsMesh = xarray.open_dataset('base_mesh.nc')
        logger.info('culling mesh')
        dsMesh = cull(dsMesh, logger=logger)
        logger.info('converting to MPAS mesh')
        dsMesh = convert(dsMesh, logger=logger)
        logger.info('writing grid_converted.nc')
        write_netcdf(dsMesh, 'grid_converted.nc')
        # If no number of levels specified in config file, use 10
        levels = section.get('levels')
        logger.info('calling create_landice_grid_from_generic_MPAS_grid.py')
        args = [
            'create_landice_grid_from_generic_MPAS_grid.py', '-i',
            'grid_converted.nc', '-o', 'gis_1km_preCull.nc', '-l', levels,
            '-v', 'glimmer'
        ]
        check_call(args, logger=logger)

        # This step uses a subset of the whole Greenland dataset trimmed to
        # the region around Humboldt Glacier, to speed up interpolation.
        # This could also be replaced with the full Greenland Ice Sheet
        # dataset.
        logger.info('calling interpolate_to_mpasli_grid.py')
        args = [
            'interpolate_to_mpasli_grid.py', '-s',
            'humboldt_1km_2020_04_20.epsg3413.icesheetonly.nc', '-d',
            'gis_1km_preCull.nc', '-m', 'b', '-t'
        ]

        check_call(args, logger=logger)

        # This step is only necessary if you wish to cull a certain
        # distance from the ice margin, within the bounds defined by
        # the GeoJSON file.
        cullDistance = section.get('cullDistance')
        if float(cullDistance) > 0.:
            logger.info('calling define_cullMask.py')
            args = [
                'define_cullMask.py', '-f', 'gis_1km_preCull.nc', '-m'
                'distance', '-d', cullDistance
            ]

            check_call(args, logger=logger)
        else:
            logger.info('cullDistance <= 0 in config file. '
                        'Will not cull by distance to margin. \n')

        # This step is only necessary because the GeoJSON region
        # is defined by lat-lon.
        logger.info('calling set_lat_lon_fields_in_planar_grid.py')
        args = [
            'set_lat_lon_fields_in_planar_grid.py', '-f', 'gis_1km_preCull.nc',
            '-p', 'gis-gimp'
        ]

        check_call(args, logger=logger)

        logger.info('calling MpasMaskCreator.x')
        args = [
            'MpasMaskCreator.x', 'gis_1km_preCull.nc', 'humboldt_mask.nc',
            '-f', 'Humboldt.geojson'
        ]

        check_call(args, logger=logger)

        logger.info('culling to geojson file')
        dsMesh = xarray.open_dataset('gis_1km_preCull.nc')
        humboldtMask = xarray.open_dataset('humboldt_mask.nc')
        dsMesh = cull(dsMesh, dsInverse=humboldtMask, logger=logger)
        write_netcdf(dsMesh, 'humboldt_culled.nc')

        logger.info('Marking horns for culling')
        args = ['mark_horns_for_culling.py', '-f', 'humboldt_culled.nc']
        check_call(args, logger=logger)

        logger.info('culling and converting')
        dsMesh = xarray.open_dataset('humboldt_culled.nc')
        dsMesh = cull(dsMesh, logger=logger)
        dsMesh = convert(dsMesh, logger=logger)
        write_netcdf(dsMesh, 'humboldt_dehorned.nc')

        logger.info('calling create_landice_grid_from_generic_MPAS_grid.py')
        args = [
            'create_landice_grid_from_generic_MPAS_grid.py', '-i',
            'humboldt_dehorned.nc', '-o', 'Humboldt_1to10km.nc', '-l', levels,
            '-v', 'glimmer', '--beta', '--thermal', '--obs', '--diri'
        ]

        check_call(args, logger=logger)

        logger.info('calling interpolate_to_mpasli_grid.py')
        args = [
            'interpolate_to_mpasli_grid.py', '-s',
            'humboldt_1km_2020_04_20.epsg3413.icesheetonly.nc', '-d',
            'Humboldt_1to10km.nc', '-m', 'b', '-t'
        ]
        check_call(args, logger=logger)

        logger.info('Marking domain boundaries dirichlet')
        args = [
            'mark_domain_boundaries_dirichlet.py', '-f', 'Humboldt_1to10km.nc'
        ]
        check_call(args, logger=logger)

        logger.info('calling set_lat_lon_fields_in_planar_grid.py')
        args = [
            'set_lat_lon_fields_in_planar_grid.py', '-f',
            'Humboldt_1to10km.nc', '-p', 'gis-gimp'
        ]
        check_call(args, logger=logger)

        logger.info('creating graph.info')
        make_graph_file(mesh_filename='Humboldt_1to10km.nc',
                        graph_filename='graph.info')
Exemplo n.º 11
0
def _cull_mesh_with_logging(logger, with_cavities, with_critical_passages,
                            custom_critical_passages, custom_land_blockages,
                            preserve_floodplain, use_progress_bar,
                            process_count):
    """ Cull the mesh once the logger is defined for sure """

    critical_passages = with_critical_passages or \
        (custom_critical_passages is not None)

    land_blockages = with_critical_passages or \
        (custom_land_blockages is not None)

    gf = GeometricFeatures()

    # start with the land coverage from Natural Earth
    fcLandCoverage = gf.read(componentName='natural_earth',
                             objectType='region',
                             featureNames=['Land Coverage'])

    # remove the region south of 60S so we can replace it based on ice-sheet
    # topography
    fcSouthMask = gf.read(componentName='ocean', objectType='region',
                          featureNames=['Global Ocean 90S to 60S'])

    fcLandCoverage = fcLandCoverage.difference(fcSouthMask)

    # Add "land" coverage from either the full ice sheet or just the grounded
    # part
    if with_cavities:
        fcAntarcticLand = gf.read(
            componentName='bedmachine', objectType='region',
            featureNames=['AntarcticGroundedIceCoverage'])
    else:
        fcAntarcticLand = gf.read(
            componentName='bedmachine', objectType='region',
            featureNames=['AntarcticIceCoverage'])

    fcLandCoverage.merge(fcAntarcticLand)

    # save the feature collection to a geojson file
    fcLandCoverage.to_geojson('land_coverage.geojson')

    # these defaults may have been updated from config options -- pass them
    # along to the subprocess
    netcdf_format = mpas_tools.io.default_format
    netcdf_engine = mpas_tools.io.default_engine

    # Create the land mask based on the land coverage, i.e. coastline data
    args = ['compute_mpas_region_masks',
            '-m', 'base_mesh.nc',
            '-g', 'land_coverage.geojson',
            '-o', 'land_mask.nc',
            '-t', 'cell',
            '--process_count', '{}'.format(process_count),
            '--format', netcdf_format,
            '--engine', netcdf_engine]
    check_call(args, logger=logger)

    dsBaseMesh = xarray.open_dataset('base_mesh.nc')
    dsLandMask = xarray.open_dataset('land_mask.nc')
    dsLandMask = add_land_locked_cells_to_mask(dsLandMask, dsBaseMesh,
                                               latitude_threshold=43.0,
                                               nSweeps=20)

    # create seed points for a flood fill of the ocean
    # use all points in the ocean directory, on the assumption that they are,
    # in fact, in the ocean
    fcSeed = gf.read(componentName='ocean', objectType='point',
                     tags=['seed_point'])

    if land_blockages:
        if with_critical_passages:
            # merge transects for critical land blockages into
            # critical_land_blockages.geojson
            fcCritBlockages = gf.read(
                componentName='ocean', objectType='transect',
                tags=['Critical_Land_Blockage'])
        else:
            fcCritBlockages = FeatureCollection()

        if custom_land_blockages is not None:
            fcCritBlockages.merge(read_feature_collection(
                custom_land_blockages))

        # create masks from the transects
        fcCritBlockages.to_geojson('critical_blockages.geojson')
        args = ['compute_mpas_transect_masks',
                '-m', 'base_mesh.nc',
                '-g', 'critical_blockages.geojson',
                '-o', 'critical_blockages.nc',
                '-t', 'cell',
                '-s', '10e3',
                '--process_count', '{}'.format(process_count),
                '--format', netcdf_format,
                '--engine', netcdf_engine]
        check_call(args, logger=logger)
        dsCritBlockMask = xarray.open_dataset('critical_blockages.nc')

        dsLandMask = add_critical_land_blockages(dsLandMask, dsCritBlockMask)

    fcCritPassages = FeatureCollection()
    dsPreserve = []

    if critical_passages:
        if with_critical_passages:
            # merge transects for critical passages into fcCritPassages
            fcCritPassages.merge(gf.read(componentName='ocean',
                                         objectType='transect',
                                         tags=['Critical_Passage']))

        if custom_critical_passages is not None:
            fcCritPassages.merge(read_feature_collection(
                custom_critical_passages))

        # create masks from the transects
        fcCritPassages.to_geojson('critical_passages.geojson')
        args = ['compute_mpas_transect_masks',
                '-m', 'base_mesh.nc',
                '-g', 'critical_passages.geojson',
                '-o', 'critical_passages.nc',
                '-t', 'cell', 'edge',
                '-s', '10e3',
                '--process_count', '{}'.format(process_count),
                '--format', netcdf_format,
                '--engine', netcdf_engine]
        check_call(args, logger=logger)
        dsCritPassMask = xarray.open_dataset('critical_passages.nc')

        # Alter critical passages to be at least two cells wide, to avoid sea
        # ice blockage
        dsCritPassMask = widen_transect_edge_masks(dsCritPassMask, dsBaseMesh,
                                                   latitude_threshold=43.0)

        dsPreserve.append(dsCritPassMask)

    if preserve_floodplain:
        dsPreserve.append(dsBaseMesh)

    # cull the mesh based on the land mask
    dsCulledMesh = cull(dsBaseMesh, dsMask=dsLandMask,
                        dsPreserve=dsPreserve, logger=logger)

    # create a mask for the flood fill seed points
    dsSeedMask = compute_mpas_flood_fill_mask(dsMesh=dsCulledMesh,
                                              fcSeed=fcSeed,
                                              logger=logger)

    # cull the mesh a second time using a flood fill from the seed points
    dsCulledMesh = cull(dsCulledMesh, dsInverse=dsSeedMask,
                        graphInfoFileName='culled_graph.info', logger=logger)
    write_netcdf(dsCulledMesh, 'culled_mesh.nc')

    if critical_passages:
        # make a new version of the critical passages mask on the culled mesh
        fcCritPassages.to_geojson('critical_passages.geojson')
        args = ['compute_mpas_transect_masks',
                '-m', 'culled_mesh.nc',
                '-g', 'critical_passages.geojson',
                '-o', 'critical_passages_mask_final.nc',
                '-t', 'cell',
                '-s', '10e3',
                '--process_count', '{}'.format(process_count),
                '--format', netcdf_format,
                '--engine', netcdf_engine]
        check_call(args, logger=logger)

    if with_cavities:
        fcAntarcticIce = gf.read(
            componentName='bedmachine', objectType='region',
            featureNames=['AntarcticIceCoverage'])

        fcAntarcticIce.to_geojson('ice_coverage.geojson')
        args = ['compute_mpas_region_masks',
                '-m', 'culled_mesh.nc',
                '-g', 'ice_coverage.geojson',
                '-o', 'ice_coverage.nc',
                '-t', 'cell',
                '--process_count', '{}'.format(process_count),
                '--format', netcdf_format,
                '--engine', netcdf_engine]
        check_call(args, logger=logger)
        dsMask = xarray.open_dataset('ice_coverage.nc')

        landIceMask = dsMask.regionCellMasks.isel(nRegions=0)
        dsLandIceMask = xarray.Dataset()
        dsLandIceMask['landIceMask'] = landIceMask

        write_netcdf(dsLandIceMask, 'land_ice_mask.nc')

        dsLandIceCulledMesh = cull(dsCulledMesh, dsMask=dsMask, logger=logger)
        write_netcdf(dsLandIceCulledMesh, 'no_ISC_culled_mesh.nc')

    extract_vtk(ignore_time=True, dimension_list=['maxEdges='],
                variable_list=['allOnCells'],
                filename_pattern='culled_mesh.nc',
                out_dir='culled_mesh_vtk',
                use_progress_bar=use_progress_bar)

    if with_cavities:
        extract_vtk(ignore_time=True, dimension_list=['maxEdges='],
                    variable_list=['allOnCells'],
                    filename_pattern='no_ISC_culled_mesh.nc',
                    out_dir='no_ISC_culled_mesh_vtk',
                    use_progress_bar=use_progress_bar)
Exemplo n.º 12
0
def _setup_eismint2_initial_conditions(logger, experiment, filename):
    """
    Add the initial condition for the given EISMINT2 experiment to the given
    MPAS mesh file

    Parameters
    ----------
    logger : logging.Logger
        A logger for output from the step

    experiment : {'a', 'b', 'c', 'd', 'f', 'g'}
        The name of the experiment

    filename : str
        file to add the initial condition to

    """
    if experiment in ('a', 'b', 'c', 'd', 'f', 'g'):
        logger.info('Setting up EISMINT2 Experiment {}'.format(experiment))
    else:
        raise ValueError("Invalid experiment specified: {}.  Please specify "
                         "an experiment between 'a' and 'g', excluding "
                         "'e'".format(experiment))

    # Setup dictionaries of parameter values for each experiment
    # Mmax: Maximum SMB at center of domain (m a-1)
    # Sb: gradient of SMB with horizontal distance (m a-1 km-1)
    # Rel: radial distance from summit where SMB = 0 (km)
    # Tmin: surface temperature at summit (K)
    # ST: gradient of air temperature with horizontal distance (K km-1)
    # beta: basal traction coefficient (Pa m-1 a)
    #       Note: beta is the inverse of parameter B in Payne et al. (2000)
    exp_params = {'a': {'Mmax': 0.5, 'Sb': 10.0**-2, 'Rel': 450.0,
                        'Tmin': 238.15, 'ST': 1.67e-2, 'beta': 1.0e8},
                  'b': {'Mmax': 0.5, 'Sb': 10.0**-2, 'Rel': 450.0,
                        'Tmin': 243.15, 'ST': 1.67e-2, 'beta': 1.0e8},
                  'c': {'Mmax': 0.25, 'Sb': 10.0**-2, 'Rel': 425.0,
                        'Tmin': 238.15, 'ST': 1.67e-2, 'beta': 1.0e8},
                  'd': {'Mmax': 0.5, 'Sb': 10.0**-2, 'Rel': 425.0,
                        'Tmin': 238.15, 'ST': 1.67e-2, 'beta': 1.0e8},
                  'f': {'Mmax': 0.5, 'Sb': 10.0**-2, 'Rel': 450.0,
                        'Tmin': 223.15, 'ST': 1.67e-2, 'beta': 1.0e8},
                  'g': {'Mmax': 0.5, 'Sb': 10.0**-2, 'Rel': 450.0,
                        'Tmin': 238.15, 'ST': 1.67e-2, 'beta': 1.0e3}}
    xsummit = 750000.0
    ysummit = 750000.0
    rhoi = 910.0
    scyr = 3600.0 * 24.0 * 365.0

    # Some experiments start from scratch, others start from the SS of a previous experiment
    if experiment in ('a', 'f', 'g'):
        # we will build the mesh from scratch
        shutil.copyfile('landice_grid.nc', filename)
    else:
        # use the final state of experiment A
        args = ['ncks', '-O', '-d', 'Time,-1', 'experiment_a_output.nc',
                filename]
        check_call(args, logger)

    # Open the new input file, get needed dimensions & variables
    gridfile = NetCDFFile(filename, 'r+')
    nVertLevels = len(gridfile.dimensions['nVertLevels'])
    # Get variables
    xCell = gridfile.variables['xCell'][:]
    yCell = gridfile.variables['yCell'][:]
    xEdge = gridfile.variables['xEdge'][:]
    yEdge = gridfile.variables['yEdge'][:]
    xVertex = gridfile.variables['xVertex'][:]
    yVertex = gridfile.variables['yVertex'][:]

    # ===================
    # initial conditions
    # ===================
    # If starting from scratch, setup dimension variables and initial condition
    # variables
    if experiment in ('a', 'f', 'g'):
        # Find center of domain
        x0 = xCell[:].min() + 0.5 * (xCell[:].max() - xCell[:].min())
        y0 = yCell[:].min() + 0.5 * (yCell[:].max() - yCell[:].min())
        # Calculate distance of each cell center from dome center
        r = ((xCell[:] - x0)**2 + (yCell[:] - y0)**2)**0.5

        # Center the dome in the center of the cell that is closest to the
        # center of the domain.
        centerCellIndex = numpy.abs(r[:]).argmin()
        # EISMINT-2 puts the center of the domain at 750,750 km instead of 0,0.
        # Adjust to use that origin.

        xShift = -1.0 * xCell[centerCellIndex] + xsummit
        yShift = -1.0 * yCell[centerCellIndex] + ysummit
        xCell[:] = xCell[:] + xShift
        yCell[:] = yCell[:] + yShift
        xEdge[:] = xEdge[:] + xShift
        yEdge[:] = yEdge[:] + yShift
        xVertex[:] = xVertex[:] + xShift
        yVertex[:] = yVertex[:] + yShift
        gridfile.variables['xCell'][:] = xCell[:]
        gridfile.variables['yCell'][:] = yCell[:]
        gridfile.variables['xEdge'][:] = xEdge[:]
        gridfile.variables['yEdge'][:] = yEdge[:]
        gridfile.variables['xVertex'][:] = xVertex[:]
        gridfile.variables['yVertex'][:] = yVertex[:]

        # Assign initial condition variable values for EISMINT-2 experiment
        # Start with no ice
        gridfile.variables['thickness'][:] = 0.0
        # flat bed at sea level
        gridfile.variables['bedTopography'][:] = 0.0
        # constant, arbitrary temperature, degrees K (doesn't matter since
        # there is no ice initially)
        gridfile.variables['temperature'][:] = 273.15
        # Setup layerThicknessFractions
        gridfile.variables['layerThicknessFractions'][:] = 1.0 / nVertLevels
    else:
        StrLen = len(gridfile.dimensions['StrLen'])
        gridfile.variables['xtime'][0, :] = list(
            '000000-01-01_00:00:00'.ljust(StrLen, ' '))

    # Now update/set origin location and distance array
    r = ((xCell[:] - xsummit)**2 + (yCell[:] - ysummit)**2)**0.5

    # ===================
    # boundary conditions
    # ===================
    # Define values prescribed by Payne et al. 2000 paper.

    params = exp_params[experiment]
    logger.info("Parameters for this experiment: {}".format(params))

    # SMB field specified by EISMINT, constant in time for EISMINT2
    # It is a function of geographical position (not elevation)

    # maximum accumulation rate [m/yr] converted to [m/s]
    Mmax = params['Mmax'] / scyr
    # gradient of accumulation rate change with horizontal distance  [m/a/km]
    # converted to [m/s/m]
    Sb = params['Sb'] / scyr / 1000.0
    # accumulation rate at 0 position  [km] converted to [m]
    Rel = params['Rel'] * 1000.0

    SMB = numpy.minimum(Mmax, Sb * (Rel - r))  # [m ice/s]
    SMB = SMB * rhoi  # in kg/m2/s
    if 'sfcMassBal' in gridfile.variables:
        sfcMassBalVar = gridfile.variables['sfcMassBal']
    else:
        datatype = gridfile.variables[
            'xCell'].dtype  # Get the datatype for double precision float
        sfcMassBalVar = gridfile.createVariable('sfcMassBal', datatype,
                                                ('Time', 'nCells'))
    sfcMassBalVar[0, :] = SMB

    # Surface temperature

    # minimum surface air temperature [K]
    Tmin = params['Tmin']
    # gradient of air temperature change with horizontal distance [K/km]
    # converted to [K/m]
    ST = params['ST'] / 1000.0

    if 'surfaceAirTemperature' in gridfile.variables:
        surfaceAirTemperatureVar = gridfile.variables['surfaceAirTemperature']
    else:
        datatype = gridfile.variables[
            'xCell'].dtype  # Get the datatype for double precision float
        surfaceAirTemperatureVar = gridfile.createVariable(
            'surfaceAirTemperature', datatype, ('Time', 'nCells'))
    surfaceAirTemperatureVar[0, :] = Tmin + ST * r

    # beta
    beta = params['beta']
    if 'beta' in gridfile.variables:
        betaVar = gridfile.variables['beta']
    else:
        datatype = gridfile.variables[
            'xCell'].dtype  # Get the datatype for double precision float
        betaVar = gridfile.createVariable('beta', datatype, ('Time', 'nCells'))
    betaVar[0, :] = beta

    gridfile.close()
    logger.info('Successfully added initial conditions for EISMINT2, '
                'experiment {} to the file: {}'.format(experiment, filename))