Beispiel #1
0
    def run(self):
        """
        Run this step of the test case
       """
        logger = self.logger
        section = self.config['enthalpy_benchmark']
        nx = section.getint('nx')
        ny = section.getint('ny')
        dc = section.getfloat('dc')
        levels = section.get('levels')

        dsMesh = make_planar_hex_mesh(nx=nx, ny=ny, dc=dc, nonperiodic_x=True,
                                      nonperiodic_y=True)

        write_netcdf(dsMesh, 'grid.nc')

        dsMesh = cull(dsMesh, logger=logger)
        dsMesh = convert(dsMesh, logger=logger)
        write_netcdf(dsMesh, 'mpas_grid.nc')

        args = ['create_landice_grid_from_generic_MPAS_grid.py',
                '-i', 'mpas_grid.nc',
                '-o', 'landice_grid.nc',
                '-l', levels,
                '--thermal']

        check_call(args, logger)

        make_graph_file(mesh_filename='landice_grid.nc',
                        graph_filename='graph.info')

        _setup_initial_conditions(section, 'landice_grid.nc')
Beispiel #2
0
    def run(self):
        """
        Run this step of the test case
        """
        config = self.config
        logger = self.logger

        section = config['gotm']
        nx = section.getint('nx')
        ny = section.getint('ny')
        dc = section.getfloat('dc')

        dsMesh = make_planar_hex_mesh(nx=nx, ny=ny, dc=dc, nonperiodic_x=False,
                                      nonperiodic_y=False)
        write_netcdf(dsMesh, 'grid.nc')

        dsMesh = cull(dsMesh, logger=logger)
        dsMesh = convert(dsMesh, graphInfoFileName='graph.info',
                         logger=logger)
        write_netcdf(dsMesh, 'mesh.nc')

        replacements = dict()
        replacements['config_periodic_planar_vert_levels'] = \
            config.get('gotm', 'vert_levels')
        replacements['config_periodic_planar_bottom_depth'] = \
            config.get('gotm', 'bottom_depth')
        self.update_namelist_at_runtime(options=replacements)

        run_model(self)
Beispiel #3
0
def test_conversion():
    dsMesh = xarray.open_dataset(
        'mesh_tools/mesh_conversion_tools/test/mesh.QU.1920km.151026.nc')
    dsMesh = convert(dsIn=dsMesh)
    write_netcdf(dsMesh, 'mesh.nc')

    dsMask = xarray.open_dataset(
        'mesh_tools/mesh_conversion_tools/test/land_mask_final.nc')
    dsCulled = cull(dsIn=dsMesh, dsMask=dsMask)
    write_netcdf(dsCulled, 'culled_mesh.nc')

    fcMask = read_feature_collection(
        'mesh_tools/mesh_conversion_tools/test/Arctic_Ocean.geojson')
    dsMask = mask(dsMesh=dsMesh, fcMask=fcMask)
    write_netcdf(dsMask, 'antarctic_mask.nc')
Beispiel #4
0
    def run(self):
        """
        Run this step of the test case
       """
        logger = self.logger
        section = self.config['eismint2']

        nx = section.getint('nx')
        ny = section.getint('ny')
        dc = section.getfloat('dc')

        dsMesh = make_planar_hex_mesh(nx=nx,
                                      ny=ny,
                                      dc=dc,
                                      nonperiodic_x=False,
                                      nonperiodic_y=False)

        dsMesh = convert(dsMesh, logger=logger)
        write_netcdf(dsMesh, 'mpas_grid.nc')
        dsMesh.close()

        radius = section.get('radius')
        args = [
            'define_cullMask.py', '-f', 'mpas_grid.nc', '-m', 'radius', '-d',
            radius
        ]

        check_call(args, logger)

        dsMesh = xarray.open_dataset('mpas_grid.nc')
        dsMesh = cull(dsMesh, logger=logger)
        dsMesh = convert(dsMesh, logger=logger)
        write_netcdf(dsMesh, 'mpas_grid2.nc')

        levels = section.get('levels')
        args = [
            'create_landice_grid_from_generic_MPAS_grid.py', '-i',
            'mpas_grid2.nc', '-o', 'landice_grid.nc', '-l', levels,
            '--thermal', '--beta'
        ]

        check_call(args, logger)

        make_graph_file(mesh_filename='landice_grid.nc',
                        graph_filename='graph.info')
Beispiel #5
0
    def run(self):
        """
        Run this step of the test case
        """

        config = self.config
        section = config['soma']
        options = dict(
            config_eos_linear_alpha=section.get('eos_linear_alpha'),
            config_soma_density_difference=section.get('density_difference'),
            config_soma_surface_temperature=section.get('surface_temperature'),
            config_soma_surface_salinity=section.get('surface_salinity'),
            config_soma_salinity_gradient=section.get('salinity_gradient'),
            config_soma_thermocline_depth=section.get('thermocline_depth'),
            config_soma_density_difference_linear=section.get(
                'density_difference_linear'),
            config_soma_phi=section.get('phi'),
            config_soma_shelf_depth=section.get('shelf_depth'),
            config_soma_bottom_depth=section.get('bottom_depth'))

        for out_name in ['namelist_mark_land.ocean', 'namelist.ocean']:
            self.update_namelist_at_runtime(options=options, out_name=out_name)
        ds_mesh = convert(xarray.open_dataset('base_mesh.nc'),
                          graphInfoFileName='base_graph.info',
                          logger=self.logger)
        write_netcdf(ds_mesh, 'mesh.nc')

        run_model(self,
                  namelist='namelist_mark_land.ocean',
                  streams='streams_mark_land.ocean',
                  graph_file='base_graph.info')

        ds_mesh = cull(xarray.open_dataset('masked_initial_state.nc'),
                       graphInfoFileName='graph.info',
                       logger=self.logger)
        write_netcdf(ds_mesh, 'culled_mesh.nc')

        run_model(self,
                  namelist='namelist.ocean',
                  streams='streams.ocean',
                  graph_file='graph.info')
Beispiel #6
0
    def run(self):
        """
        Run this step of the test case
       """
        mesh_type = self.mesh_type
        logger = self.logger
        config = self.config
        section = config['dome']

        if mesh_type == '2000m':
            nx = section.getint('nx')
            ny = section.getint('ny')
            dc = section.getfloat('dc')

            dsMesh = make_planar_hex_mesh(nx=nx,
                                          ny=ny,
                                          dc=dc,
                                          nonperiodic_x=True,
                                          nonperiodic_y=True)

            write_netcdf(dsMesh, 'grid.nc')

            dsMesh = cull(dsMesh, logger=logger)
            dsMesh = convert(dsMesh, logger=logger)
            write_netcdf(dsMesh, 'mpas_grid.nc')

        levels = section.get('levels')
        args = [
            'create_landice_grid_from_generic_MPAS_grid.py', '-i',
            'mpas_grid.nc', '-o', 'landice_grid.nc', '-l', levels
        ]

        check_call(args, logger)

        make_graph_file(mesh_filename='landice_grid.nc',
                        graph_filename='graph.info')

        _setup_dome_initial_conditions(config,
                                       logger,
                                       filename='landice_grid.nc')
Beispiel #7
0
    def run(self):
        """
        Run this step of the test case
        """
        config = self.config
        logger = self.logger

        section = config['ziso']
        nx = section.getint('nx')
        ny = section.getint('ny')
        dc = section.getfloat('dc')

        dsMesh = make_planar_hex_mesh(nx=nx, ny=ny, dc=dc, nonperiodic_x=False,
                                      nonperiodic_y=True)
        write_netcdf(dsMesh, 'base_mesh.nc')

        dsMesh = cull(dsMesh, logger=logger)
        dsMesh = convert(dsMesh, graphInfoFileName='culled_graph.info',
                         logger=logger)
        write_netcdf(dsMesh, 'culled_mesh.nc')

        ds = _write_initial_state(config, dsMesh, self.with_frazil)

        _write_forcing(config, ds.yCell, ds.zMid)
Beispiel #8
0
    dsCritPassMask = conversion.mask(dsBaseMesh, fcMask=fcCritPassages)

    # Alter critical passages to be at least two cells wide, to avoid sea ice
    # blockage.
    dsCritPassMask = widen_transect_edge_masks(dsCritPassMask,
                                               dsBaseMesh,
                                               latitude_threshold=43.0)

    dsPreserve.append(dsCritPassMask)

if options.preserve_floodplain:
    dsPreserve.append(dsBaseMesh)

# cull the mesh based on the land mask
dsCulledMesh = conversion.cull(dsBaseMesh,
                               dsMask=dsLandMask,
                               dsPreserve=dsPreserve)

# create a mask for the flood fill seed points
dsSeedMask = conversion.mask(dsCulledMesh, fcSeed=fcSeed)

# cull the mesh a second time using a flood fill from the seed points
dsCulledMesh = conversion.cull(dsCulledMesh,
                               dsInverse=dsSeedMask,
                               graphInfoFileName='culled_graph.info')
write_netcdf(dsCulledMesh, 'culled_mesh.nc', format=netcdfFormat)

if critical_passages:
    # make a new version of the critical passages mask on the culled mesh
    dsCritPassMask = conversion.mask(dsCulledMesh, fcMask=fcCritPassages)
    write_netcdf(dsCritPassMask,
Beispiel #9
0
    def run(self):
        """
        Run this step of the test case
        """
        config = self.config
        logger = self.logger

        replacements = dict()
        replacements['config_periodic_planar_vert_levels'] = \
            config.getfloat('vertical_grid', 'vert_levels')
        replacements['config_periodic_planar_bottom_depth'] = \
            config.getfloat('vertical_grid', 'bottom_depth')
        self.update_namelist_at_runtime(options=replacements)

        section = config['vertical_grid']
        vert_levels = section.getint('vert_levels')
        bottom_depth = section.getfloat('bottom_depth')

        section = config['internal_wave']
        nx = section.getint('nx')
        ny = section.getint('ny')
        dc = section.getfloat('dc')
        use_distances = section.getboolean('use_distances')
        amplitude_width_dist = section.getfloat('amplitude_width_dist')
        amplitude_width_frac = section.getfloat('amplitude_width_frac')
        bottom_temperature = section.getfloat('bottom_temperature')
        surface_temperature = section.getfloat('surface_temperature')
        temperature_difference = section.getfloat('temperature_difference')
        salinity = section.getfloat('salinity')

        logger.info(' * Make planar hex mesh')
        dsMesh = make_planar_hex_mesh(nx=nx,
                                      ny=ny,
                                      dc=dc,
                                      nonperiodic_x=False,
                                      nonperiodic_y=True)
        logger.info(' * Completed Make planar hex mesh')
        write_netcdf(dsMesh, 'base_mesh.nc')

        logger.info(' * Cull mesh')
        dsMesh = cull(dsMesh, logger=logger)
        logger.info(' * Convert mesh')
        dsMesh = convert(dsMesh,
                         graphInfoFileName='culled_graph.info',
                         logger=logger)
        logger.info(' * Completed Convert mesh')
        write_netcdf(dsMesh, 'culled_mesh.nc')

        ds = dsMesh.copy()
        yCell = ds.yCell

        ds['bottomDepth'] = bottom_depth * xarray.ones_like(yCell)
        ds['ssh'] = xarray.zeros_like(yCell)

        init_vertical_coord(config, ds)

        yMin = yCell.min().values
        yMax = yCell.max().values

        yMid = 0.5 * (yMin + yMax)

        if use_distances:
            perturbation_width = amplitude_width_dist
        else:
            perturbation_width = (yMax - yMin) * amplitude_width_frac

        # Set stratified temperature
        temp_vert = (bottom_temperature +
                     (surface_temperature - bottom_temperature) *
                     ((ds.refZMid + bottom_depth) / bottom_depth))

        depth_frac = xarray.zeros_like(temp_vert)
        refBottomDepth = ds['refBottomDepth']
        for k in range(1, vert_levels):
            depth_frac[k] = refBottomDepth[k -
                                           1] / refBottomDepth[vert_levels - 1]

        # If cell is in the southern half, outside the sin width, subtract
        # temperature difference
        frac = xarray.where(
            numpy.abs(yCell - yMid) < perturbation_width,
            numpy.cos(0.5 * numpy.pi * (yCell - yMid) / perturbation_width) *
            numpy.sin(numpy.pi * depth_frac), 0.)

        temperature = temp_vert - temperature_difference * frac
        temperature = temperature.transpose('nCells', 'nVertLevels')
        temperature = temperature.expand_dims(dim='Time', axis=0)

        normalVelocity = xarray.zeros_like(ds.xEdge)
        normalVelocity, _ = xarray.broadcast(normalVelocity, ds.refBottomDepth)
        normalVelocity = normalVelocity.transpose('nEdges', 'nVertLevels')
        normalVelocity = normalVelocity.expand_dims(dim='Time', axis=0)

        ds['temperature'] = temperature
        ds['salinity'] = salinity * xarray.ones_like(temperature)
        ds['normalVelocity'] = normalVelocity

        write_netcdf(ds, 'ocean.nc')
Beispiel #10
0
    def run(self):
        """
        Run this step of the test case
        """
        config = self.config
        logger = self.logger

        section = config['ice_shelf_2d']
        nx = section.getint('nx')
        ny = section.getint('ny')
        dc = section.getfloat('dc')

        dsMesh = make_planar_hex_mesh(nx=nx, ny=ny, dc=dc, nonperiodic_x=False,
                                      nonperiodic_y=True)
        write_netcdf(dsMesh, 'base_mesh.nc')

        dsMesh = cull(dsMesh, logger=logger)
        dsMesh = convert(dsMesh, graphInfoFileName='culled_graph.info',
                         logger=logger)
        write_netcdf(dsMesh, 'culled_mesh.nc')

        bottom_depth = config.getfloat('vertical_grid', 'bottom_depth')

        section = config['ice_shelf_2d']
        temperature = section.getfloat('temperature')
        surface_salinity = section.getfloat('surface_salinity')
        bottom_salinity = section.getfloat('bottom_salinity')

        # points 1 and 2 are where angles on ice shelf are located.
        # point 3 is at the surface.
        # d variables are total water-column thickness below ice shelf
        y1 = section.getfloat('y1')
        y2 = section.getfloat('y2')
        y3 = y2 + section.getfloat('edge_width')
        d1 = section.getfloat('cavity_thickness')
        d2 = d1 + section.getfloat('slope_height')
        d3 = bottom_depth

        ds = dsMesh.copy()

        ds['bottomDepth'] = bottom_depth * xarray.ones_like(ds.xCell)

        yCell = ds.yCell

        column_thickness = xarray.where(
            yCell < y1, d1, d1 + (d2 - d1) * (yCell - y1) / (y2 - y1))
        column_thickness = xarray.where(
            yCell < y2, column_thickness,
            d2 + (d3 - d2) * (yCell - y2) / (y3 - y2))
        column_thickness = xarray.where(yCell < y3, column_thickness, d3)

        ds['ssh'] = -bottom_depth + column_thickness

        # set up the vertical coordinate
        init_vertical_coord(config, ds)

        modify_mask = xarray.where(yCell < y3, 1, 0).expand_dims(
            dim='Time', axis=0)
        landIceFraction = modify_mask.astype(float)
        landIceMask = modify_mask.copy()

        ref_density = constants['SHR_CONST_RHOSW']
        landIcePressure, landIceDraft = compute_land_ice_pressure_and_draft(
            ssh=ds.ssh, modify_mask=modify_mask, ref_density=ref_density)

        salinity = surface_salinity + ((bottom_salinity - surface_salinity) *
                                       (ds.zMid / (-bottom_depth)))
        salinity, _ = xarray.broadcast(salinity, ds.layerThickness)
        salinity = salinity.transpose('Time', 'nCells', 'nVertLevels')

        normalVelocity = xarray.zeros_like(ds.xEdge)
        normalVelocity, _ = xarray.broadcast(normalVelocity, ds.refBottomDepth)
        normalVelocity = normalVelocity.transpose('nEdges', 'nVertLevels')
        normalVelocity = normalVelocity.expand_dims(dim='Time', axis=0)

        ds['temperature'] = temperature * xarray.ones_like(ds.layerThickness)
        ds['salinity'] = salinity
        ds['normalVelocity'] = normalVelocity
        ds['fCell'] = xarray.zeros_like(ds.xCell)
        ds['fEdge'] = xarray.zeros_like(ds.xEdge)
        ds['fVertex'] = xarray.zeros_like(ds.xVertex)
        ds['modifyLandIcePressureMask'] = modify_mask
        ds['landIceFraction'] = landIceFraction
        ds['landIceMask'] = landIceMask
        ds['landIcePressure'] = landIcePressure
        ds['landIceDraft'] = landIceDraft

        write_netcdf(ds, 'initial_state.nc')
Beispiel #11
0
def saveesm(path,
            geom,
            mesh,
            preserve_floodplain=False,
            floodplain_elevation=20.0,
            do_inject_elevation=False,
            with_cavities=False,
            lat_threshold=43.00,
            with_critical_passages=True):
    """
    SAVEESM: export a jigsaw mesh obj. to MPAS-style output.

    1. Writes "mesh_triangles.nc" and "base_mesh.nc" files.
    2. (Optionally) injects elevation + floodplain data.
    3. Calls MPAS-Tools + Geometric-Data to cull mesh into 
       ocean/land partitions.
    4. Writes "culled_mesh.nc" (ocean) and "invert_mesh.nc"
       (land) MPAS-spec. output files.

    Data is written to "../path/out/" and/or "../path/tmp/".

    """
    # Authors: Darren Engwirda

    ttic = time.time()

    print("")
    print("Running MPAS mesh-tools...")

    inject_edge_tags(mesh)

    # adapted from BUILD_MESH.py

    if (geom.mshID.lower() == "ellipsoid-mesh"):
        print("Forming mesh_triangles.nc")
        jigsaw_mesh_to_netcdf(mesh=mesh,
                              on_sphere=True,
                              sphere_radius=np.mean(geom.radii) * 1e3,
                              output_name=os.path.join(path, "tmp",
                                                       "mesh_triangles.nc"))

    if (geom.mshID.lower() == "euclidean-mesh"):
        print("Forming mesh_triangles.nc")
        jigsaw_mesh_to_netcdf(mesh=mesh,
                              on_sphere=False,
                              output_name=os.path.join(path, "tmp",
                                                       "mesh_triangles.nc"))

    print("Forming base_mesh.nc")
    write_netcdf(convert(
        xarray.open_dataset(os.path.join(path, "tmp", "mesh_triangles.nc"))),
                 fileName=os.path.join(path, "out", "base_mesh.nc"))
    """
    if do_inject_elevation:
        print("Injecting cell elevations")
        inject_elevation(
            cell_elev=mesh.value,
            mesh_file=os.path.join(
                path, "out", "base_mesh.nc"))
    """

    if preserve_floodplain:
        print("Injecting floodplain flag")
        inject_preserve_floodplain(mesh_file=os.path.join(
            path, "out", "base_mesh.nc"),
                                   floodplain_elevation=floodplain_elevation)

    args = [
        "paraview_vtk_field_extractor.py", "--ignore_time", "-l", "-d",
        "maxEdges=0", "-v", "allOnCells", "-f",
        os.path.join(path, "out", "base_mesh.nc"), "-o",
        os.path.join(path, "out", "base_mesh_vtk")
    ]
    print("")
    print("running:", " ".join(args))
    subprocess.check_call(args, env=os.environ.copy())

    # adapted from CULL_MESH.py

    # required for compatibility with MPAS
    netcdfFormat = "NETCDF3_64BIT"

    gf = GeometricFeatures(cacheLocation="{}".format(
        os.path.join(HERE, "..", "data", "geometric_data")))

    # start with the land coverage from Natural Earth
    fcLandCoverage = gf.read(componentName="natural_earth",
                             objectType="region",
                             featureNames=["Land Coverage"])

    # remove the region south of 60S so we can replace
    # it based on ice-sheet topography
    fcSouthMask = gf.read(componentName="ocean",
                          objectType="region",
                          featureNames=["Global Ocean 90S to 60S"])

    fcLandCoverage = \
        fcLandCoverage.difference(fcSouthMask)

    # add land coverage from either the full ice sheet
    # or just the grounded part
    if with_cavities:
        fcAntarcticLand = gf.read(
            componentName="bedmap2",
            objectType="region",
            featureNames=["AntarcticGroundedIceCoverage"])
    else:
        fcAntarcticLand = gf.read(componentName="bedmap2",
                                  objectType="region",
                                  featureNames=["AntarcticIceCoverage"])

    fcLandCoverage.merge(fcAntarcticLand)

    # save the feature collection to a geojson file
    fcLandCoverage.to_geojson(
        os.path.join(path, "tmp", "land_coverage.geojson"))

    # Create the land mask based on the land coverage,
    # i.e. coastline data.
    dsBaseMesh = xarray.open_dataset(os.path.join(path, "out", "base_mesh.nc"))
    dsLandMask = mask(dsBaseMesh, fcMask=fcLandCoverage)

    dsLandMask = add_land_locked_cells_to_mask(
        dsLandMask, dsBaseMesh, latitude_threshold=lat_threshold, nSweeps=20)

    if with_critical_passages:
        # merge transects for critical passages into
        # critical_passages.geojson
        fcCritPassages = gf.read(componentName="ocean",
                                 objectType="transect",
                                 tags=["Critical_Passage"])

        # create masks from the transects
        dsCritPassMask = \
            mask(dsBaseMesh, fcMask=fcCritPassages)

        # Alter critical passages to be at least two
        # cells wide, to avoid sea ice blockage.
        dsCritPassMask = widen_transect_edge_masks(
            dsCritPassMask, dsBaseMesh, latitude_threshold=lat_threshold)

        dsLandMask = subtract_critical_passages(dsLandMask, dsCritPassMask)

        # merge transects for critical land blockages
        # into critical_land_blockages.geojson
        fcCritBlockages = gf.read(componentName="ocean",
                                  objectType="transect",
                                  tags=["Critical_Land_Blockage"])

        # create masks from the transects for critical
        # land blockages
        dsCritBlockMask = \
            mask(dsBaseMesh, fcMask=fcCritBlockages)

        dsLandMask = add_critical_land_blockages(dsLandMask, dsCritBlockMask)

    # create seed points for a flood fill of the ocean
    # use all points in the ocean directory, on the
    # assumption that they are, in fact *in* the ocean
    fcSeed = gf.read(componentName="ocean",
                     objectType="point",
                     tags=["seed_point"])

    # update the land mask to ensure all ocean cells really
    # are "reachable" from the rest of the global ocean
    dsLandMask = mask_reachable_ocean(dsMesh=dsBaseMesh,
                                      dsMask=dsLandMask,
                                      fcSeed=fcSeed)

    # cull the (ocean) mesh based on the land mask, and a
    # cull the (land) mesh using the inverse mask

    if preserve_floodplain:
        # with "preserve_floodplains", the (ocean) mesh will
        # contain overlap with the (land) mesh, otherwise the
        # two are "perfectly" disjoint
        dsCulledMesh = cull(dsBaseMesh,
                            dsMask=dsLandMask,
                            dsPreserve=dsBaseMesh,
                            graphInfoFileName=os.path.join(
                                path, "out", "culled_graph.info"))

        dsInvertMesh = cull(dsBaseMesh,
                            dsInverse=dsLandMask,
                            graphInfoFileName=os.path.join(
                                path, "out", "invert_graph.info"))

    else:
        dsCulledMesh = cull(dsBaseMesh,
                            dsMask=dsLandMask,
                            graphInfoFileName=os.path.join(
                                path, "out", "culled_graph.info"))

        dsInvertMesh = cull(dsBaseMesh,
                            dsInverse=dsLandMask,
                            graphInfoFileName=os.path.join(
                                path, "out", "invert_graph.info"))

    write_netcdf(dsCulledMesh, os.path.join(path, "out", "culled_mesh.nc"),
                 netcdfFormat)

    write_netcdf(dsInvertMesh, os.path.join(path, "out", "invert_mesh.nc"),
                 netcdfFormat)

    args = [
        "paraview_vtk_field_extractor.py", "--ignore_time", "-d", "maxEdges=",
        "-v", "allOnCells", "-f",
        os.path.join(path, "out", "culled_mesh.nc"), "-o",
        os.path.join(path, "out", "culled_mesh_vtk")
    ]
    print("")
    print("running", " ".join(args))
    subprocess.check_call(args, env=os.environ.copy())

    args = [
        "paraview_vtk_field_extractor.py", "--ignore_time", "-d", "maxEdges=",
        "-v", "allOnCells", "-f",
        os.path.join(path, "out", "invert_mesh.nc"), "-o",
        os.path.join(path, "out", "invert_mesh_vtk")
    ]
    print("running", " ".join(args))
    subprocess.check_call(args, env=os.environ.copy())

    ttoc = time.time()

    print("CPUSEC =", (ttoc - ttic))

    return
Beispiel #12
0
    def run(self):
        """
        Run this step of the test case
        """
        logger = self.logger
        config = self.config
        section = config['humboldt']

        logger.info('calling build_cell_wdith')
        cell_width, x1, y1, geom_points, geom_edges = self.build_cell_width()
        logger.info('calling build_planar_mesh')
        build_planar_mesh(cell_width,
                          x1,
                          y1,
                          geom_points,
                          geom_edges,
                          logger=logger)
        dsMesh = xarray.open_dataset('base_mesh.nc')
        logger.info('culling mesh')
        dsMesh = cull(dsMesh, logger=logger)
        logger.info('converting to MPAS mesh')
        dsMesh = convert(dsMesh, logger=logger)
        logger.info('writing grid_converted.nc')
        write_netcdf(dsMesh, 'grid_converted.nc')
        # If no number of levels specified in config file, use 10
        levels = section.get('levels')
        logger.info('calling create_landice_grid_from_generic_MPAS_grid.py')
        args = [
            'create_landice_grid_from_generic_MPAS_grid.py', '-i',
            'grid_converted.nc', '-o', 'gis_1km_preCull.nc', '-l', levels,
            '-v', 'glimmer'
        ]
        check_call(args, logger=logger)

        # This step uses a subset of the whole Greenland dataset trimmed to
        # the region around Humboldt Glacier, to speed up interpolation.
        # This could also be replaced with the full Greenland Ice Sheet
        # dataset.
        logger.info('calling interpolate_to_mpasli_grid.py')
        args = [
            'interpolate_to_mpasli_grid.py', '-s',
            'humboldt_1km_2020_04_20.epsg3413.icesheetonly.nc', '-d',
            'gis_1km_preCull.nc', '-m', 'b', '-t'
        ]

        check_call(args, logger=logger)

        # This step is only necessary if you wish to cull a certain
        # distance from the ice margin, within the bounds defined by
        # the GeoJSON file.
        cullDistance = section.get('cullDistance')
        if float(cullDistance) > 0.:
            logger.info('calling define_cullMask.py')
            args = [
                'define_cullMask.py', '-f', 'gis_1km_preCull.nc', '-m'
                'distance', '-d', cullDistance
            ]

            check_call(args, logger=logger)
        else:
            logger.info('cullDistance <= 0 in config file. '
                        'Will not cull by distance to margin. \n')

        # This step is only necessary because the GeoJSON region
        # is defined by lat-lon.
        logger.info('calling set_lat_lon_fields_in_planar_grid.py')
        args = [
            'set_lat_lon_fields_in_planar_grid.py', '-f', 'gis_1km_preCull.nc',
            '-p', 'gis-gimp'
        ]

        check_call(args, logger=logger)

        logger.info('calling MpasMaskCreator.x')
        args = [
            'MpasMaskCreator.x', 'gis_1km_preCull.nc', 'humboldt_mask.nc',
            '-f', 'Humboldt.geojson'
        ]

        check_call(args, logger=logger)

        logger.info('culling to geojson file')
        dsMesh = xarray.open_dataset('gis_1km_preCull.nc')
        humboldtMask = xarray.open_dataset('humboldt_mask.nc')
        dsMesh = cull(dsMesh, dsInverse=humboldtMask, logger=logger)
        write_netcdf(dsMesh, 'humboldt_culled.nc')

        logger.info('Marking horns for culling')
        args = ['mark_horns_for_culling.py', '-f', 'humboldt_culled.nc']
        check_call(args, logger=logger)

        logger.info('culling and converting')
        dsMesh = xarray.open_dataset('humboldt_culled.nc')
        dsMesh = cull(dsMesh, logger=logger)
        dsMesh = convert(dsMesh, logger=logger)
        write_netcdf(dsMesh, 'humboldt_dehorned.nc')

        logger.info('calling create_landice_grid_from_generic_MPAS_grid.py')
        args = [
            'create_landice_grid_from_generic_MPAS_grid.py', '-i',
            'humboldt_dehorned.nc', '-o', 'Humboldt_1to10km.nc', '-l', levels,
            '-v', 'glimmer', '--beta', '--thermal', '--obs', '--diri'
        ]

        check_call(args, logger=logger)

        logger.info('calling interpolate_to_mpasli_grid.py')
        args = [
            'interpolate_to_mpasli_grid.py', '-s',
            'humboldt_1km_2020_04_20.epsg3413.icesheetonly.nc', '-d',
            'Humboldt_1to10km.nc', '-m', 'b', '-t'
        ]
        check_call(args, logger=logger)

        logger.info('Marking domain boundaries dirichlet')
        args = [
            'mark_domain_boundaries_dirichlet.py', '-f', 'Humboldt_1to10km.nc'
        ]
        check_call(args, logger=logger)

        logger.info('calling set_lat_lon_fields_in_planar_grid.py')
        args = [
            'set_lat_lon_fields_in_planar_grid.py', '-f',
            'Humboldt_1to10km.nc', '-p', 'gis-gimp'
        ]
        check_call(args, logger=logger)

        logger.info('creating graph.info')
        make_graph_file(mesh_filename='Humboldt_1to10km.nc',
                        graph_filename='graph.info')
Beispiel #13
0
def _cull_mesh_with_logging(logger, with_cavities, with_critical_passages,
                            custom_critical_passages, custom_land_blockages,
                            preserve_floodplain, use_progress_bar,
                            process_count):
    """ Cull the mesh once the logger is defined for sure """

    critical_passages = with_critical_passages or \
        (custom_critical_passages is not None)

    land_blockages = with_critical_passages or \
        (custom_land_blockages is not None)

    gf = GeometricFeatures()

    # start with the land coverage from Natural Earth
    fcLandCoverage = gf.read(componentName='natural_earth',
                             objectType='region',
                             featureNames=['Land Coverage'])

    # remove the region south of 60S so we can replace it based on ice-sheet
    # topography
    fcSouthMask = gf.read(componentName='ocean', objectType='region',
                          featureNames=['Global Ocean 90S to 60S'])

    fcLandCoverage = fcLandCoverage.difference(fcSouthMask)

    # Add "land" coverage from either the full ice sheet or just the grounded
    # part
    if with_cavities:
        fcAntarcticLand = gf.read(
            componentName='bedmachine', objectType='region',
            featureNames=['AntarcticGroundedIceCoverage'])
    else:
        fcAntarcticLand = gf.read(
            componentName='bedmachine', objectType='region',
            featureNames=['AntarcticIceCoverage'])

    fcLandCoverage.merge(fcAntarcticLand)

    # save the feature collection to a geojson file
    fcLandCoverage.to_geojson('land_coverage.geojson')

    # these defaults may have been updated from config options -- pass them
    # along to the subprocess
    netcdf_format = mpas_tools.io.default_format
    netcdf_engine = mpas_tools.io.default_engine

    # Create the land mask based on the land coverage, i.e. coastline data
    args = ['compute_mpas_region_masks',
            '-m', 'base_mesh.nc',
            '-g', 'land_coverage.geojson',
            '-o', 'land_mask.nc',
            '-t', 'cell',
            '--process_count', '{}'.format(process_count),
            '--format', netcdf_format,
            '--engine', netcdf_engine]
    check_call(args, logger=logger)

    dsBaseMesh = xarray.open_dataset('base_mesh.nc')
    dsLandMask = xarray.open_dataset('land_mask.nc')
    dsLandMask = add_land_locked_cells_to_mask(dsLandMask, dsBaseMesh,
                                               latitude_threshold=43.0,
                                               nSweeps=20)

    # create seed points for a flood fill of the ocean
    # use all points in the ocean directory, on the assumption that they are,
    # in fact, in the ocean
    fcSeed = gf.read(componentName='ocean', objectType='point',
                     tags=['seed_point'])

    if land_blockages:
        if with_critical_passages:
            # merge transects for critical land blockages into
            # critical_land_blockages.geojson
            fcCritBlockages = gf.read(
                componentName='ocean', objectType='transect',
                tags=['Critical_Land_Blockage'])
        else:
            fcCritBlockages = FeatureCollection()

        if custom_land_blockages is not None:
            fcCritBlockages.merge(read_feature_collection(
                custom_land_blockages))

        # create masks from the transects
        fcCritBlockages.to_geojson('critical_blockages.geojson')
        args = ['compute_mpas_transect_masks',
                '-m', 'base_mesh.nc',
                '-g', 'critical_blockages.geojson',
                '-o', 'critical_blockages.nc',
                '-t', 'cell',
                '-s', '10e3',
                '--process_count', '{}'.format(process_count),
                '--format', netcdf_format,
                '--engine', netcdf_engine]
        check_call(args, logger=logger)
        dsCritBlockMask = xarray.open_dataset('critical_blockages.nc')

        dsLandMask = add_critical_land_blockages(dsLandMask, dsCritBlockMask)

    fcCritPassages = FeatureCollection()
    dsPreserve = []

    if critical_passages:
        if with_critical_passages:
            # merge transects for critical passages into fcCritPassages
            fcCritPassages.merge(gf.read(componentName='ocean',
                                         objectType='transect',
                                         tags=['Critical_Passage']))

        if custom_critical_passages is not None:
            fcCritPassages.merge(read_feature_collection(
                custom_critical_passages))

        # create masks from the transects
        fcCritPassages.to_geojson('critical_passages.geojson')
        args = ['compute_mpas_transect_masks',
                '-m', 'base_mesh.nc',
                '-g', 'critical_passages.geojson',
                '-o', 'critical_passages.nc',
                '-t', 'cell', 'edge',
                '-s', '10e3',
                '--process_count', '{}'.format(process_count),
                '--format', netcdf_format,
                '--engine', netcdf_engine]
        check_call(args, logger=logger)
        dsCritPassMask = xarray.open_dataset('critical_passages.nc')

        # Alter critical passages to be at least two cells wide, to avoid sea
        # ice blockage
        dsCritPassMask = widen_transect_edge_masks(dsCritPassMask, dsBaseMesh,
                                                   latitude_threshold=43.0)

        dsPreserve.append(dsCritPassMask)

    if preserve_floodplain:
        dsPreserve.append(dsBaseMesh)

    # cull the mesh based on the land mask
    dsCulledMesh = cull(dsBaseMesh, dsMask=dsLandMask,
                        dsPreserve=dsPreserve, logger=logger)

    # create a mask for the flood fill seed points
    dsSeedMask = compute_mpas_flood_fill_mask(dsMesh=dsCulledMesh,
                                              fcSeed=fcSeed,
                                              logger=logger)

    # cull the mesh a second time using a flood fill from the seed points
    dsCulledMesh = cull(dsCulledMesh, dsInverse=dsSeedMask,
                        graphInfoFileName='culled_graph.info', logger=logger)
    write_netcdf(dsCulledMesh, 'culled_mesh.nc')

    if critical_passages:
        # make a new version of the critical passages mask on the culled mesh
        fcCritPassages.to_geojson('critical_passages.geojson')
        args = ['compute_mpas_transect_masks',
                '-m', 'culled_mesh.nc',
                '-g', 'critical_passages.geojson',
                '-o', 'critical_passages_mask_final.nc',
                '-t', 'cell',
                '-s', '10e3',
                '--process_count', '{}'.format(process_count),
                '--format', netcdf_format,
                '--engine', netcdf_engine]
        check_call(args, logger=logger)

    if with_cavities:
        fcAntarcticIce = gf.read(
            componentName='bedmachine', objectType='region',
            featureNames=['AntarcticIceCoverage'])

        fcAntarcticIce.to_geojson('ice_coverage.geojson')
        args = ['compute_mpas_region_masks',
                '-m', 'culled_mesh.nc',
                '-g', 'ice_coverage.geojson',
                '-o', 'ice_coverage.nc',
                '-t', 'cell',
                '--process_count', '{}'.format(process_count),
                '--format', netcdf_format,
                '--engine', netcdf_engine]
        check_call(args, logger=logger)
        dsMask = xarray.open_dataset('ice_coverage.nc')

        landIceMask = dsMask.regionCellMasks.isel(nRegions=0)
        dsLandIceMask = xarray.Dataset()
        dsLandIceMask['landIceMask'] = landIceMask

        write_netcdf(dsLandIceMask, 'land_ice_mask.nc')

        dsLandIceCulledMesh = cull(dsCulledMesh, dsMask=dsMask, logger=logger)
        write_netcdf(dsLandIceCulledMesh, 'no_ISC_culled_mesh.nc')

    extract_vtk(ignore_time=True, dimension_list=['maxEdges='],
                variable_list=['allOnCells'],
                filename_pattern='culled_mesh.nc',
                out_dir='culled_mesh_vtk',
                use_progress_bar=use_progress_bar)

    if with_cavities:
        extract_vtk(ignore_time=True, dimension_list=['maxEdges='],
                    variable_list=['allOnCells'],
                    filename_pattern='no_ISC_culled_mesh.nc',
                    out_dir='no_ISC_culled_mesh_vtk',
                    use_progress_bar=use_progress_bar)
Beispiel #14
0
    def run(self):
        """
        Run this step of the test case
        """
        config = self.config
        logger = self.logger

        section = config['baroclinic_channel']
        nx = section.getint('nx')
        ny = section.getint('ny')
        dc = section.getfloat('dc')

        dsMesh = make_planar_hex_mesh(nx=nx,
                                      ny=ny,
                                      dc=dc,
                                      nonperiodic_x=False,
                                      nonperiodic_y=True)
        write_netcdf(dsMesh, 'base_mesh.nc')

        dsMesh = cull(dsMesh, logger=logger)
        dsMesh = convert(dsMesh,
                         graphInfoFileName='culled_graph.info',
                         logger=logger)
        write_netcdf(dsMesh, 'culled_mesh.nc')

        section = config['baroclinic_channel']
        use_distances = section.getboolean('use_distances')
        gradient_width_dist = section.getfloat('gradient_width_dist')
        gradient_width_frac = section.getfloat('gradient_width_frac')
        bottom_temperature = section.getfloat('bottom_temperature')
        surface_temperature = section.getfloat('surface_temperature')
        temperature_difference = section.getfloat('temperature_difference')
        salinity = section.getfloat('salinity')
        coriolis_parameter = section.getfloat('coriolis_parameter')

        ds = dsMesh.copy()
        xCell = ds.xCell
        yCell = ds.yCell

        bottom_depth = config.getfloat('vertical_grid', 'bottom_depth')

        ds['bottomDepth'] = bottom_depth * xarray.ones_like(xCell)
        ds['ssh'] = xarray.zeros_like(xCell)

        init_vertical_coord(config, ds)

        xMin = xCell.min().values
        xMax = xCell.max().values
        yMin = yCell.min().values
        yMax = yCell.max().values

        yMid = 0.5 * (yMin + yMax)
        xPerturbMin = xMin + 4.0 * (xMax - xMin) / 6.0
        xPerturbMax = xMin + 5.0 * (xMax - xMin) / 6.0

        if use_distances:
            perturbationWidth = gradient_width_dist
        else:
            perturbationWidth = (yMax - yMin) * gradient_width_frac

        yOffset = perturbationWidth * numpy.sin(6.0 * numpy.pi *
                                                (xCell - xMin) / (xMax - xMin))

        temp_vert = (bottom_temperature +
                     (surface_temperature - bottom_temperature) *
                     ((ds.refZMid + bottom_depth) / bottom_depth))

        frac = xarray.where(yCell < yMid - yOffset, 1., 0.)

        mask = numpy.logical_and(yCell >= yMid - yOffset,
                                 yCell < yMid - yOffset + perturbationWidth)
        frac = xarray.where(
            mask, 1. - (yCell - (yMid - yOffset)) / perturbationWidth, frac)

        temperature = temp_vert - temperature_difference * frac
        temperature = temperature.transpose('nCells', 'nVertLevels')

        # Determine yOffset for 3rd crest in sin wave
        yOffset = 0.5 * perturbationWidth * numpy.sin(
            numpy.pi * (xCell - xPerturbMin) / (xPerturbMax - xPerturbMin))

        mask = numpy.logical_and(
            numpy.logical_and(
                yCell >= yMid - yOffset - 0.5 * perturbationWidth,
                yCell <= yMid - yOffset + 0.5 * perturbationWidth),
            numpy.logical_and(xCell >= xPerturbMin, xCell <= xPerturbMax))

        temperature = (temperature + mask * 0.3 *
                       (1. - ((yCell - (yMid - yOffset)) /
                              (0.5 * perturbationWidth))))

        temperature = temperature.expand_dims(dim='Time', axis=0)

        normalVelocity = xarray.zeros_like(ds.xEdge)
        normalVelocity, _ = xarray.broadcast(normalVelocity, ds.refBottomDepth)
        normalVelocity = normalVelocity.transpose('nEdges', 'nVertLevels')
        normalVelocity = normalVelocity.expand_dims(dim='Time', axis=0)

        ds['temperature'] = temperature
        ds['salinity'] = salinity * xarray.ones_like(temperature)
        ds['normalVelocity'] = normalVelocity
        ds['fCell'] = coriolis_parameter * xarray.ones_like(xCell)
        ds['fEdge'] = coriolis_parameter * xarray.ones_like(ds.xEdge)
        ds['fVertex'] = coriolis_parameter * xarray.ones_like(ds.xVertex)

        write_netcdf(ds, 'ocean.nc')
Beispiel #15
0
    def run(self):
        """
        Run this step of the test case
        """
        config = self.config
        logger = self.logger

        section = config['isomip_plus']
        nx = section.getint('nx')
        ny = section.getint('ny')
        dc = section.getfloat('dc')
        filter_sigma = section.getfloat('topo_smoothing') * self.resolution
        min_ice_thickness = section.getfloat('min_ice_thickness')
        min_land_ice_fraction = section.getfloat('min_land_ice_fraction')
        draft_scaling = section.getfloat('draft_scaling')

        process_input_geometry('input_geometry.nc',
                               'input_geometry_processed.nc',
                               filterSigma=filter_sigma,
                               minIceThickness=min_ice_thickness,
                               scale=draft_scaling)

        dsMesh = make_planar_hex_mesh(nx=nx + 2,
                                      ny=ny + 2,
                                      dc=dc,
                                      nonperiodic_x=False,
                                      nonperiodic_y=False)
        translate(mesh=dsMesh, yOffset=-2 * dc)
        write_netcdf(dsMesh, 'base_mesh.nc')

        dsGeom = xarray.open_dataset('input_geometry_processed.nc')

        min_ocean_fraction = config.getfloat('isomip_plus',
                                             'min_ocean_fraction')

        dsMask = interpolate_ocean_mask(dsMesh, dsGeom, min_ocean_fraction)
        dsMesh = cull(dsMesh, dsInverse=dsMask, logger=logger)
        dsMesh.attrs['is_periodic'] = 'NO'

        dsMesh = convert(dsMesh,
                         graphInfoFileName='culled_graph.info',
                         logger=logger)
        write_netcdf(dsMesh, 'culled_mesh.nc')

        ds = interpolate_geom(dsMesh, dsGeom, min_ocean_fraction)

        for var in ['landIceFraction']:
            ds[var] = ds[var].expand_dims(dim='Time', axis=0)

        ds['landIceMask'] = \
            (ds.landIceFraction >= min_land_ice_fraction).astype(int)

        ref_density = constants['SHR_CONST_RHOSW']
        landIcePressure, landIceDraft = compute_land_ice_pressure_and_draft(
            ssh=ds.ssh, modify_mask=ds.ssh < 0., ref_density=ref_density)

        ds['landIcePressure'] = landIcePressure
        ds['landIceDraft'] = landIceDraft

        if self.time_varying_forcing:
            self._write_time_varying_forcing(ds_init=ds)

        ds['bottomDepth'] = -ds.bottomDepthObserved

        section = config['isomip_plus']

        min_column_thickness = section.getfloat('min_column_thickness')
        min_levels = section.getint('minimum_levels')

        interfaces = generate_1d_grid(config)

        # Deepen the bottom depth to maintain the minimum water-column
        # thickness
        min_depth = numpy.maximum(-ds.ssh + min_column_thickness,
                                  interfaces[min_levels + 1])
        ds['bottomDepth'] = numpy.maximum(ds.bottomDepth, min_depth)

        init_vertical_coord(config, ds)

        ds['modifyLandIcePressureMask'] = \
            (ds['landIceFraction'] > 0.01).astype(int)

        max_bottom_depth = -config.getfloat('vertical_grid', 'bottom_depth')
        frac = (0. - ds.zMid) / (0. - max_bottom_depth)

        # compute T, S
        init_top_temp = section.getfloat('init_top_temp')
        init_bot_temp = section.getfloat('init_bot_temp')
        init_top_sal = section.getfloat('init_top_sal')
        init_bot_sal = section.getfloat('init_bot_sal')
        ds['temperature'] = (1.0 - frac) * init_top_temp + frac * init_bot_temp
        ds['salinity'] = (1.0 - frac) * init_top_sal + frac * init_bot_sal

        # compute coriolis
        coriolis_parameter = section.getfloat('coriolis_parameter')

        ds['fCell'] = coriolis_parameter * xarray.ones_like(ds.xCell)
        ds['fEdge'] = coriolis_parameter * xarray.ones_like(ds.xEdge)
        ds['fVertex'] = coriolis_parameter * xarray.ones_like(ds.xVertex)

        normalVelocity = xarray.zeros_like(ds.xEdge)
        normalVelocity = normalVelocity.broadcast_like(ds.refBottomDepth)
        normalVelocity = normalVelocity.transpose('nEdges', 'nVertLevels')
        ds['normalVelocity'] = normalVelocity.expand_dims(dim='Time', axis=0)

        write_netcdf(ds, 'initial_state.nc')

        plot_folder = '{}/plots'.format(self.work_dir)
        if os.path.exists(plot_folder):
            shutil.rmtree(plot_folder)

        # plot a few fields
        section_y = config.getfloat('isomip_plus_viz', 'section_y')

        # show progress only if we're not writing to a log file
        show_progress = self.log_filename is None

        plotter = MoviePlotter(inFolder=self.work_dir,
                               streamfunctionFolder=self.work_dir,
                               outFolder=plot_folder,
                               expt=self.experiment,
                               sectionY=section_y,
                               dsMesh=ds,
                               ds=ds,
                               showProgress=show_progress)

        plotter.plot_3d_field_top_bot_section(ds.zMid,
                                              nameInTitle='zMid',
                                              prefix='zmid',
                                              units='m',
                                              vmin=-720.,
                                              vmax=0.,
                                              cmap='cmo.deep_r')

        plotter.plot_3d_field_top_bot_section(ds.temperature,
                                              nameInTitle='temperature',
                                              prefix='temp',
                                              units='C',
                                              vmin=-2.,
                                              vmax=1.,
                                              cmap='cmo.thermal')

        plotter.plot_3d_field_top_bot_section(ds.salinity,
                                              nameInTitle='salinity',
                                              prefix='salin',
                                              units='PSU',
                                              vmin=33.8,
                                              vmax=34.7,
                                              cmap='cmo.haline')

        # compute restoring
        dsForcing = xarray.Dataset()

        restore_top_temp = section.getfloat('restore_top_temp')
        restore_bot_temp = section.getfloat('restore_bot_temp')
        restore_top_sal = section.getfloat('restore_top_sal')
        restore_bot_sal = section.getfloat('restore_bot_sal')
        dsForcing['temperatureInteriorRestoringValue'] = \
            (1.0 - frac) * restore_top_temp + frac * restore_bot_temp
        dsForcing['salinityInteriorRestoringValue'] = \
            (1.0 - frac) * restore_top_sal + frac * restore_bot_sal

        restore_rate = section.getfloat('restore_rate')
        restore_xmin = section.getfloat('restore_xmin')
        restore_xmax = section.getfloat('restore_xmax')
        frac = numpy.maximum(
            (ds.xCell - restore_xmin) / (restore_xmax - restore_xmin), 0.)
        frac = frac.broadcast_like(dsForcing.temperatureInteriorRestoringValue)

        # convert from 1/days to 1/s
        dsForcing['temperatureInteriorRestoringRate'] = \
            frac * restore_rate / constants['SHR_CONST_CDAY']
        dsForcing['salinityInteriorRestoringRate'] = \
            dsForcing.temperatureInteriorRestoringRate

        # compute "evaporation"
        restore_evap_rate = section.getfloat('restore_evap_rate')

        mask = numpy.logical_and(ds.xCell >= restore_xmin,
                                 ds.xCell <= restore_xmax)
        mask = mask.expand_dims(dim='Time', axis=0)
        # convert to m/s, negative for evaporation rather than precipitation
        evap_rate = -restore_evap_rate / (constants['SHR_CONST_CDAY'] * 365)
        # PSU*m/s to kg/m^2/s
        sflux_factor = 1.
        # C*m/s to W/m^2
        hflux_factor = 1. / (ref_density * constants['SHR_CONST_CPSW'])
        dsForcing['evaporationFlux'] = mask * ref_density * evap_rate
        dsForcing['seaIceSalinityFlux'] = \
            mask*evap_rate*restore_top_sal/sflux_factor
        dsForcing['seaIceHeatFlux'] = \
            mask*evap_rate*restore_top_temp/hflux_factor

        write_netcdf(dsForcing, 'init_mode_forcing_data.nc')