Beispiel #1
0
def field2d(src_lon, src_lat, src_field, dest_lon, dest_lat, dest_mask=None,
            nx=0, ny=0, weight=10, threads=2, pmap=None):
    """
    Given a 2D field with time (dimensions [time, lat, lon]), interpolate
    onto a new grid and return the new field. This is a helper function
    when needing to interpolate data within files, etc.

    Parameters
    ----------
    src_lon: numpy.ndarray
        longitude that field is on
    src_lat: numpy.ndarray
        latitude that field is on
    src_field: numpy.ndarray
        field to interpolate
    dest_lon: numpy.ndarray
        output longitudes to interpolate to
    dest_lat: numpy.ndarray
        output latitudes to interpolate to
    dest_mask: numpy.ndarray, optional
        mask to apply to interpolated data
    reftime: datetime, optional:
        Reference time as the epoch for z-grid file
    nx : float, optional:
        decorrelation length-scale for OA (same units as source data)
    ny : float, optional:
        decorrelation length-scale for OA (same units as source data)
    weight : int, optional:
        number of points to use in weighting matrix
    threads : int, optional:
        number of processing threads
    pmap : numpy.ndarray, optional:
        use the specified pmap rather than compute it

    Output
    ------
    ndarray:
        interpolated field on the destination grid
    pmap:
        the pmap used in the inerpolation
    """
    if pmap is None:
        tmp, pmap = seapy.oasurf(src_lon, src_lat, src_lat,
                                 dest_lon, dest_lat, weight=weight, nx=nx, ny=ny)
    if dest_mask is None:
        dest_mask = np.ones(dest_lat.shape)
    records = np.arange(0, src_field.shape[0])
    maxrecs = np.maximum(1,
                         np.minimum(records.size,
                                    np.int(_max_memory /
                                           (dest_lon.nbytes + src_lon.nbytes))))
    for rn, recs in enumerate(seapy.chunker(records, maxrecs)):
        nfield = np.ma.array(Parallel(n_jobs=threads, verbose=2)
                             (delayed(__interp2_thread)(
                                 src_lon, src_lat, src_field[i, :, :],
                                 dest_lon, dest_lat,
                                 pmap, weight,
                                 nx, ny, dest_mask)
                              for i in recs), copy=False)
    return nfield, pmap
Beispiel #2
0
    def convert_file(self, file, title="AVISO Obs"):
        """
        Load an AVISO file and convert into an obs structure
        """
        # Load AVISO Data
        nc = seapy.netcdf(file)
        lonname = 'lon' if 'lon' in nc.variables.keys() else 'longitude'
        lon = nc.variables[lonname][:]
        latname = 'lat' if 'lat' in nc.variables.keys() else 'latitude'
        lat = nc.variables[latname][:]
        dat = np.squeeze(nc.variables["sla"][:])
        err = np.squeeze(nc.variables["err"][:])
        time = seapy.roms.get_time(
            nc, "time", records=[0], epoch=self.epoch)[0]
        nc.close()
        lon, lat = np.meshgrid(lon, lat)
        lat = lat.flatten()
        lon = lon.flatten()
        if not self.grid.east():
            lon[lon > 180] -= 360
        data = [seapy.roms.obs.raw_data("ZETA", "SSH_AVISO_MAP",
                                        dat.flatten(), err.flatten(), self.ssh_error)]
        # Grid it
        obs = seapy.roms.obs.gridder(self.grid, time, lon, lat, None,
                                     data, self.dt, title)

        # Apply the model mean ssh to the sla data
        if self.ssh_mean is not None:
            m, p = seapy.oasurf(self.grid.I, self.grid.J, self.ssh_mean,
                                obs.x, obs.y, nx=1, ny=1, weight=7)
            obs.value += m
        return obs
Beispiel #3
0
def field2d(src_lon, src_lat, src_field, dest_lon, dest_lat, dest_mask=None,
            nx=0, ny=0, weight=10, threads=2, pmap=None):
    """
    Given a 2D field with time (dimensions [time, lat, lon]), interpolate
    onto a new grid and return the new field. This is a helper function
    when needing to interpolate data within files, etc.

    Parameters
    ----------
    src_lon: numpy.ndarray
        longitude that field is on
    src_lat: numpy.ndarray
        latitude that field is on
    src_field: numpy.ndarray
        field to interpolate
    dest_lon: numpy.ndarray
        output longitudes to interpolate to
    dest_lat: numpy.ndarray
        output latitudes to interpolate to
    dest_mask: numpy.ndarray, optional
        mask to apply to interpolated data
    reftime: datetime, optional:
        Reference time as the epoch for z-grid file
    nx : float, optional:
        decorrelation length-scale for OA (same units as source data)
    ny : float, optional:
        decorrelation length-scale for OA (same units as source data)
    weight : int, optional:
        number of points to use in weighting matrix
    threads : int, optional:
        number of processing threads
    pmap : numpy.ndarray, optional:
        use the specified pmap rather than compute it

    Output
    ------
    ndarray:
        interpolated field on the destination grid
    pmap:
        the pmap used in the inerpolation
    """
    if pmap is None:
        tmp, pmap = seapy.oasurf(src_lon, src_lat, src_lat,
                                 dest_lon, dest_lat, weight=weight, nx=nx, ny=ny)
    if dest_mask is None:
        dest_mask = np.ones(dest_lat.shape)
    records = np.arange(0, src_field.shape[0])
    maxrecs = np.maximum(1,
                         np.minimum(records.size,
                                    np.int(_max_memory /
                                           (dest_lon.nbytes + src_lon.nbytes))))
    for rn, recs in enumerate(seapy.chunker(records, maxrecs)):
        nfield = np.ma.array(Parallel(n_jobs=threads, verbose=2)
                             (delayed(__interp2_thread)(
                                 src_lon, src_lat, src_field[i, :, :],
                                 dest_lon, dest_lat,
                                 pmap, weight,
                                 nx, ny, dest_mask)
                              for i in recs), copy=False)
    return nfield, pmap
Beispiel #4
0
def __interp2_thread(rx, ry, data, zx, zy, pmap, weight, nx, ny, mask):
    """
    internal routine: 2D interpolation thread for parallel interpolation
    """
    data = np.ma.fix_invalid(data, copy=False)

    # Convolve the water over the land
    ksize = 2 * np.round(
        np.sqrt((nx / np.ma.median(np.ma.diff(rx)))**2 +
                (ny / np.ma.median(np.ma.diff(ry.T)))**2)) + 1
    if ksize < _ksize_range[0]:
        warn("nx or ny values are too small for stable OA, {:f}".format(ksize))
        ksize = _ksize_range[0]
    elif ksize > _ksize_range[1]:
        warn("nx or ny values are too large for stable OA, {:f}".format(ksize))
        ksize = _ksize_range[1]
    data = seapy.convolve_mask(data, ksize=ksize, copy=False)

    # Interpolate the field and return the result
    with timeout(minutes=30):
        res, pm = seapy.oasurf(rx, ry, data, zx, zy, pmap, weight, nx, ny)

    return np.ma.masked_where(np.logical_or(mask == 0,
                                            np.abs(res) > 9e4),
                              res,
                              copy=False)
Beispiel #5
0
    def convert_file(self, file, title="AVISO Obs"):
        """
        Load an AVISO file and convert into an obs structure
        """
        # Load AVISO Data
        nc = seapy.netcdf(file)
        lonname = 'lon' if 'lon' in nc.variables.keys() else 'longitude'
        lon = nc.variables[lonname][:]
        latname = 'lat' if 'lat' in nc.variables.keys() else 'latitude'
        lat = nc.variables[latname][:]
        dat = np.squeeze(nc.variables["sla"][:])
        err = np.squeeze(nc.variables["err"][:])
        time = netCDF4.num2date(nc.variables["time"][0],
                                nc.variables["time"].units) - self.epoch
        time = time.total_seconds() * seapy.secs2day
        nc.close()
        lon, lat = np.meshgrid(lon, lat)
        lat = lat.flatten()
        lon = lon.flatten()
        if not self.grid.east():
            lon[lon > 180] -= 360
        data = [seapy.roms.obs.raw_data("ZETA", "SSH_AVISO_MAP",
                                        dat.flatten(), err.flatten(), self.ssh_error)]
        # Grid it
        obs = seapy.roms.obs.gridder(self.grid, time, lon, lat, None,
                                     data, self.dt, title)

        # Apply the model mean ssh to the sla data
        if self.ssh_mean is not None:
            m, p = seapy.oasurf(self.grid.I, self.grid.J, self.ssh_mean,
                                obs.x, obs.y, nx=1, ny=1, weight=7)
            obs.value += m
        return obs
Beispiel #6
0
    def convert_file(self, file, title="AVISO SLA Track Obs"):
        """
        Load an AVISO file and convert into an obs structure
        """
        # Load AVISO Data
        nc = seapy.netcdf(file)
        lon = nc.variables["longitude"][:]
        lat = nc.variables["latitude"][:]
        slaname = 'SLA' if 'SLA' in nc.variables.keys() else 'sla_filtered'
        dat = nc.variables[slaname][:]
        time = seapy.roms.num2date(nc, "time", epoch=self.epoch)
        nc.close()

        # make them into vectors
        lat = lat.ravel()
        lon = lon.ravel()
        dat = dat.ravel()
        err = np.ones(dat.shape) * _aviso_sla_errors.get(self.provenance, 0.1)

        if not self.grid.east():
            lon[lon > 180] -= 360

        good = dat.nonzero()
        data = [seapy.roms.obs.raw_data("ZETA", self.provenance,
                                        dat[good], err[good], err[0])]
        # Grid it
        obs = seapy.roms.obs.gridder(self.grid, time, lon[good], lat[good], None,
                                     data, self.dt, title)

        # Apply the model mean ssh to the sla data
        if self.ssh_mean is not None and obs is not None:
            m, p = seapy.oasurf(self.grid.I, self.grid.J, self.ssh_mean,
                                obs.x, obs.y, nx=1, ny=1, weight=7)
            obs.value += m

        # Duplicate the observations before and after as per the repeat
        # time unless it is zero
        if self.repeat and obs:
            prior = obs.copy()
            after = obs.copy()
            prior.time -= self.repeat / 24
            after.time += self.repeat / 24
            obs.add(prior)
            obs.add(after)

        return obs
Beispiel #7
0
    def convert_file(self, file, title="AVISO SLA Track Obs"):
        """
        Load an AVISO file and convert into an obs structure
        """
        # Load AVISO Data
        nc = seapy.netcdf(file)
        lon = nc.variables["longitude"][:]
        lat = nc.variables["latitude"][:]
        slaname = 'SLA' if 'SLA' in nc.variables.keys() else 'sla_filtered'
        dat = nc.variables[slaname][:]
        time = seapy.roms.get_time(nc, "time", epoch=self.epoch)
        nc.close()

        # make them into vectors
        lat = lat.ravel()
        lon = lon.ravel()
        dat = dat.ravel()
        err = np.ones(dat.shape) * _aviso_sla_errors.get(self.provenance, 0.1)

        if not self.grid.east():
            lon[lon > 180] -= 360

        good = dat.nonzero()
        data = [seapy.roms.obs.raw_data("ZETA", self.provenance,
                                        dat[good], err[good], err[0])]
        # Grid it
        obs = seapy.roms.obs.gridder(self.grid, time, lon[good], lat[good], None,
                                     data, self.dt, title)

        # Apply the model mean ssh to the sla data
        if self.ssh_mean is not None and obs is not None:
            m, p = seapy.oasurf(self.grid.I, self.grid.J, self.ssh_mean,
                                obs.x, obs.y, nx=1, ny=1, weight=7)
            obs.value += m

        # Duplicate the observations before and after as per the repeat
        # time unless it is zero
        if self.repeat and obs:
            prior = obs.copy()
            after = obs.copy()
            prior.time -= self.repeat / 24
            after.time += self.repeat / 24
            obs.add(prior)
            obs.add(after)

        return obs
Beispiel #8
0
def constant_depth(field, grid, depth, zeta=None, threads=-2):
    """
    Find the values of a 3-D field at a constant depth for all times given.

    Parameters
    ----------
    field : ndarray,
        ROMS 3-D field to interpolate onto a constant depth level. Can be
        two- or three-dimensional array (first dimension assumed to be time).
    grid : seapy.model.grid or string or list,
        Grid that defines the depths and stretching for the field given
    depth : float,
        Depth (in meters) to find all values
    zeta : ndarray, optional,
        ROMS zeta field corresponding to field if you wish to apply the SSH
        correction to the depth calculations.
    threads : int, optional,
        Number of threads to use for processing

    Returns
    -------
    nfield : ndarray,
        Values from ROMS field on the given constant depth
    """

    # Make sure our inputs are all valid
    grid = seapy.model.asgrid(grid)
    if np.ndim(field) == 3:
        field = seapy.adddim(field)
    if zeta is not None and np.ndim(zeta == 2):
        zeta = seapy.adddim(zeta)
    depth = depth if depth < 0 else -depth

    # Set up some arrays
    x, y = np.meshgrid(np.arange(field.shape[-1]), np.arange(field.shape[-2]))
    fz, pmap = seapy.oasurf(x, y, x, x, y, None, 5, 1, 1)
    fz = seapy.adddim(np.ones(x.shape)) * depth
    # Loop over all times, generate new field at depth
    nfield = np.ma.array(Parallel(n_jobs=threads, verbose=2)(
        delayed(__dinterp)(x, y, grid.depth_rho, np.squeeze(field[i, :, :, :]),
                           fz, pmap) for i in range(field.shape[0])),
                         copy=False)

    return nfield
Beispiel #9
0
def constant_depth(field, grid, depth, zeta=None, threads=-2):
    """
    Find the values of a 3-D field at a constant depth for all times given.

    Parameters
    ----------
    field : ndarray,
        ROMS 3-D field to interpolate onto a constant depth level. Can be
        two- or three-dimensional array (first dimension assumed to be time).
    grid : seapy.model.grid or string or list,
        Grid that defines the depths and stretching for the field given
    depth : float,
        Depth (in meters) to find all values
    zeta : ndarray, optional,
        ROMS zeta field corresponding to field if you wish to apply the SSH
        correction to the depth calculations.
    threads : int, optional,
        Number of threads to use for processing

    Returns
    -------
    nfield : ndarray,
        Values from ROMS field on the given constant depth
    """

    # Make sure our inputs are all valid
    grid = seapy.model.asgrid(grid)
    if np.ndim(field) == 3:
        field = seapy.adddim(field)
    if zeta is not None and np.ndim(zeta == 2):
        zeta = seapy.adddim(zeta)
    depth = depth if depth < 0 else -depth

    # Set up some arrays
    x, y = np.meshgrid(np.arange(field.shape[-1]), np.arange(field.shape[-2]))
    fz, pmap = seapy.oasurf(x, y, x, x, y, None, 5, 1, 1)
    fz = seapy.adddim(np.ones(x.shape)) * depth
    # Loop over all times, generate new field at depth
    nfield = np.ma.array(Parallel(n_jobs=threads, verbose=2)
                         (delayed(__dinterp)(x, y, grid.depth_rho,
                                             np.squeeze(field[i, :, :, :]), fz, pmap)
                          for i in range(field.shape[0])), copy=False)

    return nfield
Beispiel #10
0
def __interp2_thread(rx, ry, data, zx, zy, pmap, weight, nx, ny, mask):
    """
    internal routine: 2D interpolation thread for parallel interpolation
    """
    data = np.ma.fix_invalid(data, copy=False)

    # Convolve the water over the land
    ksize = 2 * np.round(np.sqrt((nx / np.median(np.diff(rx)))**2 +
                                 (ny / np.median(np.diff(ry.T)))**2)) + 1
    if ksize < _ksize_range[0]:
        warn("nx or ny values are too small for stable OA, {:f}".format(ksize))
        ksize = _ksize_range[0]
    elif ksize > _ksize_range[1]:
        warn("nx or ny values are too large for stable OA, {:f}".format(ksize))
        ksize = _ksize_range[1]
    data = seapy.convolve_mask(data, ksize=ksize, copy=False)

    # Interpolate the field and return the result
    with timeout(minutes=30):
        res, pm = seapy.oasurf(rx, ry, data, zx, zy, pmap, weight, nx, ny)

    return np.ma.masked_where(np.logical_or(mask == 0, np.abs(res) > 9e4), res,
                              copy=False)
Beispiel #11
0
def __interp_grids(src_grid, child_grid, ncout, records=None,
                   threads=2, nx=0, ny=0, weight=10, vmap=None, z_mask=False,
                   pmap=None):
    """
    internal method:  Given a model file (average, history, etc.),
    interpolate the fields onto another gridded file.

    Parameters
    ----------
    src_grid : seapy.model.grid data source (History, Average, etc. file)
    child_grid : seapy.model.grid output data grid
    ncout : netcdf output file
    [records] : array of the record indices to interpolate
    [threads] : number of processing threads
    [nx] : decorrelation length in grid-cells for x
    [ny] : decorrelation length in grid-cells for y
    [vmap] : variable name mapping
    [z_mask] : mask out depths in z-grids
    [pmap] : use the specified pmap rather than compute it

    Returns
    -------
    None

    """
    # If we don't have a variable map, then do a one-to-one mapping
    if vmap is None:
        vmap = dict()
        for k in seapy.roms.fields:
            vmap[k] = k

    # Generate a file to store the pmap information
    sname = getattr(src_grid, 'name', None)
    cname = getattr(child_grid, 'name', None)
    pmap_file = None if any(v is None for v in (sname, cname)) else \
        sname + "_" + cname + "_pmap.npz"

    # Create or load the pmaps depending on if they exist
    if nx == 0:
        if hasattr(src_grid, "dm") and hasattr(child_grid, "dm"):
            nx = np.ceil(np.mean(src_grid.dm) / np.mean(child_grid.dm))
        else:
            nx = 5
    if ny == 0:
        if hasattr(src_grid, "dn") and hasattr(child_grid, "dn"):
            ny = np.ceil(np.mean(src_grid.dn) / np.mean(child_grid.dn))
        else:
            ny = 5

    if pmap is None:
        if pmap_file is not None and os.path.isfile(pmap_file):
            pmap = np.load(pmap_file)
        else:
            tmp = np.ma.masked_equal(src_grid.mask_rho, 0)
            tmp, pmaprho = seapy.oasurf(src_grid.lon_rho, src_grid.lat_rho,
                                        tmp, child_grid.lon_rho, child_grid.lat_rho,
                                        weight=weight, nx=nx, ny=ny)
            tmp = np.ma.masked_equal(src_grid.mask_u, 0)
            tmp, pmapu = seapy.oasurf(src_grid.lon_u, src_grid.lat_u,
                                      tmp, child_grid.lon_rho, child_grid.lat_rho,
                                      weight=weight, nx=nx, ny=ny)
            tmp = np.ma.masked_equal(src_grid.mask_v, 0)
            tmp, pmapv = seapy.oasurf(src_grid.lon_v, src_grid.lat_v,
                                      tmp, child_grid.lon_rho, child_grid.lat_rho,
                                      weight=weight, nx=nx, ny=ny)
            if pmap_file is not None:
                np.savez(pmap_file, pmaprho=pmaprho, pmapu=pmapu, pmapv=pmapv)
            pmap = {"pmaprho": pmaprho, "pmapu": pmapu, "pmapv": pmapv}

    # Get the time field
    ncsrc = seapy.netcdf(src_grid.filename)
    time = seapy.roms.get_timevar(ncsrc)

    # Interpolate the depths from the source to final grid
    src_depth = np.min(src_grid.depth_rho, 0)
    dst_depth = __interp2_thread(src_grid.lon_rho, src_grid.lat_rho, src_depth,
                                 child_grid.lon_rho, child_grid.lat_rho, pmap[
                                     "pmaprho"],
                                 weight, nx, ny, child_grid.mask_rho)
    # Interpolate the scalar fields
    records = np.arange(0, ncsrc.variables[time].shape[0]) \
        if records is None else np.atleast_1d(records)
    for src in vmap:
        dest = vmap[src]

        # Extra fields will probably be user tracers (biogeochemical)
        fld = seapy.roms.fields.get(dest, {"dims": 3})

        # Only interpolate the fields we want in the destination
        if (dest not in ncout.variables) or ("rotate" in fld):
            continue

        if fld["dims"] == 2:
            # Compute the max number of hold in memory
            maxrecs = np.maximum(1, np.minimum(len(records),
                                               np.int(_max_memory / (child_grid.lon_rho.nbytes +
                                                                     src_grid.lon_rho.nbytes))))
            for rn, recs in enumerate(seapy.chunker(records, maxrecs)):
                outr = np.s_[
                    rn * maxrecs:np.minimum((rn + 1) * maxrecs, len(records))]
                ndata = np.ma.array(Parallel(n_jobs=threads, verbose=2, max_nbytes=_max_memory)
                                    (delayed(__interp2_thread)(
                                     src_grid.lon_rho, src_grid.lat_rho,
                                     ncsrc.variables[src][i, :, :],
                                     child_grid.lon_rho, child_grid.lat_rho,
                                     pmap["pmaprho"], weight,
                                     nx, ny, child_grid.mask_rho)
                                     for i in recs), copy=False)
                ncout.variables[dest][outr, :, :] = ndata
                ncout.sync()
        else:
            maxrecs = np.maximum(1, np.minimum(
                len(records), np.int(_max_memory /
                                     (child_grid.lon_rho.nbytes *
                                      child_grid.n +
                                      src_grid.lon_rho.nbytes *
                                      src_grid.n))))
            for rn, recs in enumerate(seapy.chunker(records, maxrecs)):
                outr = np.s_[
                    rn * maxrecs:np.minimum((rn + 1) * maxrecs, len(records))]
                ndata = np.ma.array(Parallel(n_jobs=threads, verbose=2, max_nbytes=_max_memory)
                                    (delayed(__interp3_thread)(
                                        src_grid.lon_rho, src_grid.lat_rho,
                                        src_grid.depth_rho,
                                        ncsrc.variables[src][i, :, :, :],
                                        child_grid.lon_rho, child_grid.lat_rho,
                                        child_grid.depth_rho,
                                        pmap["pmaprho"], weight,
                                        nx, ny, child_grid.mask_rho,
                                        up_factor=_up_scaling.get(dest, 1.0),
                                        down_factor=_down_scaling.get(dest, 1.0))
                                     for i in recs), copy=False)
                if z_mask:
                    __mask_z_grid(ndata, dst_depth, child_grid.depth_rho)
                ncout.variables[dest][outr, :, :, :] = ndata
                ncout.sync()

    # Rotate and Interpolate the vector fields. First, determine which
    # are the "u" and the "v" vmap fields
    try:
        velmap = {
            "u": list(vmap.keys())[list(vmap.values()).index("u")],
            "v": list(vmap.keys())[list(vmap.values()).index("v")]}
    except:
        warn("velocity not present in source file")
        return

    srcangle = src_grid.angle if src_grid.cgrid else None
    dstangle = child_grid.angle if child_grid.cgrid else None
    maxrecs = np.minimum(len(records),
                         np.int(_max_memory /
                                (2 * (child_grid.lon_rho.nbytes *
                                      child_grid.n +
                                      src_grid.lon_rho.nbytes *
                                      src_grid.n))))
    for nr, recs in enumerate(seapy.chunker(records, maxrecs)):
        vel = Parallel(n_jobs=threads, verbose=2, max_nbytes=_max_memory)(delayed(__interp3_vel_thread)(
            src_grid.lon_rho, src_grid.lat_rho,
            src_grid.depth_rho, srcangle,
            ncsrc.variables[velmap["u"]][i, :, :, :],
            ncsrc.variables[velmap["v"]][i, :, :, :],
            child_grid.lon_rho, child_grid.lat_rho,
            child_grid.depth_rho, dstangle,
            pmap["pmaprho"], weight, nx, ny,
            child_grid.mask_rho) for i in recs)

        for j in range(len(vel)):
            vel_u = np.ma.array(vel[j][0], copy=False)
            vel_v = np.ma.array(vel[j][1], copy=False)
            if z_mask:
                __mask_z_grid(vel_u, dst_depth, child_grid.depth_rho)
                __mask_z_grid(vel_v, dst_depth, child_grid.depth_rho)

            if child_grid.cgrid:
                vel_u = seapy.model.rho2u(vel_u)
                vel_v = seapy.model.rho2v(vel_v)

            ncout.variables["u"][nr * maxrecs + j, :] = vel_u
            ncout.variables["v"][nr * maxrecs + j, :] = vel_v

            if "ubar" in ncout.variables:
                # Create ubar and vbar
                # depth = seapy.adddim(child_grid.depth_u, vel_u.shape[0])
                ncout.variables["ubar"][nr * maxrecs + j, :] = \
                    np.sum(vel_u * child_grid.depth_u, axis=0) /  \
                    np.sum(child_grid.depth_u, axis=0)

            if "vbar" in ncout.variables:
                # depth = seapy.adddim(child_grid.depth_v, vel_v.shape[0])
                ncout.variables["vbar"][nr * maxrecs + j, :] = \
                    np.sum(vel_v * child_grid.depth_v, axis=0) /  \
                    np.sum(child_grid.depth_v, axis=0)

            ncout.sync()

    # Return the pmap that was used
    return pmap
Beispiel #12
0
def __interp_grids(src_grid, child_grid, ncsrc, ncout, records=None,
                   threads=2, nx=0, ny=0, weight=10, vmap=None, z_mask=False,
                   pmap=None):
    """
    internal method:  Given a model file (average, history, etc.),
    interpolate the fields onto another gridded file.

    Parameters
    ----------
    src_grid : seapy.model.grid data of source
    child_grid : seapy.model.grid output data grid
    ncsrc : netcdf input file  (History, Average, etc. file)
    ncout : netcdf output file
    [records] : array of the record indices to interpolate
    [threads] : number of processing threads
    [nx] : decorrelation length in grid-cells for x
    [ny] : decorrelation length in grid-cells for y
    [vmap] : variable name mapping
    [z_mask] : mask out depths in z-grids
    [pmap] : use the specified pmap rather than compute it

    Returns
    -------
    None

    """
    # If we don't have a variable map, then do a one-to-one mapping
    if vmap is None:
        vmap = dict()
        for k in seapy.roms.fields:
            vmap[k] = k

    # Generate a file to store the pmap information
    sname = getattr(src_grid, 'name', None)
    cname = getattr(child_grid, 'name', None)
    pmap_file = None if any(v is None for v in (sname, cname)) else \
        sname + "_" + cname + "_pmap.npz"

    # Create or load the pmaps depending on if they exist
    if nx == 0:
        if hasattr(src_grid, "dm") and hasattr(child_grid, "dm"):
            nx = np.ceil(np.mean(src_grid.dm) / np.mean(child_grid.dm))
        else:
            nx = 5
    if ny == 0:
        if hasattr(src_grid, "dn") and hasattr(child_grid, "dn"):
            ny = np.ceil(np.mean(src_grid.dn) / np.mean(child_grid.dn))
        else:
            ny = 5

    if pmap is None:
        if pmap_file is not None and os.path.isfile(pmap_file):
            pmap = np.load(pmap_file)
        else:
            tmp = np.ma.masked_equal(src_grid.mask_rho, 0)
            tmp, pmaprho = seapy.oasurf(src_grid.lon_rho, src_grid.lat_rho,
                                        tmp, child_grid.lon_rho, child_grid.lat_rho,
                                        weight=weight, nx=nx, ny=ny)
            tmp = np.ma.masked_equal(src_grid.mask_u, 0)
            tmp, pmapu = seapy.oasurf(src_grid.lon_u, src_grid.lat_u,
                                      tmp, child_grid.lon_rho, child_grid.lat_rho,
                                      weight=weight, nx=nx, ny=ny)
            tmp = np.ma.masked_equal(src_grid.mask_v, 0)
            tmp, pmapv = seapy.oasurf(src_grid.lon_v, src_grid.lat_v,
                                      tmp, child_grid.lon_rho, child_grid.lat_rho,
                                      weight=weight, nx=nx, ny=ny)
            if pmap_file is not None:
                np.savez(pmap_file, pmaprho=pmaprho, pmapu=pmapu, pmapv=pmapv)
            pmap = {"pmaprho": pmaprho, "pmapu": pmapu, "pmapv": pmapv}

    # Get the time field
    time = seapy.roms.get_timevar(ncsrc)

    # Interpolate the depths from the source to final grid
    src_depth = np.min(src_grid.depth_rho, 0)
    dst_depth = __interp2_thread(src_grid.lon_rho, src_grid.lat_rho, src_depth,
                                 child_grid.lon_rho, child_grid.lat_rho, pmap[
                                     "pmaprho"],
                                 weight, nx, ny, child_grid.mask_rho)
    # Interpolate the scalar fields
    records = np.arange(0, ncsrc.variables[time].shape[0]) \
        if records is None else np.atleast_1d(records)
    for src in vmap:
        dest = vmap[src]

        # Extra fields will probably be user tracers (biogeochemical)
        fld = seapy.roms.fields.get(dest, {"dims": 3})

        # Only interpolate the fields we want in the destination
        if (dest not in ncout.variables) or \
           (src not in ncsrc.variables) or \
           ("rotate" in fld):
            continue

        if fld["dims"] == 2:
            # Compute the max number of hold in memory
            maxrecs = np.maximum(1, np.minimum(len(records),
                                               np.int(_max_memory / (child_grid.lon_rho.nbytes +
                                                                     src_grid.lon_rho.nbytes))))
            for rn, recs in enumerate(seapy.chunker(records, maxrecs)):
                outr = np.s_[
                    rn * maxrecs:np.minimum((rn + 1) * maxrecs, len(records))]
                ndata = np.ma.array(Parallel(n_jobs=threads, verbose=2, max_nbytes=_max_memory)
                                    (delayed(__interp2_thread)(
                                     src_grid.lon_rho, src_grid.lat_rho,
                                     ncsrc.variables[src][i, :, :],
                                     child_grid.lon_rho, child_grid.lat_rho,
                                     pmap["pmaprho"], weight,
                                     nx, ny, child_grid.mask_rho)
                                     for i in recs), copy=False)
                ncout.variables[dest][outr, :, :] = ndata
                ncout.sync()
        else:
            maxrecs = np.maximum(1, np.minimum(
                len(records), np.int(_max_memory /
                                     (child_grid.lon_rho.nbytes *
                                      child_grid.n +
                                      src_grid.lon_rho.nbytes *
                                      src_grid.n))))
            for rn, recs in enumerate(seapy.chunker(records, maxrecs)):
                outr = np.s_[
                    rn * maxrecs:np.minimum((rn + 1) * maxrecs, len(records))]
                ndata = np.ma.array(Parallel(n_jobs=threads, verbose=2, max_nbytes=_max_memory)
                                    (delayed(__interp3_thread)(
                                        src_grid.lon_rho, src_grid.lat_rho,
                                        src_grid.depth_rho,
                                        ncsrc.variables[src][i, :, :, :],
                                        child_grid.lon_rho, child_grid.lat_rho,
                                        child_grid.depth_rho,
                                        pmap["pmaprho"], weight,
                                        nx, ny, child_grid.mask_rho,
                                        up_factor=_up_scaling.get(dest, 1.0),
                                        down_factor=_down_scaling.get(dest, 1.0))
                                     for i in recs), copy=False)
                if z_mask:
                    __mask_z_grid(ndata, dst_depth, child_grid.depth_rho)
                ncout.variables[dest][outr, :, :, :] = ndata
                ncout.sync()

    # Rotate and Interpolate the vector fields. First, determine which
    # are the "u" and the "v" vmap fields
    try:
        velmap = {
            "u": list(vmap.keys())[list(vmap.values()).index("u")],
            "v": list(vmap.keys())[list(vmap.values()).index("v")]}
    except:
        warn("velocity not present in source file")
        return

    srcangle = getattr(src_grid, 'angle', None)
    dstangle = getattr(child_grid, 'angle', None)
    maxrecs = np.minimum(len(records),
                         np.int(_max_memory /
                                (2 * (child_grid.lon_rho.nbytes *
                                      child_grid.n +
                                      src_grid.lon_rho.nbytes *
                                      src_grid.n))))
    for nr, recs in enumerate(seapy.chunker(records, maxrecs)):
        vel = Parallel(n_jobs=threads, verbose=2, max_nbytes=_max_memory)(delayed(__interp3_vel_thread)(
            src_grid.lon_rho, src_grid.lat_rho,
            src_grid.depth_rho, srcangle,
            ncsrc.variables[velmap["u"]][i, :, :, :],
            ncsrc.variables[velmap["v"]][i, :, :, :],
            child_grid.lon_rho, child_grid.lat_rho,
            child_grid.depth_rho, dstangle,
            pmap["pmaprho"], weight, nx, ny,
            child_grid.mask_rho) for i in recs)

        for j in range(len(vel)):
            vel_u = np.ma.array(vel[j][0], copy=False)
            vel_v = np.ma.array(vel[j][1], copy=False)
            if z_mask:
                __mask_z_grid(vel_u, dst_depth, child_grid.depth_rho)
                __mask_z_grid(vel_v, dst_depth, child_grid.depth_rho)

            if child_grid.cgrid:
                vel_u = seapy.model.rho2u(vel_u)
                vel_v = seapy.model.rho2v(vel_v)

            ncout.variables["u"][nr * maxrecs + j, :] = vel_u
            ncout.variables["v"][nr * maxrecs + j, :] = vel_v

            if "ubar" in ncout.variables:
                # Create ubar and vbar
                # depth = seapy.adddim(child_grid.depth_u, vel_u.shape[0])
                ncout.variables["ubar"][nr * maxrecs + j, :] = \
                    np.sum(vel_u * child_grid.depth_u, axis=0) /  \
                    np.sum(child_grid.depth_u, axis=0)

            if "vbar" in ncout.variables:
                # depth = seapy.adddim(child_grid.depth_v, vel_v.shape[0])
                ncout.variables["vbar"][nr * maxrecs + j, :] = \
                    np.sum(vel_v * child_grid.depth_v, axis=0) /  \
                    np.sum(child_grid.depth_v, axis=0)

            ncout.sync()

    # Return the pmap that was used
    return pmap