Exemplo n.º 1
0
def sound_speed_teos10(lats, lons, z, t, SP):
    """ Compute sound speed from temperature, salinity, and depth 
        for a given latitude and longitude using the Thermodynamic 
        Equation of Seawater 2010.

        https://teos-10.github.io/GSW-Python/

        Args:
            lats: numpy array
                Latitudes (-90 to 90 degrees)
            lons: numpy array
                Longitudes (-180 to 180 degrees)
            z: numpy array
                Depths (meters)
            t: numpy array
                In-situ temperature (Celsius)
            SP: numpy array
                Practical Salinity (psu)

        Returns:
            c: numpy array
                Sound speed (m/s) 
    """
    p = gsw.p_from_z(z=-z, lat=lats)  # sea pressure (gsw uses negative z below sea surface)
    SA = gsw.SA_from_SP(SP, p, lons, lats)  # absolute salinity
    CT = gsw.CT_from_t(SA, t, p)  # conservative temperature
    c = gsw.density.sound_speed(SA=SA, CT=CT, p=p)
    return c
def get_ht(t, s, d, vh, y, y_loc, cp, rho0):
    '''
         Compute offshore heat transport, as defined in St-Laurent et al JPO 2012
         '''
    #cp = 1; rho0 = 1
    tmp = np.nonzero(y <= y_loc)[0][-1]
    # transport at h point
    # mask transport. > 0.
    vhh = 0.5 * (vh[:, tmp - 1, :] + vh[:, tmp, :])
    vhnew1 = np.ma.masked_where(vhh < 0.0, vhh)
    vhnew2 = np.ma.masked_where(vhh > 0.0, vhh)
    t_new = t[:, tmp, :]  # temp at cavity exit
    s_new = s[0, tmp, :]  # salinity at cavity exit
    d_new = d[0, tmp, :]  # depth at cavity exit
    p = gsw.p_from_z(d_new, -72.0) * 1.0e4  # in Pascal
    t_freeze = eos.tfreeze(s_new, p) + 273.0  # in K
    #t_freeze = eos.tfreeze(34.,p) + 273.0 # in K
    print 't_freeze min/max', t_freeze.min(), t_freeze.max()
    dt = (t_new - t_freeze)
    print 'dt max/min', dt.max(), dt.min()
    Hout = (vhnew1 * cp * rho0 * dt)  # watts
    Hin = (vhnew2 * cp * rho0 * dt)  # watts
    print 'Hout, Hin, difference:', Hout.sum(), Hin.sum(
    ), Hout.sum() + Hin.sum()
    return Hout.sum(), Hin.sum()
Exemplo n.º 3
0
def common_thermodynamics(depth, lon, lat, SP, t):
    """Wrapper for various thermodynamic calculations.

    Assumes input data is 1D with size N or M, or 2D with size N*M,
    where M denotes profiles and N depths.

    Parameters
    ----------
        depth : numpy array
            Depth (m), size N.
        lon : numpy array
            Longitude, size M.
        lat : numpy array
            Latitude, size M.
        SP : numpy array
            Practical salinity, size N*M.
        t : numpy array
            Temperature, size N*M.

    """

    p = gsw.p_from_z(-depth, np.mean(lat))
    SA = gsw.SA_from_SP(SP, p[:, np.newaxis], lon[np.newaxis, :],
                        lat[np.newaxis, :])
    CT = gsw.CT_from_t(SA, t, p[:, np.newaxis])
    sig0 = gsw.pot_rho_t_exact(SA, t, p[:, np.newaxis], 0)
    N2, p_mid = gsw.Nsquared(SA, CT, p[:, np.newaxis], lat[np.newaxis, :])

    return p, SA, CT, sig0, p_mid, N2
Exemplo n.º 4
0
def compute_potdens(ds, saltname='SALT', tempname='THETA'):
    import gsw
    """ compute the potential density
    """
    # compute the Conservative Temperature from the model's potential temperature
    temp = ds[tempname].transpose(*('time', 'k', 'face', 'j', 'i'))
    salt = ds[tempname].transpose(*('time', 'k', 'face', 'j', 'i'))
    CT = gsw.CT_from_pt(salt, temp)
    z, lat = xr.broadcast(ds['Z'], ds['YC'])
    z = z.transpose(*('k', 'face', 'j', 'i'))
    lat = lat.transpose(*('k', 'face', 'j', 'i'))
    # compute pressure from depth
    p = gsw.p_from_z(z, lat)
    # compute in-situ temperature
    T = gsw.t_from_CT(salt, CT, p)
    # compute potential density
    rho = gsw.pot_rho_t_exact(salt, T, p, 0.)
    # create new dataarray
    darho = xr.full_like(temp, 0.)
    darho = darho.load().chunk({'time': 1, 'face': 1})
    darho.name = 'RHO'
    darho.attrs['long_name'] = 'Potential Density ref at 0m'
    darho.attrs['standard_name'] = 'RHO'
    darho.attrs['units'] = 'kg/m3'
    darho.values = rho
    # filter special value
    darho = darho.where(darho > 1000)
    darho = darho.assign_coords(XC=ds['XC'], YC=ds['YC'], Z=ds['Z'])
    return darho
Exemplo n.º 5
0
 def __init__(self, lon, lat, name=None):
     self._woa=True
     #
     self._tfile = 'woa13_decav_t00_01v2.nc'
     nc = Dataset(self._tfile,'r')
     #
     glon = nc.variables['lon'][:]
     glat = nc.variables['lat'][:]
     ilon = np.argmin(np.abs(lon-glon))
     ilat = np.argmin(np.abs(lat-glat))
     self.lon = glon[ilon]
     self.lat = glat[ilat]
     #
     self.z = -nc.variables['depth'][:]
     self.p = gsw.p_from_z(self.z,self.lat)
     #
     self.temp = nc.variables['t_an'][0,:,ilat,ilon]
     nc.close()
     #
     self._sfile = 'woa13_decav_s00_01v2.nc'
     nc = Dataset(self._sfile,'r')
     self.s = nc.variables['s_an'][0,:,ilat,ilon]
     nc.close()
     # derive absolute salinity and conservative temperature
     self.SA = gsw.SA_from_SP(self.s, self.p, self.lon, self.lat)
     self.CT = gsw.CT_from_t(self.SA, self.temp, self.p)
     # isopycnal displacement
     self.eta=0.
     #
     if name is None:
         self.name = 'WOA water profile at lon=%.0f, lat=%.0f'%(self.lon,self.lat)
Exemplo n.º 6
0
def raw_to_zref(d, zref):
    temp = d['TEMP']
    psal = d['PSAL']
    pres = d['PRES']
    temp_qc = d['TEMP_QC']
    psal_qc = d['PSAL_QC']
    pres_qc = d['PRES_QC']
    lon = d['LONGITUDE']
    lat = d['LATITUDE']

    klist, ierr = remove_bad_qc(d)
    if ierr == 0:
        Tis = temp[klist]
        SP = psal[klist]
        p = pres[klist]

        CT, SA, z = insitu_to_absolute(Tis, SP, p, lon, lat, zref)
        Ti, Si, dTidz, dSidz = interp_at_zref(CT, SA, z, zref, klist)
        pi = gsw.p_from_z(-zref, lat)
        #Ri = gsw.rho(Si, Ti, pi)
        Ri, alpha, beta = gsw.rho_alpha_beta(Si, Ti, pi)
        g = gsw.grav(lat, pi)
        BVF2i = g * (beta * dSidz - alpha * dTidz)
        flag = True
    else:
        zero = zref * 0.
        Ti, Si, Ri, BVF2i = zero, zero, zero, zero
        flag = False
    return {'CT': Ti, 'SA': Si, 'RHO': Ri, 'BVF2': BVF2i}, flag
Exemplo n.º 7
0
 def _load_from_woa(self, lon, lat, name):
     self._woa = True
     #
     self._tfile = 'woa18_A5B7_t00_01.nc'
     nc = Dataset(self._tfile, 'r')
     #
     glon = nc.variables['lon'][:]
     glat = nc.variables['lat'][:]
     ilon = np.argmin(np.abs(lon - glon))
     ilat = np.argmin(np.abs(lat - glat))
     self.lon = glon[ilon]
     self.lat = glat[ilat]
     #
     self.z = -nc.variables['depth'][:].data
     self.p = gsw.p_from_z(self.z, self.lat)
     #
     self.temp = nc.variables['t_an'][0, :, ilat, ilon]
     nc.close()
     #
     self._sfile = 'woa18_A5B7_s00_01.nc'
     nc = Dataset(self._sfile, 'r')
     self.s = nc.variables['s_an'][0, :, ilat, ilon]
     nc.close()
     #
     self._update_eos()
     #
     if name is None:
         self.name = 'WOA water profile at lon=%.0f, lat=%.0f' % (self.lon,
                                                                  self.lat)
Exemplo n.º 8
0
def _in_situ_to_potential_temperature(dsTemp, dsSalin):
    z = dsTemp.z.values
    lat = dsTemp.lat.values
    lon = dsTemp.lon.values

    nz = len(z)
    ny, nx = lat.shape

    dsPotTemp = dsTemp.drop('in_situ_temperature')
    pt = numpy.nan * numpy.ones((nz, ny, nx))
    for zIndex in range(nz):
        pressure = gsw.p_from_z(z[zIndex], lat)
        in_situ_temp = dsTemp.in_situ_temperature[zIndex, :, :].values
        salin = dsSalin.salinity[zIndex, :, :].values
        mask = numpy.isfinite(in_situ_temp)
        SA = gsw.SA_from_SP(salin[mask], pressure[mask], lon[mask], lat[mask])
        ptSlice = pt[zIndex, :, :]
        ptSlice[mask] = gsw.pt_from_t(SA,
                                      in_situ_temp[mask],
                                      pressure[mask],
                                      p_ref=0.)
        pt[zIndex, :, :] = ptSlice

    dsPotTemp['temperature'] = (('z', 'y', 'x'), pt)
    dsPotTemp['temperature'].attrs = dsTemp.in_situ_temperature.attrs

    return dsPotTemp
def compute_B0_MO_lenght(temp, salt, PRCmE, depth, t, y, args):
    '''
    Compute net surface buoyancy flux and Monin-Obukhov lenght scale
    '''
    tmp1 = np.nonzero(y <= args.ISL)[0][-1]
    tmp2 = np.nonzero(y <= args.cshelf_lenght)[0][-1]
    # constants
    rho_0 = 1028.0
    vonKar = 0.41
    g = 9.8
    Cp = 3974.0
    # get EoS coeffs
    p = gsw.p_from_z(depth, -70.0) * 1.0e4  # in Pascal
    beta = eos.beta_wright_eos(temp, salt, p) / rho_0
    alpha = eos.alpha_wright_eos(temp, salt, p) / rho_0
    #print 'depth, beta, alpha',depth.min(), depth.max(), beta.min(), beta.max(), alpha.min(), alpha.max()
    # load local data
    ustar = mask_bad_values(Dataset(args.sfc_file).variables['ustar'][t, :])
    sensible = mask_bad_values(
        Dataset(args.sfc_file).variables['sensible'][t, :])
    latent = mask_bad_values(Dataset(args.sfc_file).variables['latent'][t, :])
    shelf_area = Dataset(args.ice_shelf_file).variables['shelf_area'][0, :]
    # buoyancy flux
    B0 = -g * (alpha * (-(sensible + latent) / (rho_0 * Cp)) - beta *
               (PRCmE * salt / rho_0))
    B0_shelf = B0[tmp1:tmp2, :].mean()
    B0_IS = B0[0:tmp1, :].mean()
    # Monin-Obukhov Length
    l = ustar**3 / (vonKar * B0)
    # mask values outside cavity
    l[l == 0.0] = -1e+34
    #    l = np.ma.masked_where(depth == depth.max(), l)
    #    print 'Monin-Obukhov Length min/max',l.min(), l.max()

    return l, B0, B0.mean(), B0_shelf, B0_IS
Exemplo n.º 10
0
def comp_rhostar(Si, Ti, lat):
    pi = gsw.p_from_z(-zref, lat)
    cs = gsw.sound_speed(Si, Ti, pi)

    Ri = gsw.rho(Si, Ti, pi)
    g = gsw.grav(lat, pi[0])
    E = np.zeros((len(zref), ))
    #plt.plot(Ri, -zref)
    f = interpolate.interp1d(zref, cs)

    def e(x):
        return -g / f(x)**2

    if True:
        for k, z in enumerate(zref):
            if k == 0:
                r, E[k] = 0., 1.
            else:
                #r1,p = integrate.quad(e,zref[k-1],z,epsrel=1e-1)
                x = np.linspace(zref[k - 1], z, 10)
                dx = x[1] - x[0]
                r1 = integrate.trapz(e(x), dx=dx)
                r += r1
                E[k] = np.exp(r)
    return Ri * E, E
def get_ht(t, s, d, vh, y, direction, y_loc):
    '''
         Compute heat transport, onshore (direction=1) or offshore (direction=2), as defined in St-Laurent et al JPO 2012
         '''
    cp = 3974.0  # heat capacity
    rho0 = 1028.0
    tmp = np.nonzero(y <= y_loc)[0][-1]
    if direction == 1:
        # mask transport. > 0.
        vhnew = np.ma.masked_where(vh[:, tmp, :] > 0.0, vh[:, tmp, :])
        t_new = np.ma.masked_where(vh[:, tmp, :] > 0.0, t[:, tmp, :])
        s_new = np.ma.masked_where(vh[:, tmp, :] > 0.0, s[:, tmp, :])
        d_new = np.ma.masked_where(vh[:, tmp, :] > 0.0, d[:, tmp, :])
    else:
        # mask transport. < 0.
        vhnew = np.ma.masked_where(vh[:, tmp, :] < 0.0, vh[:, tmp, :])
        t_new = np.ma.masked_where(vh[:, tmp, :] < 0.0, t[:, tmp, :])
        s_new = np.ma.masked_where(vh[:, tmp, :] < 0.0, s[:, tmp, :])
        d_new = np.ma.masked_where(vh[:, tmp, :] < 0.0, d[:, tmp, :])

    p = gsw.p_from_z(d_new, -70.0) * 1.0e4  # in Pascal
    t_freeze = eos.tfreeze(s_new, p)
    dt = (t_new - t_freeze)
    #print 't-tf min/max',dt.min(),dt.max()

    if direction == 1:
        oht = -(vhnew * cp * rho0 * dt).sum()  # watts
    else:
        oht = (vhnew * cp * rho0 * dt).sum()

    return oht
Exemplo n.º 12
0
def ctd_bincast(data, dz, zmin, zmax):
    """
    Depth-bin CTD time series.

    Parameters
    ----------
    data : xr.Dataset
        CTD time series
    dz : float
        Bin size [m]
    zmin : float
        Minimum bin depth center [m]
    zmax : float
        Maximum bin depth center [m]

    Returns
    -------
    data : xr.Dataset
        Depth-binned CTD profile
    """
    dz2 = dz / 2
    zbin = np.arange(zmin - dz2, zmax + dz + dz2, dz)
    zbinlabel = np.arange(zmin, zmax + dz, dz)

    # prepare dataset
    tmp = data.swap_dims({"time": "depth"})
    tmp = tmp.reset_coords()

    # need to bin time separately, not sure why
    btime = tmp.time.groupby_bins("depth",
                                  bins=zbin,
                                  labels=zbinlabel,
                                  right=True,
                                  include_lowest=True).mean()
    # bin all variables
    out = tmp.groupby_bins("depth",
                           bins=zbin,
                           labels=zbinlabel,
                           right=True,
                           include_lowest=True).mean()

    # organize
    out.coords["time"] = btime
    out = out.set_coords(["lon", "lat"])
    out = out.rename_dims({"depth_bins": "z"})
    out = out.rename({"depth_bins": "depth"})

    # copy attributes
    # get data variable names
    varnames = [k for k, v in data.data_vars.items()]
    for vari in varnames:
        out[vari].attrs = data[vari].attrs
    out["depth"].attrs = {"long_name": "depth", "units": "m"}
    out.attrs = data.attrs

    # recalculate pressure from depth bins
    out["p"] = (["z"], gsw.p_from_z(-1 * out.depth, out.lat))
    out.p.attrs = {"long_name": "pressure", "units": "dbar"}

    return out
def compute_pot_density(prefix, inGridName, inDir):
    config = MpasAnalysisConfigParser()
    config.read('mpas_analysis/config.default')

    outDescriptor = get_comparison_descriptor(config, 'antarctic')
    outGridName = outDescriptor.meshName
    description = 'Monthly potential density climatologies from ' \
                  '2005-2010 average of the Southern Ocean State ' \
                  'Estimate (SOSE)'
    botDescription = 'Monthly potential density climatologies at sea ' \
                     'floor from 2005-2010 average from SOSE'

    for gridName in [inGridName, outGridName]:
        outFileName = '{}_pot_den_{}.nc'.format(prefix, gridName)
        TFileName = '{}_pot_temp_{}.nc'.format(prefix, gridName)
        SFileName = '{}_salinity_{}.nc'.format(prefix, gridName)
        if not os.path.exists(outFileName):
            with xarray.open_dataset(TFileName) as dsT:
                with xarray.open_dataset(SFileName) as dsS:
                    dsPotDensity = dsT.drop(['theta', 'botTheta'])

                    lat, lon, z = xarray.broadcast(dsS.lat, dsS.lon, dsS.z)
                    pressure = gsw.p_from_z(z.values, lat.values)
                    SA = gsw.SA_from_SP(dsS.salinity.values, pressure,
                                        lon.values, lat.values)
                    CT = gsw.CT_from_pt(SA, dsT.theta.values)
                    dsPotDensity['potentialDensity'] = (dsS.salinity.dims,
                                                        gsw.rho(SA, CT, 0.))
                    dsPotDensity.potentialDensity.attrs['units'] = \
                        'kg m$^{-3}$'
                    dsPotDensity.potentialDensity.attrs['description'] = \
                        description

                    lat, lon, z = xarray.broadcast(dsS.lat, dsS.lon, dsS.zBot)
                    pressure = gsw.p_from_z(z.values, lat.values)
                    SA = gsw.SA_from_SP(dsS.botSalinity.values, pressure,
                                        lon.values, lat.values)
                    CT = gsw.CT_from_pt(SA, dsT.botTheta.values)
                    dsPotDensity['botPotentialDensity'] = \
                        (dsS.botSalinity.dims, gsw.rho(SA, CT, 0.))
                    dsPotDensity.botPotentialDensity.attrs['units'] = \
                        'kg m$^{-3}$'
                    dsPotDensity.botPotentialDensity.attrs['description'] = \
                        botDescription

                    write_netcdf(dsPotDensity, outFileName)
Exemplo n.º 14
0
def potential_to_in_situ_temperature(dsPotTemp, dsSalin):
    z = dsPotTemp.z.values
    lat = numpy.maximum(dsPotTemp.lat.values, -80.)
    lon = dsPotTemp.lon.values

    if len(lat.shape) == 1:
        lon, lat = numpy.meshgrid(lon, lat)

    nz = len(z)
    ny, nx = lat.shape

    if 'time' in dsPotTemp.dims:
        nt = dsPotTemp.sizes['time']
        T = numpy.nan * numpy.ones((nt, nz, ny, nx))
        for zIndex in range(nz):
            pressure = gsw.p_from_z(z[zIndex], lat)
            for tIndex in range(nt):
                pt = dsPotTemp.temperature[tIndex, zIndex, :, :].values
                salin = dsSalin.salinity[tIndex, zIndex, :, :].values
                mask = numpy.logical_and(numpy.isfinite(pt),
                                         numpy.isfinite(salin))
                SA = gsw.SA_from_SP(salin[mask], pressure[mask], lon[mask],
                                    lat[mask])
                TSlice = T[tIndex, zIndex, :, :]
                CT = gsw.CT_from_pt(SA, pt[mask])
                TSlice[mask] = gsw.t_from_CT(SA, CT, pressure[mask])
                T[tIndex, zIndex, :, :] = TSlice
    else:
        T = numpy.nan * numpy.ones((nz, ny, nx))
        for zIndex in range(nz):
            pressure = gsw.p_from_z(z[zIndex], lat)
            pt = dsPotTemp.temperature[zIndex, :, :].values
            salin = dsSalin.salinity[zIndex, :, :].values
            mask = numpy.logical_and(numpy.isfinite(pt), numpy.isfinite(salin))
            SA = gsw.SA_from_SP(salin[mask], pressure[mask], lon[mask],
                                lat[mask])
            TSlice = T[zIndex, :, :]
            CT = gsw.CT_from_pt(SA, pt[mask])
            TSlice[mask] = gsw.t_from_CT(SA, CT, pressure[mask])
            T[zIndex, :, :] = TSlice

    dsTemp = dsPotTemp.drop('temperature')
    dsTemp['temperature'] = (dsPotTemp.temperature.dims, T)
    dsTemp['temperature'].attrs = dsPotTemp.temperature.attrs

    return dsTemp
Exemplo n.º 15
0
def get_soundc(t, s, z, lon, lat):
    ''' compute sound velocity
    '''
    import gsw
    p = gsw.p_from_z(z, lat.mean())
    SA = gsw.SA_from_SP(s, p, lon, lat)
    CT = gsw.CT_from_pt(SA, t)
    c = gsw.sound_speed(s, t, p)
    # inputs are: SA (absolute salinity) and CT (conservative temperature)
    return c
Exemplo n.º 16
0
def test_pz_roundtrip():
    """
    The p_z conversion functions have Matlab-based checks that use
    only the first two arguments.
    Here we verify that the functions are also inverses when the optional
    arguments are used.
    """
    z = np.array([-10, -100, -1000, -5000], dtype=float)
    p = gsw.p_from_z(z, 30, 0.5, 0.25)
    zz = gsw.z_from_p(p, 30, 0.5, 0.25)
    assert_almost_equal(z, zz)
def _main(args):
    """Run the command line program."""

    temperature_cube, temperature_history = gio.combine_files(args.temperature_file, args.temperature_var, checks=True)
    salinity_cube, salinity_history = gio.combine_files(args.salinity_file, 'sea_water_salinity', checks=True)
   
    assert 'c' in str(temperature_cube.units).lower(), "Input temperature units must be in celsius"
#    if not 'C' in str(bigthetao_cube.units):
#        bigthetao_cube.data = bigthetao_cube.data - 273.15
#        data_median = np.ma.median(bigthetao_cube.data)
#        assert data_median < 100
#        assert data_median > -10
#        bigthetao_cube.units = 'C'

    target_shape = temperature_cube.shape[1:]
    depth = temperature_cube.coord('depth').points * -1
    broadcast_depth = uconv.broadcast_array(depth, 0, target_shape)
    broadcast_longitude = uconv.broadcast_array(temperature_cube.coord('longitude').points, [1, 2], target_shape)
    broadcast_latitude = uconv.broadcast_array(temperature_cube.coord('latitude').points, [1, 2], target_shape)
    pressure = gsw.p_from_z(broadcast_depth, broadcast_latitude)

    absolute_salinity = gsw.SA_from_SP(salinity_cube.data, pressure, broadcast_longitude, broadcast_latitude)
    if args.temperature_var == 'sea_water_conservative_temperature':
        conservative_temperature = temperature_cube.data
    elif args.temperature_var == 'sea_water_potential_temperature':  
        conservative_temperature = gsw.CT_from_pt(absolute_salinity, temperature_cube.data)
    else:
        raise ValueError('Invalid temperature variable')

    if args.coefficient == 'alpha':
        coefficient_data = gsw.alpha(absolute_salinity, conservative_temperature, pressure)
        var_name = 'alpha'
        standard_name = 'thermal_expansion_coefficient'
        long_name = 'thermal expansion coefficient'
        units = '1/K'
    elif args.coefficient == 'beta':
        coefficient_data = gsw.beta(absolute_salinity, conservative_temperature, pressure)
        var_name = 'beta'
        standard_name = 'saline_contraction_coefficient'
        long_name = 'saline contraction coefficient'
        units = 'kg/g'
    else:
        raise ValueError('Coefficient must be alpha or beta')

    iris.std_names.STD_NAMES[standard_name] = {'canonical_units': units}
    coefficient_cube = temperature_cube.copy()
    coefficient_cube.data = coefficient_data
    coefficient_cube.standard_name = standard_name    
    coefficient_cube.long_name = long_name
    coefficient_cube.var_name = var_name
    coefficient_cube.units = units

    coefficient_cube.attributes['history'] = cmdprov.new_log(git_repo=repo_dir)
    iris.save(coefficient_cube, args.outfile)
Exemplo n.º 18
0
def depth_to_pressure(z, lat):
    """Converts depths to pressures.
       
       z:    scalar or numpy array of depth (m).
       lat:  scalar or numpy array of latitude (deg)."""

    assert np.array(lat).size > 0 and np.array(z).size > 0, "No value provided for z or lat"

    p = gsw.p_from_z(-z, lat)

    return p
Exemplo n.º 19
0
def depth_to_pressure(z, lat):
    """Converts depths to pressures.
       
       z:    scalar or numpy array of depth (m).
       lat:  scalar or numpy array of latitude (deg)."""

    assert np.array(lat).size > 0 and np.array(
        z).size > 0, 'No value provided for z or lat'

    p = gsw.p_from_z(-z, lat)

    return p
Exemplo n.º 20
0
def return_density(
    pt_values: np.ndarray,
    practical_salt_values: np.ndarray,
    lon_values: np.ndarray,
    lat_values: np.ndrray,
    z_values: np.ndarray,
) -> Tuple[np.array, np.ndarray, np.ndarray]:
    """
    Wrapper around the gsw to make it work.

    Args:
        pt_values (np.array): Potential temperature.
        practical_salt_values (np.array): Salt values.
        lon_values (np.array): Longitude values.
        lat_values (np.array): Latitude values.
        z_values (np.array): Height values.

    Returns:
        Tuple[np.array, np.array, np.array]: rho_values, ct_values, pressure_values
    """

    lat_mesh, z_mesh = np.meshgrid(lat_values, z_values)

    # pylint: disable=no-value-for-parameter
    pressure_mesh = gsw.p_from_z(z_mesh, lat_mesh)
    pressure_values = np.zeros(np.shape(pt_values))
    lat_grid = np.zeros(np.shape(pt_values))
    lon_grid = np.zeros(np.shape(pt_values))

    # TODO these two loops could be vectorized

    for i in range(np.shape(pt_values)[2]):
        pressure_values[:, :, i] = pressure_mesh[:, :]
        lat_grid[:, :, i] = lat_mesh[:, :]

    for i in range(np.shape(pt_values)[0]):
        for j in range(np.shape(pt_values)[1]):
            lon_grid[i, j, :] = lon_values[:]

    absolute_salinity = gsw.SA_from_SP(
        practical_salt_values, pressure_values, lon_grid, lat_grid
    )
    ct_values = gsw.conversions.CT_from_pt(absolute_salinity, pt_values)
    rho_values = gsw.density.rho(absolute_salinity, ct_values, pressure_values)

    # print(np.shape(rho_values))

    return rho_values, ct_values, pressure_values
Exemplo n.º 21
0
def add_SA_CT_PT(xray):
    if 'PRES' in list(xray.data_vars):
        PRES_out = xray['PRES']
    else:
        PRES_out = -gsw.p_from_z(xray['DEPTH'])
    SA_out = gsw.SA_from_SP(xray['PSAL'], PRES_out, xray.LONGITUDE,
                            xray.LATITUDE)
    if 'PTMP' in list(xray.data_vars):
        PT_out = xray['PTMP']
    else:
        PT_out = gsw.pt0_from_t(SA_out, xray['TEMP'], PRES_out)
    CT_out = gsw.CT_from_pt(SA_out, PT_out)
    PD_out = gsw.sigma0(SA_out, CT_out)
    xray['ASAL'] = (('TIME', 'DEPTH'), SA_out)
    xray['PTMP'] = (('TIME', 'DEPTH'), PT_out)
    xray['CTMP'] = (('TIME', 'DEPTH'), CT_out)
    xray['PDEN'] = (('TIME', 'DEPTH'), PD_out)
Exemplo n.º 22
0
def teos10_insitu_dens(t, s, z, lat, lon):
    """
    Computes the insitu density from potential temperature and salinity using the
    Thermodynamic Equation of Seawater 2010 (TEOS-10; IOC, SCOR and IAPSO, 2010).
    http://www.teos-10.org/pubs/TEOS-10_Manual.pdf

    """

    depth = np.ones_like(t) * z[None, :, None]
    lat = np.ones_like(t) * lat[None, None, :]
    lon = np.ones_like(t) * lon[None, None, :]

    p = gsw.p_from_z(-depth, lat)
    SA = gsw.SA_from_SP(s, p, lon, lat)
    CT = gsw.CT_from_pt(SA, t)
    rho = gsw.rho(SA, CT, p)

    return rho
Exemplo n.º 23
0
def pd_sa_ct_rho_sigmatheta(dataframe,
                            temperature,
                            salinity,
                            latitude=50.0,
                            longitude=-65.0,
                            z=None,
                            pressure=None):
    """
    Make common thermodynamic conversions of T-S data in dataframes.

    Parameters
    ----------
    dataframe: pandas.DataFrame
        In which to add columns.
    temperature, salinity: str
        Names of the in situ T and practical S columns.
    latitude: float or str
        User specified lat or column name.
    longitude: float or str
        User specified lon or column name.
    z: float or str
        User specified depth or column name.
    pressure: float or str
        User specified sea pressure or column name.

    Returns
    -------
    pandas.Dataframe:
        Input dataframe with added SA, CT, rho and ST columns.

    """
    # Insure z, pressure specified as strings are in dataframe
    if isinstance(pressure, str) and pressure not in dataframe.keys():
        raise KeyError(
            'pressure must be a key of dataframe when specified as str.')
    if isinstance(z, str) and z not in dataframe.keys():
        raise KeyError('z must be a key of dataframe when specified as str.')

    # Insure depth information is specified
    if z is None and pressure is None:
        raise TypeError('One of z or pressure must be float or str, not: .', z,
                        pressure)

    # Determine pressure from depth column
    elif pressure is None and isinstance(z, str):
        pressure = gsw.p_from_z(-dataframe[z], latitude)

    # Determine pressure from depth value
    elif pressure is None and isinstance(z, (int, float)):
        pressure = gsw.p_from_z(-z, latitude)

    # Use pressure column
    else:
        pressure = dataframe[pressure]

    # Use dataframe column or value as geographical coordinate
    if isinstance(latitude, str):
        lat = dataframe[latitude]
    else:
        lat = latitude
    if isinstance(longitude, str):
        lon = dataframe[longitude]
    else:
        lon = longitude

    # Call to TEOS10
    with warnings.catch_warnings():
        warnings.simplefilter('ignore', RuntimeWarning)
        SA, CT, rho, sigma_theta = sa_ct_rho_sigmatheta(dataframe[temperature],
                                                        dataframe[salinity],
                                                        pressure,
                                                        latitude=lat,
                                                        longitude=lon)

    # Add columns to dataframe
    dataframe.loc[:, 'SA'] = SA
    dataframe.loc[:, 'CT'] = CT
    dataframe.loc[:, 'rho'] = rho
    dataframe.loc[:, 'ST'] = sigma_theta

    return dataframe
def text_to_netcdf(inDir, outDir):
    inFileName = '{}/Antarctic_shelf_data.txt'.format(inDir)
    outFileName = '{}/Schmidtko_et_al_2014_bottom_PT_S_PD_' \
                  'SouthernOcean_0.25x0.125degree.nc'.format(outDir)

    if os.path.exists(outFileName):
        return

    # 1/4 x 1/8 degree grid cells
    cellsPerLon = 4
    cellsPerLat = 8

    obsFile = pandas.read_csv(inFileName, delim_whitespace=True)

    inLon = numpy.array(obsFile.iloc[:, 0])
    inLat = numpy.array(obsFile.iloc[:, 1])

    inZ = numpy.array(obsFile.iloc[:, 2])

    inCT = numpy.array(obsFile.iloc[:, 3])
    inCT_std = numpy.array(obsFile.iloc[:, 4])

    inSA = numpy.array(obsFile.iloc[:, 5])
    inSA_std = numpy.array(obsFile.iloc[:, 6])

    pressure = gsw.p_from_z(inZ, inLat)
    inS = gsw.SP_from_SA(inSA, pressure, inLon, inLat)
    inPT = gsw.pt_from_CT(inSA, inCT)
    inPD = gsw.rho(inSA, inCT, 0.)

    minLat = int(numpy.amin(inLat) * cellsPerLat) / cellsPerLat
    maxLat = int(numpy.amax(inLat) * cellsPerLat) / cellsPerLat
    deltaLat = 1. / cellsPerLat
    outLat = numpy.arange(minLat - deltaLat, maxLat + 2 * deltaLat, deltaLat)

    deltaLon = 1. / cellsPerLon
    outLon = numpy.arange(0., 360., deltaLon)

    xIndices = numpy.array(cellsPerLon * inLon + 0.5, int)
    yIndices = numpy.array(cellsPerLat * (inLat - outLat[0]) + 0.5, int)

    Lon, Lat = numpy.meshgrid(outLon, outLat)

    ds = xarray.Dataset()
    ds['lon'] = (('lon', ), outLon)
    ds.lon.attrs['units'] = 'degrees'
    ds.lon.attrs['description'] = 'longitutude'

    ds['lat'] = (('lat', ), outLat)
    ds.lat.attrs['units'] = 'degrees'
    ds.lat.attrs['description'] = 'latitutude'

    z = numpy.ma.masked_all(Lon.shape)
    z[yIndices, xIndices] = inZ
    ds['z'] = (('lat', 'lon'), z)
    ds.z.attrs['units'] = 'meters'
    ds.z.attrs['description'] = 'depth of the seafloor (positive up)'

    PT = numpy.ma.masked_all(Lon.shape)
    PT[yIndices, xIndices] = inPT
    ds['botTheta'] = (('lat', 'lon'), PT)
    ds.botTheta.attrs['units'] = '$\degree$C'
    ds.botTheta.attrs['description'] = \
        'potential temperature at sea floor'

    PT_std = numpy.ma.masked_all(Lon.shape)
    # neglect difference between std of PT and CT
    PT_std[yIndices, xIndices] = inCT_std
    ds['botThetaStd'] = (('lat', 'lon'), PT_std)
    ds.botThetaStd.attrs['units'] = '$\degree$C'
    ds.botThetaStd.attrs['description'] = \
        'standard deviation in potential temperature at sea floor'

    S = numpy.ma.masked_all(Lon.shape)
    S[yIndices, xIndices] = inS
    ds['botSalinity'] = (('lat', 'lon'), S)
    ds.botSalinity.attrs['units'] = 'PSU'
    ds.botSalinity.attrs['description'] = \
        'salinity at sea floor'

    S_std = numpy.ma.masked_all(Lon.shape)
    # neglect difference between std of S and SA
    S_std[yIndices, xIndices] = inSA_std
    ds['botSalinityStd'] = (('lat', 'lon'), S_std)
    ds.botSalinityStd.attrs['units'] = 'PSU'
    ds.botSalinityStd.attrs['description'] = \
        'standard deviation in salinity at sea floor'

    PD = numpy.ma.masked_all(Lon.shape)
    PD[yIndices, xIndices] = inPD
    ds['botPotentialDensity'] = (('lat', 'lon'), PD)
    ds.botPotentialDensity.attrs['units'] = 'kg m$^{-3}$'
    ds.botPotentialDensity.attrs['description'] = \
        'potential desnity at sea floor'

    write_netcdf(ds, outFileName)
def mld(S,thetao,depth_cube,latitude_deg):
	"""Compute the mixed layer depth.
	Parameters
	----------
	SA : array_like
		 Absolute Salinity  [g/kg]
	CT : array_like
		 Conservative Temperature [:math:`^\circ` C (ITS-90)]
	p : array_like
		sea pressure [dbar]
	criterion : str, optional
			   MLD Criteria
	Mixed layer depth criteria are:
	'temperature' : Computed based on constant temperature difference
	criterion, CT(0) - T[mld] = 0.5 degree C.
	'density' : computed based on the constant potential density difference
	criterion, pd[0] - pd[mld] = 0.125 in sigma units.
	`pdvar` : computed based on variable potential density criterion
	pd[0] - pd[mld] = var(T[0], S[0]), where var is a variable potential
	density difference which corresponds to constant temperature difference of
	0.5 degree C.
	Returns
	-------
	MLD : array_like
		  Mixed layer depth
	idx_mld : bool array
			  Boolean array in the shape of p with MLD index.
	Examples
	--------
	>>> import os
	>>> import gsw
	>>> import matplotlib.pyplot as plt
	>>> from oceans import mld
	>>> from gsw.utilities import Bunch
	>>> # Read data file with check value profiles
	>>> datadir = os.path.join(os.path.dirname(gsw.utilities.__file__), 'data')
	>>> cv = Bunch(np.load(os.path.join(datadir, 'gsw_cv_v3_0.npz')))
	>>> SA, CT, p = (cv.SA_chck_cast[:, 0], cv.CT_chck_cast[:, 0],
	...              cv.p_chck_cast[:, 0])
	>>> fig, (ax0, ax1, ax2) = plt.subplots(nrows=1, ncols=3, sharey=True)
	>>> l0 = ax0.plot(CT, -p, 'b.-')
	>>> MDL, idx = mld(SA, CT, p, criterion='temperature')
	>>> l1 = ax0.plot(CT[idx], -p[idx], 'ro')
	>>> l2 = ax1.plot(CT, -p, 'b.-')
	>>> MDL, idx = mld(SA, CT, p, criterion='density')
	>>> l3 = ax1.plot(CT[idx], -p[idx], 'ro')
	>>> l4 = ax2.plot(CT, -p, 'b.-')
	>>> MDL, idx = mld(SA, CT, p, criterion='pdvar')
	>>> l5 = ax2.plot(CT[idx], -p[idx], 'ro')
	>>> _ = ax2.set_ylim(-500, 0)
	References
	----------
	.. [1] Monterey, G., and S. Levitus, 1997: Seasonal variability of mixed
	layer depth for the World Ocean. NOAA Atlas, NESDIS 14, 100 pp.
	Washington, D.C.
	""" 
	#depth_cube.data = np.ma.masked_array(np.swapaxes(np.tile(depths,[360,180,1]),0,2))
	MLD_out = S.extract(iris.Constraint(depth = np.min(depth_cube.data)))
	MLD_out_data = MLD_out.data
	for i in range(np.shape(MLD_out)[0]):
		print'calculating mixed layer for year: ',i
		thetao_tmp = thetao[i]
		S_tmp = S[i]
		depth_cube.data = np.abs(depth_cube.data)
		depth_cube = depth_cube * (-1.0)
		p = gsw.p_from_z(depth_cube.data,latitude_deg.data) # dbar
		SA = S_tmp.data*1.004715
		CT = gsw.CT_from_pt(SA,thetao_tmp.data - 273.15)
		SA, CT, p = map(np.asanyarray, (SA, CT, p))
		SA, CT, p = np.broadcast_arrays(SA, CT, p)
		SA, CT, p = map(ma.masked_invalid, (SA, CT, p))
		p_min, idx = p.min(axis = 0), p.argmin(axis = 0)
		sigma = SA.copy()
		to_mask = np.where(sigma == S.data.fill_value)
		sigma = gsw.rho(SA, CT, p_min) - 1000.
		sigma[to_mask] = np.NAN
		sig_diff = sigma[0,:,:].copy()
		sig_diff += 0.125 # Levitus (1982) density criteria
		sig_diff = np.tile(sig_diff,[np.shape(sigma)[0],1,1])
		idx_mld = sigma <= sig_diff
		#NEED TO SORT THS PIT - COMPARE WWITH OTHER AND FIX!!!!!!!!!!
		MLD = ma.masked_all_like(S_tmp.data)
		MLD[idx_mld] = depth_cube.data[idx_mld] * -1
		MLD_out_data[i,:,:] = np.ma.max(MLD,axis=0) 
	return MLD_out_data
Exemplo n.º 26
0
    def standardize(self, gps_prefix=None):

        df = self.data.copy()

        # Convert NMEA coordinates to decimal degrees
        for col in df.columns:
            # Ignore if the m_gps_lat and/or m_gps_lon value is the default masterdata value
            if col.endswith('_lat'):
                df[col] = df[col].map(lambda x: get_decimal_degrees(x)
                                      if x <= 9000 else np.nan)
            elif col.endswith('_lon'):
                df[col] = df[col].map(lambda x: get_decimal_degrees(x)
                                      if x < 18000 else np.nan)

        # Standardize 'time' to the 't' column
        for t in self.TIMESTAMP_SENSORS:
            if t in df.columns:
                df['t'] = pd.to_datetime(df[t], unit='s')
                break

        # Interpolate GPS coordinates
        if 'm_gps_lat' in df.columns and 'm_gps_lon' in df.columns:

            df['drv_m_gps_lat'] = df.m_gps_lat.copy()
            df['drv_m_gps_lon'] = df.m_gps_lon.copy()

            # Fill in data will nulls where value is the default masterdata value
            masterdatas = (df.drv_m_gps_lon >= 18000) | (df.drv_m_gps_lat >
                                                         9000)
            df.loc[masterdatas, 'drv_m_gps_lat'] = np.nan
            df.loc[masterdatas, 'drv_m_gps_lon'] = np.nan

            try:
                # Interpolate the filled in 'x' and 'y'
                y_interp, x_interp = interpolate_gps(masked_epoch(df.t),
                                                     df.drv_m_gps_lat,
                                                     df.drv_m_gps_lon)
            except (ValueError, IndexError):
                L.warning("Raw GPS values not found!")
                y_interp = np.empty(df.drv_m_gps_lat.size) * np.nan
                x_interp = np.empty(df.drv_m_gps_lon.size) * np.nan

            df['y'] = y_interp
            df['x'] = x_interp
        """
        ---- Option 1: Always calculate Z from pressure ----
        It's really a matter of data provider preference and varies from one provider to another.
        That being said, typically the sci_water_pressure or m_water_pressure variables, if present
        in the raw data files, will typically have more non-NaN values than m_depth.  For example,
        all MARACOOS gliders typically have both m_depth and sci_water_pressure contained in them.
        However, m_depth is typically heavily decimated while sci_water_pressure contains a more
        complete pressure record.  So, while we transmit both m_depth and sci_water_pressure, I
        calculate depth from pressure & (interpolated) latitude and use that as my NetCDF depth
        variable. - Kerfoot
        """
        # Search for a 'pressure' column
        for p in self.PRESSURE_SENSORS:
            if p in df.columns:
                # Convert bar to dbar here
                df['pressure'] = df[p].copy() * 10
                # Calculate depth from pressure and latitude
                # Negate the results so that increasing values note increasing depths
                df['z'] = -z_from_p(df.pressure, df.y)
                break

        if 'z' not in df and 'pressure' not in df:
            # Search for a 'z' column
            for p in self.DEPTH_SENSORS:
                if p in df.columns:
                    df['z'] = df[p].copy()
                    # Calculate pressure from depth and latitude
                    # Negate the results so that increasing values note increasing depth
                    df['pressure'] = -p_from_z(df.z, df.y)
                    break
        # End Option 1
        """
        ---- Option 2: Use raw pressure/depth data that was sent across ----
        # Standardize to the 'pressure' column
        for p in self.PRESSURE_SENSORS:
            if p in df.columns:
                # Convert bar to dbar here
                df['pressure'] = df[p].copy() * 10
                break

        # Standardize to the 'z' column
        for p in self.DEPTH_SENSORS:
            if p in df.columns:
                df['z'] = df[p].copy()
                break

        # Don't calculate Z from pressure if a metered depth column exists already
        if 'pressure' in df and 'z' not in df:
            # Calculate depth from pressure and latitude
            # Negate the results so that increasing values note increasing depths
            df['z'] = -z_from_p(df.pressure, df.y)

        if 'z' in df and 'pressure' not in df:
            # Calculate pressure from depth and latitude
            # Negate the results so that increasing values note increasing depth
            df['pressure'] = -p_from_z(df.z, df.y)
        # End Option 2
        """

        rename_columns = {
            'm_water_vx': 'u_orig',
            'm_water_vy': 'v_orig',
        }

        # These need to be standardize so we can compute salinity and density!
        for vname in self.TEMPERATURE_SENSORS:
            if vname in df.columns:
                rename_columns[vname] = 'temperature'
                break
        for vname in self.CONDUCTIVITY_SENSORS:
            if vname in df.columns:
                rename_columns[vname] = 'conductivity'
                break

        # Standardize columns
        df = df.rename(columns=rename_columns)

        # Compute additional columns
        df = self.compute(df)

        return df
Exemplo n.º 27
0
def main(args):
    """Parse one or more Slocum glider dba files and write time-series based NetCDF file(s)."""

    status = 0

    log_level = args.loglevel
    log_level = getattr(logging, log_level.upper())
    log_format = '%(asctime)s:%(module)s:%(levelname)s:%(message)s [line %(lineno)d]'
    logging.basicConfig(format=log_format, level=log_level)

    debug = args.debug
    config_path = args.config_path
    nc_files = args.nc_files
    drop_missing = args.drop
    profiles = args.profiles
    ngdac = args.ngdac
    nc_dest = args.output_path or os.path.realpath(os.curdir)
    clobber = args.clobber
    nc_format = args.nc_format

    if not os.path.isdir(config_path):
        logging.error('Invalid configuration path: {:}'.format(config_path))
        return 1

    if not nc_files:
        logging.error('No NAVOCEANO NetCDF files specified')
        return 1

    if not os.path.isdir(nc_dest):
        logging.error(
            'Invalid NetCDF destination specified: {:}'.format(nc_dest))
        return 1

    logging.info('Configuration path: {:}'.format(config_path))
    logging.info('NetCDF destination: {:}'.format(nc_dest))
    logging.info('Processing {:} dba files'.format(len(nc_files)))
    if profiles:
        logging.info('Writing profile-based NetCDFs')
    else:
        logging.info('Writing time-series NetCDFs')

    gdm = GliderDataModel(cfg_dir=config_path)
    logging.debug('{:}'.format(gdm))

    netcdf_count = 0
    for nc_file in nc_files:

        if nc_file.endswith('optics.nc'):
            logging.info('Skipping optics file: {:}'.format(nc_file))
            continue

        logging.info('Processing {:}'.format(nc_file))

        nc_path, nc_name = os.path.split(nc_file)
        fname, ext = os.path.splitext(nc_name)

        nc_df, pro_meta, nc_ds = load_navo_nc(nc_file)

        # NAVOCEANO NetCDF files do not contain pressure, so we need to calculate that from depth and latitude
        nc_df['pressure'] = p_from_z(-nc_df.depth, nc_df.latitude.mean())
        # NAVOCEANO NetCDF files do not contain density, so we need to calculate it
        nc_df['density'] = calculate_density(nc_df.temp, nc_df.pressure,
                                             nc_df.salinity, nc_df.latitude,
                                             nc_df.longitude)
        # Convert nc_df.scitime from a timedelta to a datetime64
        nc_df['scitime'] = pd.to_datetime(pd.Series(
            [td.total_seconds() for td in nc_df.scitime]),
                                          unit='s').values

        if nc_df.empty:
            continue

        gdm.data = nc_df
        gdm.profiles = pro_meta

        if debug:
            logging.info('{:}'.format(gdm))
            logging.info('debug switch set so no NetCDF creation')
            continue

        # dba_meta = build_dbas_data_frame(nc_file)
        # if dba_meta.empty:
        #     continue

        if not profiles:
            logging.info('Writing time-series...')
            ds = gdm.to_timeseries_dataset(drop_missing=drop_missing)

            # Update history attribute
            if 'history' not in ds.attrs:
                ds.attrs['history'] = ''
            new_history = '{:}: {:} {:}'.format(
                datetime.datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%SZ'),
                sys.argv[0], nc_file)
            if ds.attrs['history'].strip():
                ds.attrs['history'] = '{:}\n{:}'.format(
                    ds.attrs['history'], new_history)
            else:
                ds.attrs['history'] = '{:}'.format(ds.attrs['history'],
                                                   new_history)

            # Update the source global attribute
            ds.attrs['source'] = nc_file

            # Update the source global attribute
            ds.attrs['source'] = nc_file

            # Add the source_file variable
            source_file_attrs = nc_ds.attrs.copy()
            source_file_attrs['bytes'] = '{:}'.format(os.path.getsize(nc_file))
            ds['source_file'] = DataArray(source_file_attrs['filename_label'],
                                          attrs=source_file_attrs)

            netcdf_path = os.path.join(nc_dest, '{:}.nc'.format(fname))
            logging.info('Writing: {:}'.format(netcdf_path))
            ds.to_netcdf(netcdf_path)
            netcdf_count += 1
        else:
            logging.info('Writing profiles...')
            glider = os.path.basename(nc_file).split('_')[0]
            dbd_type = 'rt'

            for profile_time, profile_ds in gdm.iter_profiles(
                    drop_missing=drop_missing):
                netcdf_path = os.path.join(
                    nc_dest, '{:}_{:}_{:}.nc'.format(
                        glider, profile_time.strftime('%Y%m%dT%H%M%SZ'),
                        dbd_type))
                # Rename latitude and longitude to lat and lon
                profile_ds = profile_ds.rename({
                    'latitude': 'lat',
                    'longitude': 'lon'
                })

                # Set profile_lat and profile_lon
                profile_ds.profile_lat.values = profile_ds.lat.mean()
                profile_ds.profile_lon.values = profile_ds.lon.mean()

                if os.path.isfile(netcdf_path):
                    if not clobber:
                        logging.info('Ignoring existing NetCDF: {:}'.format(
                            netcdf_path))
                        continue
                    else:
                        logging.info('Clobbering existing NetCDF: {:}'.format(
                            netcdf_path))

                # Update history attribute
                if 'history' not in profile_ds.attrs:
                    profile_ds.attrs['history'] = ''

                new_history = '{:}: {:} --profiles {:}'.format(
                    datetime.datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%SZ'),
                    sys.argv[0], nc_file)
                if profile_ds.attrs['history'].strip():
                    profile_ds.attrs['history'] = '{:}\n{:}'.format(
                        profile_ds.attrs['history'], new_history)
                else:
                    profile_ds.attrs['history'] = '{:}'.format(new_history)

                # Update the source global attribute
                profile_ds.attrs['source'] = nc_file

                # Add the source_file variable
                source_file_attrs = nc_ds.attrs.copy()
                source_file_attrs['bytes'] = '{:}'.format(
                    os.path.getsize(nc_file))
                profile_ds['source_file'] = DataArray(nc_name,
                                                      attrs=source_file_attrs)

                logging.info('Writing: {:}'.format(netcdf_path))
                profile_ds.to_netcdf(netcdf_path)

                netcdf_count += 1

    logging.info('{:} NetCDF files written'.format(netcdf_count))

    return status
if not isinstance(fdirs, list):
    fdirs = [fdirs]

fnames = []
for fdir in fdirs:
    ystr = int(fdir[-4:])
    if np.logical_or(ystr < START_YEAR, ystr > END_YEAR):
        continue
    fnamesi = glob(fdir + fdir_tail)
    fnamesi.sort()
    for f in fnamesi:
        fnames.append(f)
nt = len(fnames)  # Total number of files.

Tfmins = []
p = p_from_z(z, latu[msk].mean())  # lats closest to the isobath.
t = []
#
UQxm_circ_all = []
UQxe_circ_all = []
UQxm_100m_all = []
UQxe_100m_all = []
UQxm_100m_700m_all = []
UQxe_100m_700m_all = []
UQxm_700m_1000m_all = []
UQxe_700m_1000m_all = []
#
Ux_all = []
#
UQx_all = []
UQxm_all = []
#

# Load data
data = sp.io.loadmat('~/erics_python_plotting/data/temp_depth.mat')
depth = data['depth']
T_CTRL = data['T_CTRL']
T_A1B = data['T_A1B']
S_CTRL = data['S_CTRL']
S_A1B = data['S_A1B']

# Location
lon = 160
lat = -40

# Convert depth to pressure
p = gsw.p_from_z(-depth, lat)

# Convert practical salinity to absolute salinity
SA_CTRL = gsw.SA_from_SP(S_CTRL, p, lon, lat)
SA_A1B = gsw.SA_from_SP(S_A1B, p, lon, lat)

# Convert in-situ temperature to conservative temperature
TC_CTRL = gsw.CT_from_t(SA_CTRL, T_CTRL, p)
TC_A1B = gsw.CT_from_t(SA_A1B, T_A1B, p)

# Calculate density on a T-S grid
T_grid, S_grid = np.meshgrid(np.arange(0,35,0.05), np.arange(33,37,0.001))
rho = gsw.rho(S_grid, T_grid, 0) # SHOULD calc pot. dens. NOT in-situ density assuming no depth

# Plot
plt.figure()
Exemplo n.º 30
0
        #trim to the desired length
        utc = full_utc[start_index:stop_index]

        print("full time", full_utc[0], full_utc[-1])
        print("measurement time", start_time, stop_time)

        #print("\n\n\n\n\n")
        #print(full_utc)
        #print("\n\n\n\n\n")

        #unit conversion because of different standards for the conductivity
        if np.nanmean(conductivity) < 3:
            conductivity *= 10

        constant_pressure = gsw.p_from_z(-float(sensor_depth),
                                         54.32)  #latitude of the transect
        #print("pressure",constant_pressure)
        computed_salinity = SA_from_C(conductivity, temperature,
                                      constant_pressure, 20.6, 54.32)
        #print("comparison:",np.nanmean(salinity),np.nanmean(computed_salinity))
        #print("check:",np.nanmean(conductivity))

        #TODO Start- und Stopzeit so waehlen, dass hier was rauskommt
        print("TEST:", np.arange(0, full_utc.size)[full_utc == start_time])
        print("TEST:", np.arange(0, full_utc.size)[full_utc == stop_time])

        temperature = full_temperature[start_index:stop_index]
        utc = full_utc[start_index:stop_index]

        print("start", utc[0], start_time, utc[0] == start_time)
        print("stop", utc[-1], stop_time, utc[-1] == stop_time)
Exemplo n.º 31
0
def BV2(S,T,depth,lon,lat):
    p1=gsw.p_from_z(depth,np.array(lat))
    sa=gsw.SA_from_SP(S,p1,lon,lat)
    ct=gsw.CT_from_pt(sa,T)
    N2,pOut=gsw.Nsquared(sa,ct,p1,lat)
    return N2,pOut
Exemplo n.º 32
0
def wod_cast_n(rag_arr,
               n,
               var_names=['Temperature', 'Salinity'],
               anc_names=None,
               do_qc=True,
               do_teos10=True):
    """
    Get an individual cast from WOD ragged array.

    If do_qc is true, data are filtered keeping only those with
    quality flags of 0 or 1. Refused data are returned as NaN. Some
    profiles do not have quality flags. There are three possible cases
    and here are the meaning of the quality flags they produce.

    Profile quality flag missing
    -> Profile flag = -1
    -> value flags = -1

    Profile quality flag exists but is not accepted
    -> Profile flag = passed from original file
    -> Value flags = -2

    Profile quality flag exists and is accepted, but value flags are missing
    -> Profile flag = passed from original file
    -> Value flags = -3

    Parameters
    ----------
    rag_arr: xarray.Dataset or straight
        Path to a WOD netCDF file containing a CTD ragged array or named
        it is read into.
    n: int
        Cast number to return as xarray Dataset.
    var_names: list of str
        Names of the variables to extract. Defaults to ['Temperature', 'Salinity'].
    anc_names: list of str
        Names of the ancillary data variables to extract. Defaults to None.
    do_qc: bool
        If True keep only data with WOD quality flags 0 or 1. Defaults to True.
        This also passes the WOD quality flags to the child cast.
    do_teos10: bool
        If True calculate CT, SA and sigma0 using the gsw package, implementing
        TEOS10. Defaults to True.

    Returns
    -------
    xarray.Dataset
        The isolated nth cast of the ragged array.
    """
    # Read netcdf or pass xarray
    if isinstance(rag_arr, str):
        dset = xr.open_dataset(rag_arr)
    elif isinstance(rag_arr, xr.Dataset):
        dset = rag_arr
    else:
        raise TypeError('rag_arr is not string or xarray Dataset')

    # Replace VAR_row_size NaN values with 0
    for variable in var_names + ['z']:
        field = '%s_row_size' % variable
        dset[field] = dset[field].where(isfinite(dset[field]), 0)

    # Get cast depth information
    depth_name = 'z'
    time_name = 'time'  # For ease if ever necessary
    lon_name = 'lon'  # to make more flexible
    lat_name = 'lat'

    c_strt = int(dset['%s_row_size' % depth_name][:n].values.sum())
    c_stop = c_strt + int(dset['%s_row_size' % depth_name][n].values)
    depth = dset[depth_name][c_strt:c_stop].values

    # Add requested variables
    cast = xr.Dataset(coords={depth_name: depth}, attrs=dset.attrs)

    # Default of TEOS10 switches
    has_temp = False
    has_sal = False

    # Loop over requested variables
    for variable in var_names:
        # Check this variable is not empty for this cast
        if dset['%s_row_size' % variable][n] > 0:
            # Get variable index values
            c_strt = int(dset['%s_row_size' % variable][:n].values.sum())
            c_stop = c_strt + int(dset['%s_row_size' % variable][n].values)

            # Assign
            cast[variable] = (depth_name, dset[variable][c_strt:c_stop])

            # Switches for TEOS10
            if 'Temperature' == variable:
                has_temp = True
            if 'Salinity' == variable:
                has_sal = True

            # Do quality control (keeps flags 0 and 1)
            if do_qc:
                # Logic switches
                pf_exists = '%s_WODprofileflag' % variable in dset.data_vars.keys(
                )
                vl_exists = '%s_WODflag' % variable in dset.data_vars.keys()

                # Check quality flag exists for cast
                if pf_exists:
                    # Pass value of cast quality flag
                    cast.attrs['%s_WODprofileflag' %
                               variable] = dset['%s_WODprofileflag' %
                                                variable].values[n]

                    # Check quality flag is accepted for cast
                    if cast.attrs['%s_WODprofileflag' % variable] in [0, 1]:
                        # Check existence of observation flags
                        if vl_exists:
                            cast['%s_WODflag' %
                                 variable] = (depth_name,
                                              dset['%s_WODflag' %
                                                   variable][c_strt:c_stop])
                            condition = ((cast['%s_WODflag' % variable] == 0) |
                                         (cast['%s_WODflag' % variable] == 1))
                            cast[variable] = cast[variable].where(condition)

                        # Value flags do not exist
                        else:
                            print('Warning: No flags for variable %s' %
                                  variable)
                            cast[variable] *= nan
                            cast['%s_WODflag' %
                                 variable] = (depth_name,
                                              -3 * ones_like(depth, dtype=int))

                    # Profile quality flag is not accepted
                    else:
                        # Pass cast quality flag
                        cast[variable] *= nan
                        cast['%s_WODflag' %
                             variable] = (depth_name,
                                          -2 * ones_like(depth, dtype=int))

                # Profile quality flag does not exist
                else:
                    cast.attrs['%s_WODprofileflag' % variable] = -1
                    cast[variable] *= nan
                    cast['%s_WODflag' %
                         variable] = (depth_name,
                                      -1 * ones_like(depth, dtype=int))

        # Variable exists but profile is empty
        else:
            print('Warning: No data for variable %s' % variable)

    # Convert other coordinates to attributes
    for coord in [time_name, lon_name, lat_name]:
        cast.attrs[coord] = dset[coord].values[n]

    # Gather ancillary data if requested
    if anc_names is not None:
        for anc in anc_names:
            cast.attrs[anc] = dset[anc].values[n]

    # Use TEOS10 to get CT, SA and sigma0 if requested
    if has_temp and has_sal:
        has_rfields = (
            all(cast.attrs['%s_WODprofileflag' %
                           field] in [0, 1]  # Has the required fields
                for field in ['Temperature', 'Salinity']))
    else:
        has_rfields = False
    has_rattrs = (
        all(attr in cast.attrs  # Has the required attributes
            for attr in ['lon', 'lat']))

    if do_teos10 and has_rattrs and has_rfields:
        # Remove NaN values
        cast = cast.where(
            (isfinite(cast.Temperature)) & (isfinite(cast.Salinity)),
            drop=True)
        try:
            cast['Sea_Pres'] = (depth_name,
                                gsw.p_from_z(-cast.z.values, cast.lat))
            cast['SA'] = (depth_name,
                          gsw.SA_from_SP(cast.Salinity.values,
                                         cast.Sea_Pres.values, cast.lon,
                                         cast.lat))
            cast['CT'] = (depth_name,
                          gsw.CT_from_t(cast.SA.values,
                                        cast.Temperature.values,
                                        cast.Sea_Pres.values))
            cast['RHO'] = (depth_name,
                           gsw.rho(cast.SA.values, cast.CT.values,
                                   cast.Sea_Pres.values))
            cast['SIGMA_THETA'] = (depth_name,
                                   gsw.density.sigma0(cast.SA.values,
                                                      cast.CT.values))
        except:
            print('Could not do TEOS 10 conversions')

    # Gather ancillary data if requested
    if anc_names is not None:
        for anc in anc_names:
            cast.attrs[anc] = dset[anc].values[n]

    # Output result
    return cast
Exemplo n.º 33
0
def driver(args):

    path_out = args.path_out
    file_out_unfilled = args.file_out_unfilled
    file_out_filled = args.file_out_filled

    # URL for NODC THREDSS server
    ncei_thredds_url = 'https://data.nodc.noaa.gov/thredds/dodsC/ncei/woa/'

    # Setup which WOA data set to download
    # 1 deg. WOA
    if args.resolution == '01':
        fext = '01'
        res = '1.00'
    elif args.resolution == '04':
        # 0.25 deg WOA
        fext = '04'
        res = '0.25'
    else:
        raise ValueError(
            'The resolution provided, {}, is not supported. Please use 01 or 04.'
            .format(args.resolution))

    decade = 'decav'
    date_mon = 1  # This is the month of the output IC file
    date_seas = range(13, 17)  # all four seasons

    # Get the data from NODC
    # Get monthly temperature & salinity
    base_url_t = ncei_thredds_url + '/temperature/' + decade + '/' + res + '/'
    file_root_t = 'woa18_' + decade + '_t'
    files_t = '{0:s}{1:s}{2:02d}_{3:s}.nc'.format(base_url_t, file_root_t,
                                                  date_mon, fext)
    print('Monthly temperature files:', files_t)
    dst_mon = xr.open_dataset(
        files_t,
        decode_times=False)  #,data_vars='minimal',combine='by_coords')

    base_url_s = ncei_thredds_url + '/salinity/' + decade + '/' + res + '/'
    file_root_s = 'woa18_' + decade + '_s'
    files_s = '{0:s}{1:s}{2:02d}_{3:s}.nc'.format(base_url_s, file_root_s,
                                                  date_mon, fext)
    print('Monthly salinity files:', files_s)
    dss_mon = xr.open_dataset(
        files_s,
        decode_times=False)  #,data_vars='minimal',combine='by_coords')

    # Merge monthly input data sets
    ds_mon = dst_mon.merge(dss_mon).squeeze()

    # Get seasonal temperature & salinity
    files_t = [
        '{0:s}{1:s}{2:02d}_{3:s}.nc'.format(base_url_t, file_root_t, date_str,
                                            fext) for date_str in date_seas
    ]
    print('Seasonal temeperature files:', files_t)
    dst_seas = xr.open_mfdataset(files_t,
                                 decode_times=False,
                                 data_vars='minimal',
                                 combine='by_coords')

    files_s = [
        '{0:s}{1:s}{2:02d}_{3:s}.nc'.format(base_url_s, file_root_s, date_str,
                                            fext) for date_str in date_seas
    ]
    print('Seasonal salinity files:', files_s)
    dss_seas = xr.open_mfdataset(files_s,
                                 decode_times=False,
                                 data_vars='minimal',
                                 combine='by_coords')

    # Merge seasonal data sets
    # force download to overcome limitation of interpolation on chunked dims
    ds_seas = dst_seas.merge(dss_seas).compute()

    ###########################################################################
    # Extend Monthly data to deepest levels by interpolating from seasonal

    nseas = np.shape(ds_seas['time'])[0]
    nmon = np.shape(ds_mon['time'])
    print('length seasonal file=', nseas)
    print('length monthly file=', nmon)

    time_target = ds_mon['time'].values
    print('target time of ic file=', time_target)

    nlon = np.shape(ds_mon['lon'])[0]
    nlat = np.shape(ds_mon['lat'])[0]
    print(' nlon=', nlon, ' nlat=', nlat)
    ndep_upper = np.shape(ds_mon['depth'])[0]
    ndep_full = np.shape(ds_seas['depth'])[0]
    print('# levels seasonal file=', ndep_full)
    print('# levels monthly file=', ndep_upper)

    # Check that the right depths are being extracted and concatinated
    #tmp_dep = xr.concat([ds_mon['depth'],ds_seas['depth'][ndep_upper:]],dim='depth')
    #print('# of levels in extended file =',np.shape(tmp_dep)[0])
    #print('concatinated full depth')
    #for k in range(0,ndep_full) :
    #    print('  k=',k,' z=',tmp_dep.values[k])

    # Pull out seasons -1 and N for periodic b.c on time interpolation.
    ds_seas_bef = ds_seas.isel(time=nseas - 1)
    ds_seas_aft = ds_seas.isel(time=0)

    #  Adjust times to be monotonic
    ds_seas_bef['time'] = ds_seas_bef['time'] - 12.
    ds_seas_aft['time'] = ds_seas_aft['time'] + 12.

    print('time before=', ds_seas_bef['time'].values, ' time after =',
          ds_seas_aft['time'].values)

    ###########################################################################
    # Create a data set with monthly data in upper ocean,
    #  interpolated seasonal data in deeper ocean

    ds_deep = xr.Dataset()
    for var in ('t_an', 's_an'):
        print('Starting ', var, ' ...')
        tmp_extend = xr.concat([
            ds_seas_bef[var][ndep_upper:, :, :],
            ds_seas[var][:, ndep_upper:, :, :],
            ds_seas_aft[var][ndep_upper:, :, :]
        ],
                               dim='time').compute()
        print(' ... extended seasonal data in time. shape=',
              np.shape(tmp_extend))
        print(' ... extended seasonal data times=', tmp_extend['time'].values)

        print(' ... interpolating to time ', time_target)
        tmp_interp = tmp_extend.interp(time=time_target)
        print('... finished inteprolation to months. shape=',
              np.shape(tmp_interp))
        print(' ... shape of monthly upper ocean array=',
              np.shape(ds_mon[var]))

        ds_deep[var] = xr.concat([ds_mon[var], tmp_interp], dim='depth')
        print(
            '... finished inserting deep values into monthly arrays. shape = ',
            np.shape(ds_deep[var]))

    ###########################################################################
    # Compute pressure, reference salinity and potential temperature using TEOS-10

    z1d = -ds_deep['depth']
    lat1d = ds_deep['lat']
    lon1d = ds_deep['lon']

    z3d, lat3d, lon3d = xr.broadcast(z1d, lat1d, lon1d)

    p3d = gsw.p_from_z(z3d, lat3d)
    SR = gsw.SR_from_SP(ds_deep['s_an'].data)
    PT0 = gsw.conversions.pt0_from_t(SR, ds_deep['t_an'], p3d)

    ds_deep['theta0'] = xr.DataArray(PT0, dims=('depth', 'lat', 'lon'))
    for var in ('depth', 'lat', 'lon'):
        ds_deep['theta0'].assign_coords({var: ds_seas[var]})

    ds_deep['theta0'].attrs = ds_seas['t_an'].attrs
    ds_deep['theta0'].attrs[
        'long_name'] = 'Potential temperature from objectively analyzed in situ temperature and salinty'
    ds_deep['theta0'].attrs[
        'standard_name'] = 'sea_water_potential_temperature'
    ds_deep['theta0'].encoding = ds_seas['t_an'].encoding

    ds_deep.to_netcdf(path_out + file_out_unfilled)
    ###########################################################################
    # Fill all land with values interpolated from nearest ocean

    mask_all_points = xr.DataArray(np.ones(
        (ndep_full, nlat, nlon)).astype('bool'),
                                   dims=('depth', 'lat', 'lon'),
                                   coords=(ds_seas['depth'], ds_seas['lat'],
                                           ds_seas['lon']))

    ds_fill = xr.Dataset()

    for var in ('s_an', 'theta0'):
        print('Starting ', var, ' ...')

        ds_fill[var] = fill.lateral_fill(ds_deep[var],
                                         mask_all_points,
                                         ltripole=False,
                                         tol=5.0e-3,
                                         use_sor=True,
                                         rc=1.88,
                                         max_iter=1000)

    ds_fill['depth'].attrs = ds_seas['depth'].attrs
    ds_fill['lat'].attrs = ds_seas['lat'].attrs
    ds_fill['lon'].attrs = ds_seas['lon'].attrs

    # Global attrs
    ds_fill.attrs['title'] = 'T and S from WOA filled over continents'
    ds_fill.attrs[
        'WOA_resolution'] = args.resolution + ', 01 (1 deg), 04 (0.25 deg)'
    ds_fill.attrs['author'] = args.author
    ds_fill.attrs['date'] = datetime.now().isoformat()
    ds_fill.attrs['created_using'] = os.path.basename(__file__) + ' -path_out ' + path_out + ' -author ' + \
      args.author + ' -resolution ' + args.resolution + ' -file_out_unfilled ' + file_out_unfilled + \
      ' -file_out_filled ' + file_out_filled
    ds_fill.attrs['url'] = os.path.basename(
        __file__) + ' can be found at https://github.com/NCAR/WOA_MOM6'
    ds_fill.attrs['git_hash'] = str(
        subprocess.check_output(["git", "describe", "--always"]).strip())
    # save
    ds_fill.to_netcdf(path_out + file_out_filled)
    return
Exemplo n.º 34
0
import numpy as np, matplotlib.pyplot as plt, xray, gsw
#1
woa = xray.open_dataset('http://iridl.ldeo.columbia.edu/SOURCES/.NOAA/.NODC/.WOA09/'
                        '.Grid-1x1/.Annual/dods')

p0 = -10.1325 #surface pressure (dbars)
pr =  0
                      
S,T,Lat,Lon,dep = xray.broadcast_arrays\
(woa['salinity.s_an'][0],woa['temperature.t_an'][0],woa.lat*1,woa.lon*1,woa.depth*1)

p = -1*gsw.p_from_z(dep,Lat)
SA = gsw.SA_from_SP(S,p,Lon,Lat)
TC = gsw.CT_from_t(SA,T,p)
rho= gsw.rho_CT_exact(SA,TC,pr)


rhoArr = xray.DataArray(rho,dims=['depth', 'lat','lon'],coords=[woa.depth,woa.lat,woa.lon])
rhoArr1 = rhoArr.sel(lat=slice(30, 50),lon=slice(170.5,170.5))
rhoArr2 = rhoArr.sel(lat=slice(-72, -55),lon=slice(170.5,170.5))

plt.figure(figsize=(7,7)) 
plt.contourf(rhoArr1.lat,rhoArr1.depth,rhoArr1.squeeze(dim=None),cmap='ocean')
plt.title('Potential Density of Kuroshio Extension')
plt.xlabel('lat')
plt.ylabel('depth(m)')
plt.ylim(0,3000)
cbar = plt.colorbar(orientation='vertical',fraction=0.046, pad=0.04)
cbar.ax.set_xlabel('rho  kg m^-3')
plt.show()