Esempio n. 1
0
def formatDf(file): # convert pressure to depth
    lat, lon, dt = castMeta(file)
    df = readCnv(file)
    df['depth'] = sw.dpth(df.index,lat)
    df = df.set_index(['cast'])
    df['lat'] = lat
    df['lon'] = lon
    return df, lat, lon, dt, df.index.unique()[0]
def check_prof_depth(nprof, presargo, latargo, depthmin):
    print('check if profile has a good depth : ')
    #convert pressure to depth
    depthargo = seawater.dpth(presargo, latargo)
    #look for the last level
    indzprof = np.max(np.where(np.isnan(depthargo) == False))
    dmax = depthargo[indzprof]
    print('profile max depth is ' + str(dmax) + ' m')
    if dmax >= depthmin:
        check = 0
    else:
        check = 1
    return check
def get_MCOMS_Argo(filename, get_data=True, get_dives=None):
    Argo_nc = Dataset(filename, 'r')

    inds = np.arange(
        Argo_nc.dimensions['N_PROF'].size) if get_dives is None else get_dives

    inds = inds.astype('int')

    tt = [
        dt.datetime(1950, 1, 1) + dt.timedelta(days=t) -
        dt.datetime(2018, 1, 1)
        for t in Argo_nc.variables['JULD'][inds].filled(np.nan)
    ]
    time = np.array([t.days + t.seconds / 86400 for t in tt])

    # doesn't matter because no matches anyway
    #bad_inds = time>END_DATE
    #inds = inds[~bad_inds]
    #time = time[~bad_inds]

    lat = Argo_nc.variables['LATITUDE'][inds].filled(np.nan)
    lon = Argo_nc.variables['LONGITUDE'][inds].filled(np.nan)

    if get_data:
        depth = sw.dpth(Argo_nc.variables['PRES_ADJUSTED'][inds].filled(
            np.nan),
                        lat=lat[0])
        temp = Argo_nc.variables['TEMP_ADJUSTED'][inds].filled(np.nan)
        sal = Argo_nc.variables['PSAL_ADJUSTED'][inds].filled(np.nan)
        VSF = Argo_nc.variables['BBP700_ADJUSTED'][inds].filled(
            np.nan) / (1.142 * 2 * np.pi
                       )  # was changed from dividing by 1.1, need to re-run

    output = (
        time,
        lat,
        lon,
        inds,
    )
    if get_data:
        output += (
            temp,
            sal,
            depth,
            VSF,
        )

    return output
def get_HS(files, get_data=True, get_dives=None):
    SB_files = np.sort(glob.glob(files))

    if get_dives is not None:
        SB_files = SB_files[get_dives.astype('int')]

    lat = ()
    lon = ()
    t = ()
    sta = ()
    if get_data:
        T = ()
        S = ()
        z = ()
        VSF = ()
        VSF_sd = ()
    for i in range(len(SB_files)):
        SB = readSB(SB_files[i], no_warn=True)
        lat += (get_startlat(SB), )
        lon += (get_startlon(SB), )
        t += (get_starttime(SB), )
        sta += (i, )

        if get_data:
            T += (np.array(SB.data['wt']), )
            S += (np.array(SB.data['sal']), )
            p = np.array(SB.data['pressure'])
            z += (sw.dpth(p, lat=50.5), )
            # convert using chi factor for 142deg angle used here
            VSF += (np.array(SB.data['bbp700']) / (2 * np.pi * 1.18), )
            # convert using chi factor for 142deg angle used here
            VSF_sd += (np.array(SB.data['bbp700_sd']) / (2 * np.pi * 1.18), )

    output = (np.array(t, dtype='object'), np.array(lat, dtype='object'),
              np.array(lon, dtype='object'), np.array(sta, dtype='object'))

    if get_data:
        output += (
            np.array(T, dtype='object'),
            np.array(S, dtype='object'),
            np.array(z, dtype='object'),
            np.array(VSF, dtype='object'),
            np.array(VSF_sd, dtype='object'),
        )

    return output
def check_prof_depth(fileEN4, prof):
    print('check if profile has a good depth : ')
    #open the profile file and read the infos on pressure and latitude
    tfileEN4 = diren4 + fileEN4
    dsen4 = Dataset(tfileEN4, 'r')
    presen4 = dsen4.variables['PRES_ADJUSTED'][prof]
    laten4 = dsen4.variables['LATITUDE'][prof]
    #convert pressure to depth
    depthen4 = seawater.dpth(presen4, laten4)
    #look for the last level
    indzprof = np.max(np.where(np.isnan(depthen4) == False))
    dmax = depthen4[indzprof - 1]
    print('profile max depth is ' + str(dmax) + ' m')
    if dmax >= depthmin:
        check = 0
    else:
        check = 1
    return check
Esempio n. 6
0
def csv2table(dir): # produce ctd and key dataframes from QC'd ctd data tables
    if os.path.isdir(dir) is True:
        files=glob(dir+'*.csv')
    else:
        files = [dir]
    dfCtd,dfCtdKey = [pd.DataFrame() for i in range(2)]
    for file in files:
        dfCur = pd.read_csv(file,skiprows=[1])
        cast = []
        for name in dfCur.profile_id.values:
            cast.append(float(name.split('_')[0][-3:]))
        dfCur['cast'] = cast
        dfCur['depth'] = sw.dpth(dfCur.pressure,dfCur.latitude)
        dfCur = dfCur.rename(columns={"T_28": "temp", "S_41": "sal","latitude":"lat","longitude":"lon"})
        dfCtdCur = dfCur[["cast","temp","sal","depth","lat","lon"]]
        dfCtdCur = dfCtdCur.set_index('cast')
        dfCtd = dfCtd.append(dfCtdCur)

        dfKeyCur = dfCur[["cast","lat","lon","time"]]
        dfKeyCur.lon = dfKeyCur.lon-360
        dfKeyCur.time = pd.to_datetime(dfKeyCur.time)
        dfCtdKey = dfCtdKey.append(dfKeyCur.drop_duplicates())
    return dfCtd, dfCtdKey
Esempio n. 7
0
def adiabatic_level_sw(P,
                       S,
                       T,
                       lat,
                       bin_width=100.,
                       order=1,
                       ret_coefs=False,
                       cap=None):
    """Generate smooth buoyancy frequency profile by applying the adiabatic
    levelling method of Bray and Fofonoff (1981). This function uses the older
    theormodynamic toolbox, 'seawater'.

    Parameters
    ----------
    P : 1-D ndarray
        Pressure [dbar]
    S : 1-D ndarray
        Practical salinity [-]
    T : 1-D ndarray
        Temperature [degrees C]
    lat : float
        Latitude [-90...+90]
    bin_width : float, optional
        Pressure bin width [dbar]
    deg : int, optional
        Degree of polynomial fit. (DEGREES HIGHER THAN 1 NOT PROPERLY TESTED)
    ret_coefs : bool, optional
        Flag to return additional argument pcoefs. False by default.
    cap : optional
        Flag to change proceedure at ends of array where bins may be partially
        filled. None by default, meaning they are included. Can also specify
        'left', 'right' or 'both' to cap method before partial bins.

    Returns
    -------
    N2_ref : 1-D ndarray
        Reference buoyancy frequency [s-2]
    pcoefs : 2-D ndarray
        Fitting coefficients, returned only when the flag ret_coefs is set
        True.

    """
    valid = np.isfinite(P) & np.isfinite(S) & np.isfinite(T)
    valid = np.squeeze(np.argwhere(valid))
    P_, S_, T_ = P[valid], S[valid], T[valid]

    flip = False
    if (np.diff(P_) < 0).all():
        flip = True
        P_ = np.flipud(P_)
        S_ = np.flipud(S_)
        T_ = np.flipud(T_)
    elif (np.diff(P_) < 0).any():
        raise ValueError('P must be monotonically increasing/decreasing.')

    i1 = np.searchsorted(P_, P_ - bin_width / 2.)
    i2 = np.searchsorted(P_, P_ + bin_width / 2.)

    if cap is None:
        Nd = P_.size
    elif cap == 'both':
        icapl = i2[0]
        icapr = i1[-1]
    elif cap == 'left':
        icapl = i2[0]
        icapr = i2[-1]
    elif cap == 'right':
        icapl = i1[0]
        icapr = i1[-1]
    else:
        raise ValueError("The argument cap must be either None, 'both', 'left'"
                         " or 'right'")

    if cap is not None:
        i1 = i1[icapl:icapr]
        i2 = i2[icapl:icapr]
        valid = valid[icapl:icapr]
        Nd = icapr - icapl

    dimax = np.max(i2 - i1)

    Pb = np.full((dimax, Nd), np.nan)
    Sb = np.full((dimax, Nd), np.nan)
    Tb = np.full((dimax, Nd), np.nan)

    for i in range(Nd):
        imax = i2[i] - i1[i]
        Pb[:imax, i] = P_[i1[i]:i2[i]]
        Sb[:imax, i] = S_[i1[i]:i2[i]]
        Tb[:imax, i] = T_[i1[i]:i2[i]]

    Pbar = np.nanmean(Pb, axis=0)

    rho = sw.pden(Sb, Tb, Pb, Pbar)
    sv = 1. / rho

    rhobar = np.nanmean(rho, axis=0)

    p = np.full((order + 1, Nd), np.nan)

    for i in range(Nd):
        imax = i2[i] - i1[i]
        p[:, i] = np.polyfit(Pb[:imax, i], sv[:imax, i], order)

    g = sw.g(lat, -sw.dpth(Pbar, lat))
    # The factor 1e-4 is needed for conversion from dbar to Pa.
    N2 = -1e-4 * rhobar**2 * g**2 * p[order - 1, :]

    N2_ref = np.full_like(P, np.nan)
    pcoef = np.full((order + 1, P.size), np.nan)
    if flip:
        N2_ref[valid] = np.flipud(N2)
        pcoef[:, valid] = np.fliplr(p)
    else:
        N2_ref[valid] = N2
        pcoef[:, valid] = p

    if ret_coefs:
        return N2_ref, pcoef
    else:
        return N2_ref
Esempio n. 8
0
def main(uri: str, filename: str):
    """Import NAFC CTD

    :param str uri: Database URI
    :param str filename: NetCDF file, or directory of files
    """
    data.observational.init_db(uri, echo=False)
    data.observational.create_tables()

    datatype_map = {}

    if os.path.isdir(filename):
        filenames = sorted(glob.glob(os.path.join(filename, "*.nc")))
    else:
        filenames = [filename]

    for fname in filenames:
        print(fname)
        with xr.open_dataset(fname) as ds:
            if len(datatype_map) == 0:
                # Generate the DataTypes; only consider variables that have depth
                for var in filter(
                        lambda x, dataset=ds: 'level' in dataset[x].coords,
                    [d for d in ds.data_vars]):
                    dt = DataType.query.get(ds[var].standard_name)
                    if dt is None:
                        dt = DataType(key=ds[var].standard_name,
                                      name=ds[var].long_name,
                                      unit=ds[var].units)
                    datatype_map[var] = dt

                data.observational.db.session.add_all(datatype_map.values())

            # Query or generate the platform
            # The files I worked off of were not finalized -- in this case the
            # trip id also included the cast number, so I strip off the last 3
            # digits.
            unique_id = f"nafc_ctd_{ds.trip_id[:-3]}"
            p = Platform.query.filter(
                Platform.unique_id == unique_id).one_or_none()
            if p is None:
                p = Platform(type=Platform.Type.mission, unique_id=unique_id)
                p.attrs = {
                    'Institution': ds.institution,
                    'Trip ID': ds.trip_id[:-3],
                    'Ship Name': ds.shipname,
                }
                data.observational.db.session.add(p)

            # Generate the station
            s = Station(
                latitude=ds.latitude.values[0],
                longitude=ds.longitude.values[0],
                time=pd.Timestamp(ds.time.values[0]),
            )
            p.stations.append(s)
            data.observational.db.session.commit()

            ds['level'] = seawater.dpth(ds.level.values, ds.latitude[0].values)

            # Generate the samples
            for var, dt in datatype_map.items():
                da = ds[var].dropna('level')
                samples = [
                    Sample(value=d.item(),
                           depth=d.level.item(),
                           datatype_key=dt.key,
                           station_id=s.id) for d in da
                ]
                data.observational.db.session.bulk_save_objects(samples)

            data.observational.db.session.commit()
    depthmod2d = np.zeros([lx, ly])
    for j in np.arange(ly):
        for i in np.arange(lx):
            depthmod2d[j, i] = depthmod[np.min(
                np.where(maskmod[:, j, i].values < 1))].values

    # Read the profile file
    tfileargo = dirargo + file_prof
    dsargo = xr.open_dataset(tfileargo)
    latargo = dsargo['LATITUDE'][prof_prof]
    lonargo = dsargo['LONGITUDE'][prof_prof]
    dayargo = dsargo['JULD'][prof_prof]
    tempargo = dsargo['TEMP_ADJUSTED'][prof_prof]
    saltargo = dsargo['PSAL_ADJUSTED'][prof_prof]
    presargo = dsargo['PRES_ADJUSTED'][prof_prof]
    depthargo = seawater.dpth(presargo, latargo)

    # Find the last level and reduce the profiles
    indzprof = np.min(np.where(np.isnan(depthargo) == True))
    dmax = depthargo[indzprof - 1]
    obsred_dep = np.zeros(int(indzprof))
    obsred_temp = np.zeros(int(indzprof))
    obsred_salt = np.zeros(int(indzprof))
    for z in np.arange(int(indzprof)):
        obsred_dep[int(z)] = depthargo[int(z)]
        obsred_temp[int(z)] = tempargo[int(z)]
        obsred_salt[int(z)] = saltargo[int(z)]

    # Remove the NaN in the profiles
    indtempnan = np.where(np.isnan(obsred_temp) == True)
    if len(indtempnan[0]) > 0:
Esempio n. 10
0
def main(uri: str, filename: str):
    """Import Argo Profiles

    :param str uri: Database URI
    :param str filename: Argo NetCDF Filename, or directory of files
    """
    data.observational.init_db(uri, echo=False)
    data.observational.create_tables()

    session = data.observational.db.session

    if os.path.isdir(filename):
        filenames = sorted(glob.glob(os.path.join(filename, "*.nc")))
    else:
        filenames = [filename]

    for fname in filenames:
        print(fname)
        with xr.open_dataset(fname) as ds:
            times = pd.to_datetime(ds.JULD.values)

            for f in META_FIELDS:
                META_FIELDS[f] = ds[f].values.astype(str)

            for prof in ds.N_PROF.values:
                plat_number = ds.PLATFORM_NUMBER.values.astype(str)[prof]
                unique_id = f"argo_{plat_number}"

                # Grab the platform from the db base on the unique id
                platform = (session.query(Platform).filter(
                    Platform.unique_id == unique_id,
                    Platform.type == Platform.Type.argo,
                ).first())
                if platform is None:
                    # ... or make a new platform
                    platform = Platform(type=Platform.Type.argo,
                                        unique_id=unique_id)
                    attrs = {}
                    for f in META_FIELDS:
                        attrs[ds[f].long_name] = META_FIELDS[f][prof].strip()

                    platform.attrs = attrs
                    session.add(platform)

                # Make a new Station
                station = Station(
                    time=times[prof],
                    latitude=ds.LATITUDE.values[prof],
                    longitude=ds.LONGITUDE.values[prof],
                )
                platform.stations.append(station)
                # We need to commit the station here so that it'll have an id
                session.commit()

                depth = seawater.dpth(ds.PRES[prof].dropna("N_LEVELS").values,
                                      ds.LATITUDE.values[prof])

                samples = []
                for variable in VARIABLES:
                    # First check our local cache for the DataType object, if
                    # that comes up empty, check the db, and failing that,
                    # create a new one from the variable's attributes
                    if variable not in datatype_map:
                        dt = DataType.query.get(ds[variable].standard_name)
                        if dt is None:
                            dt = DataType(
                                key=ds[variable].standard_name,
                                name=ds[variable].long_name,
                                unit=ds[variable].units,
                            )

                            data.observational.db.session.add(dt)
                            # Commit the DataType right away. This might lead
                            # to a few extra commits on the first import, but
                            # reduces overall complexity in having to
                            # 'remember' if we added a new one later.
                            data.observational.db.session.commit()
                            datatype_map[variable] = dt
                    else:
                        dt = datatype_map[variable]

                    values = ds[variable][prof].dropna("N_LEVELS").values

                    # Using station_id and datatype_key here instead of the
                    # actual objects so that we can use bulk_save_objects--this
                    # is much faster, but it doesn't follow any relationships.
                    samples = [
                        Sample(
                            depth=pair[0],
                            datatype_key=dt.key,
                            value=pair[1],
                            station_id=station.id,
                        ) for pair in zip(depth, values)
                    ]

                    data.observational.db.session.bulk_save_objects(samples)

                session.commit()
def main(uri: str, filename: str):
    """Import Glider NetCDF

    :param str uri: Database URI
    :param str filename: Glider Filename, or directory of NetCDF files
    """
    data.observational.init_db(uri, echo=False)
    data.observational.create_tables()

    if os.path.isdir(filename):
        filenames = sorted(glob.glob(os.path.join(filename, "*.nc")))
    else:
        filenames = [filename]

    datatype_map = {}
    for fname in filenames:
        print(fname)
        with xr.open_dataset(fname) as ds:
            variables = [v for v in VARIABLES if v in ds.variables]
            df = ds[['TIME', 'LATITUDE', 'LONGITUDE', 'PRES',
                     *variables]].to_dataframe().reset_index().dropna()

            df['DEPTH'] = seawater.dpth(df.PRES, df.LATITUDE)

            for variable in variables:
                if variable not in datatype_map:
                    dt = DataType.query.get(ds[variable].standard_name)
                    if dt is None:
                        dt = DataType(key=ds[variable].standard_name,
                                      name=ds[variable].long_name,
                                      unit=ds[variable].units)
                        data.observational.db.session.add(dt)

                    datatype_map[variable] = dt

            data.observational.db.session.commit()

            p = Platform(type=Platform.Type.glider,
                         unique_id=f"glider_{ds.deployment_label}")
            attrs = {
                'Glider Platform': ds.platform_code,
                'WMO': ds.wmo_platform_code,
                'Deployment': ds.deployment_label,
                'Institution': ds.institution,
                'Contact': ds.contact,
            }
            p.attrs = attrs
            data.observational.db.session.add(p)
            data.observational.db.session.commit()

            stations = [
                Station(
                    platform_id=p.id,
                    time=row.TIME,
                    latitude=row.LATITUDE,
                    longitude=row.LONGITUDE,
                ) for idx, row in df.iterrows()
            ]

            # Using return_defaults=True here so that the stations will get
            # updated with id's. It's slower, but it means that we can just
            # put all the station ids into a pandas series to use when
            # constructing the samples.
            data.observational.db.session.bulk_save_objects(
                stations, return_defaults=True)
            df["STATION_ID"] = [s.id for s in stations]

            samples = [[
                Sample(station_id=row.STATION_ID,
                       depth=row.DEPTH,
                       value=row[variable],
                       datatype_key=datatype_map[variable].key)
                for variable in variables
            ] for idx, row in df.iterrows()]
            data.observational.db.session.bulk_save_objects(
                [item for sublist in samples for item in sublist])
            data.observational.db.session.commit()

        data.observational.db.session.commit()
Esempio n. 12
0
    def arrays_to_netcdf(ncfile, lon, lat, t, p, T, S):
        """
        Write the arrays into a single netCDF file `ncfile`
        with a structure similar to SOCIB files
        Inputs:
          lon, lat, time, pressure, T and S are numpy ndarrays
          (arrays of arrays), one array per profile
        """

        with netCDF4.Dataset(ncfile, "w", format="NETCDF4") as nc:

            ndepth = len(p)

            # Dimensions
            time = nc.createDimension("time", None)         # unlimited
            depth = nc.createDimension("depth", ndepth)

            # Variables and attributes
            time = nc.createVariable("time", "f8",("time",), fill_value=np.nan)
            time.standard_name = "time"
            time.units = "days since 01-01-01 00:00:00"
            time.axis = "T"
            time.calendar = "gregorian"

            DEPTH = nc.createVariable("DEPTH", "f8",("time", "depth"))
            DEPTH.ancillary_variables = "QC_DEPTH"
            DEPTH.axis = "Z"
            DEPTH.long_name = "Depth coordinate"
            DEPTH.positive = "down"
            DEPTH.reference_datum = "geographical coordinates, WGS84 projection"
            DEPTH.standard_name = "depth"
            DEPTH.units = "m"

            LON = nc.createVariable("LON", "f4",("time",))
            LON.standard_name = "longitude"
            LON.long_name = "Longitude"
            LON.units = "degrees_east"
            LON.ancillary_variables = "QC_LON"
            LON.axis = "X"
            LON.valid_min = -180.
            LON.valid_max = 180.
            LON.reference_datum = "geographical coordinates, WGS84 projection" ;

            LAT = nc.createVariable("LAT", "f4",("time",))
            LAT.standard_name = "latitude"
            LAT.long_name = "Latitude"
            LAT.units = "degrees_north"
            LAT.ancillary_variables = "QC_LAT"
            LAT.axis = "Y"
            LAT.valid_min = -90.
            LAT.valid_max = 90.
            LAT.reference_datum = "geographical coordinates, WGS84 projection"

            WTR_PRE = nc.createVariable("WTR_PRE", "f8",("time", "depth"))
            WTR_PRE.ancillary_variables = "QC_WTR_PRE"
            WTR_PRE.coordinates = "time LAT LON DEPTH"
            WTR_PRE.long_name = "Sea water pressure"
            WTR_PRE.observation_type = "measured"
            WTR_PRE.original_units = "dbar"
            WTR_PRE.precision = "0.1"
            WTR_PRE.resolution = "0.1"
            WTR_PRE.standard_name = "sea_water_pressure"
            WTR_PRE.units = "dbar"

            WTR_TEM = nc.createVariable("WTR_TEM", "f8",("time", "depth"))
            WTR_TEM.ancillary_variables = "QC_WTR_TEM"
            WTR_TEM.coordinates = "time LAT LON DEPTH"
            WTR_TEM.long_name = "Sea water tempature"
            WTR_TEM.observation_type = "measured"
            WTR_TEM.original_units = "C"
            WTR_TEM.precision = "0.001"
            WTR_TEM.resolution = "0.001"
            WTR_TEM.standard_name = "sea_water_temperature"
            WTR_TEM.units = "C"

            SALT = nc.createVariable("SALT", "f8",("time", "depth"))
            SALT.ancillary_variables = "QC_SALT"
            SALT.coordinates = "time LAT LON DEPTH"
            SALT.long_name = "Sea water salinity"
            SALT.observation_type = "derived"
            SALT.original_units = "psu"
            SALT.precision = "0.001"
            SALT.resolution = "0.001"
            SALT.standard_name = "sea_water_salinity"
            SALT.units = "psu"

            # Add values to the variables
            LON[:] = lon
            LAT[:] = lat
            # Remove 365 days because of reference year
            time[:] = t - 365

            for i, Pprofile in enumerate(p):
                npoints = len(Pprofile)
                if npoints > 0:
                    WTR_PRE[i,:npoints] = Pprofile
                    # Convert pressure to depth
                    depth = seawater.dpth(Pprofile, lat[i])
                    DEPTH[i,:npoints] = depth

            for i, Tprofile in enumerate(T):
                npoints = len(Tprofile)
                WTR_TEM[i,:npoints] = Tprofile

            for i, Sprofile in enumerate(S):
                npoints = len(Sprofile)
                SALT[i,:npoints] = Sprofile
def test(fileout='python-test.txt'):
    r"""Copy of the Matlab test.

    Modifications: Phil Morgan
                   03-12-12. Lindsay Pender, Converted to ITS-90.
    """
    f = open(fileout, 'w')
    asterisks = '*' * 76

    f.write(asterisks)
    f.write('\n    TEST REPORT    ')
    f.write('\n')
    f.write('\n SEA WATER LIBRARY %s' % sw.__version__)
    f.write('\n')
    # Show some info about this Python.
    f.write('\npython version: %s' % sys.version)
    f.write('\n on %s computer %s' % (uname()[0], uname()[-1]))
    f.write('\n')
    f.write('\n')
    f.write(asctime(localtime()))
    f.write('\n')
    f.write('\n')

    # Test main module  ptmp.
    f.write('\n%s' % asterisks)
    f.write('\n**  TESTING MODULE: ptmp')
    f.write('\n**  and SUB-MODULE: adtg')
    f.write('\n%s' % asterisks)
    f.write('\n')
    f.write('\n')

    # Test 1 - data from Unesco 1983 p45.
    T = np.array([[0, 0, 0, 0, 0, 0], [10, 10, 10, 10, 10, 10],
                  [20, 20, 20, 20, 20, 20], [30, 30, 30, 30, 30, 30],
                  [40, 40, 40, 40, 40, 40]])

    T = T / 1.00024

    S = np.array([[25, 25, 25, 35, 35, 35], [25, 25, 25, 35, 35, 35],
                  [25, 25, 25, 35, 35, 35], [25, 25, 25, 35, 35, 35],
                  [25, 25, 25, 35, 35, 35]])

    P = np.array([[0, 5000, 10000, 0, 5000, 10000],
                  [0, 5000, 10000, 0, 5000, 10000],
                  [0, 5000, 10000, 0, 5000, 10000],
                  [0, 5000, 10000, 0, 5000, 10000],
                  [0, 5000, 10000, 0, 5000, 10000]])

    Pr = np.array([0, 0, 0, 0, 0, 0])

    UN_ptmp = np.array([[0, -0.3061, -0.9667, 0, -0.3856, -1.0974],
                        [10, 9.3531, 8.4684, 10, 9.2906, 8.3643],
                        [20, 19.0438, 17.9426, 20, 18.9985, 17.8654],
                        [30, 28.7512, 27.4353, 30, 28.7231, 27.3851],
                        [40, 38.4607, 36.9254, 40, 38.4498, 36.9023]])

    PT = sw.ptmp(S, T, P, Pr) * 1.00024

    # Display results.
    f.write('\n')
    f.write('\n%s' % asterisks)
    f.write('\nComparison of accepted values from UNESCO 1983 ')
    f.write('\n (Unesco Tech. Paper in Marine Sci. No. 44, p45)')
    f.write('\n%s' % asterisks)
    f.write('\n')

    m, n = S.shape  # TODO: so many loops there must be a better way.
    for icol in range(0, n):
        f.write('\n   Sal  Temp  Press     PTMP       ptmp')
        f.write('\n  (psu)  (C)   (db)     (C)          (C)\n')
        result = np.vstack(
            (S[:, icol], T[:, icol], P[:, icol], UN_ptmp[:, icol], PT[:,
                                                                      icol]))
        for iline in range(0, m):
            f.write(" %4.0f  %4.0f   %5.0f   %8.4f  %11.5f\n" %
                    tuple(result[:, iline]))

    # Test main module svan.
    f.write('\n%s' % asterisks)
    f.write('\n**  TESTING MODULE: svan')
    f.write('\n**  and SUB-MODULE: dens dens0 smow seck pden ptmp')
    f.write('\n%s' % asterisks)

    # Test data FROM: Unesco Tech. Paper in Marine Sci. No. 44, p22.
    s = np.array([0, 0, 0, 0, 35, 35, 35, 35])
    p = np.array([0, 10000, 0, 10000, 0, 10000, 0, 10000])
    t = np.array([0, 0, 30, 30, 0, 0, 30, 30]) / 1.00024

    UN_svan = np.array(
        [2749.54, 2288.61, 3170.58, 3147.85, 0.0, 0.00, 607.14, 916.34])

    SVAN = sw.svan(s, t, p)

    # DISPLAY RESULTS
    f.write('\n')
    f.write('\n%s' % asterisks)
    f.write('\n')
    f.write('\nComparison of accepted values from UNESCO 1983')
    f.write('\n (Unesco Tech. Paper in Marine Sci. No. 44, p22)')
    f.write('\n%s' % asterisks)
    f.write('\n')
    f.write('\n   Sal  Temp  Press        SVAN        svan')
    f.write('\n  (psu)  (C)   (db)    (1e-8*m3/kg)  (1e-8*m3/kg)\n')
    result = np.vstack([s, t, p, UN_svan, 1e+8 * SVAN])
    for iline in range(0, len(SVAN)):
        f.write(" %4.0f  %4.0f   %5.0f   %11.2f    %11.3f\n" %
                tuple(result[:, iline]))

    # Test main module salt.
    f.write('\n%s' % asterisks)
    f.write('\n**  TESTING MODULE: salt')
    f.write('\n**  and SUB-MODULE: salrt salrp sals')
    f.write('\n%s' % asterisks)
    f.write('\n')

    # Test 1 - data from Unesco 1983 p9.
    R = np.array([1, 1.2, 0.65])  # cndr = R.
    T = np.array([15, 20, 5]) / 1.00024
    P = np.array([0, 2000, 1500])
    #Rt   = np.array([  1, 1.0568875, 0.81705885])
    UN_S = np.array([35, 37.245628, 27.995347])

    S = sw.salt(R, T, P)

    # Display results.
    f.write('\n')
    f.write('\n%s' % asterisks)
    f.write('\nComparison of accepted values from UNESCO 1983 ')
    f.write('\n(Unesco Tech. Paper in Marine Sci. No. 44, p9)')
    f.write('\n%s' % asterisks)
    f.write('\n')

    f.write('\n   Temp    Press       R              S           salt')
    f.write('\n   (C)     (db)    (no units)       (psu)          (psu)\n')
    table = np.vstack([T, P, R, UN_S, S])
    m, n = table.shape
    for iline in range(0, n):
        f.write(" %4.0f       %4.0f  %8.2f      %11.6f  %14.7f\n" %
                tuple(table[:, iline]))

    # Test main module cndr.
    f.write('\n%s' % asterisks)
    f.write('\n**  TESTING MODULE: cndr')
    f.write('\n**  and SUB-MODULE: salds')
    f.write('\n%s' % asterisks)

    # Test 1 - data from Unesco 1983 p9.
    T = np.array([0, 10, 0, 10, 10, 30]) / 1.00024
    P = np.array([0, 0, 1000, 1000, 0, 0])
    S = np.array([25, 25, 25, 25, 40, 40])
    UN_R = np.array(
        [0.498088, 0.654990, 0.506244, 0.662975, 1.000073, 1.529967])
    R = sw.cndr(S, T, P)

    # Display results.
    f.write('\n')
    f.write('\n%s' % asterisks)
    f.write('\nComparison of accepted values from UNESCO 1983 ')
    f.write('\n (Unesco Tech. Paper in Marine Sci. No. 44, p14)')
    f.write('\n%s' % asterisks)
    f.write('\n')
    f.write('\n')

    f.write('\n   Temp    Press       S            cndr         cndr')
    f.write('\n   (C)     (db)      (psu)        (no units)    (no units)\n')
    table = np.vstack([T, P, S, UN_R, R])
    m, n = table.shape
    for iline in range(0, n):
        f.write(" %4.0f       %4.0f   %8.6f   %11.6f  %14.8f\n" %
                tuple(table[:, iline]))

    # Test main module depth.
    f.write('\n%s' % asterisks)
    f.write('\n**  TESTING MODULE: depth')
    f.write('\n%s' % asterisks)

    # Test data - matrix "pressure", vector "lat"  Unesco 1983 data p30.
    lat = np.array([0, 30, 45, 90])
    P = np.array([[500, 500, 500, 500], [5000, 5000, 5000, 5000],
                  [10000, 10000, 10000, 10000]])

    UN_dpth = np.array([[496.65, 496.00, 495.34, 494.03],
                        [4915.04, 4908.56, 4902.08, 4889.13],
                        [9725.47, 9712.65, 9699.84, 9674.23]])

    dpth = sw.dpth(P, lat)

    # Display results.
    f.write('\n')
    f.write('\n%s' % asterisks)
    f.write('\nComparison of accepted values from Unesco 1983 ')
    f.write('\n(Unesco Tech. Paper in Marine Sci. No. 44, p28)')
    f.write('\n%s' % asterisks)
    f.write('\n')

    f.write('\n')
    for irow in range(0, 3):
        f.write('\n    Lat       Press     DPTH      dpth')
        f.write('\n  (degree)    (db)     (meter)    (meter)\n')
        table = np.vstack([lat, P[irow, :], UN_dpth[irow, :], dpth[irow, :]])
        m, n = table.shape
        for iline in range(0, n):
            f.write("  %6.3f     %6.0f   %8.2f   %8.3f\n" %
                    tuple(table[:, iline]))

    # Test main module fp.
    f.write('\n%s' % asterisks)
    f.write('\n**  TESTING MODULE: fp')
    f.write('\n%s' % asterisks)

    # Test 1 - UNESCO data p.30.
    S = np.array([[5, 10, 15, 20, 25, 30, 35, 40],
                  [5, 10, 15, 20, 25, 30, 35, 40]])

    P = np.array([[0, 0, 0, 0, 0, 0, 0, 0],
                  [500, 500, 500, 500, 500, 500, 500, 500]])

    UN_fp = np.array(
        [[-0.274, -0.542, -0.812, -1.083, -1.358, -1.638, -1.922, -2.212],
         [-0.650, -0.919, -1.188, -1.460, -1.735, -2.014, -2.299, -2.589]])

    FP = sw.fp(S, P)

    # Display results.
    f.write('\n')
    f.write('\n%s' % asterisks)
    f.write('\nComparison of accepted values from UNESCO 1983 ')
    f.write('\n (Unesco Tech. Paper in Marine Sci. No. 44, p30)')
    f.write('\n%s' % asterisks)
    f.write('\n')

    f.write('\n')
    for irow in range(0, 2):
        f.write('\n   Sal   Press      fp        fp')
        f.write('\n  (psu)   (db)      (C)        (C)\n')
        table = np.vstack(
            [S[irow, :], P[irow, :], UN_fp[irow, :], FP[irow, :]])
        m, n = table.shape
        for iline in range(0, n):
            f.write(" %4.0f   %5.0f   %8.3f  %11.4f\n" %
                    tuple(table[:, iline]))

    # Test main module cp.
    f.write('\n%s' % asterisks)
    f.write('\n**  TESTING MODULE: cp')
    f.write('\n%s' % asterisks)

    # Test 1.
    # Data from Pond and Pickard Intro. Dynamical Oceanography 2nd ed. 1986
    T = np.array([[0, 0, 0, 0, 0, 0], [10, 10, 10, 10, 10, 10],
                  [20, 20, 20, 20, 20, 20], [30, 30, 30, 30, 30, 30],
                  [40, 40, 40, 40, 40, 40]]) / 1.00024

    S = np.array([[25, 25, 25, 35, 35, 35], [25, 25, 25, 35, 35, 35],
                  [25, 25, 25, 35, 35, 35], [25, 25, 25, 35, 35, 35],
                  [25, 25, 25, 35, 35, 35]])

    P = np.array([[0, 5000, 10000, 0, 5000, 10000],
                  [0, 5000, 10000, 0, 5000, 10000],
                  [0, 5000, 10000, 0, 5000, 10000],
                  [0, 5000, 10000, 0, 5000, 10000],
                  [0, 5000, 10000, 0, 5000, 10000]])

    UN_cp = np.array([[4048.4, 3896.3, 3807.7, 3986.5, 3849.3, 3769.1],
                      [4041.8, 3919.6, 3842.3, 3986.3, 3874.7, 3804.4],
                      [4044.8, 3938.6, 3866.7, 3993.9, 3895.0, 3828.3],
                      [4049.1, 3952.0, 3883.0, 4000.7, 3909.2, 3844.3],
                      [4051.2, 3966.1, 3905.9, 4003.5, 3923.9, 3868.3]])

    CP = sw.cp(S, T, P)

    # Display results.
    f.write('\n')
    f.write('\n%s' % asterisks)
    f.write('\nComparison of accepted values from UNESCO 1983 ')
    f.write('\n (Unesco Tech. Paper in Marine Sci. No. 44, p37)')
    f.write('\n%s' % asterisks)
    f.write('\n')

    m, n = S.shape
    f.write('\n')
    for icol in range(0, n):
        f.write('\n   Sal  Temp  Press      Cp        cp')
        f.write('\n  (psu)  (C)   (db)    (J/kg.C)   (J/kg.C)\n')
        result = np.vstack(
            [S[:, icol], T[:, icol], P[:, icol], UN_cp[:, icol], CP[:, icol]])
        for iline in range(0, m):
            f.write(" %4.0f  %4.0f   %5.0f   %8.1f  %11.2f\n" %
                    tuple(result[:, iline]))

    # Test main module svel.
    f.write('\n%s' % asterisks)
    f.write('\n**  TESTING MODULE: svel')
    f.write('\n%s' % asterisks)

    # Test 1.
    # Data from Pond and Pickard Intro. Dynamical Oceanography 2nd ed. 1986
    T = np.array([[0, 0, 0, 0, 0, 0], [10, 10, 10, 10, 10, 10],
                  [20, 20, 20, 20, 20, 20], [30, 30, 30, 30, 30, 30],
                  [40, 40, 40, 40, 40, 40]]) / 1.00024

    S = np.array([[25, 25, 25, 35, 35, 35], [25, 25, 25, 35, 35, 35],
                  [25, 25, 25, 35, 35, 35], [25, 25, 25, 35, 35, 35],
                  [25, 25, 25, 35, 35, 35]])

    P = np.array([[0, 5000, 10000, 0, 5000, 10000],
                  [0, 5000, 10000, 0, 5000, 10000],
                  [0, 5000, 10000, 0, 5000, 10000],
                  [0, 5000, 10000, 0, 5000, 10000],
                  [0, 5000, 10000, 0, 5000, 10000]])

    UN_svel = np.array([[1435.8, 1520.4, 1610.4, 1449.1, 1534.0, 1623.2],
                        [1477.7, 1561.3, 1647.4, 1489.8, 1573.4, 1659.0],
                        [1510.3, 1593.6, 1676.8, 1521.5, 1604.5, 1687.2],
                        [1535.2, 1619.0, 1700.6, 1545.6, 1629.0, 1710.1],
                        [1553.4, 1638.0, 1719.2, 1563.2, 1647.3, 1727.8]])

    SVEL = sw.svel(S, T, P)

    # Display results.
    f.write('\n')
    f.write('\n%s' % asterisks)
    f.write('\nComparison of accepted values from UNESCO 1983 ')
    f.write('\n (Unesco Tech. Paper in Marine Sci. No. 44, p50)')
    f.write('\n%s' % asterisks)
    f.write('\n')

    m, n = SVEL.shape
    f.write('\n')
    for icol in range(0, n):
        f.write('\n   Sal  Temp  Press     SVEL       svel')
        f.write('\n  (psu)  (C)   (db)     (m/s)       (m/s)\n')

        result = np.vstack([
            S[:, icol], T[:, icol], P[:, icol], UN_svel[:, icol], SVEL[:, icol]
        ])
        for iline in range(0, m):
            f.write(" %4.0f  %4.0f   %5.0f   %8.1f  %11.3f\n" %
                    tuple(result[:, iline]))

    # Test submodules alpha beta aonb.
    f.write('\n%s' % asterisks)
    f.write('\n**  and SUB-MODULE: alpha beta aonb')
    f.write('\n%s' % asterisks)

    # Data from McDouogall 1987.
    s = 40
    PT = 10
    p = 4000
    beta_lit = 0.72088e-03
    aonb_lit = 0.34763
    alpha_lit = aonb_lit * beta_lit

    BETA = sw.beta(s, PT, p, pt=True)
    ALPHA = sw.alpha(s, PT, p, pt=True)
    AONB = sw.aonb(s, PT, p, pt=True)

    # Display results.
    f.write('\n')
    f.write('\n%s' % asterisks)
    f.write('\nComparison of accepted values from MCDOUGALL 1987 ')
    f.write('\n%s' % asterisks)
    f.write('\n')
    f.write('\n')

    f.write('\n   Sal  Temp  Press     BETA       beta')
    f.write('\n  (psu)  (C)   (db)   (psu^-1)     (psu^-1)\n')
    table = np.hstack([s, PT, p, beta_lit, BETA])
    f.write(" %4.0f  %4.0f   %5.0f   %11.4e  %11.5e\n" % tuple(table))

    f.write('\n   Sal  Temp  Press     AONB       aonb')
    f.write('\n  (psu)  (C)   (db)   (psu C^-1)   (psu C^-1)\n')
    table = np.hstack([s, PT, p, aonb_lit, AONB])
    f.write(" %4.0f  %4.0f   %5.0f   %8.5f  %11.6f\n" % tuple(table))

    f.write('\n   Sal  Temp  Press     ALPHA       alpha')
    f.write('\n  (psu)  (C)   (db)    (psu^-1)     (psu^-1)\n')
    table = np.hstack([s, PT, p, alpha_lit, ALPHA])
    f.write(" %4.0f  %4.0f   %5.0f   %11.4e  %11.4e\n" % tuple(table))

    # Test main moduleS  satO2 satN2 satAr.
    f.write('\n%s' % asterisks)
    f.write('\n**  TESTING MODULE: satO2 satN2 satAr')
    f.write('\n%s' % asterisks)
    f.write('\n')

    # Data from Weiss 1970.
    T = np.array([[-1, -1], [10, 10], [20, 20], [40, 40]]) / 1.00024

    S = np.array([[20, 40], [20, 40], [20, 40], [20, 40]])

    lit_O2 = np.array([[9.162, 7.984], [6.950, 6.121], [5.644, 5.015],
                       [4.050, 3.656]])

    lit_N2 = np.array([[16.28, 14.01], [12.64, 11.01], [10.47, 9.21],
                       [7.78, 6.95]])

    lit_Ar = np.array([[0.4456, 0.3877], [0.3397, 0.2989], [0.2766, 0.2457],
                       [0.1986, 0.1794]])

    O2 = sw.satO2(S, T)
    N2 = sw.satN2(S, T)
    Ar = sw.satAr(S, T)

    # Display results.
    f.write('\n')
    f.write('\n%s' % asterisks)
    f.write('\nComparison of accepted values from Weiss, R.F. 1979 ')
    f.write('\n"The solubility of nitrogen, oxygen and argon in water')
    f.write('\n and seawater." Deep-Sea Research., 1970, Vol 17, pp721-735.')
    f.write('\n%s' % asterisks)
    f.write('\n')

    m, n = S.shape
    f.write('\n')
    for icol in range(0, n):
        f.write('\n   Sal  Temp      O2         satO2')
        f.write('\n  (psu)  (C)      (ml/l)     (ml/l)\n')
        result = np.vstack(
            [S[:, icol], T[:, icol], lit_O2[:, icol], O2[:, icol]])
        for iline in range(0, m):
            f.write(" %4.0f  %4.0f    %8.2f   %9.3f\n" %
                    tuple(result[:, iline]))

    for icol in range(0, n):
        f.write('\n   Sal  Temp      N2         satN2')
        f.write('\n  (psu)  (C)      (ml/l)     (ml/l)\n')
        result = np.vstack(
            [S[:, icol], T[:, icol], lit_N2[:, icol], N2[:, icol]])
        for iline in range(0, m):
            f.write(" %4.0f  %4.0f    %8.2f  %9.3f\n" %
                    tuple(result[:, iline]))

    for icol in range(0, n):
        f.write('\n   Sal  Temp      Ar         satAr')
        f.write('\n  (psu)  (C)      (ml/l)     (ml/l)\n')
        result = np.vstack(
            [S[:, icol], T[:, icol], lit_Ar[:, icol], Ar[:, icol]])
        for iline in range(0, m):
            f.write(" %4.0f  %4.0f     %8.4f  %9.4f\n" %
                    tuple(result[:, iline]))
Esempio n. 14
0
def test(fileout='python-test.txt'):
    r"""Copy of the Matlab test.

    Modifications: Phil Morgan
                   03-12-12. Lindsay Pender, Converted to ITS-90.
    """
    f = open(fileout, 'w')
    asterisks = '*' * 76

    f.write(asterisks)
    f.write('\n    TEST REPORT    ')
    f.write('\n')
    f.write('\n SEA WATER LIBRARY %s' % sw.__version__)
    f.write('\n')
    # Show some info about this Python.
    f.write('\npython version: %s' % sys.version)
    f.write('\n on %s computer %s' % (uname()[0], uname()[-1]))
    f.write('\n')
    f.write('\n')
    f.write(asctime(localtime()))
    f.write('\n')
    f.write('\n')

    # Test main module  ptmp.
    f.write('\n%s' % asterisks)
    f.write('\n**  TESTING MODULE: ptmp')
    f.write('\n**  and SUB-MODULE: adtg')
    f.write('\n%s' % asterisks)
    f.write('\n')
    f.write('\n')

    # Test 1 - data from Unesco 1983 p45.
    T = np.array([[0,  0,  0,  0,  0,  0],
                  [10, 10, 10, 10, 10, 10],
                  [20, 20, 20, 20, 20, 20],
                  [30, 30, 30, 30, 30, 30],
                  [40, 40, 40, 40, 40, 40]])

    T = T / 1.00024

    S = np.array([[25, 25, 25, 35, 35, 35],
                  [25, 25, 25, 35, 35, 35],
                  [25, 25, 25, 35, 35, 35],
                  [25, 25, 25, 35, 35, 35],
                  [25, 25, 25, 35, 35, 35]])

    P = np.array([[0, 5000, 10000, 0, 5000, 10000],
                  [0, 5000, 10000, 0, 5000, 10000],
                  [0, 5000, 10000, 0, 5000, 10000],
                  [0, 5000, 10000, 0, 5000, 10000],
                  [0, 5000, 10000, 0, 5000, 10000]])

    Pr = np.array([0, 0, 0, 0, 0, 0])

    UN_ptmp = np.array([[0, -0.3061, -0.9667,  0, -0.3856, -1.0974],
                        [10,  9.3531,  8.4684, 10,  9.2906,  8.3643],
                        [20, 19.0438, 17.9426, 20, 18.9985, 17.8654],
                        [30, 28.7512, 27.4353, 30, 28.7231, 27.3851],
                        [40, 38.4607, 36.9254, 40, 38.4498, 36.9023]])

    PT = sw.ptmp(S, T, P, Pr) * 1.00024

    # Display results.
    f.write('\n')
    f.write('\n%s' % asterisks)
    f.write('\nComparison of accepted values from UNESCO 1983 ')
    f.write('\n (Unesco Tech. Paper in Marine Sci. No. 44, p45)')
    f.write('\n%s' % asterisks)
    f.write('\n')

    m, n = S.shape  # TODO: so many loops there must be a better way.
    for icol in range(0, n):
        f.write('\n   Sal  Temp  Press     PTMP       ptmp')
        f.write('\n  (psu)  (C)   (db)     (C)          (C)\n')
        result = np.vstack((S[:, icol], T[:, icol], P[:, icol],
                            UN_ptmp[:, icol], PT[:, icol]))
        for iline in range(0, m):
            f.write(" %4.0f  %4.0f   %5.0f   %8.4f  %11.5f\n" %
                    tuple(result[:, iline]))

    # Test main module svan.
    f.write('\n%s' % asterisks)
    f.write('\n**  TESTING MODULE: svan')
    f.write('\n**  and SUB-MODULE: dens dens0 smow seck pden ptmp')
    f.write('\n%s' % asterisks)

    # Test data FROM: Unesco Tech. Paper in Marine Sci. No. 44, p22.
    s = np.array([0,     0,  0,     0, 35,    35, 35,   35])
    p = np.array([0, 10000,  0, 10000,  0, 10000,  0, 10000])
    t = np.array([0,     0, 30,    30,  0,     0, 30,    30]) / 1.00024

    UN_svan = np.array([2749.54, 2288.61, 3170.58, 3147.85,
                        0.0,    0.00,  607.14,  916.34])

    SVAN = sw.svan(s, t, p)

    # DISPLAY RESULTS
    f.write('\n')
    f.write('\n%s' % asterisks)
    f.write('\n')
    f.write('\nComparison of accepted values from UNESCO 1983')
    f.write('\n (Unesco Tech. Paper in Marine Sci. No. 44, p22)')
    f.write('\n%s' % asterisks)
    f.write('\n')
    f.write('\n   Sal  Temp  Press        SVAN        svan')
    f.write('\n  (psu)  (C)   (db)    (1e-8*m3/kg)  (1e-8*m3/kg)\n')
    result = np.vstack([s, t, p, UN_svan, 1e+8 * SVAN])
    for iline in range(0, len(SVAN)):
        f.write(" %4.0f  %4.0f   %5.0f   %11.2f    %11.3f\n" %
                tuple(result[:, iline]))

    # Test main module salt.
    f.write('\n%s' % asterisks)
    f.write('\n**  TESTING MODULE: salt')
    f.write('\n**  and SUB-MODULE: salrt salrp sals')
    f.write('\n%s' % asterisks)
    f.write('\n')

    # Test 1 - data from Unesco 1983 p9.
    R = np.array([1, 1.2, 0.65])  # cndr = R.
    T = np.array([15, 20, 5]) / 1.00024
    P = np.array([0, 2000, 1500])
    #Rt   = np.array([  1, 1.0568875, 0.81705885])
    UN_S = np.array([35, 37.245628,  27.995347])

    S = sw.salt(R, T, P)

    # Display results.
    f.write('\n')
    f.write('\n%s' % asterisks)
    f.write('\nComparison of accepted values from UNESCO 1983 ')
    f.write('\n(Unesco Tech. Paper in Marine Sci. No. 44, p9)')
    f.write('\n%s' % asterisks)
    f.write('\n')

    f.write('\n   Temp    Press       R              S           salt')
    f.write('\n   (C)     (db)    (no units)       (psu)          (psu)\n')
    table = np.vstack([T, P, R, UN_S, S])
    m, n = table.shape
    for iline in range(0, n):
        f.write(" %4.0f       %4.0f  %8.2f      %11.6f  %14.7f\n" %
                tuple(table[:, iline]))

    # Test main module cndr.
    f.write('\n%s' % asterisks)
    f.write('\n**  TESTING MODULE: cndr')
    f.write('\n**  and SUB-MODULE: salds')
    f.write('\n%s' % asterisks)

    # Test 1 - data from Unesco 1983 p9.
    T = np.array([0, 10, 0, 10, 10, 30]) / 1.00024
    P = np.array([0,  0, 1000, 1000, 0, 0])
    S = np.array([25, 25, 25, 25, 40, 40])
    UN_R = np.array([0.498088, 0.654990, 0.506244, 0.662975, 1.000073,
                     1.529967])
    R = sw.cndr(S, T, P)

    # Display results.
    f.write('\n')
    f.write('\n%s' % asterisks)
    f.write('\nComparison of accepted values from UNESCO 1983 ')
    f.write('\n (Unesco Tech. Paper in Marine Sci. No. 44, p14)')
    f.write('\n%s' % asterisks)
    f.write('\n')
    f.write('\n')

    f.write('\n   Temp    Press       S            cndr         cndr')
    f.write('\n   (C)     (db)      (psu)        (no units)    (no units)\n')
    table = np.vstack([T, P, S, UN_R, R])
    m, n = table.shape
    for iline in range(0, n):
        f.write(" %4.0f       %4.0f   %8.6f   %11.6f  %14.8f\n" %
                tuple(table[:, iline]))

    # Test main module depth.
    f.write('\n%s' % asterisks)
    f.write('\n**  TESTING MODULE: depth')
    f.write('\n%s' % asterisks)

    # Test data - matrix "pressure", vector "lat"  Unesco 1983 data p30.
    lat = np.array([0, 30, 45, 90])
    P = np.array([[500,   500,   500,  500],
                  [5000,  5000,  5000, 5000],
                  [10000, 10000, 10000, 10000]])

    UN_dpth = np.array([[496.65,  496.00,  495.34,  494.03],
                        [4915.04, 4908.56, 4902.08, 4889.13],
                        [9725.47, 9712.65, 9699.84, 9674.23]])

    dpth = sw.dpth(P, lat)

    # Display results.
    f.write('\n')
    f.write('\n%s' % asterisks)
    f.write('\nComparison of accepted values from Unesco 1983 ')
    f.write('\n(Unesco Tech. Paper in Marine Sci. No. 44, p28)')
    f.write('\n%s' % asterisks)
    f.write('\n')

    f.write('\n')
    for irow in range(0, 3):
        f.write('\n    Lat       Press     DPTH      dpth')
        f.write('\n  (degree)    (db)     (meter)    (meter)\n')
        table = np.vstack([lat, P[irow, :], UN_dpth[irow, :], dpth[irow, :]])
        m, n = table.shape
        for iline in range(0, n):
            f.write("  %6.3f     %6.0f   %8.2f   %8.3f\n" %
                    tuple(table[:, iline]))

    # Test main module fp.
    f.write('\n%s' % asterisks)
    f.write('\n**  TESTING MODULE: fp')
    f.write('\n%s' % asterisks)

    # Test 1 - UNESCO data p.30.
    S = np.array([[5, 10, 15, 20, 25, 30, 35, 40],
                  [5, 10, 15, 20, 25, 30, 35, 40]])

    P = np.array([[0,   0,   0,   0,   0,   0,   0,   0],
                  [500, 500, 500, 500, 500, 500, 500, 500]])

    UN_fp = np.array([[-0.274, -0.542, -0.812, -1.083, -1.358, -1.638, -1.922,
                       -2.212], [-0.650, -0.919, -1.188, -1.460, -1.735,
                                 -2.014, -2.299, -2.589]])

    FP = sw.fp(S, P)

    # Display results.
    f.write('\n')
    f.write('\n%s' % asterisks)
    f.write('\nComparison of accepted values from UNESCO 1983 ')
    f.write('\n (Unesco Tech. Paper in Marine Sci. No. 44, p30)')
    f.write('\n%s' % asterisks)
    f.write('\n')

    f.write('\n')
    for irow in range(0, 2):
        f.write('\n   Sal   Press      fp        fp')
        f.write('\n  (psu)   (db)      (C)        (C)\n')
        table = np.vstack([S[irow, :], P[irow, :], UN_fp[irow, :],
                           FP[irow, :]])
        m, n = table.shape
        for iline in range(0, n):
            f.write(" %4.0f   %5.0f   %8.3f  %11.4f\n" %
                    tuple(table[:, iline]))

    # Test main module cp.
    f.write('\n%s' % asterisks)
    f.write('\n**  TESTING MODULE: cp')
    f.write('\n%s' % asterisks)

    # Test 1.
    # Data from Pond and Pickard Intro. Dynamical Oceanography 2nd ed. 1986
    T = np.array([[0,  0,  0,  0,  0,  0],
                  [10, 10, 10, 10, 10, 10],
                  [20, 20, 20, 20, 20, 20],
                  [30, 30, 30, 30, 30, 30],
                  [40, 40, 40, 40, 40, 40]]) / 1.00024

    S = np.array([[25, 25, 25, 35, 35, 35],
                  [25, 25, 25, 35, 35, 35],
                  [25, 25, 25, 35, 35, 35],
                  [25, 25, 25, 35, 35, 35],
                  [25, 25, 25, 35, 35, 35]])

    P = np.array([[0, 5000, 10000, 0, 5000, 10000],
                  [0, 5000, 10000, 0, 5000, 10000],
                  [0, 5000, 10000, 0, 5000, 10000],
                  [0, 5000, 10000, 0, 5000, 10000],
                  [0, 5000, 10000, 0, 5000, 10000]])

    UN_cp = np.array([[4048.4,  3896.3,  3807.7,  3986.5,  3849.3,  3769.1],
                      [4041.8,  3919.6,  3842.3,  3986.3,  3874.7,  3804.4],
                      [4044.8,  3938.6,  3866.7,  3993.9,  3895.0,  3828.3],
                      [4049.1,  3952.0,  3883.0,  4000.7,  3909.2,  3844.3],
                      [4051.2,  3966.1,  3905.9,  4003.5,  3923.9,  3868.3]])

    CP = sw.cp(S, T, P)

    # Display results.
    f.write('\n')
    f.write('\n%s' % asterisks)
    f.write('\nComparison of accepted values from UNESCO 1983 ')
    f.write('\n (Unesco Tech. Paper in Marine Sci. No. 44, p37)')
    f.write('\n%s' % asterisks)
    f.write('\n')

    m, n = S.shape
    f.write('\n')
    for icol in range(0, n):
        f.write('\n   Sal  Temp  Press      Cp        cp')
        f.write('\n  (psu)  (C)   (db)    (J/kg.C)   (J/kg.C)\n')
        result = np.vstack([S[:, icol], T[:, icol], P[:, icol],
                            UN_cp[:, icol], CP[:, icol]])
        for iline in range(0, m):
            f.write(" %4.0f  %4.0f   %5.0f   %8.1f  %11.2f\n" %
                    tuple(result[:, iline]))

    # Test main module svel.
    f.write('\n%s' % asterisks)
    f.write('\n**  TESTING MODULE: svel')
    f.write('\n%s' % asterisks)

    # Test 1.
    # Data from Pond and Pickard Intro. Dynamical Oceanography 2nd ed. 1986
    T = np.array([[0,  0,  0,  0,  0,  0],
                  [10, 10, 10, 10, 10, 10],
                  [20, 20, 20, 20, 20, 20],
                  [30, 30, 30, 30, 30, 30],
                  [40, 40, 40, 40, 40, 40]]) / 1.00024

    S = np.array([[25, 25, 25, 35, 35, 35],
                  [25, 25, 25, 35, 35, 35],
                  [25, 25, 25, 35, 35, 35],
                  [25, 25, 25, 35, 35, 35],
                  [25, 25, 25, 35, 35, 35]])

    P = np.array([[0, 5000, 10000, 0, 5000, 10000],
                  [0, 5000, 10000, 0, 5000, 10000],
                  [0, 5000, 10000, 0, 5000, 10000],
                  [0, 5000, 10000, 0, 5000, 10000],
                  [0, 5000, 10000, 0, 5000, 10000]])

    UN_svel = np.array([[1435.8, 1520.4, 1610.4, 1449.1, 1534.0, 1623.2],
                        [1477.7, 1561.3, 1647.4, 1489.8, 1573.4, 1659.0],
                        [1510.3, 1593.6, 1676.8, 1521.5, 1604.5, 1687.2],
                        [1535.2, 1619.0, 1700.6, 1545.6, 1629.0, 1710.1],
                        [1553.4, 1638.0, 1719.2, 1563.2, 1647.3, 1727.8]])

    SVEL = sw.svel(S, T, P)

    # Display results.
    f.write('\n')
    f.write('\n%s' % asterisks)
    f.write('\nComparison of accepted values from UNESCO 1983 ')
    f.write('\n (Unesco Tech. Paper in Marine Sci. No. 44, p50)')
    f.write('\n%s' % asterisks)
    f.write('\n')

    m, n = SVEL.shape
    f.write('\n')
    for icol in range(0, n):
        f.write('\n   Sal  Temp  Press     SVEL       svel')
        f.write('\n  (psu)  (C)   (db)     (m/s)       (m/s)\n')

        result = np.vstack([S[:, icol], T[:, icol], P[:, icol],
                            UN_svel[:, icol], SVEL[:, icol]])
        for iline in range(0, m):
            f.write(" %4.0f  %4.0f   %5.0f   %8.1f  %11.3f\n" %
                    tuple(result[:, iline]))

    # Test submodules alpha beta aonb.
    f.write('\n%s' % asterisks)
    f.write('\n**  and SUB-MODULE: alpha beta aonb')
    f.write('\n%s' % asterisks)

    # Data from McDouogall 1987.
    s = 40
    PT = 10
    p = 4000
    beta_lit = 0.72088e-03
    aonb_lit = 0.34763
    alpha_lit = aonb_lit * beta_lit

    BETA = sw.beta(s, PT, p, pt=True)
    ALPHA = sw.alpha(s, PT, p, pt=True)
    AONB = sw.aonb(s, PT, p, pt=True)

    # Display results.
    f.write('\n')
    f.write('\n%s' % asterisks)
    f.write('\nComparison of accepted values from MCDOUGALL 1987 ')
    f.write('\n%s' % asterisks)
    f.write('\n')
    f.write('\n')

    f.write('\n   Sal  Temp  Press     BETA       beta')
    f.write('\n  (psu)  (C)   (db)   (psu^-1)     (psu^-1)\n')
    table = np.hstack([s, PT, p, beta_lit, BETA])
    f.write(" %4.0f  %4.0f   %5.0f   %11.4e  %11.5e\n" %
            tuple(table))

    f.write('\n   Sal  Temp  Press     AONB       aonb')
    f.write('\n  (psu)  (C)   (db)   (psu C^-1)   (psu C^-1)\n')
    table = np.hstack([s, PT, p, aonb_lit, AONB])
    f.write(" %4.0f  %4.0f   %5.0f   %8.5f  %11.6f\n" %
            tuple(table))

    f.write('\n   Sal  Temp  Press     ALPHA       alpha')
    f.write('\n  (psu)  (C)   (db)    (psu^-1)     (psu^-1)\n')
    table = np.hstack([s, PT, p, alpha_lit, ALPHA])
    f.write(" %4.0f  %4.0f   %5.0f   %11.4e  %11.4e\n" %
            tuple(table))

    # Test main moduleS  satO2 satN2 satAr.
    f.write('\n%s' % asterisks)
    f.write('\n**  TESTING MODULE: satO2 satN2 satAr')
    f.write('\n%s' % asterisks)
    f.write('\n')

    # Data from Weiss 1970.
    T = np.array([[-1, -1],
                  [10, 10],
                  [20, 20],
                  [40, 40]]) / 1.00024

    S = np.array([[20, 40],
                  [20, 40],
                  [20, 40],
                  [20, 40]])

    lit_O2 = np.array([[9.162, 7.984],
                       [6.950, 6.121],
                       [5.644, 5.015],
                       [4.050, 3.656]])

    lit_N2 = np.array([[16.28, 14.01],
                       [12.64, 11.01],
                       [10.47,  9.21],
                       [7.78,  6.95]])

    lit_Ar = np.array([[0.4456, 0.3877],
                       [0.3397, 0.2989],
                       [0.2766, 0.2457],
                       [0.1986, 0.1794]])

    O2 = sw.satO2(S, T)
    N2 = sw.satN2(S, T)
    Ar = sw.satAr(S, T)

    # Display results.
    f.write('\n')
    f.write('\n%s' % asterisks)
    f.write('\nComparison of accepted values from Weiss, R.F. 1979 ')
    f.write('\n"The solubility of nitrogen, oxygen and argon in water')
    f.write('\n and seawater." Deep-Sea Research., 1970, Vol 17, pp721-735.')
    f.write('\n%s' % asterisks)
    f.write('\n')

    m, n = S.shape
    f.write('\n')
    for icol in range(0, n):
        f.write('\n   Sal  Temp      O2         satO2')
        f.write('\n  (psu)  (C)      (ml/l)     (ml/l)\n')
        result = np.vstack([S[:, icol], T[:, icol],
                            lit_O2[:, icol], O2[:, icol]])
        for iline in range(0, m):
            f.write(" %4.0f  %4.0f    %8.2f   %9.3f\n" %
                    tuple(result[:, iline]))

    for icol in range(0, n):
        f.write('\n   Sal  Temp      N2         satN2')
        f.write('\n  (psu)  (C)      (ml/l)     (ml/l)\n')
        result = np.vstack([S[:, icol], T[:, icol],
                            lit_N2[:, icol], N2[:, icol]])
        for iline in range(0, m):
            f.write(" %4.0f  %4.0f    %8.2f  %9.3f\n" %
                    tuple(result[:, iline]))

    for icol in range(0, n):
        f.write('\n   Sal  Temp      Ar         satAr')
        f.write('\n  (psu)  (C)      (ml/l)     (ml/l)\n')
        result = np.vstack([S[:, icol], T[:, icol],
                            lit_Ar[:, icol], Ar[:, icol]])
        for iline in range(0, m):
            f.write(" %4.0f  %4.0f     %8.4f  %9.4f\n" %
                    tuple(result[:, iline]))
Esempio n. 15
0
def convert_ios_dict(dat):
    '''
    convert_ios_dict
    
    Converts an ios_ctd.ios_read dictionary object to a standardized
    dictionary object.
    
    So far the stand
    '''
    global STANDARD_KEYS, IOS_DAT_CONV_KEYS

    new_dat = {}

    # initialize the new object
    for key in STANDARD_KEYS:
        new_dat[key] = None

    # code to convert ios data here

    # longitude/latitude are floats, no need to convert
    new_dat['Longitude'] = dat['Longitude']
    new_dat['Latitude'] = dat['Latitude']

    # Day/Month/Year determined by parsing Date string
    date_str = dat['Date']
    dt_obj = dateutil.parser.parse(date_str)
    new_dat['Day'] = dt_obj.day
    new_dat['Month'] = dt_obj.month
    new_dat['Year'] = dt_obj.year

    # ID is the complicated one, need to compare to available NOAA data
    # -- this is necessary to determine whether two casts are repeated
    # -- 17 character unique identifier
    # FMT: YR   MNTH DAY  LONG      LAT
    #      4chr 2chr 2chr 3chr.2chr 2chr.2chr
    # e.g: 20100915125314850
    #     - cast from Sept 15, 2010
    #     - lon: 125.31, lat 48.50
    # NOTE: doesn't differentiate between N/S or E/W for lat/lon
    new_dat['ID'] = '%04d%02d%02d%03.2f%02.2f' % (
        new_dat['Year'], new_dat['Month'], new_dat['Day'],
        np.abs(new_dat['Longitude']), np.abs(new_dat['Latitude']))
    # remove the decimal point from lat and lon
    new_dat['ID'] = new_dat['ID'].replace('.', '')

    for count, var in enumerate(dat['Variables']):
        # convert from pressure to depth
        if 'Pressure' in var['Name']:
            # need to use Seawater package to convert
            # from dbar to depth
            new_dat['Depth'] = []
            for ii in dat['DATA'][count]:
                new_dat['Depth'].append(SW.dpth(ii, new_dat['Latitude']))
            continue
        # otherwise, use the conversion keys to set data
        for key, val in IOS_DAT_CONV_KEYS.iteritems():
            # make sure we haven't already written this variable
            # -- happens on occasion when Temperature:Primary and
            #    Temperature:Secondary are both available
            if var['Name'] == key and val in new_dat:
                if new_dat[val] is None or len(new_dat[val]) == 0:
                    new_dat[val] = dat['DATA'][count]
                break

    # archaic error handling for now
    if 'Temperature' not in new_dat:
        print('> ERROR :: TEMP ENTRY NOT FOUND')
        return None

    if 'Salinity' not in new_dat:
        print('> ERROR :: SALN ENTRY NOT FOUND')
        return None

    if 'Depth' not in new_dat:
        print('> ERROR :: PRESS ENTRY NOT FOUND')
        return None

    return new_dat
def get_BB9_RR(files, get_data=True, get_dives=None, wavelength=700):
    SB_files = np.sort(glob.glob(files))
    sta = np.arange(len(SB_files))

    if get_dives is not None:
        SB_files = SB_files[get_dives.astype('int')]
        sta = sta[get_dives.astype('int')]

    lat = ()
    lon = ()
    t = ()
    if get_data:
        T = ()
        S = ()
        z = ()
        VSF = ()
        VSF_sd = ()

    for i in range(len(SB_files)):
        SB = readSB(SB_files[i], no_warn=True)

        if 'bbp650' not in SB.data.keys():
            # sometimes this isn't recorded for some reason...do not use these
            continue

        lat += (get_startlat(SB), )
        lon += (get_startlon(SB), )
        t += (get_starttime(SB), )

        if get_data:
            T += (np.array(SB.data['wt']), )
            S += (np.array(SB.data['sal']), )
            p = np.array(SB.data['pressure'])
            z += (sw.dpth(p, lat=50.5), )
            if wavelength == 470:
                wvs = [440, 488]
            elif wavelength == 700:
                wvs = [650, 715]
            else:
                print(
                    'Error: cannot have wavelength other than 470 or 700 right now'
                )
            VSF1 = np.array(SB.data['bbp%03d' % wvs[0]]) / (2 * np.pi * 1.076)
            VSF1_sd = np.array(
                SB.data['bbp%03d_sd' % wvs[0]]) / (2 * np.pi * 1.076)
            VSF2 = np.array(SB.data['bbp%03d' % wvs[1]]) / (2 * np.pi * 1.076)
            VSF2_sd = np.array(
                SB.data['bbp%03d_sd' % wvs[1]]) / (2 * np.pi * 1.076)
            VSF += (((wvs[1] - wavelength) * VSF1 +
                     (wavelength - wvs[0]) * VSF2) / (wvs[1] - wvs[0]), )
            VSF_sd += (((wvs[1] - wavelength) * VSF1_sd +
                        (wavelength - wvs[0]) * VSF2_sd) / (wvs[1] - wvs[0]), )

    output = np.array(t, dtype='object'), np.array(
        lat, dtype='object'), np.array(lon, dtype='object'), sta
    if get_data:
        output += (
            np.array(T, dtype='object'),
            np.array(S, dtype='object'),
            np.array(z, dtype='object'),
            np.array(VSF, dtype='object'),
            np.array(VSF_sd, dtype='object'),
        )

    return output
def get_FLBB_LF(filename,
                get_data=True,
                get_dives=None,
                remove_seawater=True,
                VSF_dark=0,
                remove_deep_vals=True):
    LF = readSB(filename, no_warn=True)
    LF_time = get_time(LF)
    LF_lon = np.array(LF.data['lon'])
    LF_lat = np.array(LF.data['lat'])

    LF_dep = np.array(sw.dpth(np.array(LF.data['pressure']), lat=50.5))

    if get_data:
        LF_temp = np.array(LF.data['wt'])
        LF_sal = np.array(LF.data['sal'])
        LF_VSF = np.array(LF.data['vsf700'])
        if remove_seawater:
            VSF_sw = betasw_wetlabs(700, 20, 140, 41, LF_temp, LF_sal,
                                    np.array(LF.data['pressure']))
            LF_VSF -= VSF_sw

    begin_dive = np.where((np.diff(LF_time) > .01)
                          & (np.diff(LF_time) < .4))[0]
    end_dive = np.array([
        np.where((np.diff(LF_dep[i:i + 1000]) < -1)
                 & (LF_dep[i + 1:i + 1000] > 150))[0][0] + i
        for i in begin_dive
    ])

    if get_dives is None:
        inds = np.arange(len(begin_dive))
    else:
        inds = get_dives

    LF_lon_dive = np.array(LF_lon[begin_dive][inds])
    LF_lat_dive = np.array(LF_lat[begin_dive][inds])
    LF_time_dive = np.array(LF_time[begin_dive][inds])

    if get_data:
        LF_temp_dive = np.array(
            [LF_temp[b:e] for b, e in zip(begin_dive[inds], end_dive[inds])],
            dtype='object')
        LF_sal_dive = np.array(
            [LF_sal[b:e] for b, e in zip(begin_dive[inds], end_dive[inds])],
            dtype='object')
        LF_VSF_dive = np.array(
            [LF_VSF[b:e] for b, e in zip(begin_dive[inds], end_dive[inds])],
            dtype='object')
        LF_dep_dive = np.array(
            [LF_dep[b:e] for b, e in zip(begin_dive[inds], end_dive[inds])],
            dtype='object')

        if remove_deep_vals:
            z_inds = [(d < 100) for d in LF_dep_dive]
            LF_temp_dive = np.array(
                [T[i] for T, i in zip(LF_temp_dive, z_inds)], dtype='object')
            LF_sal_dive = np.array([S[i] for S, i in zip(LF_sal_dive, z_inds)],
                                   dtype='object')
            LF_VSF_dive = np.array([V[i] for V, i in zip(LF_VSF_dive, z_inds)],
                                   dtype='object')
            LF_dep_dive = np.array([z[i] for z, i in zip(LF_dep_dive, z_inds)],
                                   dtype='object')

    bad_inds = (LF_time_dive > END_DATE
                )  # beyond time of the main EXPORTS experiment
    LF_time_dive = LF_time_dive[~bad_inds]
    LF_lat_dive = LF_lat_dive[~bad_inds]
    LF_lon_dive = LF_lon_dive[~bad_inds]
    inds = inds[~bad_inds]

    if get_data:
        LF_temp_dive = LF_temp_dive[~bad_inds]
        LF_sal_dive = LF_sal_dive[~bad_inds]
        LF_dep_dive = LF_dep_dive[~bad_inds]
        LF_VSF_dive = LF_VSF_dive[~bad_inds]

    output = (
        LF_time_dive,
        LF_lat_dive,
        LF_lon_dive,
        inds,
    )

    if get_data:
        output += (
            LF_temp_dive,
            LF_sal_dive,
            LF_dep_dive,
            LF_VSF_dive,
        )

    return output
def main(uri: str, filename: str):
    """Import Seal Profiles

    :param str uri: Database URI
    :param str filename: Seal NetCDF Filename, or directory of files
    """
    data.observational.init_db(uri, echo=False)
    data.observational.create_tables()

    if os.path.isdir(filename):
        filenames = sorted(glob.glob(os.path.join(filename, "*.nc")))
    else:
        filenames = [filename]

    for fname in filenames:
        print(fname)
        # We're only loading Temperature and Salinity from these files, so
        # we'll just make sure the DataTypes are in the db now.
        if DataType.query.get("sea_water_temperature") is None:
            dt = DataType(
                key="sea_water_temperature",
                name="Water Temperature",
                unit="degree_Celsius",
            )
            data.observational.db.session.add(dt)

        if DataType.query.get("sea_water_temperature") is None:
            dt = DataType(key="sea_water_salinity",
                          name="Water Salinity",
                          unit="PSU")
            data.observational.db.session.add(dt)

        data.observational.db.session.commit()

        with xr.open_dataset(fname) as ds:
            ds["TIME"] = ds.JULD.to_index().to_datetimeindex()
            ds["TIME"] = ds.TIME.swap_dims({"TIME": "N_PROF"})
            depth = seawater.dpth(
                ds.PRES_ADJUSTED,
                np.tile(ds.LATITUDE, (ds.PRES.shape[1], 1)).transpose(),
            )
            ds["DEPTH"] = (["N_PROF", "N_LEVELS"], depth)

            # This is a single platform, so we can construct it here.
            p = Platform(type=Platform.Type.animal,
                         unique_id=ds.reference_file_name)
            p.attrs = {
                "Principle Investigator": ds.pi_name,
                "Platform Code": ds.platform_code,
                "Species": ds.species,
            }

            data.observational.db.session.add(p)
            data.observational.db.session.commit()

            # Generate Stations
            df = ds[["LATITUDE", "LONGITUDE", "TIME"]].to_dataframe()
            stations = [
                Station(
                    platform_id=p.id,
                    latitude=row.LATITUDE,
                    longitude=row.LONGITUDE,
                    time=row.TIME,
                ) for idx, row in df.iterrows()
            ]

            # Using return_defaults=True here so that the stations will get
            # updated with id's. It's slower, but it means that we can just
            # put all the station ids into a pandas series to use when
            # constructing the samples.
            data.observational.db.session.bulk_save_objects(
                stations, return_defaults=True)
            df["STATION_ID"] = [s.id for s in stations]

            # Generate Samples
            df_samp = (ds[["TEMP_ADJUSTED", "PSAL_ADJUSTED",
                           "DEPTH"]].to_dataframe().reorder_levels(
                               ["N_PROF", "N_LEVELS"]))

            samples = [[
                Sample(
                    station_id=df.STATION_ID[idx[0]],
                    datatype_key="sea_water_temperature",
                    value=row.TEMP_ADJUSTED,
                    depth=row.DEPTH,
                ),
                Sample(
                    station_id=df.STATION_ID[idx[0]],
                    datatype_key="sea_water_salinity",
                    value=row.PSAL_ADJUSTED,
                    depth=row.DEPTH,
                ),
            ] for idx, row in df_samp.iterrows()]
            samples = [item for sublist in samples for item in sublist]
            samples = [s for s in samples if not pd.isna(s.value)]

            data.observational.db.session.bulk_save_objects(samples)
            data.observational.db.session.commit()
    d, ly, lx = maskmod.shape
    depthmod2d = np.zeros([lx, ly])
    for j in np.arange(ly):
        for i in np.arange(lx):
            depthmod2d[j, i] = depthmod[np.min(np.where(maskmod[:, j, i] < 1))]

    # Read the profile file
    tfileEN4 = diren4 + file_prof
    dsen4 = Dataset(tfileEN4)
    laten4 = dsen4.variables['LATITUDE'][prof_prof]
    lonen4 = dsen4.variables['LONGITUDE'][prof_prof]
    dayen4 = dsen4.variables['JULD'][prof_prof]
    tempen4 = dsen4.variables['TEMP_ADJUSTED'][prof_prof]
    salten4 = dsen4.variables['PSAL_ADJUSTED'][prof_prof]
    presen4 = dsen4.variables['PRES_ADJUSTED'][prof_prof]
    depthen4 = seawater.dpth(presen4, laten4)

    # Find the last level and reduce the profiles
    indzprof = np.max(np.where(np.isnan(depthen4) == False))
    dmax = depthen4[indzprof - 1]
    obsred_dep = np.zeros(int(indzprof))
    obsred_temp = np.zeros(int(indzprof))
    obsred_salt = np.zeros(int(indzprof))
    for z in np.arange(int(indzprof)):
        obsred_dep[int(z)] = depthen4[int(z)]
        obsred_temp[int(z)] = tempen4[int(z)]
        obsred_salt[int(z)] = salten4[int(z)]

    # Remove the NaN in the profiles
    indtempnan = np.where(np.isnan(obsred_temp) == True)
    if len(indtempnan[0]) > 0:
Esempio n. 20
0
 def depth_combined(self, CTDPRS, DEPTH, LATITUDE):
     #depth = DEPTH
     depth_from_pres = -1 * sw.dpth(CTDPRS, LATITUDE)
     #depth[np.isnan(depth)] = depth_from_pres[np.isnan(depth)]
     return depth_from_pres
Esempio n. 21
0
                o2_mlL = float((element_list_1[-1]))
                mol_volume_o2 = 44.66
                sw_density = float(element_list_1[11])
                o2_µmol = (o2_mlL * mol_volume_o2 / sw_density * 1000)
                mn_o2.append(o2_µmol)
                #mn_o2.append(o2_µmol + (-9.196871644229978) + (-0.007655474201452)*o2_µmol +\
                #(0.011910402128384)*m + (-0.000021367427539)*(m*m) +\
                #(-0.000326456036203)*(m*o2_µmol))     # o2 conversion ml to µmol
                mn_net.append(net)
                mn_index.append(net)
                mn_haul.append(mn_filename)
                volume = float(element_list_1[3])
                mn_volume.append(volume)
                mn_salinity.append(float(element_list_1[9]))
                mn_temp.append(float(element_list_1[7]))
                depth = sw.dpth(m, float(lat))  # calculating depth
                mn_depth.append(depth)

        line_counter += 1

    #creating dataframe with mean value
    mn_dataframe=pd.DataFrame({"haul": mn_haul, "net": mn_net,\
                               "volume": mn_volume,"pressure": mn_pressure, "depth":mn_depth, "o2": mn_o2,\
                               "salinity": mn_salinity, "temp": mn_temp})

    mn_dataframe.loc[mn_dataframe['o2'] < 0,
                     'o2'] = 0  # get rid of negative value

    # creating columns net opening, closing and delta value to calculate the integrated biomass and abundance
    mn_data_mins = mn_dataframe.groupby([
        "haul",