An array of pandas Interval objects. Returns ------- mids : 1D numpy array Midpoints of the intervals. """ return np.array([v.mid for v in intervals]) # %% [markdown] # Load datasets. # %% up = xr.open_dataset("../proc/downstream_deep_upward_2018_enu.nc") up = up.set_coords(["lon", "lat"]) up["time"] = utils.POSIX_to_datetime(up.time.values).astype(np.datetime64) do = xr.open_dataset("../proc/downstream_deep_down_2018_enu.nc") do = do.set_coords(["lon", "lat"]) do["time"] = utils.POSIX_to_datetime(do.time.values).astype(np.datetime64) sbe51 = xr.open_dataset("../proc/downstream_deep_SBE37_10551_2018.nc") sbe51 = sbe51.set_coords(["lon", "lat"]) sbe51["time"] = utils.POSIX_to_datetime(sbe51.time.values).astype(np.datetime64) sbe52 = xr.open_dataset("../proc/downstream_deep_SBE37_10552_2018.nc") sbe52 = sbe52.set_coords(["lon", "lat"]) sbe52["time"] = utils.POSIX_to_datetime(sbe52.time.values).astype(np.datetime64) sbe53 = xr.open_dataset("../proc/downstream_deep_SBE37_10553_2018.nc") sbe53 = sbe53.set_coords(["lon", "lat"])
An array of pandas Interval objects. Returns ------- mids : 1D numpy array Midpoints of the intervals. """ return np.array([v.mid for v in intervals]) # %% [markdown] # Load datasets and do some basic conversion of times and variables. # %% sV = xr.open_dataset("../proc/ABLE_sentinel_2018_enu.nc") sV = sV.set_coords(["lon", "lat"]) sV["time"] = utils.POSIX_to_datetime(sV.time.values).astype(np.datetime64) x, y, *_ = utm.from_latlon(sV.lat, sV.lon) sV = sV.assign_coords({"x": x, "y": y}) virt = xr.open_dataset("../proc/ABLE_sentinel_RBRvirtuoso_2018.nc") virt = virt.set_coords(["lon", "lat"]) virt["time"] = utils.POSIX_to_datetime(virt.time.values).astype(np.datetime64) sbe = xr.open_dataset("../proc/ABLE_sentinel_SBE37_2018.nc") sbe = sbe.set_coords(["lon", "lat"]) sbe["time"] = utils.POSIX_to_datetime(sbe.time.values).astype(np.datetime64) # %% [markdown] # Define some parameters and simple thresholds for processing.
# language: python # name: lcpp-dev # --- # %% import xarray as xr import matplotlib.pyplot as plt import utils # %% [markdown] # ## Downstream deep upward looking # %% ddu = xr.open_dataset("../proc/downstream_deep_upward_2018_enu.nc" ) #, time=slice(9900, -2300)).sel(distance=slice(0, 75)) ddu["time"] = utils.POSIX_to_datetime(ddu.time.values) # %% ddu # %% ddu.u.plot(vmin=-0.5, vmax=0.5, cmap="coolwarm") # %% ddu.v.plot(vmin=-0.5, vmax=0.5, cmap="coolwarm") # %% ddu.a1.plot() # %% ddu.g1.plot()
adps = [adp1, adp2, adp3] ctds = [ctd1, ctd2, ctd3] # %% [markdown] # # Processing Loop # %% for i, (adp, ctd) in enumerate(zip(adps, ctds)): print(f"Loading {adp}") print(f"Loading {ctd}") # Load data a1 = xr.open_dataset(adp) a1 = a1.set_coords(["lon", "lat"]) a1["time"] = utils.POSIX_to_datetime(a1.time.values).astype(np.datetime64) x, y, *_ = utm.from_latlon(a1.lat, a1.lon) a1 = a1.assign_coords({"x": x, "y": y}) c1 = xr.open_dataset(ctd) try: c1["time"] = utils.POSIX_to_datetime(c1.time.values).astype( np.datetime64) except TypeError: print("No need to convert time") # Estimate CTD depth c1["z"] = ( c1.p.dims, gsw.z_from_p(c1.p, c1.lat).data,
# # First run the R notebook: `test_R_adp.ipynb` # # This generates the netcdf files. # %% import xarray as xr import matplotlib.pyplot as plt import numpy as np import utils import scipy.stats as stats from scipy.ndimage import label, gaussian_filter # %% enu = xr.open_dataset("enu.nc", decode_times=False) enu["time"] = utils.POSIX_to_datetime(enu.time.values).astype(np.datetime64) # %% hvel_kwargs = dict(vmin=-0.3, vmax=0.3, cmap="coolwarm") vvel_kwargs = dict(vmin=-0.1, vmax=0.1, cmap="coolwarm") fig, axs = plt.subplots(4, 1, sharex=True, figsize=(22, 14)) enu.u.plot(ax=axs[0], **hvel_kwargs) enu.v.plot(ax=axs[1], **hvel_kwargs) enu.w.plot(ax=axs[2], **vvel_kwargs) np.abs(enu.err).plot(ax=axs[3], vmin=0, vmax=0.2) for ax in axs: ax.set_xlabel("") # %%