def get_argo_data(extent, t0, t1): # Written by Mike Smith Argo = namedtuple('Argo', ['name', 'time', 'lon', 'lat']) argo_floats = [] data = active_argo_floats(extent, t0, t1) if not data.empty: most_recent = data.loc[data.groupby('platform_number') ['time (UTC)'].idxmax()] for float in most_recent.itertuples(): A = Argo(float.platform_number, pd.to_datetime(float._2).strftime('%Y-%m-%dT%H:%M:%S'), float._4, float._5) argo_floats.append(A) return argo_floats
rtofs = xr.open_mfdataset(rtofs_files) rtofs = rtofs.rename({ 'Longitude': 'lon', 'Latitude': 'lat', 'MT': 'time', 'Depth': 'depth' }) # for r in regions: # extent = regions[r]['lonlat'] floats = ['4903354', '4902350'] extent = regions['Gulf of Mexico']['lonlat'] floats = active_argo_floats(bbox=extent, time_start=argo_search_start, time_end=argo_search_end, floats=floats[1]) for argo in floats.platform_number.unique(): temp = floats[floats['platform_number'] == argo] for t_float in temp['time (UTC)'].unique(): temp_float_time = temp[temp['time (UTC)'] == t_float] filtered = temp_float_time[(np.abs( stats.zscore(temp_float_time['psal (PSU)'])) < 3)] # filter salinity x = filtered['longitude (degrees_east)'].unique()[-1] y = filtered['latitude (degrees_north)'].unique()[-1] t0 = dt.datetime(
columns=['lonmin', 'lonmax', 'latmin', 'latmax']) global_extent = [ extent_df.lonmin.min(), extent_df.lonmax.max(), extent_df.latmin.min(), extent_df.latmax.max() ] search_start = ranges[0] - dt.timedelta(hours=search_hours) search_end = ranges[-1] if argo: print( f'Download ARGO Data within {global_extent} between {search_start} and {search_end}' ) argo_data = active_argo_floats(global_extent, search_start, search_end) else: argo_data = pd.DataFrame() if gliders: print( f'Downloading glider data within {global_extent} between {search_start} and {search_end}' ) glider_data = active_gliders(global_extent, search_start, search_end) else: glider_data = pd.DataFrame() # Loop through regions for region in regions.items(): extent = region[1]['lonlat'] print(f'Region: {region[0]}, Extent: {extent}')
'Depth': 'depth' }) lat = ds.lat.data lon = ds.lon.data t0 = pd.to_datetime(ds.time.data[0] - np.timedelta64(search_hours, 'h')) t1 = pd.to_datetime(ds.time.data[0]) kwargs['t0'] = t0 # Loop through regions for region in regions.items(): extent = region[1]['lonlat'] if argo: kwargs['argo'] = active_argo_floats(extent, t0, t1) if gliders: kwargs['gliders'] = active_gliders(extent, t0, t1) if bathy: kwargs['bathy'] = bathy.sel( lon=slice(extent[0] - 1, extent[1] + 1), lat=slice(extent[2] - 1, extent[3] + 1)) if region[1]['currents']['bool']: kwargs['currents'] = region[1]['currents'] # kwargs['transform'] = projection extent = np.add(extent, [-1, 1, -1, 1]).tolist()
t1 = pd.to_datetime(now) # cmems_salinity = grab_cmems('global-analysis-forecast-phy-001-024-3dinst-so', user, pw) # cmems_temp = grab_cmems('global-analysis-forecast-phy-001-024-3dinst-thetao', user, pw) # cmems = xr.merge([cmems_salinity, cmems_temp]) # cmems = cmems.sel(depth=slice(0, 400), time=slice(t0, t1),) # Loop through regions for region, values in regions.items(): extent = values['lonlat'] print(f'Region: {region}, Extent: {extent}') temp_save_dir = os.path.join(save_dir, '_'.join(region.split(' ')).lower()) os.makedirs(temp_save_dir, exist_ok=True) floats = active_argo_floats(extent, t0, t1) temp_df = floats.loc[floats.groupby('platform_number') ['time (UTC)'].idxmax()] region_limits = regions[region] vargs = {} vargs['vmin'] = values['temperature'][0]['limits'][0] vargs['vmax'] = values['temperature'][0]['limits'][1] vargs['transform'] = ccrs.PlateCarree() vargs['cmap'] = cmocean.cm.thermal vargs['levels'] = np.arange(vargs['vmin'], vargs['vmax'], values['temperature'][0]['limits'][2]) vargs['argo'] = temp_df
}) rtofs = rtofs.sel(depth=slice(0, depth)) # cmems_salinity = grab_cmems('global-analysis-forecast-phy-001-024-3dinst-so', user, pw) # cmems_temp = grab_cmems('global-analysis-forecast-phy-001-024-3dinst-thetao', user, pw) # cmems = xr.merge([cmems_salinity, cmems_temp]) # cmems = cmems.sel(depth=slice(0, 400)) # floats = ['4902350', '6902851', '4902915', '4901719', '4903356', '4903238', '4903238', '4901719'] # floats = ['4901719'] # floats = ['4901716', '6902851', '4902351', '4902915', '4902915', '4902114', '4902441', '4902928'] # floats = ['4903248', '4903253', '4903259', '6902855', '4902916'] for f in floats: argos = active_argo_floats(time_start=argo_search_start, time_end=argo_search_end, floats=f) if not argos.empty: for argo in argos.platform_number.unique(): temp = argos[argos['platform_number'] == argo] for t_float in temp['time (UTC)'].unique(): temp_float_time = temp[temp['time (UTC)'] == t_float] filtered = temp_float_time[(np.abs( stats.zscore(temp_float_time['psal (PSU)'])) < 3)] # filter salinity filtered = filtered[filtered['pres (decibar)'] <= 400] x = filtered['longitude (degrees_east)'].unique()[-1] y = filtered['latitude (degrees_north)'].unique()[-1]