import matplotlib.pyplot as plt
import scipy.stats as stats
from matplotlib.lines import Line2D

url = '/Users/mikesmith/Documents/github/rucool/hurricanes/data/rtofs/'
save_dir = '/Users/mikesmith/Documents/github/rucool/hurricanes/plots/argo_profile_model_comparisons/'

gofs_url = 'https://tds.hycom.org/thredds/dodsC/GLBy0.08/expt_93.0'

days_to_check_for_argo_surfacing = 15
days_pre_surfacing = 1
days_post_surfacing = 2
dpi = 150
figsize = (16, 12)

regions = limits_regions('rtofs', ['gom', 'carib'])

argo_search_end = dt.date.today()
argo_search_start = argo_search_end - dt.timedelta(
    days=days_to_check_for_argo_surfacing)

date_list = [
    argo_search_end - dt.timedelta(days=x + 1)
    for x in range(days_to_check_for_argo_surfacing + 15)
]
date_list.insert(0, argo_search_end)
date_list.insert(0, argo_search_end + dt.timedelta(days=1))
date_list.reverse()

line = ('-', '--', '-.', ':')
alpha = (1, .75, .5, .25)
Esempio n. 2
0
save_dir = '/Users/mikesmith/Documents/github/rucool/hurricanes/plots/transects/'
# save_dir = '/www/web/rucool/hurricane/model_comparisons/transects/gofs/'
model = 'gofs'
transect_spacing = 0.05 / 20

days = 1

# Get today and yesterdays date
today = dt.date.today()
ranges = pd.date_range(today - dt.timedelta(days=days),
                       today + dt.timedelta(days=1),
                       freq='6H')
# pd.date_range(yesterday, today, periods=4)

transects = transects()
limits = limits_regions()

with xr.open_dataset('https://tds.hycom.org/thredds/dodsC/GLBy0.08/expt_93.0',
                     drop_variables='tau') as ds:
    ds = rename_model_variables(ds, model)
    dst = ds.sel(time=ranges[ranges < pd.to_datetime(ds.time.max().data)])

    for transect in transects.keys():
        transect_str = " ".join(transect.split("_")).title()
        x1 = transects[transect]['extent'][0]
        y1 = transects[transect]['extent'][1]
        x2 = transects[transect]['extent'][2]
        y2 = transects[transect]['extent'][3]

        # Make directories
        save_dir_transect_temp = os.path.join(save_dir, transect, model,
# Testing Inputs
# url = '/Users/mikesmith/Documents/github/rucool/hurricanes/data/rtofs/'
# save_dir = '/Users/mikesmith/Documents/github/rucool/hurricanes/plots/surface_maps_comparison'
# bathymetry = '/Users/mikesmith/Documents/github/rucool/hurricanes/data/bathymetry/GEBCO_2014_2D_-100.0_0.0_-10.0_50.0.nc'

days = 0
projection = dict(map=ccrs.Mercator(), data=ccrs.PlateCarree())
argo = True
gliders = True
dpi = 150
search_hours = 24 * 5  #Hours back from timestamp to search for drifters/gliders

gofs_url = 'https://tds.hycom.org/thredds/dodsC/GLBy0.08/expt_93.0'

regions = limits_regions(
    'gofs', ['yucatan', 'usvi', 'mab', 'gom', 'carib', 'wind', 'sab'])

# initialize keyword arguments for map plot
kwargs = dict()
kwargs['transform'] = projection
kwargs['save_dir'] = save_dir
kwargs['dpi'] = dpi

if bathymetry:
    bathy = xr.open_dataset(bathymetry)

# Get today and yesterday dates
today = dt.date.today()

date_list = [today - dt.timedelta(days=x) for x in range(days + 1)]
rtofs_files = [
# url = '/Users/mikesmith/Documents/github/rucool/hurricanes/data/rtofs/'
# save_dir = '/Users/mikesmith/Documents/github/rucool/hurricanes/plots/surface_maps/'
# bathymetry = '/Users/mikesmith/Documents/github/rucool/hurricanes/data/bathymetry/GEBCO_2014_2D_-100.0_0.0_-10.0_50.0.nc'

url = '/home/hurricaneadm/data/rtofs/'
save_dir = '/www/web/rucool/hurricane/model_comparisons/realtime/surface_maps/'
bathymetry = '/home/hurricaneadm/data/bathymetry/GEBCO_2014_2D_-100.0_0.0_-10.0_50.0.nc'

days = 2
projection = dict(map=ccrs.Mercator(), data=ccrs.PlateCarree())
argo = False
gliders = False
dpi = 150
search_hours = 24 * 5  #Hours back from timestamp to search for drifters/gliders=

regions = limits_regions('rtofs', ['north_atlantic'])

# initialize keyword arguments for map plot
kwargs = dict()
kwargs['model'] = 'rtofs'
kwargs['transform'] = projection
kwargs['save_dir'] = save_dir
kwargs['dpi'] = dpi

if bathymetry:
    bathy = xr.open_dataset(bathymetry)

# Get today and yesterday dates
today = dt.date.today()
date_list = [today - dt.timedelta(days=x) for x in range(days)]
rtofs_files = [
import cmocean
import matplotlib.patheffects as path_effects
# from scripts.harvest.grab_cmems import copernicusmarine_datastore as grab_cmems

url = '/home/hurricaneadm/data/rtofs/'
save_dir = '/www/web/rucool/hurricane/model_comparisons/realtime/argo_profile_to_model_comparisons/'

# url = '/Users/mikesmith/Documents/github/rucool/hurricanes/data/rtofs/'
# save_dir = '/Users/mikesmith/Documents/github/rucool/hurricanes/plots/argo_profile_model_comparisons/'
# user = '******'
# pw = 'password'

days = 8
dpi = 150

regions = limits_regions('rtofs', ['mab', 'gom', 'carib', 'wind', 'sab'])

# initialize keyword arguments for map plot
kwargs = dict()
kwargs['model'] = 'rtofs'
kwargs['save_dir'] = save_dir
kwargs['dpi'] = dpi

# Get today and yesterday dates
date_list = [dt.date.today() - dt.timedelta(days=x + 1) for x in range(days)]
date_list.insert(0, dt.date.today())
rtofs_files = [
    glob(os.path.join(url, x.strftime('rtofs.%Y%m%d'), '*.nc'))
    for x in date_list
]
rtofs_files = sorted([inner for outer in rtofs_files for inner in outer])
import os
import numpy as np
from scripts.harvest import grab_cmems
from hurricanes.limits import limits_regions

# COPERNICUS MARINE ENVIRONMENT MONITORING SERVICE (CMEMS)
# ncCOP_global = '/home/lgarzio/cmems/global-analysis-forecast-phy-001-024_1565877333169.nc'  # on server
# ncCOP_global = '/Users/garzio/Documents/rucool/hurricane_glider_project/CMEMS/global-analysis-forecast-phy-001-024_1565877333169.nc'  # on local machine

out = '/Users/garzio/Documents/rucool/hurricane_glider_project/CMEMS'
maxdepth = 500
username = '******'
password = '******'
today = dt.datetime.combine(dt.date.today(), dt.datetime.min.time())
tomorrow = today + dt.timedelta(days=1)
regions = limits_regions('gofs')

# add Atlantic Ocean limits
regions['Atlantic'] = dict()
regions['Atlantic'].update(lonlat=[-60, -10, 8, 45])
regions['Atlantic'].update(code='atl')
regions['Gulf of Mexico'].update(code='gom')
regions['South Atlantic Bight'].update(code='sab')
regions['Mid Atlantic Bight'].update(code='mab')
regions['Caribbean'].update(code='carib')

outdir = os.path.join(out, today.strftime('%Y%m%d'))
os.makedirs(outdir, exist_ok=True)

for region in regions.items():
    if region[1]['name'] == 'carib':
Esempio n. 7
0
def forecast_storm_region(forecast_track):
    regions = limits_regions(regions=['mab', 'gom', 'carib', 'sab'])

    # add shortened codes for the regions
    regions['Gulf of Mexico'].update(code='gom')
    regions['South Atlantic Bight'].update(code='sab')
    regions['Mid Atlantic Bight'].update(code='mab')
    regions['Caribbean'].update(code='carib')

    storm_region = dict()
    salinity_lims = {'values': [], 'interval': []}
    temp_lims = {'values': [], 'interval': []}
    lons = []
    lats = []
    for name, region in regions.items():
        # check if the forecast track passes through each region
        for i in range(len(forecast_track['lon'])):
            ilon = forecast_track['lon'][i]
            ilat = forecast_track['lat'][i]
            if np.logical_and(ilon >= region['lonlat'][0],
                              ilon <= region['lonlat'][1]):
                if np.logical_and(ilat >= region['lonlat'][2],
                                  ilat <= region['lonlat'][3]):
                    storm_region[name] = region
                    # add limits to list
                    salinity_lims['values'].append(
                        region['salinity'][0]['limits'][0:2])
                    salinity_lims['interval'].append(
                        region['salinity'][0]['limits'][-1])
                    temp_lims['values'].append(
                        region['temperature'][0]['limits'][0:2])
                    temp_lims['interval'].append(
                        region['temperature'][0]['limits'][-1])
                    lons.append(region['lonlat'][0:2])
                    lats.append(region['lonlat'][2:4])

    if len(storm_region) < 1:
        if np.max(forecast_track['lon']) > -52:
            # Atlantic Ocean limits
            extent = [-60, -10, 8, 45]
            salinity = [dict(depth=0, limits=[34, 37])]
            sea_water_temperature = [dict(depth=0, limits=[22, 29])]

            storm_region['Atlantic'] = dict()
            atl = storm_region['Atlantic']
            atl.update(code='atl')
            atl.update(lonlat=extent)
            atl.update(salinity=salinity)
            atl.update(temperature=sea_water_temperature)

    if len(storm_region) > 1:
        # set limits for map of entire track
        storm_region['full_track'] = {
            'lonlat': [
                np.nanmin(lons),
                np.nanmax(lons),
                np.nanmin(lats),
                np.nanmax(lats)
            ],
            'salinity': [{
                'depth':
                0,
                'limits': [
                    np.nanmin(salinity_lims['values']),
                    np.nanmax(salinity_lims['values']),
                    np.nanmin(salinity_lims['interval'])
                ]
            }],
            'temperature': [{
                'depth':
                0,
                'limits': [
                    np.nanmin(temp_lims['values']),
                    np.nanmax(temp_lims['values']),
                    np.nanmin(temp_lims['interval'])
                ]
            }],
            'code':
            'full'
        }

    return storm_region
Esempio n. 8
0
# user = '******'
# pw='MariaCMEMS2018'

gofs_url = 'https://tds.hycom.org/thredds/dodsC/GLBy0.08/expt_93.0'

# floats = ['4903248', '4903253', '4903259', '6902855', '4902916']
floats = ['4903356']
t1 = dt.datetime(2021, 9, 17)
days_to_check_for_argo_surfacing = 15
days_pre_surfacing = 1
days_post_surfacing = 2
depth = 400
dpi = 150
figsize = (20, 14)

regions = limits_regions('rtofs', ['gom'])

argo_search_end = t1 + dt.timedelta(days=1)
argo_search_start = argo_search_end - dt.timedelta(
    days=days_to_check_for_argo_surfacing)

date_list = [
    argo_search_end - dt.timedelta(days=x + 1)
    for x in range(days_to_check_for_argo_surfacing + 20)
]
date_list.insert(0, argo_search_end)
date_list.insert(0, argo_search_end + dt.timedelta(days=1))
date_list.reverse()

line = ('-', '--', '-.', ':')
alpha = (1, .75, .5, .25)