import mod_HYCOM_utils as MHU
import datetime as DTM
from matplotlib import pyplot as plt
from matplotlib import gridspec as GrdSpc
from mpl_toolkits.axes_grid1 import make_axes_locatable
import os, sys
import numpy as np

PLOT_MOD = False
PLOT_OBS = False
year = 2006

# model data
print('Load model daily files...\n')
ddir = '/work/timill/RealTime_Models/TP4a0.12/expt_01.4/data'
fli1 = mr.file_list(ddir, 'DAILY', '.a')

# monthly averaged conc
print('Load OSISAF monthly files...\n')
osidir = '/work/shared/nersc/OSISAF/'
osipat = 'osisaf-nh_aggregated_ice_concentration_nh_polstere-100'
fli2 = mr.file_list(osidir, osipat, '.nc')

# basemap
gs = GrdSpc.GridSpec(1, 1)
bmap = Fplt.start_HYCOM_map('Arctic')
fig = plt.figure()
ax = fig.add_subplot(gs[0, 0])

dto0 = DTM.datetime(year, 1, 1)
dto0_, idx0 = fli2.nearestDate(dto0)
Exemplo n.º 2
0
    fobj = MR.nc_getinfo(ncfil)
    vobj = 'fice'
    idx = 2
else:
    # file_object_list
    pattern = 'archv'
    if 1:
        # list of binary files
        ddir = FCdir + '/bin'
        ext = '.a'
    elif 1:
        # list of netcdf files
        ddir = FCdir + '/netcdf'
        ext = '.nc'

    fobj = MR.file_list(ddir, pattern, ext)
    vobj = 'fice'

# 2nd variable to plot: ice velocity as a quiver plot
vobj2 = MR.make_plot_options('uice',\
   vec_opt=3,layer=0,conv_fac=1,wave_mask=False,ice_mask=True,dir_from=True)

# =========================================================================
if 0:
    # use imshow for a fast plot
    fobj.imshow(vobj, time_index=idx, date_label=2)
elif 0:
    # use plot_var for a projected plot
    fobj.plot_var(vobj, time_index=idx, date_label=2)
elif 0:
    # plot pair of variables
Exemplo n.º 3
0
dto = DTM.datetime.strptime(cdate + "12", fmt + "%H")

print('\nObservation date')
print(dto)
print('\n')

FCdays = 6  # length of -ice-only FC's
for ndays in range(FCdays):
    # 0,...,5 (days which have daily average files)
    fcdate = dto - DTM.timedelta(ndays + .5)
    fcdir = FCdir + '/' + fcdate.strftime(fmt) + '/bin'
    # print(fcdate)
    # print(fcdir)

    if os.path.exists(fcdir):
        hi = mr.file_list(fcdir, 'DAILY', '.a')
        # for i,DT in enumerate(hi.datetimes):
        #    print(i)
        #    print(DT)

        if dto in hi.datetimes:
            # ========================================================
            # find index corresponding to observation date
            idx = hi.datetimes.index(dto)

            # call the AOD routine
            odir = outdir + '/FC' + str(ndays) + 'days'
            if not os.path.exists(odir):
                # don't overwrite sub-dirs
                hi.areas_of_disagreement(time_index=idx,\
                   obs_type='OSISAF',obs_path=None,\
Exemplo n.º 4
0
def average_area_model(cdate, vertices, fcdir0, outdir='.'):
    # average MIZ area for a given date
    # use binary files since more likely to be there
    # returns None if nothing there

    fcdir = fcdir0 + '/' + cdate + '/'
    if not os.path.exists(fcdir):
        return
    lst = os.listdir(fcdir)
    if cdate in lst:
        # check if need to add another cdate
        fcdir += cdate + '/'

    lst = os.listdir(fcdir)
    if 'bin' in lst:
        bindir = fcdir + 'bin'
    else:
        bindir = fcdir + 'binaries'

    # make file_list object from binary files
    # - treat in same way as multi-record netcdf
    fli = mr.file_list(bindir, 'archv_wav', '.a')
    if fli.number_of_time_records == 0:
        return

    # loop over 6-h intervals:
    daily_areas = []
    for hr in range(0, 24, 6):
        dto = datetime.datetime(int(cdate[:4]), int(cdate[4:6]),
                                int(cdate[6:8]), hr)

        #print(dto)
        if dto in fli.datetimes:
            idx = fli.datetimes.index(dto)
            out = fli.MIZmap(no_width=True,
                             vertices=vertices,
                             time_index=idx,
                             outdir=outdir)
            # out   = fli.MIZmap(vertices=vertices,time_index=idx,outdir=outdir)
            # sys.exit()
            #
            tfil = out[out.keys()[0]]
            pil = mc.single_file(tfil)

            tot_area = 0
            for pio in pil.poly_info_objects:
                #pio.area		# approximate area (Euclidean after projection using NP as center)
                #pio.ll_coords #list of coords of boundaries
                lon, lat = np.array(pio.ll_coords).transpose()
                area = GS.area_polygon_ellipsoid(lon, lat)
                # print(area)
                tot_area += area

            print('\nTot area: ' + str(tot_area) + '\n')
            daily_areas.append(tot_area)

    if len(daily_areas) == 0:
        return
    else:
        # take daily average
        TotArea = np.mean(daily_areas)
        print('\nAvg tot area: ' + str(TotArea) + '\n')
        return TotArea
Exemplo n.º 5
0
ODLmap = pyproj.Proj(ODLsrs)  #same as srs="+init=EPSG:3413"
bmap = FP.start_HYCOM_map('Arctic')

if 0:
    # ease2 projection (North)
    # cf https://nsidc.org/data/ease/versions.html
    ease2 = pyproj.Proj("+init=EPSG:6931")
    # ease1 = pyproj.Proj("+init=EPSG:3408") # works

# directories
if 0:
    #hexagon
    odir = '/work/shared/nersc/msc/cersat/'  # path to observations
    mdir = '/work/timill/RealTime_Models/TP4a0.12/expt_01.5/data'  # path to model data
    flist = mr.file_list(mdir, 'DAILY', '.a')
else:
    mdir = '/mnt/sda1/work/Model-outputs/thickness_comp_ifremer/TP4/2015_060'
    odir = '/mnt/sda1/work/Model-outputs/thickness_comp_ifremer/cersat'
    flist = mr.file_list(mdir, 'DAILY', '.a', gridpath=mdir + '/../topo')

if 1:
    olist = ['cs2_smos_ice_thickness_20150302_20150308.nc']
else:
    olist = os.listdir(odir)

# mlon,mlat   = flist.get_corners()
Mgrid = flist.create_ESMF_grid()
Mfld = create_field(Mgrid, 'hice')

ofil = olist[-1]
Exemplo n.º 6
0
        ss = str(its) + ' | ' + str(len(wlist))
        fid.write(ss)
        fid.close()

        # place to save results
        outdir = mdir2 + '/' + wdir + '/analysis'
        if not os.path.exists(outdir):
            os.mkdir(outdir)

        outdir += '/MIZmap'
        if not os.path.exists(outdir):
            os.mkdir(outdir)
        else:
            continue

        flist = MR.file_list(mdir2 + '/' + wdir + '/' + subdir, 'archv_wav',
                             '.a')
        step = 0.5
        start_date = flist.datetimes[2].strftime('%Y%m%dT%H%M%SZ')
        end_date = flist.datetimes[-1].strftime('%Y%m%dT%H%M%SZ')
        # end_date    = flist.datetimes[4].strftime('%Y%m%dT%H%M%SZ')

        regions = ['gre', 'bar', 'lab', 'beau']
        out      = flist.MIZmap_all(end_date=end_date,start_date=start_date,step=step,\
              outdir=outdir,regions=regions,plotting=plotting)

        if 1:
            # do 1 week at a time (can do all in parallel)
            print('\nExit python\n')
            sys.exit()

print('\nExit python\n')
Exemplo n.º 7
0
        ss = str(its) + ' | ' + str(len(wlist))
        fid.write(ss)
        fid.close()

        # place to save results
        outdir = mdir2 + '/' + wdir + '/analysis'
        if not os.path.exists(outdir):
            os.mkdir(outdir)
        else:
            continue

        outdir += '/AODs'
        if not os.path.exists(outdir):
            os.mkdir(outdir)

        flist = MR.file_list(mdir2 + '/' + wdir + '/' + subdir, 'DAILY', '.a')

        regions = ['gre', 'bar', 'lab', 'beau']
        obs_type = 'OSISAF'
        out = flist.AODs_all(outdir=outdir,
                             regions=regions,
                             obs_type=obs_type,
                             plotting=plotting)

        # ============================================
        # weekly average of conc anomalies
        cdates = []
        nlist = []
        wt = 1. / 7
        anom_av = 0.
Exemplo n.º 8
0
if len(opts) == 1:
    ddir = rootdir + '/' + ddirs[opts[0]]
else:
    print('Usage: ' + me + ' run_name')
    print('options for run_name')

    for s in ddirs:
        d_dir = rootdir + '/' + ddir[s]
        print(s + ' = ' + d_dir)

    print('\n')
    raise ValueError('\n\nNot enough inputs to ' + me + '\n')

# ddir  = sys.argv[1]
print('sort files in ' + ddir + '...')
fli = mr.file_list(ddir, 'TP4archv_wav', '.a')
# print(fli.datetimes)

if 0:
    # just do a few times for testing
    start_date = DTM.strptime('20050831', '%Y%m%d')  # +TDEL(0.5)
    end_date = DTM.strptime('20050901', '%Y%m%d')
    plotting = True
    show = False
else:
    # do all times
    start_date = None
    end_date = None
    plotting = False
    show = False
if not os.path.exists(outdir):
   os.mkdir(outdir)

hmax     = 4
po       = FP.plot_object()
shorts   = ['BackThick'              ,'CS2'                   ,'SMOS'             ,'AnThick'              ,'AnThickErr']
clabs    = ['Background Thickness, m','Cryosat-2 thickness, m','SMOS thickness, m','Analysis Thickness, m','Analysis Thickness Error, m']
vlist    = ['background_thickness'   ,'cs2_thickness'         ,'smos_thickness'   ,'analysis_thickness'   ,'analysis_thickness_err']
ts_data  = {'RMSE_both_ice':[],'Bias_both_ice':[],\
            'RMSE_either_ice':[],'Bias_either_ice':[]}

for i,ofil in enumerate(olist):
   # =======================================================================
   # get arrays for later
   print(mdir+'/'+Mdir[i])
   flist = mr.file_list(mdir+'/'+Mdir[i],'DAILY','.a',gridpath=gridpath)
   nci   = mr.nc_getinfo(odir+'/'+ofil,lonlat_file=odir+'/'+lonlat_file)
   # nci.plot_var('analysis_thickness',clim=[0,hmax])
   DOCB  = False
   if i==0:
       olon,olat    = nci.get_lonlat()
       oX,oY        = ODLmap(olon,olat)
       mlon,mlat    = flist.get_lonlat()
       mX,mY        = ODLmap(mlon,mlat)
       DOCB       = True
   hobs = nci.get_var('analysis_thickness')
   # =======================================================================


   # =======================================================================
   if PLOT_EG: