Esempio n. 1
0
def main(intopo):
    # Read plon,plat
    gfile = abf.ABFileGrid("regional.grid", "r")
    plon = gfile.read_field("plon")
    plat = gfile.read_field("plat")
    gfile.close()

    # Read input bathymetry
    bfile = abf.ABFileBathy(intopo,
                            "r",
                            idm=gfile.idm,
                            jdm=gfile.jdm,
                            mask=True)
    in_depth_m = bfile.read_field("depth")
    bfile.close()

    # Print to CICE mask files
    kmt = numpy.where(~in_depth_m.mask, 1., 0.)
    modeltools.cice.io.write_netcdf_kmt(kmt, "cice_kmt.nc")
def spatiomean(fldin,regi_mask):
  #  fldin=[]
    ab = abf.ABFileGrid("regional.grid","r")
    pplon=ab.read_field("plon")
    pplat=ab.read_field("plat")
    scppx=ab.read_field("scpx")
    scppy=ab.read_field("scpy")
    abdpth = abf.ABFileBathy('regional.depth',"r",idm=ab.idm,jdm=ab.jdm)
    mdpth=abdpth.read_field('depth')
    maskdd=mdpth.data
    maskdd[maskdd>1e29]=np.nan
    #fldin[fldin>1e29]=np.nan
    #scppx[np.isnan(maskdd)]=np.nan
    #scppy[np.isnan(maskdd)]=np.nan
###     # mask for specific region
###     #Nordic=False
###     if Nordmask:
###        print 'Compute for Nordic------>>>>>>-'
###        maskdd[pplat>80]=np.nan
###        maskdd[pplat<55]=np.nan
###        maskdd[pplon>60]=np.nan
###        maskdd[pplon<-60]=np.nan
###        #Norid
###        #fldin[np.isnan(maskdd)]=np.nan
###        #scppx[np.isnan(maskdd)]=np.nan
###        #scppy[np.isnan(maskdd)]=np.nan
###     #--
    numer=fldin*scppx*scppy
    denum=scppx*scppy
    numer[np.isnan(regi_mask)]=np.nan
    denum[np.isnan(regi_mask)]=np.nan
    fldin_avg=np.nansum(numer)/np.nansum(denum)
    print('np.nansum(numer)='), print(np.nansum(numer))
    print('np.nansum(denum)='), print(np.nansum(denum))
    print('fldin_avg='), print(fldin_avg)
    #direct mean
    return fldin_avg
Esempio n. 3
0
def main(myfiles,
         fieldname,
         idm=None,
         jdm=None,
         clim=None,
         filetype="archive",
         window=None,
         cmap="jet",
         datetime1=None,
         datetime2=None,
         vector="",
         tokml=False,
         masklim=None,
         filename2='',
         dpi=180):

    cmap = matplotlib.pyplot.get_cmap("jet")
    if tokml:
        ab = abf.ABFileGrid("regional.grid", "r")
        plon = ab.read_field("plon")
        plat = ab.read_field("plat")
        ab.close()

    ab = abf.ABFileGrid("regional.grid", "r")
    plon = ab.read_field("plon")
    plat = ab.read_field("plat")
    scpx = ab.read_field("scpx")
    scpy = ab.read_field("scpy")
    target_lonlats = [plon, plat]
    abdpth = abf.ABFileBathy('regional.depth', "r", idm=ab.idm, jdm=ab.jdm)
    mdpth = abdpth.read_field('depth')
    maskd = mdpth.data
    maskd[maskd > 1e29] = np.nan
    #Region_mask=True
    Region_mask = False
    if Region_mask:
        maskd[plat > 80] = np.nan
        maskd[plat < 50] = np.nan
        maskd[plon > 60] = np.nan
        maskd[plon < -50] = np.nan

    Nordic_mask = maskd

    proj = ccrs.Stereographic(central_latitude=90.0, central_longitude=-40.0)
    pxy = proj.transform_points(ccrs.PlateCarree(), plon, plat)
    px = pxy[:, :, 0]
    py = pxy[:, :, 1]
    x, y = np.meshgrid(np.arange(plon.shape[1]), np.arange(plon.shape[0]))

    if vector:
        logger.info("Vector component 1:%s" % fieldname)
        logger.info("Vector component 2:%s" % vector)

    #---------------
    fieldlevel = 0
    Err_map = 1
    #freezp=-2.5
    freezp = -1.8
    Point_tid = True
    Point_tid = False
    if Point_tid:
        ix = 1394
        jy = 267
    sum_fld1 = maskd
    sum_fld1[~np.isnan(sum_fld1)] = 0.0
    Clim_arr = np.zeros((plon.shape[0], plon.shape[1], 12))
    #---------------
    # compute for TP6 files
    figure = matplotlib.pyplot.figure(figsize=(8, 8))
    ax = figure.add_subplot(111)
    counter = 0
    file_count = 0
    sum_fld1 = maskd
    sum_fld1[~np.isnan(sum_fld1)] = 0.0

    #-----------------------------------------
    figure = matplotlib.pyplot.figure(figsize=(8, 8))
    ax = figure.add_subplot(111)
    onemm = 9.806
    counter = 0
    file_count = 0
    sum_fld1 = maskd
    sum_fld1[~np.isnan(sum_fld1)] = 0.0
    dt_cnl = np.zeros(len(myfiles))
    diff_dt_cnl = np.zeros(len(myfiles))
    rmse_dt_cnl = np.zeros(len(myfiles))
    Labl1 = myfiles[0][:28]
    #Labl1="CNTL: New prsbas=0"
    yyyy1 = myfiles[0][-14:-10]
    print("myfiles[0]=", myfiles[0])
    print("yyy1=", yyyy1)
    base = datetime.datetime(int(yyyy1), 1, 15)
    tid = np.array(
        [base + relativedelta(months=i) for i in range(len(myfiles))])
    counter = 0
    file_count = 0
    sum_fld1 = maskd
    sum_fld1[~np.isnan(sum_fld1)] = 0.0
    logger.info(
        ">>>>>--------------------------Processing the first files=  myfiles")
    if "salin" in fieldname:
        fieldname = "salin01"
    for ncfile0 in myfiles:
        logger.info("Now processing  %s" % ncfile0)
        fh = Dataset(ncfile0, mode='r')
        fld_arr = fh.variables[fieldname][:]
        if "srfhgt" in fieldname:
            #convert to "m"
            fld_arr = fld_arr / 9.806
        print("fld_arr.shpe", fld_arr.shape)
        tot = fld_arr.shape[0]
        fh.close()
        for ii in range(tot):
            fld = fld_arr[ii, :, :]
            print('mn,mx=', fld.min(), fld.max(), 'count=', counter)
            dt_cnl[counter] = np.nanmean(fld)
            if Point_tid:
                dt_cnl[counter] = fld[jy, ix]
            print("fld.shape", fld.shape)
            print("Nordic_mask.shape", Nordic_mask.shape)
            counter = counter + 1
            sum_fld1 = sum_fld1 + fld
            del fld
        # End i_intloop
    print('Computing the avearge of file_counter= ', file_count, 'counter=',
          counter)
    #next experminet
    if filename2:
        dt_2 = np.zeros(len(filename2))
        diff_dt_2 = np.zeros(len(filename2))
        rmse_dt_2 = np.zeros(len(filename2))
        yyyy2 = filename2[0][-14:-10]
        print("filename2[0]=", filename2[0])
        print("yyy1=", yyyy2)
        tid_2 = np.array([
            datetime.datetime(int(yyyy2), 1, 15) + relativedelta(months=i)
            for i in range(len(filename2))
        ])
        Labl2 = filename2[0][:28]
        counter = 0
        file_count = 0
        sum_fld1 = maskd
        sum_fld1[~np.isnan(sum_fld1)] = 0.0
        logger.info(
            ">>>>>--------------------------Processing the first files=  myfiles"
        )
        for ncfil in filename2:
            logger.info("Now processing  %s" % ncfil)
            fh = Dataset(ncfil, mode='r')
            fld_arr = fh.variables[fieldname][:]
            if "srfhgt" in fieldname:
                fld_arr = fld_arr / 9.806
            print("fld_arr.shpe", fld_arr.shape)
            tot = fld_arr.shape[0]
            fh.close()
            for ii in range(tot):
                fld = fld_arr[ii, :, :]
                #fld=np.ma.masked_where(fld<freezp,fld)
                print('mn,mx=', fld.min(), fld.max(), 'count=', counter)
                dt_2[counter] = np.nanmean(fld)
                if Point_tid:
                    dt_2[counter] = fld[jy, ix]
                counter = counter + 1
                sum_fld1 = sum_fld1 + fld
                del fld

    #---------------------------------------
    figure, ax = plt.subplots()
    years = YearLocator()  # every year
    months = MonthLocator()  # every month
    yearsFmt = DateFormatter('%Y')
    #ax=figure.add_subplot(111)
    nplts = 1
    ax.plot_date(tid, dt_cnl, '-o', color='g', ms=3, label=Labl1)
    if filename2:
        ax.plot_date(tid_2, dt_2, '-v', color='blue', ms=3, label=Labl2)
    ax.xaxis.set_major_locator(years)
    ax.xaxis.set_major_formatter(yearsFmt)
    ax.xaxis.set_minor_locator(months)
    ax.autoscale_view()

    # format the coords message box
    def price(x):
        return '$%1.2f' % x

    ax.fmt_xdata = DateFormatter('%Y-%m-%d')
    ax.fmt_ydata = price
    ax.grid(True)
    figure.autofmt_xdate()
    legend = plt.legend(loc='upper right', fontsize=8)
    if Point_tid:
        plt.title("Point:(lon,lat)=(" + str(plon[jy, ix]) + ',' +
                  str(plat[jy, ix]) + "): %s(%d)" % (fieldname, fieldlevel))
    else:
        plt.title("Area-averaged: %s(%d)" % (fieldname, fieldlevel))
    #plt.xlabel('dayes')
    if "srfhgt" in fieldname:
        plt.ylabel("%s[m]" % (fieldname))
    else:
        plt.ylabel("%s(%d)" % (fieldname, fieldlevel))
    #plt.title('Pakistan India Population till 2007')
    ts_fil = "Time_series_cntl%s_%02d_%02d" % (fieldname, fieldlevel,
                                               len(myfiles))
    if Region_mask:
        ts_fil = 'Region_' + ts_fil
    if Point_tid:
        ts_fil = 'Point_ix' + str(ix) + 'jy' + str(jy) + ts_fil
    figure.canvas.print_figure(ts_fil, bbox_inches='tight', dpi=dpi)
    logger.info("Successfull printing:  %s" % ts_fil)
Esempio n. 4
0
def main(infile_coarse,infile_fine,
      ncells_linear=20,
      ncells_exact=3,
      check_consistency=False,
      bathy_threshold=0.) :

   #bathy_threshold=0. # TODO
   logger.info("Bathy threshold is %12.4f"%bathy_threshold)

   # Read plon,plat
   gfile=abf.ABFileGrid("regional.grid","r")
   plon=gfile.read_field("plon")
   plat=gfile.read_field("plat")
   gfile.close()

   # Read input bathymetry - fine version
   m=re.match( "^(.*)(\.[ab])", infile_fine)
   if m : infile_fine=m.group(1)
   bfile=abf.ABFileBathy(infile_fine,"r",idm=gfile.idm,jdm=gfile.jdm)
   fine_depth_m=bfile.read_field("depth")
   fine_depth_m=numpy.ma.masked_where(fine_depth_m<=bathy_threshold,fine_depth_m)
   fine_depth=numpy.ma.filled(fine_depth_m,bathy_threshold)
   bfile.close()

   # Read input bathymetry - coarse version
   m=re.match( "^(.*)(\.[ab])", infile_coarse)
   if m : infile_coarse=m.group(1)
   bfile=abf.ABFileBathy(infile_coarse,"r",idm=gfile.idm,jdm=gfile.jdm)
   coarse_depth_m=bfile.read_field("depth")
   coarse_depth_m=numpy.ma.masked_where(coarse_depth_m<=bathy_threshold,coarse_depth_m)
   coarse_depth=numpy.ma.filled(coarse_depth_m,bathy_threshold)
   bfile.close()

   # create relaxation mask (rmu)
   tmp=numpy.linspace(0.,1.,ncells_linear)
   tmp=numpy.concatenate((numpy.zeros((ncells_exact,)),tmp)) # ie: hree first cells will match outer bathymetry
   ncells=ncells_linear+ncells_exact
   rmu=numpy.ones(coarse_depth.shape)
   rmu[:,0:ncells] = numpy.minimum(tmp,rmu[:,0:ncells])
   rmu[0:ncells,:] = numpy.minimum(tmp,rmu[0:ncells,:].transpose()).transpose()
   rmu[:,-ncells:] = numpy.minimum(tmp[::-1],rmu[:,-ncells:])
   rmu[-ncells:,:] = numpy.minimum(tmp[::-1],rmu[-ncells:,:].transpose()).transpose()

   ## Only allow points where both models are defined in the boundaruy
   rmumask=fine_depth_m.mask
   rmumask[:,0:ncells] = numpy.logical_or(rmumask[:,0:ncells],coarse_depth_m.mask[:,0:ncells])
   rmumask[0:ncells,:] = numpy.logical_or(rmumask[0:ncells,:],coarse_depth_m.mask[0:ncells,:])
   rmumask[:,-ncells:] = numpy.logical_or(rmumask[:,-ncells:],coarse_depth_m.mask[:,-ncells:])
   rmumask[-ncells:,:] = numpy.logical_or(rmumask[-ncells:,:],coarse_depth_m.mask[-ncells:,:])

   
   figure = matplotlib.pyplot.figure(figsize=(8,8))
   ax=figure.add_subplot(111)
   P=ax.pcolormesh(rmu)
   figure.colorbar(P)#,norm=matplotlib.colors.LogNorm(vmin=mask.min(), vmax=mask.max()))
   figure.canvas.print_figure("tst.png")


   # Modify bathy in mask region
   newbathy = (1.-rmu) * coarse_depth + rmu * fine_depth
   newbathy[rmumask] = bathy_threshold
   newbathy[:,0]=bathy_threshold
   newbathy[:,-1]=bathy_threshold
   newbathy[0,:]=bathy_threshold
   newbathy[-1,:]=bathy_threshold
   #print newbathy.min(),newbathy.max()



   # Make call to consistency routine
   if check_consistency :
      logger.info("Passing merged bathymetry to consistency check ")
      import hycom_bathy_consistency # Normally in same dir as this python routine, so ok
      newbathy=hycom_bathy_consistency.main("",[],[],
            remove_isolated_basins=True,
            remove_one_neighbour_cells=True,
            remove_islets=True,
            remove_inconsistent_nesting_zone=True,
            inbathy=numpy.ma.masked_where(newbathy<=bathy_threshold,newbathy),
            write_to_file=False)


   # Mask data where depth below threshold
   newbathy_m=numpy.ma.masked_where(newbathy<=bathy_threshold,newbathy)

   # Create netcdf file with all  stages for analysis
   logger.info("Writing bathymetry to diagnostic file bathy_merged.nc")
   ncid = netCDF4.Dataset("bathy_merged.nc","w")
   ncid.createDimension("idm",newbathy.shape[1])
   ncid.createDimension("jdm",newbathy.shape[0])
   ncid.createVariable("lon","f8",("jdm","idm"))
   ncid.createVariable("lat","f8",("jdm","idm"))
   ncid.createVariable("coarse","f8",("jdm","idm"))
   ncid.createVariable("coarse_masked","f8",("jdm","idm"))
   ncid.createVariable("fine","f8",("jdm","idm"))
   ncid.createVariable("fine_masked","f8",("jdm","idm"))
   ncid.createVariable("final","f8",("jdm","idm"))
   ncid.createVariable("final_masked","f8",("jdm","idm"))
   ncid.createVariable("rmu","f8",("jdm","idm"))
   ncid.createVariable("modified","f8",("jdm","idm"))
   ncid.variables["lon"][:]=plon
   ncid.variables["lat"][:]=plat
   ncid.variables["coarse"][:]=coarse_depth
   ncid.variables["coarse_masked"][:]=coarse_depth_m
   ncid.variables["fine"][:]=fine_depth
   ncid.variables["fine_masked"][:]=fine_depth_m
   ncid.variables["final"][:]=newbathy
   ncid.variables["final_masked"][:]=newbathy_m
   modmask=newbathy-fine_depth
   ncid.variables["modified"][:] = modmask
   ncid.variables["rmu"][:] = rmu
   ncid.close()
   
   logger.info("Writing bathymetry plot to file newbathy.png")
   figure = matplotlib.pyplot.figure(figsize=(8,8))
   ax=figure.add_subplot(111)
   P=ax.pcolormesh(newbathy)
   figure.colorbar(P,norm=matplotlib.colors.LogNorm(vmin=newbathy.min(), vmax=newbathy.max()))
   I,J=numpy.where(numpy.abs(modmask)>.1)
   ax.scatter(J,I,20,"r")
   figure.canvas.print_figure("newbathy.png")



   # Print to HYCOM
   abf.write_bathymetry("MERGED",0,newbathy,bathy_threshold)
       setattr(args, self.dest, tmp)
   class WindowParseAction(argparse.Action) :
     def __call__(self, parser, args, values, option_string=None):
       tmp = values.split(",")
       tmp = [int(elem) for elem in tmp[0:4]]
       setattr(args, self.dest, tmp)

   parser = argparse.ArgumentParser(description='')
   parser.add_argument('--filename', help="",nargs='+')
   
   args = parser.parse_args()
   print(args.filename)
   #Example
   #python ./interpolate_sstncof2TP5.py --filename ../ncof_sst/ncof_sst_20*.nc
   
   gfile = abf.ABFileGrid("regional.grid","r")
   plon=gfile.read_field("plon")
   plat=gfile.read_field("plat")
   gfile.close()
   dpfile = abf.ABFileBathy("regional.depth","r",idm=gfile.idm,jdm=gfile.jdm)
   depth=dpfile.read_field("depth")
   dpfile.close()
   target_lon=plon
   target_lat=plat
   target_lonlats = [target_lon,target_lat]

   # compute for TPZ files
   counter=0
   file_count=0
   if args.filename:
      for ncfile0 in args.filename :
def main(lon1,
         lat1,
         lon2,
         lat2,
         variable,
         files,
         filetype="archive",
         clim=None,
         sectionid="",
         ijspace=False,
         xaxis="distance",
         section_map=False,
         dens=False,
         dpi=180):

    logger.info("Filetype is %s" % filetype)
    gfile = abf.ABFileGrid("regional.grid", "r")
    plon = gfile.read_field("plon")
    plat = gfile.read_field("plat")

    # Set up section info
    if ijspace:
        sec = gridxsec.SectionIJSpace([lon1, lon2], [lat1, lat2], plon, plat)
    else:
        sec = gridxsec.Section([lon1, lon2], [lat1, lat2], plon, plat)
    I, J = sec.grid_indexes
    dist = sec.distance
    print('dit.shae=', dist.shape)
    slon = sec.longitude
    slat = sec.latitude

    logger.info("Min max I-index (starts from 0):%d %d" % (I.min(), I.max()))
    logger.info("Min max J-index (starts from 0):%d %d" % (J.min(), J.max()))
    #
    #
    if section_map:
        ll_lon = slon.min() - 10.
        ur_lon = slon.max() + 10.
        ll_lat = np.maximum(-90., slat.min() - 10.)
        ur_lat = np.minimum(90., slat.max() + 10.)

        proj = ccrs.Stereographic(central_latitude=90.0,
                                  central_longitude=-40.0)
        pxy = proj.transform_points(ccrs.PlateCarree(), plon, plat)
        px = pxy[:, :, 0]
        py = pxy[:, :, 1]
        x, y = np.meshgrid(np.arange(slon.shape[0]), np.arange(slat.shape[0]))

        figure = plt.figure(figsize=(10, 8))
        ax = figure.add_subplot(111)

        ax = plt.axes(projection=ccrs.PlateCarree())
        ax.set_extent([-179, 179, 53, 85], ccrs.PlateCarree())
        ax.add_feature(cfeature.GSHHSFeature('auto', edgecolor='grey'))
        ax.add_feature(cfeature.GSHHSFeature('auto', facecolor='grey'))
        ax.gridlines()
        ax.plot(slon, slat, "r-", lw=1)

        pos = ax.get_position()
        asp = pos.height / pos.width
        w = figure.get_figwidth()
        h = asp * w
        figure.set_figheight(h)
        if sectionid:
            figure.canvas.print_figure("map_%s.png" % sectionid, dpi=dpi)
        else:
            figure.canvas.print_figure("map.png", dpi=dpi)

    # Get layer thickness variable used in hycom
    dpname = modeltools.hycom.layer_thickness_variable[filetype]
    logger.info("Filetype %s: layer thickness variable is %s" %
                (filetype, dpname))

    if xaxis == "distance":
        x = dist / 1000.
        xlab = "Distance along section[km]"
    elif xaxis == "i":
        x = I
        xlab = "i-index"
    elif xaxis == "j":
        x = J
        xlab = "j-index"
    elif xaxis == "lon":
        x = slon
        xlab = "longitude"
    elif xaxis == "lat":
        x = slat
        xlab = "latitude"
    else:
        logger.warning("xaxis must be i,j,lo,lat or distance")
        x = dist / 1000.
        xlab = "Distance along section[km]"

    # Loop over archive files
    figure = plt.figure()
    ax = figure.add_subplot(111)
    pos = ax.get_position()
    for fcnt, myfile0 in enumerate(files):

        # Remove [ab] ending if present
        m = re.match("(.*)\.[ab]", myfile0)
        if m:
            myfile = m.group(1)
        else:
            myfile = myfile0

        # Add more filetypes if needed. By def we assume archive
        if filetype == "archive":
            i_abfile = abf.ABFileArchv(myfile, "r")
        elif filetype == "restart":
            i_abfile = abf.ABFileRestart(myfile,
                                         "r",
                                         idm=gfile.idm,
                                         jdm=gfile.jdm)
        else:
            raise NotImplementedError("Filetype %s not implemented" % filetype)

        # kdm assumed to be max level in ab file
        kdm = max(i_abfile.fieldlevels)

        # Set up interface and daat arrays
        xx = np.zeros((kdm + 1, I.size))
        intfsec = np.zeros((kdm + 1, I.size))
        datasec = np.zeros((kdm + 1, I.size))
        if dens:
            datasec_sal = np.zeros((kdm + 1, I.size))
            sigma_sec = np.zeros((kdm + 1, I.size))

        # Loop over layers in file.
        logger.info("File %s" % (myfile))
        for k in range(kdm):
            logger.debug("File %s, layer %03d/%03d" % (myfile, k, kdm))

            # Get 2D fields
            dp2d = i_abfile.read_field(dpname, k + 1)
            data2d = i_abfile.read_field(variable, k + 1)
            dp2d = np.ma.filled(dp2d, 0.) / modeltools.hycom.onem
            data2d = np.ma.filled(data2d, 1e30)

            # Place data into section arrays
            intfsec[k + 1, :] = intfsec[k, :] + dp2d[J, I]
            if k == 0: datasec[k, :] = data2d[J, I]
            datasec[k + 1, :] = data2d[J, I]

            if dens:
                data2d_sal = i_abfile.read_field('salin', k + 1)
                data2d_sal = np.ma.filled(data2d_sal, 1e30)
                datasec_sal[k + 1, :] = data2d_sal[J, I]

        i_maxd = np.argmax(np.abs(intfsec[kdm, :]))
        for k in range(kdm + 1):
            xx[k, :] = x[:]

        datasec = np.ma.masked_where(datasec > 0.5 * 1e30, datasec)
        print("datasec min, max=", datasec.min(), datasec.max())
        if dens:
            datasec_sal = np.ma.masked_where(datasec_sal > 0.5 * 1e30,
                                             datasec_sal)
            print("datasec_sal min, max=", datasec_sal.min(),
                  datasec_sal.max())
            sigma_sec = mod_hyc2plot.sig(datasec, datasec_sal)
            sigma_sec = np.ma.masked_where(sigma_sec < 0.0, sigma_sec)
            datasec = sigma_sec
        # Set up section plot
        datasec = np.ma.masked_where(datasec > 0.5 * 1e30, datasec)
        print("min, max=", datasec.min(), datasec.max())
        if clim is None:
            clim = [datasec.min(), datasec.max()]
            #clim=[0.0,13]
        print("clim=", clim[0], clim[1])
        if clim is not None:
            lvls = MaxNLocator(nbins=70).tick_values(clim[0], clim[1])
        mf = 'sawtooth_fc100.txt'
        LinDic = mod_hyc2plot.cmap_dict(mf)
        my_cmap = matplotlib.colors.LinearSegmentedColormap(
            'my_colormap', LinDic)
        cmap = my_cmap
        norm = BoundaryNorm(lvls, ncolors=cmap.N, clip=True)
        P = ax.contourf(xx, -intfsec, datasec, cmap=cmap, levels=lvls)

        # Plot layer interfaces
        for k in range(1, kdm + 1):
            if k % 100 == 0:
                PL = ax.plot(x, -intfsec[k, :], "-", color="k")
                textx = x[i_maxd]
                texty = -0.5 * (intfsec[k - 1, i_maxd] + intfsec[k, i_maxd])
                ax.text(textx,
                        texty,
                        str(k),
                        verticalalignment="center",
                        horizontalalignment="center",
                        fontsize=6)
            elif k % 5 == 0:
                PL = ax.plot(x, -intfsec[k, :], "--", color="k", linewidth=0.5)
                textx = x[i_maxd]
                texty = -0.5 * (intfsec[k - 1, i_maxd] + intfsec[k, i_maxd])
                ax.text(textx,
                        texty,
                        str(k),
                        verticalalignment="center",
                        horizontalalignment="center",
                        fontsize=6)
            else:
                if k > 2 and k % 2 == 0:
                    PL = ax.plot(x,
                                 -intfsec[k, :],
                                 "-",
                                 color=".5",
                                 linewidth=0.5)
                    textx = x[i_maxd]
                    texty = -0.5 * (intfsec[k - 1, i_maxd] +
                                    intfsec[k, i_maxd])
                    ax.text(textx,
                            texty,
                            str(k),
                            verticalalignment="center",
                            horizontalalignment="center",
                            fontsize=6)
                else:
                    continue
        # Print figure
        ax.set_facecolor('xkcd:gray')
        aspect = 90
        pad_fraction = 0.25
        divider = make_axes_locatable(ax)
        width = axes_size.AxesY(ax, aspect=1. / aspect)
        pad = axes_size.Fraction(pad_fraction, width)
        cax = divider.append_axes("right", size=width, pad=pad)
        cb = ax.figure.colorbar(P, cax=cax)
        if clim is not None: P.set_clim(clim)
        if dens:
            ax.set_title('[P. density ]: ' + myfile)
        else:
            ax.set_title('[' + variable + ']: ' + myfile)

        ax.set_ylabel('Depth [m]')
        ax.set_xlabel(xlab)

        # Print in different y-lims
        suff = os.path.basename(myfile)
        if sectionid: suff = suff + "_" + sectionid
        if dens: variable = "dens"
        figure.canvas.print_figure("sec_%s_full_%s.png" % (variable, suff),
                                   dpi=dpi)
        ax.set_ylim(-1000, 0)
        figure.canvas.print_figure("sec_%s_1000m_%s.png" % (variable, suff),
                                   dpi=dpi)

        # Close input file
        i_abfile.close()

        #
        ax.clear()
        cb.remove()
def main(lon1,lat1,lon2,lat2,variable,files,filetype="archive",clim=None,sectionid="",
      ijspace=False,xaxis="distance",section_map=False,ncfiles="",dpi=180) :
   #TP4Grd='/cluster/work/users/aal069/TP4a0.12/mfile/'
   logger.info("Filetype is %s"% filetype)
   gfile = abf.ABFileGrid("regional.grid","r")
   plon=gfile.read_field("plon")
   plat=gfile.read_field("plat")


   # Set up section info
   if ijspace :
      sec = gridxsec.SectionIJSpace([lon1,lon2],[lat1,lat2],plon,plat)
   else  :
      sec = gridxsec.Section([lon1,lon2],[lat1,lat2],plon,plat)
   I,J=sec.grid_indexes
   dist=sec.distance
   print('dit.shae=',dist.shape)
   slon=sec.longitude
   slat=sec.latitude
   # In testing
   #J,I,slon,slat,case,dist=sec.find_intersection(qlon,qlat)
   #print I,J
   #raise NameError,"test"

   logger.info("Min max I-index (starts from 0):%d %d"%(I.min(),I.max()))
   logger.info("Min max J-index (starts from 0):%d %d"%(J.min(),J.max()))
   #
   #
   if section_map :
      ll_lon=slon.min()-10.
      ur_lon=slon.max()+10.
      ll_lat=np.maximum(-90.,slat.min()-10.)
      ur_lat=np.minimum(90. ,slat.max()+10.)

      proj=ccrs.Stereographic(central_latitude=90.0,central_longitude=-40.0)
      #pxy = proj.transform_points(ccrs.PlateCarree(), plon, plat)
      #px=pxy[:,:,0]
      #py=pxy[:,:,1]
      #x,y=np.meshgrid(np.arange(slon.shape[0]),np.arange(slat.shape[0]))
        
      figure =plt.figure(figsize=(8,8))
      ax=figure.add_subplot(111,projection=proj)
      #ax = plt.axes(projection=ccrs.PlateCarree())
      ax.set_extent([-179, 179, 53, 85],ccrs.PlateCarree())
      #ax = plt.axes(projection=ccrs.Stereographic())
      ax.add_feature(cfeature.GSHHSFeature('auto', edgecolor='grey'))
      ax.add_feature(cfeature.GSHHSFeature('auto', facecolor='grey'))
      ax.gridlines()
      #ax.coastlines(resolution='110m')
      ax.plot(slon,slat,"r-",lw=1,transform=ccrs.PlateCarree())
       
      pos = ax.get_position()
      asp=pos.height/pos.width
      w=figure.get_figwidth()
      h=asp*w
      figure.set_figheight(h)
      if sectionid :
         figure.canvas.print_figure("map_%s.png"%sectionid,dpi=dpi,bbox_inches='tight')
      else :
         figure.canvas.print_figure("map.png",dpi=dpi,bbox_inches='tight')

   # Get layer thickness variable used in hycom
   dpname = modeltools.hycom.layer_thickness_variable[filetype]
   logger.info("Filetype %s: layer thickness variable is %s"%(filetype,dpname))


   if xaxis == "distance" :
      x=dist/1000.
      xlab="Distance along section[km]"
   elif xaxis == "i" :
      x=I
      xlab="i-index"
   elif xaxis == "j" :
      x=J
      xlab="j-index"
   elif xaxis == "lon" :
      x=slon
      xlab="longitude"
   elif xaxis == "lat" :
      x=slat
      xlab="latitude"
   else :
      logger.warning("xaxis must be i,j,lo,lat or distance")
      x=dist/1000.
      xlab="Distance along section[km]"

   # get kdm from the first file:
   # Remove [ab] ending if present
   print('firstfilw', files[0])
   m=re.match("(.*)\.[ab]",files[0])
   print('m=',m.group(1))
   myf=m.group(1)
   fi_abfile = abf.ABFileArchv(myf,"r")
   kdm=max(fi_abfile.fieldlevels)

   # Loop over archive files
   figure = plt.figure()
   ax=figure.add_subplot(111)
   pos = ax.get_position()
   count_sum=0
   intfsec_sum=np.zeros((kdm+1,I.size))
   datasec_sum=np.zeros((kdm+1,I.size))
   for fcnt,myfile0 in enumerate(files) :
      count_sum=count_sum+1
      print('count_sum==', count_sum)
      print('fcnt=', fcnt)
      print('mfile0=', myfile0)
      # Remove [ab] ending if present
      m=re.match("(.*)\.[ab]",myfile0)
      if m :
         myfile=m.group(1)
      else :
         myfile=myfile0

      # Add more filetypes if needed. By def we assume archive
      if filetype == "archive" :
         i_abfile = abf.ABFileArchv(myfile,"r")
      elif filetype == "restart" :
         i_abfile = abf.ABFileRestart(myfile,"r",idm=gfile.idm,jdm=gfile.jdm)
      else :
         raise NotImplementedError("Filetype %s not implemented"%filetype)
      # kdm assumed to be max level in ab file
      kdm=max(i_abfile.fieldlevels)

      # Set up interface and daat arrays
      
      xx=np.zeros((kdm+1,I.size))
      intfsec=np.zeros((kdm+1,I.size))
      datasec=np.zeros((kdm+1,I.size))
      # Loop over layers in file. 
      logger.info("File %s"%(myfile))
      for k in range(kdm) :
         logger.debug("File %s, layer %03d/%03d"%(myfile,k,kdm))

         # Get 2D fields
         dp2d=i_abfile.read_field(dpname,k+1)
         data2d=i_abfile.read_field(variable,k+1)
         #print('---mn,mx  data=',  data2d.min(),data2d.max())
         if (k%kdm==49):
            print("---Reach bottom layer" )
         dp2d=np.ma.filled(dp2d,0.)/modeltools.hycom.onem
         data2d=np.ma.filled(data2d,1e30)
         # Place data into section arrays
         intfsec[k+1,:] = intfsec[k,:] + dp2d[J,I]
         if k==0 : datasec[k,:] = data2d[J,I]
         datasec[k+1,:] = data2d[J,I]
      

      intfsec_sum=intfsec_sum + intfsec
      datasec_sum=datasec_sum + datasec
      #print 'prs_intafce=', np.transpose(intfsec[:,15]) 
      i_abfile.close()

      # end loop over files
      
   intfsec_avg=intfsec_sum/count_sum
   datasec_avg=datasec_sum/count_sum

   if ncfiles :
      MLDGS_sum=np.zeros((1,I.size))
      count_sum=0
      for fcnt,ncfile in enumerate(ncfiles) :
         count_sum=count_sum+1
         print('ncfile count_sum==', count_sum)
         print('ncfile fcnt=', fcnt)
         print('ncfilefile=', ncfile)
         MLDGS=np.zeros((1,I.size))
         ncfile0 = netCDF4.Dataset(ncfile,'r')
         MLD_2D  = ncfile0.variables['GS_MLD'][:]
         #MLD_2D  = ncfile0.variables['mlp'][:]
         MLDGS[0,:]=MLD_2D[0,J,I]
         MLDGS_sum= MLDGS_sum + MLDGS
         ncfile0.close()
      # end loop over files
      MLDGS_avg=MLDGS_sum/count_sum
   #
   #-----------------------------------------------------------------
   # read from clim mld TP5netcdf
   if ncfiles :
      if 'TP2' in files[0]:
         fh=netCDF4.Dataset('mld_dr003_l3_modif_Interp_TP2grd.nc')
      else:
         fh=netCDF4.Dataset('mld_dr003_l3_modif_Interp_TP5grd.nc')
      fhmldintrp = fh.variables['TP5mld'][:]
      fh.close()
      #fhMLDintrp_sum=np.zeros((760,800))
      MLDclim_sum=np.zeros((1,I.size))
      cunt_sum=0
      for ii in range(12) :
          cunt_sum=cunt_sum +1
          MLDclim=np.zeros((1,I.size))
          MLDclim[0,:]=fhmldintrp[ii,J,I]

          MLDclim_sum= MLDclim_sum + MLDclim
          print('clim count_sum==', cunt_sum)
      MLDclim_avg=MLDclim_sum/cunt_sum
   #-----------------------------------------------------------------   
   i_maxd=np.argmax(np.abs(intfsec_avg[kdm,:]))
   #print i_maxd
   for k in range(kdm+1) :
      xx[k,:] = x[:]
   # Set up section plot
   #datasec = np.ma.masked_where(datasec==1e30,datasec)
   datasec_avg = np.ma.masked_where(datasec_avg>0.5*1e30,datasec_avg)
   #print datasec.min(),datasec.max()
   #P=ax.pcolormesh(dist/1000.,-intfsec,datasec)
   #print i_maxd
   for k in range(kdm+1) :
      xx[k,:] = x[:]
   
   if clim is not None : lvls = MaxNLocator(nbins=30).tick_values(clim[0], clim[1])
   #print 'levels=', lvls
   mf='sawtooth_0-1.txt'
   LinDic=mod_hyc2plot.cmap_dict(mf)
   my_cmap = matplotlib.colors.LinearSegmentedColormap('my_colormap',LinDic)
   cmap=my_cmap
   #cmap = matplotlib.pyplot.get_cmap('gist_rainbow_r')
   norm = BoundaryNorm(lvls, ncolors=cmap.N, clip=True)
   print('x.shape=' ,      x.shape)
   print('x.min,xmax=' ,  x.min(),x.max())
   print('xx.shape=' ,      xx.shape)
   print('xx.min,xxmax=' ,  xx.min(),xx.max())
   print('intfsec_avg.shape=', intfsec_avg.shape)
   print('datasec_avg.shape=', datasec_avg.shape)
   #P=ax.pcolormesh(x,-intfsec,datasec,cmap=cmap)
   P=ax.contourf(xx,-intfsec_avg,datasec_avg,extend='both',cmap=cmap,levels=lvls)
   if 'sal' in variable:
      P1=ax.contour(xx,-intfsec_avg,datasec_avg,levels=[32.0,33.0,34.0,35.0,35.5],
          colors=('k',),linestyles=('-',),linewidths=(1.5,))
   else:
      P1=ax.contour(xx,-intfsec_avg,datasec_avg,levels=[-1,0.0,2.0],
          colors=('k',),linestyles=('-',),linewidths=(1.5,))
   matplotlib.pyplot.clabel(P1, fmt = '%2.1d', colors = 'k', fontsize=10) #contour line labels
   # Plot layer interfaces
   for k in range(1,kdm+1) :
      if k%100 == 0 : 
         PL=ax.plot(x,-intfsec_avg[k,:],"-",color="k")
      elif k%5 == 0 and k <= 10: 
         PL=ax.plot(x,-intfsec_avg[k,:],"--",color="k", linewidth=0.5)
         textx = x[i_maxd]
         texty = -0.5*(intfsec_avg[k-1,i_maxd] + intfsec_avg[k,i_maxd])
         ax.text(textx,texty,str(k),verticalalignment="center",horizontalalignment="center",fontsize=6)
      elif k%2 and k > 10 : 
         PL=ax.plot(x,-intfsec_avg[k,:],"--",color="k", linewidth=0.5)
         textx = x[i_maxd]
         texty = -0.5*(intfsec_avg[k-1,i_maxd] + intfsec_avg[k,i_maxd])
         ax.text(textx,texty,str(k),verticalalignment="center",horizontalalignment="center",fontsize=6)
   if ncfiles :
      PL=ax.plot(x,-MLDGS_avg[0,:],"-",color="w", linewidth=1.50)
      PL=ax.plot(x,-MLDclim_avg[0,:],"--",color="r", linewidth=1.50)
###    else :
###       PL=ax.plot(x,-intfsec_avg[k,:],"-",color=".5")
  # Print figure and remove wite space.
   aspect = 50
   pad_fraction = 0.25
   divider = make_axes_locatable(ax)
   width = axes_size.AxesY(ax, aspect=1./aspect)
   pad = axes_size.Fraction(pad_fraction, width)
   cax = divider.append_axes("right", size=width, pad=pad)
   cb=ax.figure.colorbar(P,cax=cax,extend='both')
   #cb=ax.figure.colorbar(P,extend='both')
   if clim is not None : P.set_clim(clim)
   #cb=ax.figure.colorbar(P,extend='both')
   ax.set_title(variable+':'+myfile+'AVG-')
   ax.set_ylabel('Depth [m]')
   ax.set_xlabel(xlab)
   #ax.set_position(pos)
   #matplotlib.pyplot.tight_layout()

   # Print in different y-lims 
   suff=os.path.basename(myfile)
   if sectionid : suff=suff+"_"+sectionid
   figure.canvas.print_figure("sec_AVG_%s_full_%s.png"%(variable,suff),dpi=dpi)
   #ax.set_ylim(-1000,0)
   if 'Fram' in sectionid or 'Svin' in sectionid:
      print('sectionid=', sectionid)
      ax.set_ylim(-600,0)
      figure.canvas.print_figure("sec_AVG_%s_600m_%s.png"%(variable,suff),dpi=dpi)
   else:
      #ax.set_ylim(-2500,0)
      #figure.canvas.print_figure("sec_AVG_%s_2500m_%s.png"%(variable,suff),dpi=dpi)
      ax.set_ylim(-3000,0)
      figure.canvas.print_figure("sec_AVG_%s_3000m_%s.png"%(variable,suff),dpi=dpi)

   # Close input file
   #i_abfile.close()
   #
   ax.clear()
   cb.remove()
Esempio n. 8
0
def main(path, source_file):
    print(path)
    print(source_file)
    nc = NetCDFFile(source_file, "r")
    wetNOy = nc.variables["WDEP_NOy"][:, :]
    wetNHx = nc.variables["WDEP_NHx"][:, :]
    dryNOy = nc.variables["DDEP_NOy_m2Grid"][:, :]
    dryNHx = nc.variables["DDEP_NHx_m2Grid"][:, :]
    lat = nc.variables["lat"][:]
    lon = nc.variables["lon"][:]
    Nread = wetNOy + wetNHx + dryNOy + dryNHx

    abgrid = abf.ABFileGrid(path + "../../../topo/regional.grid", "r")
    plon = abgrid.read_field("plon")
    plat = abgrid.read_field("plat")
    jdm, idm = plon.shape

    xi, yi = np.meshgrid(lon, lat)
    N = griddata((xi.flatten(), yi.flatten()),
                 Nread.flatten(), (plon, plat),
                 method='linear')
    N[np.isnan(N)] = 0.
    N = N / 365. / 86400.  # year --> second
    N = N / 14.01 * 6.625 * 12.01  # mgN m-2 s-1 --> mgC m-2 s-1


    outfile=abf.ABFileRiver(path + "ECO_no3_new.a","w",idm=idm,jdm=jdm,\
                   cline1='River nitrate fluxes + Atmospheric N deposition',\
                   cline2='mgC m-2 s-1')
    outfile.write_header()
    Nriver = abf.AFile(idm, jdm, path + "ECO_no3.a", "r")

    for month in range(12):
        river = Nriver.read_record(month)
        outfile.write_field(river + N, None, "river nitrate", month + 1)

    Nriver.close()
    outfile.close()

    origAfile = path + "ECO_no3.a"
    origBfile = path + "ECO_no3.b"
    oldAfile = path + "ECO_no3_noATM.a"
    oldBfile = path + "ECO_no3_noATM.b"
    newAfile = path + "ECO_no3_new.a"
    newBfile = path + "ECO_no3_new.b"

    os.rename(origAfile, oldAfile
              )  # river without atmpspheric deposition is kept as noATM file.
    os.rename(origBfile, oldBfile
              )  # river without atmpspheric deposition is kept as noATM file.
    os.rename(
        newAfile, origAfile
    )  # river with atmospheric deposition is renamed to the original file used by hycom
    os.rename(
        newBfile, origBfile
    )  # river with atmospheric deposition is renamed to the original file used by hycom

    # according to Okin et al, 2011 - doi:10.1029/2010GB003858
    # for North Atlantic (northern section)
    # Ndep = 7.4 TgN/yr
    # Pdep = 0.02 TgP/yr
    # ratio --> ( 7.4 / 14.01 * 6.625 ) / ( 0.02 / 31. * 106. ) = 51.2
    P = N / 51.2

    outfile=abf.ABFileRiver(path + "ECO_pho_new.a","w",idm=idm,jdm=jdm,\
                   cline1='River phosphate fluxes + Atmospheric P deposition',\
                   cline2='mgC m-2 s-1')
    outfile.write_header()
    Priver = abf.AFile(idm, jdm, path + "ECO_pho.a", "r")

    for month in range(12):
        river = Priver.read_record(month)
        outfile.write_field(river + P, None, "river phosphate", month + 1)

    Priver.close()
    outfile.close()

    origAfile = path + "ECO_pho.a"
    origBfile = path + "ECO_pho.b"
    oldAfile = path + "ECO_pho_noATM.a"
    oldBfile = path + "ECO_pho_noATM.b"
    newAfile = path + "ECO_pho_new.a"
    newBfile = path + "ECO_pho_new.b"

    os.rename(origAfile, oldAfile
              )  # river without atmpspheric deposition is kept as noATM file.
    os.rename(origBfile, oldBfile
              )  # river without atmpspheric deposition is kept as noATM file.
    os.rename(
        newAfile, origAfile
    )  # river with atmospheric deposition is renamed to the original file used by hycom
    os.rename(
        newBfile, origBfile
    )  # river with atmospheric deposition is renamed to the original file used by hycom
    '''
def main():

    # Read plon,plat
    gfile = abf.ABFileGrid("regional.grid", "r")
    plon = gfile.read_field("plon")
    plat = gfile.read_field("plat")
    gfile.close()

    # Read input bathymetry
    bfile = abf.ABFileBathy("regional.depth",
                            "r",
                            idm=gfile.idm,
                            jdm=gfile.jdm,
                            mask=True)
    in_depth_m = bfile.read_field("depth")
    bfile.close()
    #in_depth=numpy.ma.filled(in_depth_m,bathy_threshold)

    # Starting point  (State 1 for Atlantic)
    kapref = numpy.ones(plat.shape) * 1.0
    print kapref.min(), kapref.max()

    # Find regions north of northern limit. Assumes
    for segment in range(len(northern_limit_longitudes)):

        ind1 = segment
        ind2 = (segment + 1) % len(northern_limit_longitudes)

        lo1 = northern_limit_longitudes[ind1]
        la1 = northern_limit_latitudes[ind1]
        lo2 = northern_limit_longitudes[ind2]
        la2 = northern_limit_latitudes[ind2]

        tmp1 = numpy.mod(plon + 360 - lo1, 360.)
        tmp2 = numpy.mod(lo2 + 360 - lo1, 360.)
        J = tmp1 <= tmp2
        #print numpy.count_nonzero(J)

        # Linear weights and latitude in selected points
        w2 = tmp1 / tmp2
        w1 = 1. - w2
        la = la2 * w2 + la1 * w1

        kapref[J] = numpy.where(plat[J] > la[J], 2.0, kapref[J])

    import scipy.ndimage
    kapref = scipy.ndimage.gaussian_filter(kapref, sigma=20)

    #print in_depth_m.min(),type(in_depth_m)
    kaprefplot = numpy.ma.masked_where(in_depth_m.mask, kapref)
    figure = matplotlib.pyplot.figure()
    ax = figure.add_subplot(111)
    P = ax.pcolormesh(kaprefplot)
    figure.colorbar(P, ax=ax)
    figure.canvas.print_figure("kapref.png")

    af = abf.AFile(plon.shape[1], plon.shape[0], "tbaric.a", "w")
    hmin, hmax = af.writerecord(kapref, None, record=0)
    af.close()
    bf = open("tbaric.b", "w")
    bf.write("tbaric.b\n")
    bf.write("\n")
    bf.write("\n")
    bf.write("\n")
    bf.write("i/jdm =  %5d %5d\n" % (plon.shape[1], plon.shape[0]))
    bf.write("tbaric: range = %14.6e%14.6e\n" % (hmin, hmax))
    bf.close()
def main(infile, rmu_width, rmu_efold, dpi=180):

    bathy_threshold = 0.  # TODO

    # Read plon,plat
    gfile = abf.ABFileGrid("regional.grid", "r")
    plon = gfile.read_field("plon")
    plat = gfile.read_field("plat")
    gfile.close()

    # Read input bathymetri
    m = re.match("^(.*)(\.[ab])", infile)
    if m: infile = m.group(1)
    bfile = abf.ABFileBathy(infile,
                            "r",
                            idm=gfile.idm,
                            jdm=gfile.jdm,
                            mask=True)
    in_depth_m = bfile.read_field("depth")
    bfile.close()
    in_depth = numpy.ma.filled(in_depth_m, bathy_threshold)

    #print in_depth.min(),in_depth.max()
    ip = ~in_depth_m.mask
    ip = numpy.copy(ip)
    iu = numpy.copy(ip)
    iv = numpy.copy(ip)
    iu[:, 1:] = numpy.logical_and(ip[:, 1:], ip[:, :-1])
    iv[1:, :] = numpy.logical_and(ip[1:, :], ip[:-1, :])

    ifports = []
    ilports = []
    jfports = []
    jlports = []
    kdports = []

    process_south = True
    process_north = True
    process_west = True
    process_east = True

    fatal = False

    rmumask = numpy.zeros(in_depth.shape)
    labels = numpy.zeros(in_depth.shape)

    # Test ocean mask in 2nd grid cell from edge. If ocean, mark as nesting boundary.
    # When written to ports.input, we write
    # NB: All diag output is "Fortran" indexes (Starting from 1) - thats why we add 1 here and there

    for kdport in [1, 2, 3, 4]:
        t_ifports, t_ilports, t_jfports, t_jlports, t_kdports, t_labels, = port_setup(
            kdport, in_depth_m)
        labels[t_labels > 0] = t_labels[t_labels > 0] + labels.max()
        ifports.extend(t_ifports)
        ilports.extend(t_ilports)
        jfports.extend(t_jfports)
        jlports.extend(t_jlports)
        kdports.extend(t_kdports)

    # Build mask
    for i in range(len(ifports)):
        rmumask = relaxation_mask(rmumask, ifports[i], ilports[i], jfports[i],
                                  jlports[i], kdports[i], rmu_width)
    #print rmumask.min(),rmumask.max()
    rmumask = numpy.minimum(rmumask, 1.) * 1. / (rmu_efold * 86400.)
    rmumask_m = numpy.ma.masked_where(in_depth_m.mask, rmumask)

    # Check consistency
    fatal = False
    for i in range(len(ifports)):
        fatal = fatal or check_consistency(ifports[i], ilports[i], jfports[i],
                                           jlports[i], kdports[i], iu, iv,
                                           i + 1)

    # Open port output file
    logger.info("Writing to ports.input.tmp")
    fid = open("ports.input.tmp", "w")
    fid.write("%6d  'nports' = Number of ports \n" % len(kdports))
    for i in range(len(kdports)):
        write_port_location(fid, kdports[i], ifports[i] + 1, ilports[i] + 1,
                            jfports[i] + 1, jlports[i] + 1)
    fid.close()

    # Write rmu file
    rmufile = abf.ABFileRmu(
        "rmu",
        "w",
        cline1="Relaxation mask",
        cline2=
        "Relaxation mask created by topo_ports.py. rel zone width=%d, efold time=%d days"
        % (rmu_width, rmu_efold),
        mask=True)
    rmufile.write_field(rmumask, in_depth_m.mask, "rmu")
    rmufile.close()

    # Plot rmu with pcolormesh
    figure = matplotlib.pyplot.figure(figsize=(8, 8))
    ax = figure.add_subplot(111)
    cmap = matplotlib.pyplot.get_cmap("Greys_r")
    cmap2 = matplotlib.pyplot.get_cmap("jet")
    ax.add_patch(
        matplotlib.patches.Rectangle((1, 1),
                                     in_depth.shape[1],
                                     in_depth.shape[0],
                                     color=".5",
                                     alpha=.5))
    P = ax.pcolormesh(in_depth_m, cmap=cmap)
    P = ax.pcolormesh(rmumask_m, cmap=cmap2)
    CB = ax.figure.colorbar(P)
    figure.canvas.print_figure("rmu.png", dpi=dpi)

    # Plot ports with pcolormesh
    figure = matplotlib.pyplot.figure(figsize=(8, 8))
    ax = figure.add_subplot(111)
    cmap = matplotlib.pyplot.get_cmap("Greys_r")
    ax.add_patch(
        matplotlib.patches.Rectangle((1, 1),
                                     in_depth.shape[1],
                                     in_depth.shape[0],
                                     color=".5",
                                     alpha=.5))
    P = ax.pcolormesh(in_depth_m, cmap=cmap)
    I, J = numpy.where(labels > 0)
    S = ax.scatter(J, I, 50, labels[I, J], edgecolor='none')
    CB = ax.figure.colorbar(S)
    ax.set_xlim(0, in_depth.shape[1])
    ax.set_ylim(0, in_depth.shape[0])
    CB.ax.set_title("Port number")
    logger.info("Writing to ports_all.png")
    figure.canvas.print_figure("ports_all.png", dpi=dpi)

    # Port diagnostics plot
    figure2 = matplotlib.pyplot.figure(figsize=(8, 8))
    ax2 = figure2.add_subplot(111)
    cmap = matplotlib.pyplot.get_cmap("Greys_r")
    P = ax2.pcolormesh(in_depth_m,
                       cmap=cmap,
                       edgecolor=".4",
                       alpha=.5,
                       linewidth=.05)
    ax2.hold()
    Ps = []
    Ls = []
    for i in range(len(kdports)):

        iwidth = ilports[i] - ifports[i] + 1
        jwidth = jlports[i] - jfports[i] + 1
        #print ifports[i],jfports[i],iwidth,jwidth
        d = 1
        if kdports[i] == 1:
            xy = (ifports[i], jfports[i])
            jwidth = d
            c = "r"
        elif kdports[i] == 2:
            xy = (ifports[i], jfports[i])
            jwidth = d
            c = "g"
        elif kdports[i] == 3:
            xy = (ifports[i], jfports[i])
            iwidth = d
            c = "b"
        elif kdports[i] == 4:
            xy = (ifports[i], jfports[i])
            iwidth = d
            c = "m"

        figure.clf()
        ax = figure.add_subplot(111)
        P = ax.pcolormesh(in_depth_m,
                          cmap=cmap,
                          edgecolor=".4",
                          alpha=.5,
                          linewidth=.05)
        ax.add_patch(
            matplotlib.patches.Rectangle(xy, iwidth, jwidth, color=c,
                                         alpha=.5))
        ax.grid()
        ax.set_xlim(xy[0] - 20, xy[0] + iwidth + 20)
        ax.set_ylim(xy[1] - 20, xy[1] + jwidth + 20)
        ax.set_title("Port number %d - kdport=%d" % (i + 1, kdports[i]))

        R = ax2.add_patch(
            matplotlib.patches.Rectangle(xy, iwidth, jwidth, color=c,
                                         alpha=.5))
        Ps.append(R)
        Ls.append("Port %d" % (i + 1))

        fname = "port_%03d.png" % (i + 1)
        logger.info("Writing Diagnostics to %s" % fname)
        figure.canvas.print_figure(fname, bbox_inches='tight', dpi=dpi)

    fname = "ports_all_2.png"
    logger.info("Writing Diagnostics to %s" % fname)
    ax2.legend(Ps, Ls)
    figure2.canvas.print_figure(fname, bbox_inches='tight', dpi=dpi)

    if fatal:
        logger.error(
            "Errors were encountered - see errors above, and consult diag files. You may need to modify your topo file"
        )
        raise NameError("fatal exit")
    return rmumask, rmumask_m
def main(lon1,lat1,lon2,lat2,variable,files,filetype="archive",clim=None,sectionid="",
      ijspace=False,xaxis="distance",section_map=False,dpi=180) :
   TP4Grd='/cluster/work/users/aal069/TP4a0.12/mfile/'
   logger.info("Filetype is %s"% filetype)
   gfile = abf.ABFileGrid("regional.grid","r")
   plon=gfile.read_field("plon")
   plat=gfile.read_field("plat")


   # Set up section info
   if ijspace :
      sec = gridxsec.SectionIJSpace([lon1,lon2],[lat1,lat2],plon,plat)
   else  :
      sec = gridxsec.Section([lon1,lon2],[lat1,lat2],plon,plat)
   I,J=sec.grid_indexes
   dist=sec.distance
   print('dit.shae=',dist.shape)
   slon=sec.longitude
   slat=sec.latitude
   # In testing
   #J,I,slon,slat,case,dist=sec.find_intersection(qlon,qlat)
   #print I,J
   #raise NameError,"test"

   logger.info("Min max I-index (starts from 0):%d %d"%(I.min(),I.max()))
   logger.info("Min max J-index (starts from 0):%d %d"%(J.min(),J.max()))
   #
   #
   if section_map :
      ll_lon=slon.min()-10.
      ur_lon=slon.max()+10.
      ll_lat=np.maximum(-90.,slat.min()-10.)
      ur_lat=np.minimum(90. ,slat.max()+10.)

      proj=ccrs.Stereographic(central_latitude=90.0,central_longitude=-40.0)
      #pxy = proj.transform_points(ccrs.PlateCarree(), plon, plat)
      #px=pxy[:,:,0]
      #py=pxy[:,:,1]
      #x,y=np.meshgrid(np.arange(slon.shape[0]),np.arange(slat.shape[0]))
        
      figure =plt.figure(figsize=(8,8))
      ax=figure.add_subplot(111,projection=proj)
      #ax = plt.axes(projection=ccrs.PlateCarree())
      ax.set_extent([-179, 179, 53, 85],ccrs.PlateCarree())
      #ax = plt.axes(projection=ccrs.Stereographic())
      ax.add_feature(cfeature.GSHHSFeature('auto', edgecolor='grey'))
      ax.add_feature(cfeature.GSHHSFeature('auto', facecolor='grey'))
      ax.gridlines()
      #ax.coastlines(resolution='110m')
      ax.plot(slon,slat,"r-",lw=1,transform=ccrs.PlateCarree())
      
      pos = ax.get_position()
      asp=pos.height/pos.width
      w=figure.get_figwidth()
      h=asp*w
      figure.set_figheight(h)
      if sectionid :
         figure.canvas.print_figure("map_%s.png"%sectionid,dpi=dpi,bbox_inches='tight')
      else :
         figure.canvas.print_figure("map.png",dpi=dpi,bbox_inches='tight')

   # Get layer thickness variable used in hycom
   dpname = modeltools.hycom.layer_thickness_variable[filetype]
   logger.info("Filetype %s: layer thickness variable is %s"%(filetype,dpname))


   if xaxis == "distance" :
      x=dist/1000.
      xlab="Distance along section[km]"
   elif xaxis == "i" :
      x=I
      xlab="i-index"
   elif xaxis == "j" :
      x=J
      xlab="j-index"
   elif xaxis == "lon" :
      x=slon
      xlab="longitude"
   elif xaxis == "lat" :
      x=slat
      xlab="latitude"
   else :
      logger.warning("xaxis must be i,j,lo,lat or distance")
      x=dist/1000.
      xlab="Distance along section[km]"

   # get kdm from the first file:
   # Remove [ab] ending if present
   myfile0=files[0]
   print( 'myfile0', myfile0)

   m=re.match("(.*)\.[ab]",myfile0)
   print('m=',m.group(1))
   if m :
      myfile=m.group(1)
   else :
      myfile=myfile0
   dta_afile = abf.AFile(gfile.idm,gfile.jdm,myfile0,"r")
   #intfl='../../../relax/010/relax_int.a'
   intfl=myfile[:-3] + 'int.a'
   int_afile = abf.AFile(gfile.idm,gfile.jdm,intfl,"r")
   #
   lyr=1
   #record_num,xmn,xmx=dta_afile.get_record_number(variable,lyr)
   #print 'record_num, variable, layer ===', record_num-1, variable, lyr
   #print 'mn,mx=',xmn,xmx
   # for record in record_num :
   record_num=1
   record_var=record_num-1 
   fld = dta_afile.read_record(record_var)
   print('mn,mx  data=',fld.min(),fld.max())
   #np.testing.assert_approx_equal(xmn,fld.min(),significant=8)
   #np.testing.assert_approx_equal(xmx,fld.max(),significant=8)
   # presure interfc
   #record_num,xmn,xmx=int_afile.get_record_number('int',lyr)
   #print 'record_num, variable, layer ===', record_num-1, 'int', lyr
   #print 'mn,mx=',xmn,xmx
   # for record in record_num :
   record_prs=record_num-1 
   fld = int_afile.read_record(record_prs)
   print('mn,mx  intface=',fld.min(),fld.max())
   #np.testing.assert_approx_equal(xmn,fld.min(),significant=8)
   #np.testing.assert_approx_equal(xmx,fld.max(),significant=8)
   #
   #kdm=max(fi_abfile.fieldlevels)
   kdm=50
   # Loop over archive files
   figure = plt.figure()
   ax=figure.add_subplot(111)
   pos = ax.get_position()
   count_sum=0
   intfsec_sum=np.zeros((kdm+1,I.size))
   datasec_sum=np.zeros((kdm+1,I.size))
   #
   for mnth in range(12) :
      count_sum=count_sum+1

      logger.info("Reading data and presure interface for month %s"%(mnth+1))
      record_var_pnt=record_var + mnth*kdm
      record_prs_pnt=record_prs + mnth*kdm
      print('pointing at record num:  record_var_pnt', record_var_pnt)
      print('pointing at record num:  record_prs_pnt', record_prs_pnt)
      # Set up interface and daat arrays
      xx=np.zeros((kdm+1,I.size))
      intfsec=np.zeros((kdm+1,I.size))
      datasec=np.zeros((kdm+1,I.size))
      # Loop over layers in file. 
      logger.info("File %s"%(myfile))
      logger.info("intfac_File %s"%(intfl))
      #loop over 50 records
      for k in range(kdm) :
         logger.debug("File %s, layer %03d/%03d"%(myfile,k,kdm))
         r_var=record_var_pnt+k
         r_prs=record_prs_pnt+k
         # Get 2D fields
         dp2d = int_afile.read_record(r_prs)
         data2d=dta_afile.read_record(r_var)
         #print('reading rec num: r_var,r_dp=', r_var, r_prs,' ','data.min,data.max=',data2d.min(),data2d.max())
         print('data: month,layer, range', (mnth+1),'',(r_var)%kdm,data2d.min(),data2d.max())
         #dp2d=i_abfile.read_field(dpname,k+1)
         #data2d=i_abfile.read_field(variable,k+1)
         if ((r_var)%kdm==49):
            print('mn,mx  intface=',dp2d.min(),dp2d.max())
            print('mn,mx  data=',  data2d.min(),data2d.max())
            print( "Reach bottom layer" )
         dp2d=np.ma.filled(dp2d,0.)/modeltools.hycom.onem
         data2d=np.ma.filled(data2d,1e30)

         # Place data into section arrays
         #intfsec[k+1,:] = intfsec[k,:] + dp2d[J,I]
         #print("data2d.shape=",data2d.shape)
         #print("data2d[J,I].size=",data2d[J,I].size)
         intfsec[k+1,:] = dp2d[J,I]
         if k==0 : datasec[k,:] = data2d[J,I]
         datasec[k+1,:] = data2d[J,I]
      

      intfsec_sum=intfsec_sum + intfsec
      datasec_sum=datasec_sum + datasec
      #print 'prs_intafce=', np.transpose(intfsec[:,15]) 
     
   dta_afile.close()
   int_afile.close()
      # end loop over files

   print ('count_sum=',count_sum)
   intfsec_avg=intfsec_sum/count_sum
   datasec_avg=datasec_sum/count_sum
   #
   i_maxd=np.argmax(np.abs(intfsec_avg[kdm,:]))
   #print i_maxd
   for k in range(kdm+1) :
      xx[k,:] = x[:]
   # Set up section plot
   #datasec = np.ma.masked_where(datasec==1e30,datasec)
   datasec_avg = np.ma.masked_where(datasec_avg>0.5*1e30,datasec_avg)
   #print datasec.min(),datasec.max()
   #P=ax.pcolormesh(dist/1000.,-intfsec,datasec)
   #print i_maxd
   for k in range(kdm+1) :
      xx[k,:] = x[:]
   
   if clim is not None : lvls = MaxNLocator(nbins=30).tick_values(clim[0], clim[1])
   #print 'levels=', lvls
   mf='sawtooth_0-1.txt'
   LinDic=mod_hyc2plot.cmap_dict(mf)
   my_cmap = matplotlib.colors.LinearSegmentedColormap('my_colormap',LinDic)
   cmap=my_cmap
   #cmap = plt.get_cmap('gist_rainbow_r')
   norm = BoundaryNorm(lvls, ncolors=cmap.N, clip=True)
   print('x.shape=' ,      x.shape)
   print('x.min,xmax=' ,  x.min(),x.max())
   print('xx.shape=' ,      xx.shape)
   print('xx.min,xxmax=' ,  xx.min(),xx.max())
   print('intfsec_avg.shape=', intfsec_avg.shape)
   print('datasec_avg.shape=', datasec_avg.shape)
   #P=ax.pcolormesh(x,-intfsec,datasec,cmap=cmap)
   P=ax.contourf(xx,-intfsec_avg,datasec_avg,extend='both',cmap=cmap,levels=lvls)
   if 'sal' in variable:
      P1=ax.contour(xx,-intfsec_avg,datasec_avg,levels=[32.0,33.0,34.0,35.0,35.5],
          colors=('k',),linestyles=('-',),linewidths=(1.5,))
   else:
      P1=ax.contour(xx,-intfsec_avg,datasec_avg,levels=[-1.0,0.0,2.0],
          colors=('k',),linestyles=('-',),linewidths=(1.5,))
   plt.clabel(P1, fmt = '%2.1d', colors = 'k', fontsize=10) #contour line labels
   # Plot layer interfaces
   for k in range(1,kdm+1) :
      if k%100 == 0 : 
         PL=ax.plot(x,-intfsec_avg[k,:],"-",color="k")
      elif k%5 == 0 and k <= 10: 
         PL=ax.plot(x,-intfsec_avg[k,:],"--",color="k", linewidth=0.5)
         textx = x[i_maxd]
         texty = -0.5*(intfsec_avg[k-1,i_maxd] + intfsec_avg[k,i_maxd])
         ax.text(textx,texty,str(k),verticalalignment="center",horizontalalignment="center",fontsize=6)
      elif k%2 == 0 and k > 10: 
         PL=ax.plot(x,-intfsec_avg[k,:],"--",color="k", linewidth=0.5)
         textx = x[i_maxd]
         texty = -0.5*(intfsec_avg[k-1,i_maxd] + intfsec_avg[k,i_maxd])
         ax.text(textx,texty,str(k),verticalalignment="center",horizontalalignment="center",fontsize=6)
###    else :
###       PL=ax.plot(x,-intfsec_avg[k,:],"-",color=".5")
  # Print figure and remove wite space.
   aspect = 50
   pad_fraction = 0.25
   divider = make_axes_locatable(ax)
   width = axes_size.AxesY(ax, aspect=1./aspect)
   pad = axes_size.Fraction(pad_fraction, width)
   cax = divider.append_axes("right", size=width, pad=pad)
   cb=ax.figure.colorbar(P,cax=cax,extend='both')
   #cb=ax.figure.colorbar(P,extend='both')
   if clim is not None : P.set_clim(clim)
   #cb=ax.figure.colorbar(P,extend='both')
   ax.set_title(variable+':'+myfile+'AVG-')
   ax.set_ylabel('Depth [m]')
   ax.set_xlabel(xlab)
   #ax.set_position(pos)
   #plt.tight_layout()

   # Print in different y-lims 
   suff=os.path.basename(myfile)
   if sectionid : suff=suff+"_"+sectionid
   figure.canvas.print_figure("sec_AVG_%s_full_%s.png"%(variable,suff),dpi=dpi)
   if 'Fram' in sectionid or 'Svin' in sectionid:
      ax.set_ylim(-600,0)
      figure.canvas.print_figure("sec_AVG_%s_600m_%s.png"%(variable,suff),dpi=dpi)
   else :
      ax.set_ylim(-3000,0)
      figure.canvas.print_figure("sec_AVG_%s_3000m_%s.png"%(variable,suff),dpi=dpi)
      #ax.set_ylim(-600,0)
      #figure.canvas.print_figure("sec_AVG_%s_600m_%s.png"%(variable,suff),dpi=dpi)

   # Close input file
   #i_abfile.close()
   #
   ax.clear()
   cb.remove()
def main(infile,
         blo,
         bla,
         remove_isolated_basins=True,
         remove_one_neighbour_cells=True,
         remove_islets=True,
         remove_inconsistent_nesting_zone=True,
         inbathy=None,
         write_to_file=True,
         bathy_threshold=0.):

    logger.info("Bathy threshold is %12.4f" % bathy_threshold)

    # Read plon,plat
    gfile = abf.ABFileGrid("regional.grid", "r")
    plon = gfile.read_field("plon")
    plat = gfile.read_field("plat")
    gfile.close()

    # Read input bathymetri
    if inbathy is not None:
        in_depth_m = inbathy
    else:
        bfile = abf.ABFileBathy(infile,
                                "r",
                                idm=gfile.idm,
                                jdm=gfile.jdm,
                                mask=True)
        in_depth_m = bfile.read_field("depth")
        #print "in_depth_m type, min, max:",type(in_depth_m),in_depth_m.min(),in_depth_m.max()
        bfile.close()

    # Modify basin
    in_depth = numpy.ma.filled(in_depth_m, bathy_threshold)
    depth = numpy.copy(in_depth)
    logger.info("depth min max: %f8.0 %f8.0" % (depth.min(), depth.max()))
    it = 1
    while it == 1 or numpy.count_nonzero(numpy.abs(depth - depth_old)) > 0:
        depth_old = numpy.copy(depth)
        logger.info("Basin modifications ... pass %d" % (it))
        if remove_isolated_basins:
            depth = modeltools.tools.remove_isolated_basins(
                plon, plat, depth, blo, bla, threshold=bathy_threshold)
        if remove_islets:
            depth = modeltools.tools.remove_islets(depth,
                                                   threshold=bathy_threshold)
        if remove_one_neighbour_cells:
            depth = modeltools.tools.remove_one_neighbour_cells(
                depth, threshold=bathy_threshold)
        if remove_inconsistent_nesting_zone:
            depth = modeltools.tools.remove_inconsistent_nesting_zone(
                depth, threshold=bathy_threshold)
        logger.info("Modified %d points " %
                    numpy.count_nonzero(depth - depth_old))
        it += 1
    logger.info("Modifications finished after %d iterations " % (it - 1))
    w5 = numpy.copy(depth)

    w5[:, 0] = bathy_threshold
    w5[:, -1] = bathy_threshold
    w5[0, :] = bathy_threshold
    w5[-1, :] = bathy_threshold
    #print "w5 type min max",type(w5),w5.min(),w5.max()

    # Mask data where depth below threshold
    w5_m = numpy.ma.masked_where(w5 <= bathy_threshold, w5)

    # Create netcdf file with all  stages for analysis
    logger.info("Writing bathymetry to file bathy_consistency.nc")
    ncid = netCDF4.Dataset("bathy_consistency.nc", "w")
    ncid.createDimension("idm", w5.shape[1])
    ncid.createDimension("jdm", w5.shape[0])
    ncid.createVariable("lon", "f8", ("jdm", "idm"))
    ncid.createVariable("lat", "f8", ("jdm", "idm"))
    ncid.createVariable("old", "f8", ("jdm", "idm"))
    ncid.createVariable("old_masked", "f8", ("jdm", "idm"))
    ncid.createVariable("new", "f8", ("jdm", "idm"))
    ncid.createVariable("new_masked", "f8", ("jdm", "idm"))
    ncid.createVariable("modified", "i4", ("jdm", "idm"))
    ncid.variables["lon"][:] = plon
    ncid.variables["lat"][:] = plat
    ncid.variables["old"][:] = in_depth
    ncid.variables["old_masked"][:] = in_depth_m
    ncid.variables["new"][:] = w5
    ncid.variables["new_masked"][:] = w5_m
    modmask = numpy.abs(in_depth - depth) > .1
    ncid.variables["modified"][:] = modmask.astype("i4")
    ncid.close()

    logger.info("Writing bathymetry plot to file newbathy.png")
    figure = matplotlib.pyplot.figure(figsize=(8, 8))
    ax = figure.add_subplot(111)
    P = ax.pcolormesh(w5_m)
    figure.colorbar(P,
                    norm=matplotlib.colors.LogNorm(vmin=w5_m.min(),
                                                   vmax=w5_m.max()))
    I, J = numpy.where(numpy.abs(modmask) > .1)
    ax.scatter(J, I, 20, "r")
    figure.canvas.print_figure("newbathy.png")

    # Print to HYCOM and CICE bathymetry files
    if write_to_file:
        abf.write_bathymetry("CONSISTENT", 0, w5, bathy_threshold)

    return w5
Esempio n. 13
0
def main(myfiles,
         fieldname,
         fieldlevel,
         idm=None,
         jdm=None,
         clim=None,
         filetype="archive",
         window=None,
         cmap="jet",
         datetime1=None,
         datetime2=None,
         vector="",
         tokml=False,
         masklim=None,
         filename2='',
         dpi=180):

    cmap = matplotlib.pyplot.get_cmap("jet")

    ab = abf.ABFileGrid("regional.grid", "r")
    plon = ab.read_field("plon")
    plat = ab.read_field("plat")
    scpx = ab.read_field("scpx")
    scpy = ab.read_field("scpy")
    target_lonlats = [plon, plat]
    abdpth = abf.ABFileBathy('regional.depth', "r", idm=ab.idm, jdm=ab.jdm)
    mdpth = abdpth.read_field('depth')
    maskd = mdpth.data
    maskd[maskd > 1e29] = np.nan
    #Region_mask=True
    Region_mask = False
    if Region_mask:
        maskd[plat > 80] = np.nan
        maskd[plat < 50] = np.nan
        maskd[plon > 60] = np.nan
        maskd[plon < -50] = np.nan

    Nordic_mask = maskd

    proj = ccrs.Stereographic(central_latitude=90.0, central_longitude=-40.0)
    pxy = proj.transform_points(ccrs.PlateCarree(), plon, plat)
    px = pxy[:, :, 0]
    py = pxy[:, :, 1]
    x, y = np.meshgrid(np.arange(plon.shape[1]), np.arange(plon.shape[0]))

    if vector:
        logger.info("Vector component 1:%s" % fieldname)
        logger.info("Vector component 2:%s" % vector)

    #---------------first read and compute clim
    Err_map = 1
    freezp = -1.8
    sum_fld1 = maskd
    sum_fld1[~np.isnan(sum_fld1)] = 0.0
    Clim_arr = np.zeros((plon.shape[0], plon.shape[1], 12))
    #---------------
    # compute for TP6 files
    #-----------------------------------------
    #---------------------------------------------------------------------------------
    #---------------------------------------------------------------------------------
    # filename2
    onemm = 9.806
    counter = 0
    file_count = 0
    sum_fld1 = maskd
    sum_fld1[~np.isnan(sum_fld1)] = 0.0
    dt_cnl = np.zeros(len(myfiles))
    diff_dt_cnl = np.zeros(len(myfiles))
    rmse_dt_cnl = np.zeros(len(myfiles))

    Labl1 = "CNTL SST"
    Labl1 = myfiles[0][:28]
    yyyy1 = myfiles[0][-9:-5]
    if "archv." in myfiles[0]:
        yyyy1 = myfiles[0][-13:-9]
    print("myfiles[0]=", myfiles[0])
    print("yyy1=", yyyy1)
    if filename2:
        dt_2 = np.zeros(len(filename2))
        diff_dt_2 = np.zeros(len(filename2))
        rmse_dt_2 = np.zeros(len(filename2))
        tid_2=np.array([datetime.datetime(int(yyyy1), 1, 15) \
           + relativedelta(months=i) for i in range(len(filename2))])
        Labl2 = filename2[0][:28]
        counter = 0
        file_count = 0
        sum_fld1 = maskd
        sum_fld1[~np.isnan(sum_fld1)] = 0.0
        if "srfhgt" in fieldname:
            fieldname = "srfhgt"
        elif "temp" in fieldname:
            fieldname = "temp"
        for fil0 in filename2:
            logger.info("Now processing  %s" % fil0)
            n_intloop,ab,ab2,rdtimes = open_file(fil0,filetype,fieldname,fieldlevel,\
                 datetime1=datetime1,datetime2=datetime2,vector=vector,idm=idm,jdm=jdm)
            # Intloop used to read more fields in one file. Only for forcing for now
            for i_intloop in range(n_intloop):
                # Read ab file of different types
                if filetype == "archive":
                    fld1 = ab.read_field(fieldname, fieldlevel)
                elif filetype == "forcing":
                    fld1 = ab.read_field(fieldname, rdtimes[i_intloop])
                    if vector:
                        fld2 = ab2.read_field(vector, rdtimes[i_intloop])
                    logger.info("Processing time %.2f" % rdtimes[i_intloop])
                else:
                    raise NotImplementedError("Filetype %s not implemented" %
                                              filetype)
                # Create scalar field for vectors
                print('---------mn,mx  data=', fld1.min(), fld1.max())
                #if "srfhgt" in fieldname:
                #   fld1= fld1/9.806
                print("fld1.shpe", fld1.shape)
                print('mn,mx=', fld1.min(), fld1.max(), 'count=', counter)
                dt_2[counter] = np.nanmean(fld1)
                counter = counter + 1
                sum_fld1 = sum_fld1 + fld1
                del fld1
    #---------------------------------------------------------------------------------
    #---------------------------------------------------------------------------------
    base = datetime.datetime(int(yyyy1), 1, 15)
    tid = np.array(
        [base + relativedelta(months=i) for i in range(len(myfiles))])
    if "archv." in myfiles[0]:
        tid = np.array(
            [base + relativedelta(days=i) for i in range(len(myfiles))])
    nmexp = 1
    if filename2:
        nmexp = nmexp + 1
    print(
        'processing data from No runs ==##############>>>>>>>>>>>>>>>>>>>>>>>',
        nmexp)
    whole_domain = True
    whole_domain = False
    #
    counter = 0
    file_count = 0
    sum_fld1 = maskd
    sum_fld1[~np.isnan(sum_fld1)] = 0.0
    logger.info(
        ">>>>>--------------------------Processing the first files=  myfiles")
    for myfile0 in myfiles:
        logger.info("Now processing  %s" % myfile0)
        n_intloop,ab,ab2,rdtimes = open_file(myfile0,filetype,fieldname,fieldlevel,\
             datetime1=datetime1,datetime2=datetime2,vector=vector,idm=idm,jdm=jdm)
        # Intloop used to read more fields in one file. Only for forcing for now
        for i_intloop in range(n_intloop):
            # Read ab file of different types
            if filetype == "archive":
                fld1 = ab.read_field(fieldname, fieldlevel)
                if ('temp' in fieldname) and whole_domain:
                    vert_fld_sum = 0
                    for lvl in range(50):
                        print('lvl=', lvl, fieldlevel)
                        fld_lvl = ab.read_field(fieldname, lvl + 1)
                        vert_fld_sum = vert_fld_sum + np.nanmean(fld_lvl)
                    vert_fld_avg = vert_fld_sum / 50.0

            elif filetype == "forcing":
                fld1 = ab.read_field(fieldname, rdtimes[i_intloop])
                if vector: fld2 = ab2.read_field(vector, rdtimes[i_intloop])
                logger.info("Processing time %.2f" % rdtimes[i_intloop])
            else:
                raise NotImplementedError("Filetype %s not implemented" %
                                          filetype)
            # Create scalar field for vectors
            print('---------mn,mx  data=', fld1.min(), fld1.max())
            #if "srfhgt" in fieldname:
            #   fld1= fld1/9.806
            print("fld1.shpe", fld1.shape)
            print('mn,mx=', fld1.min(), fld1.max(), 'count=', counter)
            if ('temp' in fieldname) and whole_domain:
                dt_cnl[counter] = vert_fld_avg
            else:
                dt_cnl[counter] = np.nanmean(fld1)
            counter = counter + 1
            sum_fld1 = sum_fld1 + fld1
            del fld1
            # End i_intloop
        print('Computing the avearge of file_counter= ', file_count,
              'counter=', counter)
    #---------------------------------------
    #---------------------------------------
    #plot_climatology
    Clim_arr = np.zeros((plon.shape[0], plon.shape[1], 12))
    if 'tem' in fieldname:
        counter = 0
        rlxfile0 = "/cluster/work/users/achoth/TP5a0.06/relax/050/relax_tem.a"
        rlx_afile = abf.AFile(ab.idm, ab.jdm, rlxfile0, "r")
        lyr = fieldlevel
        record_num = lyr
        record_var = record_num - 1
        fld = rlx_afile.read_record(record_var)
        print('mn,mx  data=', fld.min(), fld.max())
        kdm = 50
        dt_clim = np.zeros(12)
        for mnth in range(12):
            fld1 = rlx_afile.read_record(mnth * kdm + lyr - 1)
            logger.debug("File %s, record_var/mnth*kdm %03d/%03d" %
                         (rlxfile0, record_var, mnth * kdm))
            print('record, mn,mx  data=', kdm * mnth, fld1.min(), fld1.max())
            print('record, mn,mx  data=', kdm * mnth, fld1.min(), fld1.max())
            # Intloop used to read more fields in one file. Only for forcing for now
            dt_clim[mnth] = np.nanmean(fld1)
            #Clim_arr[:,:,mnth]=fld1[:,:]
            counter = counter + 1
            print('counter=', counter)
            del fld1
        #
        tid_clim = np.array(
            [base + relativedelta(months=i) for i in range(12)])
        #figure, ax = matplotlib.pyplot.figure()
        rpt = len(dt_cnl) / 12
        dt_clim_cat = dt_clim
        for ii in range(int(rpt - 1)):
            print("concatenate ")
            dt_clim_cat = np.concatenate([dt_clim_cat, dt_clim])
    #
    #---------------------------------------
    #---------------------------------------
    figure, ax = plt.subplots()
    years = YearLocator()  # every year
    months = MonthLocator()  # every month
    yearsFmt = DateFormatter('%Y')
    #ax=figure.add_subplot(111)
    nplts = 1
    ax.plot_date(tid, dt_cnl, '-o', color='g', ms=3, label=Labl1)
    if 'tem' in fieldname:
        ax.plot_date(tid[0:len(dt_cnl)],
                     dt_clim_cat[:],
                     ':',
                     color='black',
                     label='Phc-Clim.')
    if filename2:
        ax.plot_date(tid_2, dt_2, '-v', color='orange', ms=3, label=Labl2)
    ax.xaxis.set_major_locator(years)
    ax.xaxis.set_major_formatter(yearsFmt)
    ax.xaxis.set_minor_locator(months)
    ax.autoscale_view()

    # format the coords message box
    def price(x):
        return '$%1.2f' % x

    ax.fmt_xdata = DateFormatter('%Y-%m-%d')
    ax.fmt_ydata = price
    ax.grid(True)
    figure.autofmt_xdate()
    legend = plt.legend(loc='upper left', fontsize=8)
    plt.title("Area-averaged: %s(%d)" % (fieldname, fieldlevel))
    plt.ylabel("%s(%d)" % (fieldname, fieldlevel))

    if "k.e" in fieldname:
        fieldname = "KE"
    if "u-vel" in fieldname:
        fieldname = "u-vel"
    ts_fil = "Time_series_cntl%s_%02d_%02d" % (fieldname, fieldlevel, counter)
    if Region_mask:
        ts_fil = 'Region_' + ts_fil
    figure.canvas.print_figure(ts_fil, bbox_inches='tight', dpi=dpi)
    logger.info("Successfull printing:  %s" % ts_fil)
Esempio n. 14
0
def main(myfiles,
         fieldname,
         fieldlevel,
         idm=None,
         jdm=None,
         clim=None,
         filetype="archive",
         window=None,
         cmap="jet",
         datetime1=None,
         datetime2=None,
         vector="",
         tokml=False,
         exptid="",
         masklim=None,
         dpi=180):

    cmap = plt.get_cmap("jet")
    LinDic = mod_hyc2plot.cmap_dict('sawtooth_fc100.txt')
    my_cmap = matplotlib.colors.LinearSegmentedColormap('my_colormap', LinDic)
    cmap = my_cmap

    if vector:
        cmap = cmocean.cm.speed

    if tokml:
        ab = abf.ABFileGrid("regional.grid", "r")
        plon = ab.read_field("plon")
        plat = ab.read_field("plat")
        ab.close()

    ab = abf.ABFileGrid("regional.grid", "r")
    plon = ab.read_field("plon")
    plat = ab.read_field("plat")
    ab.close()

    proj = ccrs.Stereographic(central_latitude=90.0, central_longitude=-40.0)
    pxy = proj.transform_points(ccrs.PlateCarree(), plon, plat)
    px = pxy[:, :, 0]
    py = pxy[:, :, 1]
    x, y = np.meshgrid(np.arange(plon.shape[1]), np.arange(plon.shape[0]))

    if vector:
        logger.info("Vector component 1:%s" % fieldname)
        logger.info("Vector component 2:%s" % vector)

    figure = plt.figure(figsize=(8, 8))
    ax = figure.add_subplot(111)
    ax.set_facecolor('xkcd:gray')
    onemm = 9.806
    counter = 0
    sum_fld = np.zeros(plon.shape)
    sumf1 = np.zeros(plon.shape)
    sumf2 = np.zeros(plon.shape)
    file_count = 0
    for myfile0 in myfiles:

        # Open files, and return some useful stuff.
        # ab2 i used in case of vector
        # rdtimes is used for plotting forcing fields
        n_intloop, ab, ab2, rdtimes = open_file(myfile0,
                                                filetype,
                                                fieldname,
                                                fieldlevel,
                                                datetime1=datetime1,
                                                datetime2=datetime2,
                                                vector=vector,
                                                idm=idm,
                                                jdm=jdm)
        # Intloop used to read more fields in one file. Only for forcing for now
        for i_intloop in range(n_intloop):

            # Read ab file of different types
            if filetype == "archive":
                fld1 = ab.read_field(fieldname, fieldlevel)
                if vector: fld2 = ab.read_field(vector, fieldlevel)
            elif filetype == "regional.depth":
                fld1 = ab.read_field(fieldname)
            elif filetype == "forcing":
                fld1 = ab.read_field(fieldname, rdtimes[i_intloop])
                if vector: fld2 = ab2.read_field(vector, rdtimes[i_intloop])
                logger.info("Processing time %.2f" % rdtimes[i_intloop])
            else:
                raise NotImplementedError("Filetype %s not implemented" %
                                          filetype)

            if not window:
                J, I = np.meshgrid(np.arange(fld1.shape[0]),
                                   np.arange(fld1.shape[1]))
            else:
                J, I = np.meshgrid(np.arange(window[1], window[3]),
                                   np.arange(window[0], window[2]))

            print('mnfld1,mxfld1=', fld1.min(), fld1.max())
            # Create scalar field for vectors
            if vector:
                fld = np.sqrt(fld1**2 + fld2**2)
                print('mnfld2,mxfld2=', fld2.min(), fld2.max())
            else:
                fld = fld1

            # Apply mask if requested
            if masklim:
                fld = np.ma.masked_where(fld <= masklim[0], fld)
                fld = np.ma.masked_where(fld >= masklim[1], fld)

            sum_fld = sum_fld + fld
            counter = counter + 1
            if vector:
                sumf1 = sumf1 + fld1
                sumf2 = sumf2 + fld2
        file_count = file_count + 1
        # End i_intloop
    print('Computing the avearge of file_counter= ', file_count, 'counter=',
          counter)
    if file_count > 0:
        fld_Avg = sum_fld / file_count
        print('mn_Avg_fld,mx_Avg_fld=', fld_Avg.min(), fld_Avg.max())
    if vector:
        f1_avg = sumf1 / file_count
        f2_avg = sumf2 / file_count
        print('mn_avg_fld1,mx_avg_fld1=', f1_avg.min(), f1_avg.max())
        print('mn_avg_fld2,mx_avg_fld2=', f2_avg.min(), f2_avg.max())

    if fieldname == 'k.e.':
        P = plt.pcolormesh(x[J, I],
                           y[J, I],
                           np.log10(fld_Avg[J, I]),
                           cmap=cmap,
                           shading='auto')
    elif fieldname == 'srfhgt':
        P = plt.pcolormesh(x[J, I],
                           y[J, I], (fld_Avg[J, I] / onemm),
                           cmap=cmap,
                           shading='auto')
    else:
        P = plt.pcolormesh(x[J, I],
                           y[J, I],
                           fld_Avg[J, I],
                           cmap=cmap,
                           shading='auto')

    if 'temp' in fieldname:
        P1=plt.contour(x[J,I],y[J,I],fld_Avg[J,I],levels=[-1.,1,4.0,8], \
                   colors=('w',),linestyles=('-',),linewidths=(1.5,))
        plt.clabel(P1, fmt='%2.1d', colors='w',
                   fontsize=10)  #contour line labels

    if vector:
        skip = 10
        logger.info("ploting quiver .......>>> %s" % vector)
        I2 = I[::skip, ::skip]
        J2 = J[::skip, ::skip]
        plt.quiver(x[J2, I2], y[J2, I2], f1_avg[J2, I2], f2_avg[J2, I2])

    ##
    # Print figure and remove wite space.
    ax.set_facecolor('xkcd:gray')
    aspect = 40
    pad_fraction = 0.25
    divider = make_axes_locatable(ax)
    width = axes_size.AxesY(ax, aspect=1. / aspect)
    pad = axes_size.Fraction(pad_fraction, width)
    cax = divider.append_axes("right", size=width, pad=pad)
    if vector:
        cb = ax.figure.colorbar(P, cax=cax, extend='max')
    else:
        cb = ax.figure.colorbar(P, cax=cax, extend='both')
    if clim is not None: P.set_clim(clim)
    ax.set_title('Avg' + "%s:%s(%d)" % (myfile0, fieldname, fieldlevel))
    # Print figure.
    fnamepng_template = exptid + "Avg_%s_%d_%03d_Avg.png"
    fnamepng = fnamepng_template % (fieldname, fieldlevel, counter)
    logger.info("output in  %s" % fnamepng)
    figure.canvas.print_figure(fnamepng, bbox_inches='tight', dpi=dpi)
    ax.clear()
    cb.remove()
def main(myfiles,
         fieldname,
         fieldlevel,
         idm=None,
         jdm=None,
         clim=None,
         filetype="archive",
         window=None,
         cmap="jet",
         datetime1=None,
         datetime2=None,
         vector="",
         tokml=False,
         masklim=None,
         filename2='',
         filename5='',
         dpi=180):

    LinDic = mod_hyc2plot.cmap_dict('sawtooth_fc100.txt')
    if 'temp' or 'sal' in fieldname:
        cmap = matplotlib.colors.LinearSegmentedColormap('my_colormap', LinDic)
    else:
        cmap = matplotlib.colors.LinearSegmentedColormap('my_colormap', LinDic)
    if tokml:
        ab = abf.ABFileGrid("regional.grid", "r")
        plon = ab.read_field("plon")
        plat = ab.read_field("plat")
        ab.close()

    ab = abf.ABFileGrid("regional.grid", "r")
    plon = ab.read_field("plon")
    plat = ab.read_field("plat")
    scpx = ab.read_field("scpx")
    scpy = ab.read_field("scpy")
    target_lonlats = [plon, plat]
    abdpth = abf.ABFileBathy('regional.depth', "r", idm=ab.idm, jdm=ab.jdm)
    mdpth = abdpth.read_field('depth')
    maskd = mdpth.data
    maskd[maskd > 1e29] = np.nan
    Region_mask = True
    Region_mask = False
    if Region_mask:
        maskd[plat > 70] = np.nan
        #maskd[plat<50]=np.nan
        maskd[plon > 20] = np.nan
        maskd[plon < -30] = np.nan

    Nordic_mask = maskd

    proj = ccrs.Stereographic(central_latitude=90.0, central_longitude=-40.0)
    pxy = proj.transform_points(ccrs.PlateCarree(), plon, plat)
    px = pxy[:, :, 0]
    py = pxy[:, :, 1]
    x, y = np.meshgrid(np.arange(plon.shape[1]), np.arange(plon.shape[0]))

    if vector:
        logger.info("Vector component 1:%s" % fieldname)
        logger.info("Vector component 2:%s" % vector)

    #---------------first read and compute clim
    Err_map = 1
    sum_fld1 = maskd
    sum_fld1[~np.isnan(sum_fld1)] = 0.0
    Clim_arr = np.zeros((plon.shape[0], plon.shape[1], 12))
    if 'tem' or 'sal' in fieldname:
        counter = 0
        if 'tem' in fieldname:
            rlxfile0 = "/cluster/work/users/achoth/TP5a0.06/relax/050/relax_tem.a"
        if 'sal' in fieldname:
            rlxfile0 = "/cluster/work/users/achoth/TP5a0.06/relax/050/relax_sal.a"
        rlx_afile = abf.AFile(ab.idm, ab.jdm, rlxfile0, "r")
        lyr = fieldlevel
        record_num = 1
        record_var = record_num - 1
        fld = rlx_afile.read_record(record_var)
        print('mn,mx  data='), fld.min(), fld.max()
        kdm = 50
        dt_clim = np.zeros(12)
        for mnth in range(12):
            fld1 = rlx_afile.read_record(mnth * kdm + lyr - 1)
            print('record, mn,mx  data='), kdm * mnth, fld1.min(), fld1.max()
            # Intloop used to read more fields in one file. Only for forcing for now
            dt_clim[mnth] = mod_hyc2plot.spatiomean(fld1, maskd)
            sum_fld1 = sum_fld1 + fld1
            Clim_arr[:, :, mnth] = fld1[:, :]
            counter = counter + 1
            print('counter='), counter
            del fld1
        Clim_Avg = sum_fld1 / counter
        del sum_fld1

    #---------------filename
    figure = matplotlib.pyplot.figure(figsize=(8, 8))
    ax = figure.add_subplot(111)
    onemm = 9.806
    counter = 0
    file_count = 0
    sum_fld1 = maskd
    sum_fld1[~np.isnan(sum_fld1)] = 0.0
    dt_cnl = np.zeros(len(myfiles))
    diff_dt_cnl = np.zeros(len(myfiles))
    rmse_dt_cnl = np.zeros(len(myfiles))
    Labl1 = "Model: " + fieldname
    if "SPRBAS_0" in myfiles[0]:
        Labl1 = "CNTL: prsbas=0 "
    if filename2:
        dt_2 = np.zeros(len(filename2))
        diff_dt_2 = np.zeros(len(filename2))
        rmse_dt_2 = np.zeros(len(filename2))
        yyyy1 = filename2[0][-9:-5]
        print("filename2[0]="), filename2[0][-9:-5]
        print("filename2[0]="), filename2[0]
        print("yyy1="), yyyy1
        tid_2=np.array([datetime.datetime(int(yyyy1), 1, 15) \
           + relativedelta(months=i) for i in range(len(filename2))])
        Labl2 = "filename2"
        Labl2 = "Corrected"
        if "erai" in filename2[0]:
            Labl2 = "CNTL: prsbas=1e5 "

    yyyy1cnt = myfiles[0][-8:-5]
    print("myfiles[0]="), myfiles[0][-9:-5]
    print("myfiles[0]="), myfiles[0]
    print("yyy1cnt="), print(yyyy1cnt)
    base = datetime.datetime(int(yyyy1cnt), 1, 15)
    tid = np.array(
        [base + relativedelta(months=i) for i in range(len(myfiles))])
    if len(myfiles) == 36:
        base = datetime.datetime(int(yyyy1cnt), 1, 15)
        tid = np.array(
            [base + relativedelta(months=i) for i in range(len(myfiles))])

    nmexp = 1
    if filename2:
        nmexp = nmexp + 1
    print(
        'processing data from No runs ==##############>>>>>>>>>>>>>>>>>>>>>>>'
    ), nmexp
    for iii in range(nmexp):
        counter = 0
        file_count = 0
        sum_fld1 = maskd
        sum_fld1[~np.isnan(sum_fld1)] = 0.0
        if iii == 1 and filename2:
            myfiles = filename2
        else:
            logger.info(
                ">>>>>--------------------------Processing the first files=  %d<<<<"
                % iii)

        logger.info(
            ">>>>>--------------------------Processing the first files=  %d<<<<"
            % iii)
        for myfile0 in myfiles:
            # Open files, and return some useful stuff.
            # ab2 i used in case of vector
            # rdtimes is used for plotting forcing fields
            n_intloop,ab,ab2,rdtimes = open_file(myfile0,filetype,fieldname,fieldlevel,\
                  datetime1=datetime1,datetime2=datetime2,vector=vector,idm=idm,jdm=jdm)
            # Intloop used to read more fields in one file. Only for forcing for now
            for i_intloop in range(n_intloop):
                # Read ab file of different types
                if filetype == "archive":
                    fld1 = ab.read_field(fieldname, fieldlevel)
                elif filetype == "forcing":
                    fld1 = ab.read_field(fieldname, rdtimes[i_intloop])
                    if vector:
                        fld2 = ab2.read_field(vector, rdtimes[i_intloop])
                    logger.info("Processing time %.2f" % rdtimes[i_intloop])
                else:
                    raise NotImplementedError("Filetype %s not implemented" %
                                              filetype)
                if not window:
                    J, I = np.meshgrid(np.arange(fld1.shape[0]),
                                       np.arange(fld1.shape[1]))
                else:
                    J, I = np.meshgrid(np.arange(window[1], window[3]),
                                       np.arange(window[0], window[2]))
                # Create scalar field for vectors
                if vector:
                    fld = np.sqrt(fld1**2 + fld2**2)
                else:
                    fld = fld1
                print('---------mn,mx  data='), fld.min(), fld.max()
                sum_fld1 = sum_fld1 + fld
                cindx = np.remainder(counter, 12)
                print("counter"), counter, print("cindx="), cindx
                if iii == 0:
                    dt_cnl[counter] = mod_hyc2plot.spatiomean(fld, Nordic_mask)
                    diff_dt_cnl[counter] = mod_hyc2plot.spatiomean(
                        fld[:, :] - Clim_arr[:, :, cindx], Nordic_mask)
                    rmse_dt_cnl[counter] = np.sqrt(
                        mod_hyc2plot.spatiomean(
                            (fld[:, :] - Clim_arr[:, :, cindx])**2,
                            Nordic_mask))
                    Labl = Labl1
                if iii == 1 and filename2:
                    dt_2[counter] = mod_hyc2plot.spatiomean(fld, Nordic_mask)
                    diff_dt_2[counter] = mod_hyc2plot.spatiomean(
                        fld[:, :] - Clim_arr[:, :, cindx], Nordic_mask)
                    rmse_dt_2[counter] = np.sqrt(
                        mod_hyc2plot.spatiomean(
                            (fld[:, :] - Clim_arr[:, :, cindx])**2,
                            Nordic_mask))
                    Labl = Labl2
                # Apply mask if requested
                counter = counter + 1
                file_count = file_count + 1
                del fld
            # End i_intloop
        print('Computing the avearge of file_counter= '), print(
            file_count), print('counter='), print(counter), print(
                'cindx='), print(cindx)
        if file_count > 0:
            fld_Avg = sum_fld1 / file_count
        if Err_map:
            cmap = cmocean.cm.balance
            fld_diff = fld_Avg - Clim_Avg

        if fieldname == 'k.e.':
            P = ax.pcolormesh(x[J, I],
                              y[J, I],
                              np.log10(fld_Avg[J, I]),
                              cmap=cmap)
        elif fieldname == 'srfhgt':
            P = ax.pcolormesh(x[J, I],
                              y[J, I], (fld_Avg[J, I] / onemm),
                              cmap=cmap)
        else:
            P = ax.pcolormesh(x[J, I], y[J, I], fld_diff[J, I], cmap=cmap)
        if 'temp' in fieldname:
            P1 = ax.contour(x[J, I],
                            y[J, I],
                            fld_diff[J, I],
                            levels=[-1., 1, 4.0, 8],
                            colors=('w', ),
                            linestyles=('-', ),
                            linewidths=(1.5, ))
            matplotlib.pyplot.clabel(P1, fmt='%2.1d', colors='w',
                                     fontsize=10)  #contour line labels

        # Print figure.
        aspect = 40
        pad_fraction = 0.25
        divider = make_axes_locatable(ax)
        width = axes_size.AxesY(ax, aspect=1. / aspect)
        pad = axes_size.Fraction(pad_fraction, width)
        cax = divider.append_axes("right", size=width, pad=pad)
        cb = ax.figure.colorbar(P, cax=cax, extend='both')
        if clim is not None: P.set_clim(clim)
        ax.set_title("Diff:%s(%d)" % (fieldname, fieldlevel) +
                     ' :( Model - Clim )')
        # Print figure.
        fnamepng_template = myfiles[0][-20:-5].replace(
            "/", '') + "_Avg_TP6_%s_%d_%03d_iii%03d_Avg.png"
        if Region_mask:
            fnamepng_template='Region_'+yyyy1cnt+myfiles[0][1:11].replace("/",'') \
                  +"Avg_TP5_%s_%d_%03d_iii%03d_Avg.png"
        fnamepng = fnamepng_template % (fieldname, fieldlevel, counter, iii)
        logger.info("output in  %s" % fnamepng)
        figure.canvas.print_figure(fnamepng, bbox_inches='tight', dpi=dpi)
        ax.clear()
        cb.remove()
        datmen = np.nanmean(fld_diff)
        spatiodatmen = mod_hyc2plot.spatiomean(fld_diff, Nordic_mask)
        print('-----------mean diff data, spatio='), datmen, spatiodatmen
        del sum_fld1
        #---------------------------------------

    print('tid len='), print(tid.shape)
    if filename2:
        print('dt_2='), print(dt_2.shape)
    tid_clim = np.array([base + relativedelta(months=i) for i in range(12)])
    figure, ax = plt.subplots()
    rpt = len(dt_cnl) / 12
    dt_clim_cat = dt_clim
    for ii in range(int(rpt - 1)):
        print("concatenate ")
        dt_clim_cat = np.concatenate([dt_clim_cat, dt_clim])

    years = YearLocator()  # every year
    months = MonthLocator()  # every month
    yearsFmt = DateFormatter('%Y')
    nplts = 1
    ax.plot_date(tid, dt_cnl, '-o', color='g', ms=3, label=Labl1)
    if filename2:
        ax.plot_date(tid_2, dt_2, '-v', color='orange', ms=3, label=Labl2)
        nplts = nplts + 1
    if filename5:
        ax.plot_date(tid_5, dt_5, '--', color='m', label=Labl5)
        nplts = nplts + 1
    if 'tem' or 'sal' in fieldname:
        ax.plot_date(tid[0:len(dt_cnl)],
                     dt_clim_cat[:],
                     ':',
                     color='black',
                     label='Clim:' + fieldname)
        nplts = nplts + 1
    ax.xaxis.set_major_locator(years)
    ax.xaxis.set_major_formatter(yearsFmt)
    ax.xaxis.set_minor_locator(months)
    ax.autoscale_view()

    # format the coords message box
    def price(x):
        return '$%1.2f' % x

    ax.fmt_xdata = DateFormatter('%Y-%m-%d')
    ax.fmt_ydata = price
    ax.grid(True)
    figure.autofmt_xdate()
    legend = plt.legend(loc='upper left', fontsize=8)
    plt.title("Area-averaged: %s(%d)" % (fieldname, fieldlevel))
    plt.ylabel("%s(%d)" % (fieldname, fieldlevel))
    ts_fil = "time_series_cntl_flx_%s_%02d_%02d" % (fieldname, fieldlevel,
                                                    nplts)
    if Region_mask:
        ts_fil = 'Region_' + ts_fil
    figure.canvas.print_figure(ts_fil, bbox_inches='tight', dpi=dpi)
    logger.info("Successfull printing:  %s" % ts_fil)

    #-----------------
    # plot short  mean error
    figure, ax = plt.subplots()
    print("diff_dt_cnl[:]="), diff_dt_cnl[:]
    nplts = 1
    ll = -1 * len(tid)
    if filename2:
        ll = -1 * len(tid)
    ax.plot_date(tid[ll:], diff_dt_cnl[ll:], '-o', color='g', ms=3)
    if filename2:
        ax.plot_date(tid_2, diff_dt_2, '-v', color='orange', ms=3, label=Labl2)
        nplts = nplts + 1

    ax.xaxis.set_major_locator(years)
    ax.xaxis.set_major_formatter(yearsFmt)
    ax.xaxis.set_minor_locator(months)
    ax.autoscale_view()

    # format the coords message box
    def price(x):
        return '$%1.2f' % x

    ax.fmt_xdata = DateFormatter('%Y-%m-%d')
    ax.fmt_ydata = price
    ax.grid(True)
    figure.autofmt_xdate()
    legend = plt.legend(loc='upper left', fontsize=8)
    plt.title("Mean diff:Model-Clim: %s(%d)" % (fieldname, fieldlevel))
    plt.ylabel("diff:%s(%d)" % (fieldname, fieldlevel))
    ts_fil = 'Mdiff' + "ST_cntl_flx_%s_%02d_%02d" % (fieldname, fieldlevel,
                                                     nplts)
    if Region_mask:
        ts_fil = 'Region_Mdiff' + "ST_cntl_flx_%s_%02d_%02d" % (
            fieldname, fieldlevel, nplts)
    figure.canvas.print_figure(ts_fil, bbox_inches='tight', dpi=dpi)
    logger.info("Successfull printing:  %s" % ts_fil)

    # plot rooot mean square RMSE  error
    figure, ax = plt.subplots()
    nplts = 1
    ll = -1 * len(tid)
    if filename2:
        ll = -1 * len(tid_2)
    ax.plot_date(tid[ll:], rmse_dt_cnl[ll:], '-o', color='g', ms=3)
    if filename2:
        ax.plot_date(tid_2, rmse_dt_2, '-v', color='orange', ms=3, label=Labl2)
        nplts = nplts + 1

    ax.xaxis.set_major_locator(years)
    ax.xaxis.set_major_formatter(yearsFmt)
    ax.xaxis.set_minor_locator(months)
    ax.autoscale_view()

    # format the coords message box
    def price(x):
        return '$%1.2f' % x

    ax.fmt_xdata = DateFormatter('%Y-%m-%d')
    ax.fmt_ydata = price
    ax.grid(True)
    figure.autofmt_xdate()
    legend = plt.legend(loc='upper left', fontsize=8)
    plt.title("RMSE: (Model-Clim) %s(%d)" % (fieldname, fieldlevel))
    plt.ylabel("RMSE:%s(%d)" % (fieldname, fieldlevel))
    ts_fil = 'RMSE' + "ST_cntl_flx_%s_%02d_%02d" % (fieldname, fieldlevel,
                                                    nplts)
    if Region_mask:
        ts_fil = 'Region_RMSE' + "ST2007_cntl_flx_%s_%02d_%02d" % (
            fieldname, fieldlevel, nplts)
    figure.canvas.print_figure(ts_fil, bbox_inches='tight', dpi=dpi)
    logger.info("Successfull printing:  %s" % ts_fil)
    logger.info(
        "End --------------------------------------------------------------- printing:  %s"
        % ts_fil)
def main(lon1,
         lat1,
         lon2,
         lat2,
         variable,
         files,
         filetype="archive",
         clim=None,
         sectionid="",
         ijspace=False,
         xaxis="distance",
         section_map=False,
         dpi=180):

    logger.info("Filetype is %s" % filetype)
    gfile = abf.ABFileGrid("regional.grid", "r")
    plon = gfile.read_field("plon")
    plat = gfile.read_field("plat")

    # Set up section info
    if ijspace:
        sec = gridxsec.SectionIJSpace([lon1, lon2], [lat1, lat2], plon, plat)
    else:
        sec = gridxsec.Section([lon1, lon2], [lat1, lat2], plon, plat)
    I, J = sec.grid_indexes
    dist = sec.distance
    slon = sec.longitude
    slat = sec.latitude
    print(slon.shape)
    print(slat.shape)

    logger.info("Min max I-index (starts from 0):%d %d" % (I.min(), I.max()))
    logger.info("Min max J-index (starts from 0):%d %d" % (J.min(), J.max()))

    if section_map:
        ll_lon = slon.min() - 10.
        ur_lon = slon.max() + 10.
        ll_lat = np.maximum(-90., slat.min() - 10.)
        ur_lat = np.minimum(90., slat.max() + 10.)

        proj = ccrs.Stereographic(central_latitude=90.0,
                                  central_longitude=-40.0)
        pxy = proj.transform_points(ccrs.PlateCarree(), plon, plat)
        px = pxy[:, :, 0]
        py = pxy[:, :, 1]
        x, y = np.meshgrid(np.arange(slon.shape[0]), np.arange(slat.shape[0]))

        figure = plt.figure(figsize=(10, 8))
        ax = figure.add_subplot(111)

        ax = plt.axes(projection=ccrs.PlateCarree())
        ax.set_extent([-179, 179, 53, 85], ccrs.PlateCarree())
        ax.add_feature(cfeature.GSHHSFeature('auto', edgecolor='grey'))
        ax.add_feature(cfeature.GSHHSFeature('auto', facecolor='grey'))
        ax.gridlines()
        ax.plot(slon, slat, "r-", lw=1)

        pos = ax.get_position()
        asp = pos.height / pos.width
        w = figure.get_figwidth()
        h = asp * w
        figure.set_figheight(h)
        if sectionid:
            figure.canvas.print_figure("map_%s.png" % sectionid, dpi=dpi)
        else:
            figure.canvas.print_figure("map.png", dpi=dpi)

    # Get layer thickness variable used in hycom
    dpname = modeltools.hycom.layer_thickness_variable[filetype]
    logger.info("Filetype %s: layer thickness variable is %s" %
                (filetype, dpname))

    if xaxis == "distance":
        x = dist / 1000.
        xlab = "Distance along section[km]"
    elif xaxis == "i":
        x = I
        xlab = "i-index"
    elif xaxis == "j":
        x = J
        xlab = "j-index"
    elif xaxis == "lon":
        x = slon
        xlab = "longitude"
    elif xaxis == "lat":
        x = slat
        xlab = "latitude"
    else:
        logger.warning("xaxis must be i,j,lo,lat or distance")
        x = dist / 1000.
        xlab = "Distance along section[km]"

    # Loop over archive files
    figure = plt.figure()
    ax = figure.add_subplot(111)
    pos = ax.get_position()
    for fcnt, myfile0 in enumerate(files):

        # Remove [ab] ending if present
        m = re.match("(.*)\.[ab]", myfile0)
        if m:
            myfile = m.group(1)
        else:
            myfile = myfile0

        # Add more filetypes if needed. By def we assume archive
        if filetype == "archive":
            i_abfile = abf.ABFileArchv(myfile, "r")
        elif filetype == "restart":
            i_abfile = abf.ABFileRestart(myfile,
                                         "r",
                                         idm=gfile.idm,
                                         jdm=gfile.jdm)
        else:
            raise NotImplementedError("Filetype %s not implemented" % filetype)

        # kdm assumed to be max level in ab file
        kdm = max(i_abfile.fieldlevels)

        # Set up interface and daat arrays
        intfsec = np.zeros((kdm + 1, I.size))
        datasec = np.zeros((kdm + 1, I.size))

        # Loop over layers in file.
        logger.info("File %s" % (myfile))
        for k in range(kdm):
            logger.debug("File %s, layer %03d/%03d" % (myfile, k, kdm))

            # Get 2D fields
            dp2d = i_abfile.read_field(dpname, k + 1)
            data2d = i_abfile.read_field(variable, k + 1)
            dp2d = np.ma.filled(dp2d, 0.) / modeltools.hycom.onem
            data2d = np.ma.filled(data2d, 1e30)

            # Place data into section arrays
            intfsec[k + 1, :] = intfsec[k, :] + dp2d[J, I]
            if k == 0: datasec[k, :] = data2d[J, I]
            datasec[k + 1, :] = data2d[J, I]

        i_maxd = np.argmax(np.abs(intfsec[kdm, :]))

        # Set up section plot
        datasec = np.ma.masked_where(datasec > 0.5 * 1e30, datasec)
        P = plt.pcolormesh(x, -intfsec, datasec, cmap="jet", shading='auto')
        if clim is not None: P.set_clim(clim)

        # Plot layer interfaces
        for k in range(1, kdm + 1):
            if k % 10 == 0:
                PL = ax.plot(x, -intfsec[k, :], "--", color="k", lw=.5)
            elif k % 5 == 0:
                PL = ax.plot(x, -intfsec[k, :], "--", color="k", lw=.5)
            else:
                PL = ax.plot(x, -intfsec[k, :], "--", color=".5", lw=.5)

            textx = x[i_maxd]
            texty = -0.5 * (intfsec[k - 1, i_maxd] + intfsec[k, i_maxd])
            ax.text(textx,
                    texty,
                    str(k),
                    verticalalignment="center",
                    horizontalalignment="center",
                    fontsize=6)
        cb = ax.figure.colorbar(P)
        ax.set_title(myfile)
        ax.set_ylabel(variable)
        ax.set_xlabel(xlab)

        # Print in different y-lims
        suff = os.path.basename(myfile)
        if sectionid: suff = suff + "_" + sectionid
        figure.canvas.print_figure("sec_%s_full_%s.png" % (variable, suff),
                                   dpi=dpi)
        ax.set_ylim(-1000, 0)
        figure.canvas.print_figure("sec_%s_1000m_%s.png" % (variable, suff),
                                   dpi=dpi)
        ax.set_ylim(-300, 0)
        figure.canvas.print_figure("sec_%s_300m_%s.png" % (variable, suff),
                                   dpi=dpi)

        # Close input file
        i_abfile.close()

        #
        ax.clear()
        cb.remove()