Ejemplo n.º 1
0
    def test_basic(self):
        file = self.f

        # check inp2xsel
        xsel = Nio.inp2xsel(file, 'PT', 'time|i9 zc|i0 yc|i0 xc|i10:20:2')
        xsel = Nio.inp2xsel(file, 'PT', 'time|3600')

        xc_orig = file.variables['xc'][:]
        pt_orig = file.variables['PT'][:]
        if verbose: print 'xc: ', xc_orig
        if verbose: print 'pt.shape: ', pt_orig.shape
        if verbose: print

        xsel_list = (5, slice(5,8), slice(None), slice(None,None,4))
        for xsel in xsel_list:
            if verbose: print 'xsel: ', xsel
            xc = file.variables['xc'][xsel]
            if verbose: print 'xc[xsel]: ', xc
            if verbose: print
            assert_equal(xc, xc_orig[xsel])

        ptsel_list = ((1,1,1,1), (1,slice(None),0,0), (1,3,slice(5,8)), (slice(None),3,slice(None),1))
        for ptsel in ptsel_list:
            if verbose: print 'ptsel: ', ptsel
            pt = file.variables['PT'][ptsel]
            if verbose: print 'pt[ptsel].shape: ', pt.shape
            if verbose: print
            assert_equal(pt.shape, pt_orig[ptsel].shape)

        file.close()
Ejemplo n.º 2
0
def arg_parse(varname=None,domain=None,input_file=None,list_var_names=False):
    wrf_dom = 3
    if input_file is not None:
        ncfile = Nio.open_file(input_file,format='grib')
    else:
        ncfile = Nio.open_file(find_inputfile(wrf_dom),format='')

    if list_var_names:
        print_var_summary(ncfile)

    if len(varname) != 0:
        for var in varname:
            print_var_summary(ncfile,varname=var)

    return
Ejemplo n.º 3
0
def write_2d_output(fname,lat,lon,p):
    
    Ny,Nx=p.shape
    ncf=Nio.open_file(fname,mode="w",format="nc")
    ncf.create_dimension("lat",Ny)
    ncf.create_dimension("lon",Nx)

    varname="XLAT"
    ncf.create_variable(varname,'f',('lat','lon'))
    ncf.variables[varname][:]=lat.astype('f')
    ncf.variables[varname].units="degrees"
    ncf.variables[varname].description="Latitude"

    varname="XLONG"
    ncf.create_variable(varname,'f',('lat','lon'))
    ncf.variables[varname][:]=lon.astype('f')
    ncf.variables[varname].units="degrees"
    ncf.variables[varname].description="Longitude"

    varname="precipitation"
    ncf.create_variable(varname,'f',('lat','lon'))
    ncf.variables[varname][:]=p.astype('f')
    ncf.variables[varname].units="kg/m^2/s"
    ncf.variables[varname].description="precipitation rate"
    
    ncf.close()
Ejemplo n.º 4
0
def main():
    files = sorted(glob.glob("3kmgoshen.hdf[0123456789]?????"))

    trajectory_points = [ [ (1, 0, 0) ],
                          [ (10, 0, 0) ],
                          [ (20, 0, 0) ],
                          [ (30, 0, 0) ],
                          [ (40, 0, 0) ] ]

    grid_spacing = 1000
    time_spacing = 300

    for file_name in files:
        hdf = nio.open_file(file_name, mode='r', format='hdf')

        u_grid = hdf.variables['u'][:]
        v_grid = hdf.variables['v'][:]

        hdf.close()

        for trajectory in trajectory_points:
            last_z, last_x, last_y = trajectory[-1]
            point_u = interpolate(u_grid, last_x, last_y, last_z)
            point_v = interpolate(v_grid, last_x, last_y, last_z)

            new_x = last_x + time_spacing * point_u / grid_spacing
            new_y = last_y + time_spacing * point_v / grid_spacing

            if new_x > u_grid.shape[1] - 1 or new_x < 0 or new_y > u_grid.shape[2] - 1 or new_y < 0:
                print "Parcel out of bounds ..."
            else:
                trajectory.append((last_z, new_x, new_y))

    print trajectory_points
    return
Ejemplo n.º 5
0
def main():
    base_path = "/caps2/tsupinie/1kmf-control/"
    temp = goshen_1km_temporal(start=14400, end=14400)
    grid = goshen_1km_grid()
    n_ens_members = 40

    np.seterr(all='ignore')

    ens = loadEnsemble(base_path, [ 11 ], temp.getTimes(), ([ 'pt', 'p' ], computeDensity))
    ens = ens[0, 0]

    zs = decompressVariable(nio.open_file("%s/ena001.hdfgrdbas" % base_path, mode='r', format='hdf').variables['zp'])
    xs, ys = grid.getXY()
    xs = xs[np.newaxis, ...].repeat(zs.shape[0], axis=0)
    ys = ys[np.newaxis, ...].repeat(zs.shape[0], axis=0)

    eff_buoy = effectiveBuoyancy(ens, (zs, ys, xs), plane={'z':10})
    print eff_buoy

    pylab.figure()
    pylab.contourf(xs[0], ys[0], eff_buoy[0], cmap=matplotlib.cm.get_cmap('RdBu_r'))
    pylab.colorbar()

    grid.drawPolitical()

    pylab.suptitle("Effective Buoyancy")
    pylab.savefig("eff_buoy.png")
    pylab.close()
    return
Ejemplo n.º 6
0
 def makeNioFile(self, variableName):
     filename = '/tmp/good_%s.nc' % variableName
     f = nio.open_file(filename, 'w')
     f.create_dimension('test_dimension', 1)
     f.create_variable(variableName,'l',('test_dimension',))
     f.close()
     return filename
Ejemplo n.º 7
0
def do_setup(filename):
    if os.path.exists(filename): os.remove(filename)
    f = Nio.open_file(filename, 'c')
    (nx, ny, nz, nt,ns) = (21, 21, 12, 10,1)
    (dx, dy, dz, dt) = (1000., 1000., 400., 3600.)
    f.create_dimension('xc', nx)
    f.create_dimension('yc', ny)
    f.create_dimension('zc', nz)
    f.create_dimension('time', nt)
    f.create_dimension('single', ns)
    f.Conventions = 'CF-1.0'
    f.source = 'ARPS'

    var = f.create_variable('xc', 'f', ('xc',))
    setattr(var, 'axis', 'X')
    var = f.create_variable('yc', 'f', ('yc',))
    setattr(var, 'axis', 'Y')
    var = f.create_variable('zc', 'f', ('zc',))
    setattr(var, 'axis', 'Z')
    var = f.create_variable('time', 'f', ('time',))
    setattr(var, 'axis', 'T')
    setattr(var, 'units', 'seconds since 2007-03-21 06:00:00')
    var = f.create_variable('PT', 'f', ('time', 'zc', 'yc', 'xc'))
    var = f.create_variable('PTS', 'f', ('single','time', 'zc', 'yc', 'xc'))
    var = f.create_variable('ZP', 'f', ('zc', 'yc', 'xc'))
    var = f.create_variable('TOPO', 'f', ('yc', 'xc'))
    var = f.create_variable('lon', 'f', ('yc','xc'))
    var = f.create_variable('lat', 'f', ('yc','xc'))

    xc = N.arange(nx, dtype='float32')*dx
    yc = N.arange(ny, dtype='float32')*dy
    zc = N.arange(nz, dtype='float32')*dz
    f.variables['xc'][:] = xc
    f.variables['yc'][:] = yc
    f.variables['zc'][:] = zc
    f.variables['time'][:] = N.arange(nt, dtype='float32')*dt
    a = N.arange(nt*nz*ny*nx,dtype = 'float32')
    #a = N.array(N.random.randn(nt,nz,ny,nx), dtype='float32')
    a = a.reshape(nt,nz,ny,nx)
    #print a.shape
    mask = N.zeros(a.shape,N.bool_)
    mask[:,3,:,:] = 1
    # tests adding a fill value

    am = ma.array(a,mask=mask)
    f.variables['PT'][:] = am[:]
    f.variables['PTS'][:] = am[:]
    #if verbose: print f.variables['PT']
    H = 5000.
    topo = 1000*N.cos(2*N.pi*(xc-10000.)/20000.)+1000.
    zp = zc[:,N.newaxis]*(1-topo[N.newaxis,:]/H) + topo[N.newaxis,:]
    topof = N.zeros((ny, nx), dtype='float32')
    topof[:,:] = topo[N.newaxis,:]
    zpf = N.zeros((nz,ny,nx), dtype='float32')
    zpf[:] = zp[:,N.newaxis,:]
    f.variables['ZP'][:] = zpf
    f.variables['TOPO'][:] = topof
    f.variables['lon'][:] = N.cos(0.1)*xc[N.newaxis,:] - N.sin(0.1)*yc[:,N.newaxis]
    f.variables['lat'][:] = N.sin(0.1)*xc[N.newaxis,:] + N.cos(0.1)*yc[:,N.newaxis]
    f.close()
def setup_nc_file(filename,lat,lon,nt,ny,nx):
    """setup a netcdf file for output"""
    
    ncfile=Nio.open_file(filename,mode="w",format="nc")
    ncfile.create_dimension("time",nt)
    ncfile.create_dimension("lat",ny)
    ncfile.create_dimension("lon",nx)

    timev=ncfile.create_variable("time","f",("time",))
    times=np.arange(nt)/48.0
    timev[:]=times.astype("f")
    timev.__setattr__("longname","Time")
    timev.__setattr__("units","Days from "+start_date)
    
    latv=ncfile.create_variable("lat","f",("lat","lon"))
    latv[:]=lat.astype("f")
    latv.__setattr__("longname","Latitude")
    latv.__setattr__("units","degrees")
    
    lonv=ncfile.create_variable("lon","f",("lat","lon"))
    lonv[:]=lon.astype("f")
    lonv.__setattr__("longname","Longitude")
    lonv.__setattr__("units","degrees")
    
    return ncfile
Ejemplo n.º 9
0
def read_nc(filename,var="data",proj=None,returnNCvar=False):
    '''read a netCDF file and return the specified variable

    output is a structure :
        data:raw data as an array
        proj:string representation of the projection information
        atts:data attribute dictionary (if any)
    if (returnNCvar==True) then the Nio file is note closed and the Nio 
        representation of the variable is returned instead of being read into 
        memory immediately.  
    '''
    d=Nio.open_file(filename, mode='r',format="nc")
    outputdata=None
    if var != None:
        data=d.variables[var]
        attributes=d.variables[var].__dict__
        if returnNCvar:
            outputdata=data
        else:
            outputdata=data[:]
    outputproj=None
    if proj!=None:
        projection=d.variables[proj]
        outputproj=str(projection)
    
    
    if returnNCvar:
        return Bunch(data=outputdata,proj=outputproj,ncfile=d,atts=attributes)
    d.close()
    return Bunch(data=outputdata,proj=outputproj,atts=attributes)
Ejemplo n.º 10
0
    def test_topo(self):
        file = self.f

        # basic case
        cstr_list = ('time|i0 zc|ZP|2500 yc|i5 xc|:', \
                'time|i0 zc|ZP|2500m yc|i5 xc|:', \
                'time|i0 zc|ZP|1500m yc|i5 xc|:', \
                'time|i0 zc|ZP|1000,1500m yc|i5.5 xc|:')
        results = ((21,), (21,), (21,), (2,21))

        for (cstr, res) in zip(cstr_list, results):
            if verbose: print cstr
            print "in test_topo"
            xsel = Nio.inp2xsel(file, 'PT', cstr)
            pt = file.variables['PT'][cstr]
            #pt = file.variables['ZP'][:]
            if verbose: print pt.shape
            if verbose: 
                if ma.isMA(pt):
                    print N.asarray(pt.filled())
                else:
                    print pt
            assert_equal(pt.shape, res)

        # ERROR:
        #cstr = 'xc|10k yc|i5.5:8:0.5i zc|ZP|2.5,3.5 time|i0:6:3'
        #if verbose: print cstr
        #pt = file.variables['PT'][cstr]
        #if verbose: print pt.shape

        file.close()
Ejemplo n.º 11
0
    def split(self, num):

        if type(num) is not int:
            raise TypeError, "number to split must be an integer"

        if self._open_file is False:

            if self.trange is not None:
                time_len = self.trange[1]-self.trange[0]
                tstart = self.trange[0]
            else:
                f = Nio.open_file(self.pathname)
                time_len = f.dimensions['time']
                f.close()
                tstart = 0

            chunk_size = time_len//num
            remainder = time_len%num

            tranges = [(tstart+i*chunk_size+remainder*i//num, \
                    tstart+(i+1)*chunk_size+remainder*(i+1)//num) \
                    for i in range(num)]

            return [RectGrid(self.pathname, self.varname, trange=it) \
                    for it in tranges]

        else:
            raise RuntimeError, "RectGrid must not be initialized before running split()"
Ejemplo n.º 12
0
def do_setup_nocrd(filename):
    if os.path.exists(filename): os.remove(filename)
    f = Nio.open_file(filename, 'c')
    (nx, ny, nz, nt) = (20, 25, 5, 10)
    (dx, dy, dz, dt) = (1000., 1000., 800., 3600.)
    f.create_dimension('xc', nx)
    f.create_dimension('yc', ny)
    f.create_dimension('zc', nz)
    f.create_dimension('time', nt)
    f.Conventions = 'CF-1.0'
    f.source = 'ARPS'

    var = f.create_variable('time', 'f', ('time',))
    setattr(var, 'axis', 'T')
    setattr(var, 'units', 'seconds since 2007-03-21 06:00:00')
    var = f.create_variable('PT', 'f', ('time', 'zc', 'yc', 'xc'))
    var = f.create_variable('ZP', 'f', ('zc', 'yc', 'xc'))

    xc = N.arange(nx, dtype='float32')*dx
    yc = N.arange(ny, dtype='float32')*dy
    f.variables['time'][:] = N.arange(nt, dtype='float32')*dt
    #a = N.array(N.random.randn(nt,nz,ny,nx), dtype='float32')
    a = N.arange(nt*nz*ny*nx,dtype = 'float32')
    a = a.reshape(nt,nz,ny,nx)
    f.variables['PT'][:] = a
    a = N.zeros((nz,ny,nx))
    a[:] = N.arange(nz)[:,N.newaxis,N.newaxis]
    f.variables['ZP'][:] = N.array(a, dtype='float32')
    f.close()
Ejemplo n.º 13
0
def findHeights(grdbas, bounds):
    hdf = nio.open_file(grdbas, mode='r', format='hdf')

    bounds_x, bounds_y = bounds
    column_x = (bounds_x.start + bounds_x.stop) / 2
    column_y = (bounds_y.start + bounds_y.stop) / 2

    return hdf.variables['zp'][:, column_y, column_x]
Ejemplo n.º 14
0
def do_setup(filename):
    if os.path.exists(filename):
        os.remove(filename)
    f = Nio.open_file(filename, "c")
    (nx, ny, nz, nt) = (21, 21, 12, 10)
    (dx, dy, dz, dt) = (1000.0, 1000.0, 400.0, 3600.0)
    f.create_dimension("xc", nx)
    f.create_dimension("yc", ny)
    f.create_dimension("zc", nz)
    f.create_dimension("time", nt)
    f.Conventions = "CF-1.0"
    f.source = "ARPS"

    var = f.create_variable("xc", "f", ("xc",))
    setattr(var, "axis", "X")
    var = f.create_variable("yc", "f", ("yc",))
    setattr(var, "axis", "Y")
    var = f.create_variable("zc", "f", ("zc",))
    setattr(var, "axis", "Z")
    var = f.create_variable("time", "f", ("time",))
    setattr(var, "axis", "T")
    setattr(var, "units", "seconds since 2007-03-21 06:00:00")
    var = f.create_variable("PT", "f", ("time", "zc", "yc", "xc"))
    var = f.create_variable("ZP", "f", ("zc", "yc", "xc"))
    var = f.create_variable("TOPO", "f", ("yc", "xc"))
    var = f.create_variable("lon", "f", ("yc", "xc"))
    var = f.create_variable("lat", "f", ("yc", "xc"))

    xc = N.arange(nx, dtype="float32") * dx
    yc = N.arange(ny, dtype="float32") * dy
    zc = N.arange(nz, dtype="float32") * dz
    f.variables["xc"][:] = xc
    f.variables["yc"][:] = yc
    f.variables["zc"][:] = zc
    f.variables["time"][:] = N.arange(nt, dtype="float32") * dt
    a = N.arange(nt * nz * ny * nx, dtype="float32")
    # a = N.array(N.random.randn(nt,nz,ny,nx), dtype='float32')
    a = a.reshape(nt, nz, ny, nx)
    print a.shape
    mask = N.zeros(a.shape, N.bool_)
    mask[:, 3, :, :] = 1
    # tests adding a fill value

    am = ma.array(a, mask=mask)
    f.variables["PT"][:] = am[:]
    # if verbose: print f.variables['PT']
    H = 5000.0
    topo = 1000 * N.cos(2 * N.pi * (xc - 10000.0) / 20000.0) + 1000.0
    zp = zc[:, N.newaxis] * (1 - topo[N.newaxis, :] / H) + topo[N.newaxis, :]
    topof = N.zeros((ny, nx), dtype="float32")
    topof[:, :] = topo[N.newaxis, :]
    zpf = N.zeros((nz, ny, nx), dtype="float32")
    zpf[:] = zp[:, N.newaxis, :]
    f.variables["ZP"][:] = zpf
    f.variables["TOPO"][:] = topof
    f.variables["lon"][:] = N.cos(0.1) * xc[N.newaxis, :] - N.sin(0.1) * yc[:, N.newaxis]
    f.variables["lat"][:] = N.sin(0.1) * xc[N.newaxis, :] + N.cos(0.1) * yc[:, N.newaxis]
    f.close()
Ejemplo n.º 15
0
def main():
    files = glob("1kmgoshen/1kmgoshen.hdf0*")
    hdf_grdbas = nio.open_file("1kmgoshen/1kmgoshen.hdfgrdbas", mode='r', format='hdf')

#   topo, topo_lats, topo_lons = load_topo("e10g", (6000, 10800), ((0., 50.), (-180., -90.)), ((36., 46.), (-114., -101.)))

    for file in files:
        time_sec = file[-6:]
        hdf_data = nio.open_file(file, mode='r', format='hdf')
        hydrostatic, mass_cons, thermal_wind_u, thermal_wind_v = computeBalances(hdf_data, hdf_grdbas)
        plot_map(hydrostatic[1], (1000, 1000), "xy", r"Hydrostatic Imbalance (Pa m$^{-1}$)", "hydrostatic_t%s.png" % time_sec) #, topo=(topo, topo_lats, topo_lons))
        plot_map(mass_cons[1], (1000, 1000), "xy", r"Mass Conservation Imbalance (m s$^{-2}$)", "mass_cons_t%s.png" % time_sec) #, topo=(topo, topo_lats, topo_lons))
        plot_map(thermal_wind_u[1], (1000, 1000), "xy", r"Thermal Wind $u$ Imbalance (m s$^{-2}$)", "thermal_wind_u_t%s.png" % time_sec) #, topo=(topo, topo_lats, topo_lons))
        plot_map(thermal_wind_v[1], (1000, 1000), "xy", r"Thermal Wind $v$ Imbalance (m s$^{-2}$)", "thermal_wind_v_t%s.png" % time_sec) #, topo=(topo, topo_lats, topo_lons))
        hdf_data.close()

    hdf_grdbas.close()
    return
Ejemplo n.º 16
0
def main():
    print "Testing"
    import Nio as nio
    data = nio.open_file('/home/wrfuser/hootpy/data/wrfout_d01_PLEV.nc')
#   print mslp(data.variables['PSFC'][:],data.variables['HGT'][:],data.variables['T2'][:],data.variables['Q2'][:])
    dewp = dewpoint(data.variables['T'][0,:],data.variables['QVAPOR'][0,:],data.variables['P'][:]*100)
    print dewp.max()
    print dewp.min()
    print dewp.mean()
Ejemplo n.º 17
0
def merge(ts):
    """
    Process an hour's worth of stage4 data into the hourly RE
    """

    # Load up the 12z 24h total, this is what we base our deltas on
    fp = "/mesonet/ARCHIVE/data/%s/stage4/ST4.%s.24h.grib" % (
      ts.strftime("%Y/%m/%d"), ts.strftime("%Y%m%d%H") )

    grib = Nio.open_file(fp, 'r')
    # Rough subsample, since the whole enchillata is too much
    lats = numpy.ravel( grib.variables["g5_lat_0"][200:-100:5,300:900:5] )
    lons = numpy.ravel( grib.variables["g5_lon_1"][200:-100:5,300:900:5] )
    vals = numpy.ravel( grib.variables["A_PCP_GDS5_SFC_acc24h"][200:-100:5,300:900:5] )
    res = Ngl.natgrid(lons, lats, vals, iemre.XAXIS, iemre.YAXIS)
    stage4 = res.transpose()
    # Prevent Large numbers, negative numbers
    stage4 = numpy.where( stage4 < 10000., stage4, 0.)
    stage4 = numpy.where( stage4 < 0., 0., stage4)

    # Open up our RE file
    nc = netCDF4.Dataset("/mesonet/data/iemre/%s_mw_hourly.nc" % (ts.year,),'a')
    ts0 = ts + mx.DateTime.RelativeDateTime(days=-1)
    jan1 = mx.DateTime.DateTime(ts.year, 1, 1, 0, 0)
    offset0 = int(( ts0 - jan1).hours)
    offset1 = int(( ts -  jan1).hours)
    if offset0 < 0:
        offset0 = 0
    iemre2 = numpy.sum(nc.variables["p01m"][offset0:offset1,:,:], axis=0)
    
    iemre2 = numpy.where( iemre2 > 0., iemre2, 0.00024)
    iemre2 = numpy.where( iemre2 < 10000., iemre2, 0.00024)
    print "Stage IV 24h [Avg %5.2f Max %5.2f]  IEMRE Hourly [Avg %5.2f Max: %5.2f]" % (
                    numpy.average(stage4), numpy.max(stage4), 
                    numpy.average(iemre2), numpy.max(iemre2) )
    multiplier = stage4 / iemre2
    print "Multiplier MIN: %5.2f  AVG: %5.2f  MAX: %5.2f" % (
                    numpy.min(multiplier), numpy.average(multiplier),numpy.max(multiplier))
    for offset in range(offset0, offset1):
        data  = nc.variables["p01m"][offset,:,:]
        
        # Keep data within reason
        data = numpy.where( data > 10000., 0., data)
        adjust = numpy.where( data > 0, data, 0.00001) * multiplier
        adjust = numpy.where( adjust > 250.0, 0, adjust)
        nc.variables["p01m"][offset,:,:] = numpy.where( adjust < 0.01, 0, adjust)
        ts = jan1 + mx.DateTime.RelativeDateTime(hours=offset)
        print "%s IEMRE %5.2f %5.2f Adjusted %5.2f %5.2f" % (ts.strftime("%Y-%m-%d %H"), 
                                    numpy.average(data), numpy.max(data),
                                    numpy.average(nc.variables["p01m"][offset]),
                                    numpy.max(nc.variables["p01m"][offset]))
    nc.sync()
    iemre2 = numpy.sum(nc.variables["p01m"][offset0:offset1,:,:], axis=0)
    print "Stage IV 24h [Avg %5.2f Max %5.2f]  IEMRE Hourly [Avg %5.2f Max: %5.2f]" % (
                    numpy.average(stage4), numpy.max(stage4), 
                    numpy.average(iemre2), numpy.max(iemre2) )
    nc.close()
Ejemplo n.º 18
0
    def open_file(self, file_name, mode=None):
        """
        open_file() [public]
        Purpose:    Opens a file with the given mode.
        Parameters:    file_name [type=string]
                        Name of the file to open.
                    mode [type=string]
                        How to open the file (e.g. 'r' for reading, 'rw' for reading and writing, etc.)
                        Passing None defaults to opening for reading only.
        Returns:    [nothing]
        """
        # Set the mode default
        if mode is None: mode = 'r'

        # If the file is already open, close it.
        if self._df is not None:
            self.close()

        self._file_name = file_name

        # Figure out whether we're reading, writing, or both.
        self._read = (mode.find('r') > -1)
        self._write = (mode.find('w') > -1 or mode == 'r+')

#       if self._write:
#           warning("DataIO: Writing is currently not supported.  Opening as read-only.")
#           self._write = False

        if reader.__name__ == "Nio":
            # If the reader is PyNIO open it with PyNIO's open command
            self._df = reader.open_file(file_name, mode=mode)
        elif reader.__name__ == "scipy.io.netcdf":
            # If the reader is scipy, open it with scipy's open command
            if self._read:
                self._df = reader.netcdf_file(file_name, 'r')
            elif self._write:
                self._df = reader.netcdf_file(file_name, 'w')
        elif reader.__name__ == "Dataset":
            # If the reader is netCDF4, open it with that open command
            if self._read:
                self._df = reader(file_name, mode='r')
            elif self._write:
                self._df = reader(file_name, mode='a')
        return
Ejemplo n.º 19
0
def getAxes(base_path, agl=True, z_coord_type=""):
    grdbas_file = _buildEnsGrdbas(0)
    hdf_grdbas = nio.open_file("%s/%s" % (base_path, grdbas_file), mode='r', format='hdf')
    axes = dict([ (ax[:1], decompressVariable(hdf_grdbas.variables[ax])) for ax in ['x', 'y', "zp%s" % z_coord_type] ])
    if agl:
        axes['z'], axes['z_MSL'] = _makeZCoordsAGL(axes['z'])
    else:
        axes['z_AGL'], axes['z'] = _makeZCoordsAGL(axes['z'])

    return axes
Ejemplo n.º 20
0
def save_graham_data_to_netcdf(
    netcdf_file_path, resolution_min=5, shape=(4320, 2160), lower_left_point=GeoPoint(-180.0, -90.0)
):
    opt = "c"
    if os.path.isfile(netcdf_file_path):
        os.remove(netcdf_file_path)

    file = Nio.open_file(netcdf_file_path, opt)
    save_graham_data_to_obj(file, resolution_min=resolution_min, the_shape=shape, lower_left_point=lower_left_point)
    file.close()
Ejemplo n.º 21
0
def main():
    _epoch_time = datetime(1970, 1, 1, 0, 0, 0)
    _initial_time = datetime(2009, 6, 5, 18, 0, 0) - _epoch_time
    _initial_time = (_initial_time.microseconds + (_initial_time.seconds + _initial_time.days * 24 * 3600) * 1e6) / 1e6
    _target_times = [ 1800, 3600, 5400, 7200, 9000, 10800, 11100, 11400, 11700, 12000, 12300, 12600, 12900, 13200, 13500, 13800, 14100, 14400,
        14700, 15000, 15300, 15600, 15900, 16200, 16500, 16800, 17100, 17400, 17700, 18000 ]

    inflow_wd_lbound, inflow_wd_ubound = (100, 240)

#   bounds = (0, slice(90, 210), slice(40, 160))
#   bounds = (0, slice(100, 180), slice(90, 170))
    bounds = (0, slice(115, 140), slice(120, 145))
    rev_bounds = [ 0 ]
    rev_bounds.extend(bounds[2:0:-1])
    rev_bounds = tuple(rev_bounds)

    refl_base = "hdf/KCYS/1km/goshen.hdfrefl2d"
    refl_times = np.array([ int(f[-6:]) for f in glob.glob("%s??????" % refl_base) ])
    refl_keep_times = []
    refl_data = {}

    for tt in _target_times:
        idx = np.argmin(np.abs(refl_times - tt))
        if refl_times[idx] > tt and idx > 0:
            idx -= 1

        file_name = "%s%06d" % (refl_base, refl_times[idx])
        hdf = nio.open_file(file_name, mode='r', format='hdf')
        refl_keep_times.append(refl_times[idx])
        refl_data[refl_times[idx]] = hdf.variables['refl2d'][rev_bounds]

    _proj = setupMapProjection(goshen_1km_proj, goshen_1km_gs, bounds=bounds[1:])
#   _proj['resolution'] = 'h' 
    map = Basemap(**_proj)

    ttu_sticknet_obs = cPickle.load(open("ttu_sticknet.pkl", 'r'))
    psu_straka_obs = cPickle.load(open("psu_straka_mesonet.pkl", 'r'))

    all_obs = loadObs(['ttu_sticknet.pkl', 'psu_straka_mesonet.pkl'], [ _epoch_time + timedelta(seconds=(_initial_time + t)) for t in _target_times ],  map, (goshen_1km_proj['width'], goshen_1km_proj['height']), round_time=True)
    print all_obs

#   partitioned_obs = gatherObservations(all_obs, [ _initial_time + t for t in _target_times ])
    for time, refl_time in zip([ _initial_time + t for t in _target_times], refl_keep_times):
        time_str = (_epoch_time + timedelta(seconds=time)).strftime("%d %B %Y %H%M UTC")

        plot_obs = all_obs[np.where(all_obs['time'] == time)]

        inflow_idxs = np.where((plot_obs['wind_dir'] >= inflow_wd_lbound) & (plot_obs['wind_dir'] <= inflow_wd_ubound))[0]
        outflow_idxs = np.array([ idx for idx in range(plot_obs['id'].shape[0]) if idx not in inflow_idxs ])

        title = "All MM observations at %s" % time_str
        file_name = "mm_obs_%06d.png" % (time - _initial_time)

        plotObservations(plot_obs, map, title, file_name, refl=refl_data[refl_time])
    return
Ejemplo n.º 22
0
 def make5VariableNioFile(self):
     filename = '/tmp/5_variables.nc'
     f = nio.open_file(filename, 'w')
     f.create_dimension('dimension_one', 1)
     f.create_variable('one', 'l', ('dimension_one',))
     f.create_variable('two', 'l', ('dimension_one',))
     f.create_variable('three', 'l', ('dimension_one',))
     f.create_variable('four', 'l', ('dimension_one',))
     f.create_variable('five', 'l', ('dimension_one',))
     f.close()
     return filename
Ejemplo n.º 23
0
def load_reflectivity_vars(file_name, vars, ens_member):
    hdf = nio.open_file(file_name, mode='r', format='hdf')

    vars['pt'][ens_member] = hdf.variables['pt'][12]
    vars['p'][ens_member] = hdf.variables['p'][12]
    vars['qr'][ens_member] = np.maximum(hdf.variables['qr'][12], np.zeros(hdf.variables['qr'][12].shape))
    vars['qs'][ens_member] = np.maximum(hdf.variables['qs'][12], np.zeros(hdf.variables['qs'][12].shape))
    vars['qh'][ens_member] = np.maximum(hdf.variables['qh'][12], np.zeros(hdf.variables['qh'][12].shape))

    hdf.close()
    return
Ejemplo n.º 24
0
def loadGrdbas(grdbas_file, agl):
    grdbas = nio.open_file(grdbas_file, mode='r', format='hdf')
    z_coords = decompressVariable(grdbas.variables['zp'])

    x_coords = decompressVariable(grdbas.variables['x'])
    y_coords = decompressVariable(grdbas.variables['y'])

    if agl:
        z_coords = _makeZCoordsAGL(z_coords)

    return z_coords, y_coords, x_coords
Ejemplo n.º 25
0
 def load_data(self):
     """
     Loads data from MRMS GRIB2 files and handles compression duties if files are compressed.
     """
     data = []
     loaded_dates = []
     loaded_indices = []
     for t, timestamp in enumerate(self.all_dates):
         date_str = timestamp.date().strftime("%Y%m%d")
         full_path = self.path_start + date_str + "/"
         if self.variable in os.listdir(full_path):
             full_path += self.variable + "/"
             data_files = sorted(os.listdir(full_path))
             file_dates = pd.to_datetime([d.split("_")[-1][0:13] for d in data_files])
             if timestamp in file_dates:
                 data_file = data_files[np.where(timestamp==file_dates)[0][0]]
                 print(full_path + data_file)
                 if data_file[-2:] == "gz":
                     subprocess.call(["gunzip", full_path + data_file])
                     file_obj = Nio.open_file(full_path + data_file[:-3])
                 else:
                     file_obj = Nio.open_file(full_path + data_file)
                 var_name = sorted(file_obj.variables.keys())[0]
                 data.append(file_obj.variables[var_name][:])
                 if self.lon is None:
                     self.lon = file_obj.variables["lon_0"][:]
                     # Translates longitude values from 0:360 to -180:180
                     if np.count_nonzero(self.lon > 180) > 0:
                         self.lon -= 360
                     self.lat = file_obj.variables["lat_0"][:]
                 file_obj.close()
                 if data_file[-2:] == "gz":
                     subprocess.call(["gzip", full_path + data_file[:-3]])
                 else:
                     subprocess.call(["gzip", full_path + data_file])
                 loaded_dates.append(timestamp)
                 loaded_indices.append(t)
     if len(loaded_dates) > 0:
         self.loaded_dates = pd.DatetimeIndex(loaded_dates)
         self.data = np.ones((self.all_dates.shape[0], data[0].shape[0], data[0].shape[1])) * -9999
         self.data[loaded_indices] = np.array(data)
Ejemplo n.º 26
0
 def __init__(self, filename,nx,ny,nz=None,var=None,dtype='f'):
     history = 'Created : ' + time.ctime() + '\nby:'+os.environ['USER']+" using NC_writer Class"
     self.NCfile=Nio.open_file(filename,mode='w',format="nc",history=history)
     self.NCfile.create_dimension('time', 0)
     self.NCfile.create_dimension('lat', ny)
     self.ny=ny
     self.NCfile.create_dimension('lon', nx)
     self.nx=nx
     if nz:
         self.NCfile.create_dimension('level',nz)
         self.nz=nz
     self.NCfile.create_variable('time','l',('time',))
     if var: self.addVar(var,dtype=dtype)
Ejemplo n.º 27
0
def doit(ts):
    """
    Generate hourly plot of stage4 data
    """
    gmtnow = mx.DateTime.gmt()
    routes = "a"
    if (gmtnow - ts).hours < 2:
        routes = "ac"

    fp = "/mesonet/ARCHIVE/data/%s/stage4/ST4.%s.01h.grib" % (
                        ts.strftime("%Y/%m/%d"), ts.strftime("%Y%m%d%H") )
    if not os.path.isfile(fp):
        print 'Missing stage4 %s' % (fp,)
        return

    grib = Nio.open_file(fp)
    lats = grib.variables["g5_lat_0"][:]
    lons = grib.variables["g5_lon_1"][:]
    vals = grib.variables["A_PCP_GDS5_SFC_acc1h"][:] / 25.4

    cfg = {
     'wkColorMap': 'BlAqGrYeOrRe',
     'nglSpreadColorStart': -1,
     'nglSpreadColorEnd'  : 2,
     '_MaskZero'          : True,
     'lbTitleString'      : "[inch]",
     '_valid'    : 'Hour Ending %s' % (ts.localtime().strftime("%d %B %Y %I %p %Z"),),
     '_title'    : "StageIV 1 Hour Precipitation [inch]",
     }

    tmpfp = iemplot.simple_grid_fill(lons, lats, vals, cfg)
    pqstr = "plot %s %s00 iowa_stage4_1h.png iowa_stage4_1h_%s.png png" % (
                            routes, ts.strftime("%Y%m%d%H"), ts.strftime("%H"))
    iemplot.postprocess(tmpfp, pqstr)

    # Plot Midwest
    cfg = {
     'wkColorMap': 'BlAqGrYeOrRe',
     'nglSpreadColorStart': -1,
     'nglSpreadColorEnd'  : 2,
     '_MaskZero'          : True,
     '_midwest'           : True,
     'lbTitleString'      : "[inch]",
     '_valid'    : 'Hour Ending %s' % (ts.localtime().strftime("%d %B %Y %I %p %Z"),),
     '_title'    : "StageIV 1 Hour Precipitation [inch]",
     }

    tmpfp = iemplot.simple_grid_fill(lons, lats, vals, cfg)
    pqstr = "plot %s %s00 midwest_stage4_1h.png midwest_stage4_1h_%s.png png" % (
      routes, ts.strftime("%Y%m%d%H"),ts.strftime("%H") )
    iemplot.postprocess(tmpfp, pqstr)
Ejemplo n.º 28
0
    def _init(self):

        if self._open_file is False:
            self._open_file = True
            f = Nio.open_file(self.pathname)

            # Dimension of var is time, lat, lon
            self._var = f.variables[self.varname]
            self._time = f.variables['time']
            self._lat = f.variables['lat']
            self._lon = f.variables['lon']
            self.time = None
            self.lat = None
            self.lon = None
Ejemplo n.º 29
0
def main():
    base_path = "/caps1/tsupinie/1km-control-no-ua"
    files = glob.glob("%s/ena???.hdf0*" % base_path)

    for file in files:
        hdf = nio.open_file(file, mode='r', format='hdf')

        for var in ['u', 'v', 'w', 'pt', 'p', 'qv']:
            if var not in hdf.variables:
                print "%s incomplete ... " % file
                break

        hdf.close()
    return
Ejemplo n.º 30
0
def do_simulated_plot(file_name, valid_time, sec_string, obs=None, date_tag=True):
    path_name, file_base = os.path.split(file_name)
    ena_string = file_base.split(".")[0]

    if date_tag:
        tag = path_name.split("/")[-1].split("-")[-1]
        id_string = "-%s" % tag
    else:
        id_string = ""

    hdf = nio.open_file(file_name, mode='r', format='hdf')

    vars = {}
    for var in ['pt', 'p', 'qr', 'qs', 'qh', 'w']:
        vars[var] = hdf.variables[var][12]

        if vars[var].min() == -32768 or vars[var].max() == 32767:
            dindex = (12, slice(None), slice(None))
            vars[var] = decompressVariable(hdf.variables[var], dindex=dindex)

        if var in ['qr', 'qs', 'qh']:
            vars[var] = np.maximum(vars[var], np.zeros(vars[var].shape))

    reflectivity = computeReflectivity(**vars)

    reflectivity_thresh = np.maximum(reflectivity, np.zeros(reflectivity.shape))

    w_levels = range(-20, 0, 2)
    w_levels.extend(range(2, 22, 2))

    refl_title = "Base Reflectivity Valid %s" % valid_time.strftime("%d %b %Y %H%M UTC")
    refl_img_file_name = "%s%s.bref.%s.png" % (ena_string, id_string, sec_string)
    ptprt_title = r"$\theta$ Valid %s" % valid_time.strftime("%d %b %Y %H%M UTC")
    ptprt_img_file_name = "%s%s.pt.%s.png" % (ena_string, id_string, sec_string)

#   vars = {}
    for var in ['pt', 'u', 'v']:
        vars[var] = hdf.variables[var][2]

        if vars[var].min() == -32768 or vars[var].max() == 32767:
            dindex = (2, slice(None), slice(None))
            vars[var] = decompressVariable(hdf.variables[var], dindex=dindex)

#   print pt.min(), pt.max()

    bounds = (slice(100, 130), slice(100, 130))

    plot_map(vars['pt'], 1000, ptprt_title, ptprt_img_file_name, color_bar='pt', vectors=(vars['u'], vars['v']), obs=obs)
    plot_map(reflectivity, 1000, refl_title, refl_img_file_name, color_bar='refl', aux_field=(w_levels, vars['w']))
    return
Ejemplo n.º 31
0
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
author:     yetao.lu
date:       2017/12/28
description:
"""
import Nio, datetime
inputfile = '/Users/yetao.lu/Documents/mosdata/sfc_20150601_12.grib'
inputfile2 = '/Users/yetao.lu/Documents/mosdata/pl_20150601_12.grib'
file = Nio.open_file(inputfile, 'r')
names = file.variables.keys()
fnames = file.variables.values()
t = getattr(file.variables[names[1]], 'initial_time')
odatetime = datetime.datetime.strptime(t, '%m/%d/%Y (%H:%M)')


#print odatetime
# for attrib in file.variables[names[1]].attributes.keys():
#     print attrib
#     t=getattr(file.variables[names[1]],'initial_time')
#     print  t
# tempArray=file.variables['2T_GDS0_SFC']
# tempa=tempArray[:]
# for i in range(len(tempArray)):
#     latlonArray=tempArray[i]
#     for j in range(len(latlonArray)):
#         lonarray=latlonArray[j]
#         for k in range(len(lonarray)):
#             perlonarray=lonarray[k]
#             print k,perlonarray[0],perlonarray[len(perlonarray)-1]
Ejemplo n.º 32
0
from __future__ import print_function, division
import numpy
import Nio
import time, os

#open a file
fn = "vlen.h5"
file = Nio.open_file(fn, "r")

print("file:")
print(file)

print(file.variables['vlen_var'])
var = file.variables['vlen_var'][:]

print("var:")
print(var)

file.close()

Ejemplo n.º 33
0
#
#     http://mailman.ucar.edu/mailman/listinfo/pyngl-talk
#
from __future__ import print_function
import numpy, Nio, Ngl, sys, os

#---Read data
filename = "b.e12.B1850C5CN.ne30_g16.init.ch.027.cam.h0.0001-01.nc"
if (not os.path.exists(filename)):
    print(
        "You do not have the necessary '{}' file to run this example.".format(
            filename))
    print("See the comments at the top of this script for more information.")
    sys.exit()

a = Nio.open_file(filename)
vname = "TS"
data = a.variables[vname]
lat = a.variables["lat"][:]  # 1D array (48602 cells)
lon = a.variables["lon"][:]  # ditto

ncells = data.shape[1]
print("There are {} cells in the {} variable".format(ncells, vname))

wks_type = "png"
wks = Ngl.open_wks(wks_type, "./output/ngl_report/camse1")

#---Set some plot options
res = Ngl.Resources()

# Contour options
Ejemplo n.º 34
0
    '../ncarg/data/hdf/avhrr.hdf',
    '../ncarg/data/hdfeos/MOD04_L2.A2001066.0000.004.2003078090622.hdfeos',
    '../ncarg/data/hdfeos5/OMI-Aura_L3-OMAERUVd_2010m0131_v003-2010m0202t014811.he5',
    '../ncarg/data/hdf5/num-types.h5', '../ncarg/data/shapefile/states.shp'
]

formats = [
    'grib', 'netcdf', 'grib2', 'hdf4', 'hdfeos', 'hdfeos5', 'hdf5', 'shapefile'
]

format_dict = dict(zip(formats, testfiles))

supported_formats = Nio.__formats__
for format in format_dict:
    if supported_formats.has_key(format) and not supported_formats[format]:
        print '==========================='
        print 'Optional format %s is not enabled in this version of PyNIO' % (
            format, )
        print '==========================='
        continue
    try:
        print '==========================='
        print 'Format %s: opening and printing contents' % (format, )
        print '==========================='
        f = Nio.open_file(format_dict[format])
        print f
    except:
        print '==========================='
        print 'Format %s: failed to open and/or print  contents' % (format, )
        print '==========================='
Ejemplo n.º 35
0
#Latorg = 41.3209371228     #Desired Origin Lat
#Lonorg =-70.53690039    #Desired Origin Lon
#Kentland Farm
Latorg = 37.19636
Lonorg = -80.57834

tol = 0.0125  #How close to desired origin do you want to get
filename = "20170904_00_00.grib2"
gridspacing = 3

#gridpointsI = 317 #Chose odd number so you will have the desired origin
#gridpointsJ = 317 #Chose odd number so you will have the desired origin
gridpointsI = 99  #Chose odd number so you will have the desired origin
gridpointsJ = 101  #Chose odd number so you will have the desired origin
#Open Grib File
file = Nio.open_file(filename, "r")
names = file.variables.keys()

for i in range(len(names)):
    #print "\n" + names[i]
    if names[i][:8] == "lv_ISBL0":
        print "\n" + names[i]
        print file.variables[names[i]].dimensions
        for attrib in file.variables[names[i]].attributes.keys():
            print attrib + " has value ", getattr(file.variables[names[i]],
                                                  attrib)
        a = file.variables[names[i]][:]
        print a[41]

    if names[i] == "UGRD_P0_L100_GLC0":
        print "\n" + names[i]
Ejemplo n.º 36
0
        break
#print stationlist
allpath = '/moji/meteo/cluster/data/MOS/2015/'
# 遍历文件
inputfile = ''
inputfile2 = ''
#filesdict={}
for rootpath, dirs, files in os.walk(allpath):
    for file in files:
        if file[:3] == 'sfc' and file[-5:] == '.grib' and (string.find(
                file, '2014') == -1):
            inputfile = os.path.join(rootpath, file)
            inputfile2 = inputfile.replace('sfc', 'pl')
            #filesdict[inputfile]=inputfile2
            #print inputfile,inputfile2
            sfcfile = Nio.open_file(inputfile, 'r')
            plfile = Nio.open_file(inputfile2, 'r')
            #参数0是指第0个时次的预报
            GetStationsAndOnetimesFromEC(0, sfc_varinames, sfcfile,
                                         pl_varinames, plfile, inputfile)
            # csvfile1='/home/wlan_dev/data2.csv'
            # filewrite=open(csvfile1,'w')
            # #print len(alllist)
            # for i in range(len(stationsVlist)):
            #     for j in range(len(stationsVlist[i])):
            #         if j==len(stationsVlist[i])-1:
            #             filewrite.write(str((stationsVlist[i])[j]))
            #         else:
            #             filewrite.write(str((stationsVlist[i])[j])+',')
            #     filewrite.write('\n')
# file = Nio.open_file(inputfile, 'r')
Ejemplo n.º 37
0
def getTrainfeatureFromECfile(ecfile_sfc,ecfile_pl,sfc_varinames,pl_varinames,ff,trainlist):
    sfc_file=Nio.open_file(ecfile_sfc,'r')
    pl_file=Nio.open_file(ecfile_pl,'r')
    #遍历站点列表:
    for i in range(len(stationlist)):
        #根据EC文件名建立索引来获取实况数据气温的值,文件名中日期为起报时间+预报时效
        ecnameArray=ecfile_sfc.split('_')
        #起报时间
        origindatetime=datetime.datetime.strptime((ecnameArray[1]+ecnameArray[2][:2]),'%Y%m%d%H')
        #EC的预报时间,同样是对应的实况数据的时间
        fdatetime=origindatetime+datetime.timedelta(hours=ff)
        fdatestring=datetime.datetime.strftime(fdatetime,'%Y%m%d%H%M%S')
        #根据站号+时间的索引来获取预报对应的实况数据
        kid=stationlist[i][0]+'_'+fdatestring
        #从实况字典中获取该站点,该时间的实况气温
        trainlabel=stationdict.get(kid)
        vstring=[]
        # 根据预报时效来计算数组的索引
        if ff <= 144:
            j = ff / 3
        else:
            j = (ff - 144) / 6 + 48
        levelArray=pl_file.variables['lv_ISBL1']
        # 判断该实况数据是否是有效值(不为99999或者None),若有效再计算16个格点值,将其一起添加到训练样本
        if trainlabel<>None and trainlabel<>999999:
            latitude=float(stationlist[i][1])
            longitude=float(stationlist[i][2])
            alti=float(stationlist[i][3])
            #取左上角点的索引,而不是最邻近点,16个点都会取到
            indexlat=int((60-latitude)/0.1)
            indexlon=int((longitude-60)/0.1)
            for m in range(len(sfc_varinames)):
                variArray=sfc_file.variables[sfc_varinames[m]]
                latlonArray=variArray[j]
                vstring.append(latlonArray[indexlat][indexlon])
                vstring.append(latlonArray[indexlat][indexlon + 1])
                vstring.append(latlonArray[indexlat + 1][indexlon + 1])
                vstring.append(latlonArray[indexlat + 1][indexlon])
                vstring.append(latlonArray[indexlat - 1][indexlon - 1])
                vstring.append(latlonArray[indexlat - 1][indexlon])
                vstring.append(latlonArray[indexlat - 1][indexlon + 1])
                vstring.append(latlonArray[indexlat - 1][indexlon + 2])
                vstring.append(latlonArray[indexlat][indexlon + 2])
                vstring.append(latlonArray[indexlat + 1][indexlon + 2])
                vstring.append(latlonArray[indexlat + 2][indexlon + 2])
                vstring.append(latlonArray[indexlat + 2][indexlon + 1])
                vstring.append(latlonArray[indexlat + 2][indexlon])
                vstring.append(latlonArray[indexlat + 2][indexlon - 1])
                vstring.append(latlonArray[indexlat + 1][indexlon - 1])
                vstring.append(latlonArray[indexlat][indexlon - 1])
            for n in range(len(pl_varinames)):
                pl_variArray=pl_file.variables[pl_varinames[n]]
                phaArray=pl_variArray[j]
                for k in range(len(phaArray)):
                    llArray=phaArray[k]
                    pha=levelArray[k]
                    if pha==500 or pha==850:
                        if llArray[indexlat][indexlon]=='NaN' or llArray[indexlat][indexlon]==None:
                            logger.info(pl_file)
                        vstring.append(llArray[indexlat][indexlon])
                        vstring.append(llArray[indexlat][indexlon + 1])
                        vstring.append(llArray[indexlat + 1][indexlon + 1])
                        vstring.append(llArray[indexlat + 1][indexlon])
                        vstring.append(llArray[indexlat - 1][indexlon - 1])
                        vstring.append(llArray[indexlat - 1][indexlon])
                        vstring.append(llArray[indexlat - 1][indexlon + 1])
                        vstring.append(llArray[indexlat - 1][indexlon + 2])
                        vstring.append(llArray[indexlat][indexlon + 2])
                        vstring.append(llArray[indexlat + 1][indexlon + 2])
                        vstring.append(llArray[indexlat + 2][indexlon + 2])
                        vstring.append(llArray[indexlat + 2][indexlon + 1])
                        vstring.append(llArray[indexlat + 2][indexlon])
                        vstring.append(llArray[indexlat + 2][indexlon - 1])
                        vstring.append(llArray[indexlat + 1][indexlon - 1])
                        vstring.append(llArray[indexlat][indexlon - 1])
            #站点经纬度
            vstring.append(latitude)
            vstring.append(longitude)
            vstring.append(alti)
            #站点高程:取计算好的站点周边16个点的高程值
            demlist=demdict[stationlist[i][0]]
            for u in range(1,len(demlist),1):
                vstring.append(float(demlist[u]))
            #最后一列加载气温值
            vstring.append(trainlabel)
        #vstring为一个站点的因子列表,添加到训练样本中
        if vstring<>[]:
            trainlist.append(vstring)
    sfc_file.close()
    pl_file.close()
Ejemplo n.º 38
0
#    NetCDF file
#
#  Notes: raises an exception if PyNIO is built without GRIB2 support
#     

import numpy 
import Nio
import time,os


#
#  Read a GRIB2 file from the example data directory
#
dirc = '../ncarg/data/grib2/'
fname = "wafsgfs_L_t06z_intdsk60.grib2"
f = Nio.open_file(dirc + fname)

#
# Print the input file contents
#
# print f

#
# If the output file already exists, remove it
#
os.system("rm -f " + fname + ".nc")

#
# Set the PreFill option to False to improve writing performance
#
opt = Nio.options()
Ejemplo n.º 39
0
    def test(self):
        try:
            from netCDF4 import Dataset as NetCDF
        except:
            pass

        try:
            import Nio
        except:
            pass

        timeidx = 0 if not multi else None
        pat = os.path.join(dir, pattern)
        wrf_files = glob.glob(pat)
        wrf_files.sort()

        wrfin = []
        for fname in wrf_files:
            if not pynio:
                f = NetCDF(fname)
                try:
                    f.set_always_mask(False)
                except:
                    pass
                wrfin.append(f)
            else:
                if not fname.endswith(".nc"):
                    _fname = fname + ".nc"
                else:
                    _fname = fname
                f = Nio.open_file(_fname)
                wrfin.append(f)

        if (varname == "interplevel"):
            ref_ht_500 = _get_refvals(referent, "z_500", multi)
            ref_p_5000 = _get_refvals(referent, "p_5000", multi)
            ref_ht_multi = _get_refvals(referent, "z_multi", multi)
            ref_p_multi = _get_refvals(referent, "p_multi", multi)

            ref_ht2_500 = _get_refvals(referent, "z2_500", multi)
            ref_p2_5000 = _get_refvals(referent, "p2_5000", multi)
            ref_ht2_multi = _get_refvals(referent, "z2_multi", multi)
            ref_p2_multi = _get_refvals(referent, "p2_multi", multi)

            ref_p_lev2d = _get_refvals(referent, "p_lev2d", multi)

            hts = getvar(wrfin, "z", timeidx=timeidx)
            p = getvar(wrfin, "pressure", timeidx=timeidx)
            wspd_wdir = getvar(wrfin, "wspd_wdir", timeidx=timeidx)

            # Make sure the numpy versions work first
            hts_500 = interplevel(to_np(hts), to_np(p), 500)
            hts_500 = interplevel(hts, p, 500)

            # Note: the '*2*' versions in the reference are testing
            # against the new version of interplevel in NCL
            nt.assert_allclose(to_np(hts_500), ref_ht_500)
            nt.assert_allclose(to_np(hts_500), ref_ht2_500)

            # Make sure the numpy versions work first
            p_5000 = interplevel(to_np(p), to_np(hts), 5000)
            p_5000 = interplevel(p, hts, 5000)

            nt.assert_allclose(to_np(p_5000), ref_p_5000)
            nt.assert_allclose(to_np(p_5000), ref_p2_5000)

            hts_multi = interplevel(to_np(hts), to_np(p),
                                    [1000., 850., 500., 250.])
            hts_multi = interplevel(hts, p, [1000., 850., 500., 250.])

            nt.assert_allclose(to_np(hts_multi), ref_ht_multi)
            nt.assert_allclose(to_np(hts_multi), ref_ht2_multi)

            p_multi = interplevel(to_np(p), to_np(hts),
                                  [500., 2500., 5000., 10000.])
            p_multi = interplevel(p, hts, [500., 2500., 5000., 10000.])

            nt.assert_allclose(to_np(p_multi), ref_p_multi)
            nt.assert_allclose(to_np(p_multi), ref_p2_multi)

            pblh = getvar(wrfin, "PBLH", timeidx=timeidx)
            p_lev2d = interplevel(to_np(p), to_np(hts), to_np(pblh))
            p_lev2d = interplevel(p, hts, pblh)
            nt.assert_allclose(to_np(p_lev2d), ref_p_lev2d)

            # Just make sure these run below
            wspd_wdir_500 = interplevel(to_np(wspd_wdir), to_np(p), 500)
            wspd_wdir_500 = interplevel(wspd_wdir, p, 500)
            #print(wspd_wdir_500)

            wspd_wdir_multi = interplevel(to_np(wspd_wdir), to_np(p),
                                          [1000, 500, 250])
            wdpd_wdir_multi = interplevel(wspd_wdir, p, [1000, 500, 250])

            wspd_wdir_pblh = interplevel(to_np(wspd_wdir), to_np(hts), pblh)
            wspd_wdir_pblh = interplevel(wspd_wdir, hts, pblh)

            if multi:
                wspd_wdir_pblh_2 = interplevel(to_np(wspd_wdir), to_np(hts),
                                               pblh[0, :])
                wspd_wdir_pblh_2 = interplevel(wspd_wdir, hts, pblh[0, :])

                # Since PBLH doesn't change in this case, it should match
                # the 0 time from previous computation. Note that this
                # only works when the data has 1 time step that is repeated.
                # If you use a different case with multiple times,
                # this will probably fail.
                nt.assert_allclose(to_np(wspd_wdir_pblh_2[:, 0, :]),
                                   to_np(wspd_wdir_pblh[:, 0, :]))

                nt.assert_allclose(to_np(wspd_wdir_pblh_2[:, -1, :]),
                                   to_np(wspd_wdir_pblh[:, 0, :]))

        elif (varname == "vertcross"):
            ref_ht_cross = _get_refvals(referent, "ht_cross", multi)
            ref_p_cross = _get_refvals(referent, "p_cross", multi)
            ref_ht_vertcross1 = _get_refvals(referent, "ht_vertcross1", multi)
            ref_ht_vertcross2 = _get_refvals(referent, "ht_vertcross2", multi)
            ref_ht_vertcross3 = _get_refvals(referent, "ht_vertcross3", multi)

            hts = getvar(wrfin, "z", timeidx=timeidx)
            p = getvar(wrfin, "pressure", timeidx=timeidx)

            pivot_point = CoordPair(hts.shape[-1] / 2, hts.shape[-2] / 2)

            # Beginning in wrf-python 1.3, users can select number of levels.
            # Originally, for pressure, dz was 10, so let's back calculate
            # the number of levels.
            p_max = np.floor(np.amax(p) / 10) * 10  # bottom value
            p_min = np.ceil(np.amin(p) / 10) * 10  # top value

            p_autolevels = int((p_max - p_min) / 10)

            # Make sure the numpy versions work first

            ht_cross = vertcross(to_np(hts),
                                 to_np(p),
                                 pivot_point=pivot_point,
                                 angle=90.,
                                 autolevels=p_autolevels)

            ht_cross = vertcross(hts,
                                 p,
                                 pivot_point=pivot_point,
                                 angle=90.,
                                 autolevels=p_autolevels)

            nt.assert_allclose(to_np(ht_cross), ref_ht_cross, rtol=.01)

            lats = hts.coords["XLAT"]
            lons = hts.coords["XLONG"]

            # Test the manual projection method with lat/lon
            # Only do this for the non-multi case, since the domain
            # might be moving
            if not multi:
                if lats.ndim > 2:  # moving nest
                    lats = lats[0, :]
                    lons = lons[0, :]

                ll_point = ll_points(lats, lons)

                pivot = CoordPair(lat=lats[int(lats.shape[-2] / 2),
                                           int(lats.shape[-1] / 2)],
                                  lon=lons[int(lons.shape[-2] / 2),
                                           int(lons.shape[-1] / 2)])

                v1 = vertcross(hts,
                               p,
                               wrfin=wrfin,
                               pivot_point=pivot_point,
                               angle=90.0)
                v2 = vertcross(hts,
                               p,
                               projection=hts.attrs["projection"],
                               ll_point=ll_point,
                               pivot_point=pivot_point,
                               angle=90.)

                nt.assert_allclose(to_np(v1), to_np(v2), rtol=.01)

            # Test opposite

            p_cross1 = vertcross(p, hts, pivot_point=pivot_point, angle=90.0)

            nt.assert_allclose(to_np(p_cross1), ref_p_cross, rtol=.01)
            # Test point to point
            start_point = CoordPair(0, hts.shape[-2] / 2)
            end_point = CoordPair(-1, hts.shape[-2] / 2)

            p_cross2 = vertcross(p,
                                 hts,
                                 start_point=start_point,
                                 end_point=end_point)

            nt.assert_allclose(to_np(p_cross1), to_np(p_cross2))

            # Check the new vertcross routine
            pivot_point = CoordPair(hts.shape[-1] / 2, hts.shape[-2] / 2)
            ht_cross = vertcross(hts,
                                 p,
                                 pivot_point=pivot_point,
                                 angle=90.,
                                 latlon=True)

            nt.assert_allclose(to_np(ht_cross),
                               to_np(ref_ht_vertcross1),
                               atol=.01)

            levels = [1000., 850., 700., 500., 250.]
            ht_cross = vertcross(hts,
                                 p,
                                 pivot_point=pivot_point,
                                 angle=90.,
                                 levels=levels,
                                 latlon=True)

            nt.assert_allclose(to_np(ht_cross),
                               to_np(ref_ht_vertcross2),
                               atol=.01)

            idxs = (0, slice(None)) if lats.ndim > 2 else (slice(None), )

            start_lat = np.amin(
                lats[idxs]) + .25 * (np.amax(lats[idxs]) - np.amin(lats[idxs]))
            end_lat = np.amin(
                lats[idxs]) + .65 * (np.amax(lats[idxs]) - np.amin(lats[idxs]))

            start_lon = np.amin(
                lons[idxs]) + .25 * (np.amax(lons[idxs]) - np.amin(lons[idxs]))
            end_lon = np.amin(
                lons[idxs]) + .65 * (np.amax(lons[idxs]) - np.amin(lons[idxs]))

            start_point = CoordPair(lat=start_lat, lon=start_lon)
            end_point = CoordPair(lat=end_lat, lon=end_lon)

            ll_point = ll_points(lats[idxs], lons[idxs])

            ht_cross = vertcross(hts,
                                 p,
                                 start_point=start_point,
                                 end_point=end_point,
                                 projection=hts.attrs["projection"],
                                 ll_point=ll_point,
                                 latlon=True,
                                 autolevels=1000)

            nt.assert_allclose(to_np(ht_cross),
                               to_np(ref_ht_vertcross3),
                               rtol=.01)

            if multi:
                ntimes = hts.shape[0]

                for t in range(ntimes):
                    hts = getvar(wrfin, "z", timeidx=t)
                    p = getvar(wrfin, "pressure", timeidx=t)

                    ht_cross = vertcross(hts,
                                         p,
                                         start_point=start_point,
                                         end_point=end_point,
                                         wrfin=wrfin,
                                         timeidx=t,
                                         latlon=True,
                                         autolevels=1000)

                    refname = "ht_vertcross_t{}".format(t)
                    ref_ht_vertcross = _get_refvals(referent, refname, False)

                    nt.assert_allclose(to_np(ht_cross),
                                       to_np(ref_ht_vertcross),
                                       rtol=.02)

        elif (varname == "interpline"):

            ref_t2_line = _get_refvals(referent, "t2_line", multi)
            ref_t2_line2 = _get_refvals(referent, "t2_line2", multi)
            ref_t2_line3 = _get_refvals(referent, "t2_line3", multi)

            t2 = getvar(wrfin, "T2", timeidx=timeidx)
            pivot_point = CoordPair(t2.shape[-1] / 2, t2.shape[-2] / 2)

            # Make sure the numpy version works
            t2_line1 = interpline(to_np(t2),
                                  pivot_point=pivot_point,
                                  angle=90.0)
            t2_line1 = interpline(t2, pivot_point=pivot_point, angle=90.0)

            nt.assert_allclose(to_np(t2_line1), ref_t2_line)

            # Test the new NCL wrf_user_interplevel result
            nt.assert_allclose(to_np(t2_line1), ref_t2_line2)

            # Test the manual projection method with lat/lon
            lats = t2.coords["XLAT"]
            lons = t2.coords["XLONG"]
            if multi:
                if lats.ndim > 2:  # moving nest
                    lats = lats[0, :]
                    lons = lons[0, :]

            ll_point = ll_points(lats, lons)

            pivot = CoordPair(lat=lats[int(lats.shape[-2] / 2),
                                       int(lats.shape[-1] / 2)],
                              lon=lons[int(lons.shape[-2] / 2),
                                       int(lons.shape[-1] / 2)])

            l1 = interpline(t2,
                            wrfin=wrfin,
                            pivot_point=pivot_point,
                            angle=90.0)

            l2 = interpline(t2,
                            projection=t2.attrs["projection"],
                            ll_point=ll_point,
                            pivot_point=pivot_point,
                            angle=90.)
            nt.assert_allclose(to_np(l1), to_np(l2), rtol=.01)

            # Test point to point
            start_point = CoordPair(0, t2.shape[-2] / 2)
            end_point = CoordPair(-1, t2.shape[-2] / 2)

            t2_line2 = interpline(t2,
                                  start_point=start_point,
                                  end_point=end_point)

            nt.assert_allclose(to_np(t2_line1), to_np(t2_line2))

            # Now test the start/end with lat/lon points

            start_lat = float(
                np.amin(lats) + .25 * (np.amax(lats) - np.amin(lats)))
            end_lat = float(
                np.amin(lats) + .65 * (np.amax(lats) - np.amin(lats)))

            start_lon = float(
                np.amin(lons) + .25 * (np.amax(lons) - np.amin(lons)))
            end_lon = float(
                np.amin(lons) + .65 * (np.amax(lons) - np.amin(lons)))

            start_point = CoordPair(lat=start_lat, lon=start_lon)
            end_point = CoordPair(lat=end_lat, lon=end_lon)

            t2_line3 = interpline(t2,
                                  wrfin=wrfin,
                                  timeidx=0,
                                  start_point=start_point,
                                  end_point=end_point,
                                  latlon=True)

            nt.assert_allclose(to_np(t2_line3), ref_t2_line3, rtol=.01)

            # Test all time steps
            if multi:
                refnc = NetCDF(referent)
                ntimes = t2.shape[0]

                for t in range(ntimes):
                    t2 = getvar(wrfin, "T2", timeidx=t)

                    line = interpline(t2,
                                      wrfin=wrfin,
                                      timeidx=t,
                                      start_point=start_point,
                                      end_point=end_point,
                                      latlon=True)

                    refname = "t2_line_t{}".format(t)
                    refline = refnc.variables[refname][:]

                    nt.assert_allclose(to_np(line), to_np(refline), rtol=.005)

                refnc.close()

            # Test NCLs single time case
            if not multi:
                refnc = NetCDF(referent)
                ref_t2_line4 = refnc.variables["t2_line4"][:]

                t2 = getvar(wrfin, "T2", timeidx=0)
                line = interpline(t2,
                                  wrfin=wrfin,
                                  timeidx=0,
                                  start_point=start_point,
                                  end_point=end_point,
                                  latlon=True)

                nt.assert_allclose(to_np(line), to_np(ref_t2_line4), rtol=.005)
                refnc.close()

        elif (varname == "vinterp"):
            # Tk to theta
            fld_tk_theta = _get_refvals(referent, "fld_tk_theta", multi)
            fld_tk_theta = np.squeeze(fld_tk_theta)

            tk = getvar(wrfin, "temp", timeidx=timeidx, units="k")

            interp_levels = [200, 300, 500, 1000]

            # Make sure the numpy version works
            field = vinterp(wrfin,
                            field=to_np(tk),
                            vert_coord="theta",
                            interp_levels=interp_levels,
                            extrapolate=True,
                            field_type="tk",
                            timeidx=timeidx,
                            log_p=True)

            field = vinterp(wrfin,
                            field=tk,
                            vert_coord="theta",
                            interp_levels=interp_levels,
                            extrapolate=True,
                            field_type="tk",
                            timeidx=timeidx,
                            log_p=True)

            tol = 5 / 100.
            atol = 0.0001

            field = np.squeeze(field)
            #print (np.amax(np.abs(to_np(field) - fld_tk_theta)))
            nt.assert_allclose(to_np(field), fld_tk_theta, tol, atol)

            # Tk to theta-e
            fld_tk_theta_e = _get_refvals(referent, "fld_tk_theta_e", multi)
            fld_tk_theta_e = np.squeeze(fld_tk_theta_e)

            interp_levels = [200, 300, 500, 1000]

            field = vinterp(wrfin,
                            field=tk,
                            vert_coord="theta-e",
                            interp_levels=interp_levels,
                            extrapolate=True,
                            field_type="tk",
                            timeidx=timeidx,
                            log_p=True)

            tol = 3 / 100.
            atol = 50.0001

            field = np.squeeze(field)
            #print (np.amax(np.abs(to_np(field) - fld_tk_theta_e)/fld_tk_theta_e)*100)
            nt.assert_allclose(to_np(field), fld_tk_theta_e, tol, atol)

            # Tk to pressure
            fld_tk_pres = _get_refvals(referent, "fld_tk_pres", multi)
            fld_tk_pres = np.squeeze(fld_tk_pres)

            interp_levels = [850, 500]

            field = vinterp(wrfin,
                            field=tk,
                            vert_coord="pressure",
                            interp_levels=interp_levels,
                            extrapolate=True,
                            field_type="tk",
                            timeidx=timeidx,
                            log_p=True)

            field = np.squeeze(field)

            #print (np.amax(np.abs(to_np(field) - fld_tk_pres)))
            nt.assert_allclose(to_np(field), fld_tk_pres, tol, atol)

            # Tk to geoht_msl
            fld_tk_ght_msl = _get_refvals(referent, "fld_tk_ght_msl", multi)
            fld_tk_ght_msl = np.squeeze(fld_tk_ght_msl)
            interp_levels = [1, 2]

            field = vinterp(wrfin,
                            field=tk,
                            vert_coord="ght_msl",
                            interp_levels=interp_levels,
                            extrapolate=True,
                            field_type="tk",
                            timeidx=timeidx,
                            log_p=True)

            field = np.squeeze(field)
            #print (np.amax(np.abs(to_np(field) - fld_tk_ght_msl)))
            nt.assert_allclose(to_np(field), fld_tk_ght_msl, tol, atol)

            # Tk to geoht_agl
            fld_tk_ght_agl = _get_refvals(referent, "fld_tk_ght_agl", multi)
            fld_tk_ght_agl = np.squeeze(fld_tk_ght_agl)
            interp_levels = [1, 2]

            field = vinterp(wrfin,
                            field=tk,
                            vert_coord="ght_agl",
                            interp_levels=interp_levels,
                            extrapolate=True,
                            field_type="tk",
                            timeidx=timeidx,
                            log_p=True)

            field = np.squeeze(field)
            #print (np.amax(np.abs(to_np(field) - fld_tk_ght_agl)))
            nt.assert_allclose(to_np(field), fld_tk_ght_agl, tol, atol)

            # Hgt to pressure
            fld_ht_pres = _get_refvals(referent, "fld_ht_pres", multi)
            fld_ht_pres = np.squeeze(fld_ht_pres)

            z = getvar(wrfin, "height", timeidx=timeidx, units="m")
            interp_levels = [500, 50]
            field = vinterp(wrfin,
                            field=z,
                            vert_coord="pressure",
                            interp_levels=interp_levels,
                            extrapolate=True,
                            field_type="ght",
                            timeidx=timeidx,
                            log_p=True)

            field = np.squeeze(field)
            #print (np.amax(np.abs(to_np(field) - fld_ht_pres)))
            nt.assert_allclose(to_np(field), fld_ht_pres, tol, atol)

            # Pressure to theta
            fld_pres_theta = _get_refvals(referent, "fld_pres_theta", multi)
            fld_pres_theta = np.squeeze(fld_pres_theta)

            p = getvar(wrfin, "pressure", timeidx=timeidx)
            interp_levels = [200, 300, 500, 1000]
            field = vinterp(wrfin,
                            field=p,
                            vert_coord="theta",
                            interp_levels=interp_levels,
                            extrapolate=True,
                            field_type="pressure",
                            timeidx=timeidx,
                            log_p=True)

            field = np.squeeze(field)
            #print (np.amax(np.abs(to_np(field) - fld_pres_theta)))
            nt.assert_allclose(to_np(field), fld_pres_theta, tol, atol)

            # Theta-e to pres
            fld_thetae_pres = _get_refvals(referent, "fld_thetae_pres", multi)
            fld_thetae_pres = np.squeeze(fld_thetae_pres)

            eth = getvar(wrfin, "eth", timeidx=timeidx)
            interp_levels = [850, 500, 5]
            field = vinterp(wrfin,
                            field=eth,
                            vert_coord="pressure",
                            interp_levels=interp_levels,
                            extrapolate=True,
                            field_type="theta-e",
                            timeidx=timeidx,
                            log_p=True)

            field = np.squeeze(field)
            #print (np.amax(np.abs(to_np(field) - fld_thetae_pres)))
            nt.assert_allclose(to_np(field), fld_thetae_pres, tol, atol)
Ejemplo n.º 40
0
    def test(self):

        try:
            from netCDF4 import Dataset as NetCDF
        except:
            pass

        try:
            import Nio
        except:
            pass

        timeidx = 0 if not multi else None
        pat = os.path.join(dir, pattern)
        wrf_files = glob.glob(pat)
        wrf_files.sort()

        wrfin = []
        for fname in wrf_files:
            if not pynio:
                f = NetCDF(fname)
                try:
                    f.set_always_mask(False)
                except:
                    pass
                wrfin.append(f)
            else:
                if not fname.endswith(".nc"):
                    _fname = fname + ".nc"
                else:
                    _fname = fname
                f = Nio.open_file(_fname)
                wrfin.append(f)

        refnc = NetCDF(referent)
        try:
            refnc.set_auto_mask(False)
        except:
            pass

        # These have a left index that defines the product type
        multiproduct = varname in ("uvmet", "uvmet10", "cape_2d", "cape_3d",
                                   "cfrac")
        multi2d = ("uvmet10", "cape_2d", "cfrac")
        multi3d = ("uvmet", "cape_3d")

        # These varnames don't have NCL functions to test against
        ignore_referent = ("zstag", "geopt_stag")

        if varname not in ignore_referent:
            if not multi:
                if varname in multi2d:
                    ref_vals = refnc.variables[varname][..., 0, :, :]
                elif varname in multi3d:
                    ref_vals = refnc.variables[varname][..., 0, :, :, :]
                else:
                    ref_vals = refnc.variables[varname][0, :]
            else:
                ref_vals = refnc.variables[varname][:]

        if (varname == "tc"):
            my_vals = getvar(wrfin, "temp", timeidx=timeidx, units="c")
            tol = 1 / 100.
            atol = .1  # Note:  NCL uses 273.16 as conversion for some reason
            nt.assert_allclose(to_np(my_vals), ref_vals, tol, atol)
        elif (varname == "height_agl"):
            # Change the vert_type to height_agl when NCL gets updated.
            my_vals = getvar(wrfin, "z", timeidx=timeidx, msl=False)
            tol = 1 / 100.
            atol = .1  # Note:  NCL uses 273.16 as conversion for some reason
            nt.assert_allclose(to_np(my_vals), ref_vals, tol, atol)
        elif (varname == "cfrac"):
            # Change the vert_type to height_agl when NCL gets updated.
            my_vals = getvar(wrfin, "cfrac", timeidx=timeidx)
            tol = 1 / 100.
            atol = .1  # Note:  NCL uses 273.16 as conversion for some reason
            nt.assert_allclose(to_np(my_vals), ref_vals, tol, atol)
        elif (varname == "pw"):
            my_vals = getvar(wrfin, "pw", timeidx=timeidx)
            tol = .5 / 100.0
            atol = 0  # NCL uses different constants and doesn't use same
            # handrolled virtual temp in method
            try:
                nt.assert_allclose(to_np(my_vals), ref_vals, tol, atol)
            except AssertionError:
                print(np.amax(np.abs(to_np(my_vals) - ref_vals)))
                raise
        elif (varname == "cape_2d"):
            cape_2d = getvar(wrfin, varname, timeidx=timeidx)
            tol = 0 / 100.
            atol = 200.0
            # Let's only compare CAPE values until the F90 changes are
            # merged back in to NCL.  The modifications to the R and CP
            # changes TK enough that non-lifting parcels could lift, thus
            # causing wildly different values in LCL
            nt.assert_allclose(to_np(cape_2d[0, :]), ref_vals[0, :], tol, atol)
        elif (varname == "cape_3d"):
            cape_3d = getvar(wrfin, varname, timeidx=timeidx)
            # Changing the R and CP constants, while keeping TK within
            # 2%, can lead to some big changes in CAPE.  Tolerances
            # have been set wide when comparing the with the original
            # NCL.  Change back when the F90 code is merged back with
            # NCL
            tol = 0 / 100.
            atol = 200.0

            #print np.amax(np.abs(to_np(cape_3d[0,:]) - ref_vals[0,:]))
            nt.assert_allclose(to_np(cape_3d), ref_vals, tol, atol)
        elif (varname == "zstag" or varname == "geopt_stag"):
            v = getvar(wrfin, varname, timeidx=timeidx)
            # For now, only make sure it runs without crashing since no NCL
            # to compare with yet.
        else:
            my_vals = getvar(wrfin, varname, timeidx=timeidx)
            tol = 2 / 100.
            atol = 0.1
            #print (np.amax(np.abs(to_np(my_vals) - ref_vals)))
            try:
                nt.assert_allclose(to_np(my_vals), ref_vals, tol, atol)
            except:
                absdiff = np.abs(to_np(my_vals) - ref_vals)
                maxdiff = np.amax(absdiff)
                print(maxdiff)
                print(np.argwhere(absdiff == maxdiff))

                raise
Ejemplo n.º 41
0
    def test(self):
        try:
            from netCDF4 import Dataset as NetCDF
        except:
            pass

        try:
            import Nio
        except:
            pass

        timeidx = 0 if not multi else None
        pat = os.path.join(dir, pattern)
        wrf_files = glob.glob(pat)
        wrf_files.sort()

        refnc = NetCDF(referent)
        try:
            refnc.set_always_mask(False)
        except:
            pass

        wrfin = []
        for fname in wrf_files:
            if not pynio:
                f = NetCDF(fname)
                try:
                    f.set_auto_mask(False)
                except:
                    pass
                wrfin.append(f)
            else:
                if not fname.endswith(".nc"):
                    _fname = fname + ".nc"
                else:
                    _fname = fname
                f = Nio.open_file(_fname)
                wrfin.append(f)

        if testid == "xy":

            # Lats/Lons taken from NCL script, just hard-coding for now
            lats = [22.0, 25.0, 27.0]
            lons = [-90.0, -87.5, -83.75]

            # Just call with a single lat/lon
            if single:
                timeidx = 8
                ref_vals = refnc.variables["xy2"][:]

                xy = ll_to_xy(wrfin,
                              lats[0],
                              lons[0],
                              timeidx=timeidx,
                              as_int=True)
                ref = ref_vals[:, 0]

                nt.assert_allclose(to_np(xy), ref)

                # Next make sure the 'proj' version works
                projparams = extract_proj_params(wrfin, timeidx=timeidx)
                xy_proj = ll_to_xy_proj(lats[0],
                                        lons[0],
                                        as_int=True,
                                        **projparams)

                nt.assert_allclose(to_np(xy_proj), to_np(xy))

            else:
                ref_vals = refnc.variables["xy1"][:]
                xy = ll_to_xy(wrfin, lats, lons, timeidx=None, as_int=False)

                ref = ref_vals[:]

                nt.assert_allclose(to_np(xy), ref)

                if xy.ndim > 2:
                    # Moving nest
                    is_moving = True
                    numtimes = xy.shape[-2]
                else:
                    is_moving = False
                    numtimes = 1

                for tidx in range(9):

                    # Next make sure the 'proj' version works
                    projparams = extract_proj_params(wrfin, timeidx=tidx)
                    xy_proj = ll_to_xy_proj(lats,
                                            lons,
                                            as_int=False,
                                            **projparams)

                    if is_moving:
                        idxs = (slice(None), tidx, slice(None))
                    else:
                        idxs = (slice(None), )

                    nt.assert_allclose(to_np(xy_proj), to_np(xy[idxs]))

        else:
            # i_s, j_s taken from NCL script, just hard-coding for now
            # NCL uses 1-based indexing for this, so need to subtract 1
            x_s = np.asarray([10, 50, 90], int)
            y_s = np.asarray([10, 50, 90], int)

            if single:
                timeidx = 8
                ref_vals = refnc.variables["ll2"][:]
                ll = xy_to_ll(wrfin, x_s[0], y_s[0], timeidx=timeidx)
                ref = ref_vals[::-1, 0]

                nt.assert_allclose(to_np(ll), ref)

                # Next make sure the 'proj' version works
                projparams = extract_proj_params(wrfin, timeidx=8)
                ll_proj = xy_to_ll_proj(x_s[0], y_s[0], **projparams)

                nt.assert_allclose(to_np(ll_proj), to_np(ll))

            else:
                ref_vals = refnc.variables["ll1"][:]
                ll = xy_to_ll(wrfin, x_s, y_s, timeidx=None)
                ref = ref_vals[::-1, :]

                nt.assert_allclose(to_np(ll), ref)

                if ll.ndim > 2:
                    # Moving nest
                    is_moving = True
                    numtimes = ll.shape[-2]
                else:
                    is_moving = False
                    numtimes = 1

                for tidx in range(numtimes):
                    # Next make sure the 'proj' version works
                    projparams = extract_proj_params(wrfin, timeidx=tidx)
                    ll_proj = xy_to_ll_proj(x_s, y_s, **projparams)

                    if is_moving:
                        idxs = (slice(None), tidx, slice(None))
                    else:
                        idxs = (slice(None), )

                    nt.assert_allclose(to_np(ll_proj), to_np(ll[idxs]))
Ejemplo n.º 42
0
    def open_new_file(self, file_name,
                      var_names=['X'],
                      long_names=[None],
                      units_names=['None'],
                      dtypes=['float32'],
                      ### dtypes=['float64'],
                      time_units='minutes',
                      comment=''):
          
        #--------------------------------------------------
        # Try to import the Nio module from PyNIO package
        #--------------------------------------------------
        Nio = self.import_nio()
        if not(Nio): return False

        #----------------------------
        # Does file already exist ?
        #----------------------------
        file_name = file_utils.check_overwrite( file_name )
        
        #---------------------------------------
        # Check and store the time series info
        #---------------------------------------
        self.format     = 'ncts'
        self.file_name  = file_name
        self.time_index = 0
        if (long_names[0] == None):
            long_names = var_names
        #-------------------------------------------
        # We may not need to save these in self.
        # I don't think they're used anywhere yet.
        #-------------------------------------------
        self.var_names   = var_names           
        self.long_names  = long_names
        self.units_names = units_names
        self.time_units  = time_units
        self.dtypes      = dtypes

        #---------------------------------------------
        # Create array of Nio type codes from dtypes
        #---------------------------------------------
        nio_type_map   = self.get_nio_type_map()
        nio_type_codes = []
        if (len(dtypes) == len(var_names)):
            for dtype in dtypes:
               nio_type_code = nio_type_map[ dtype.lower() ]
               nio_type_codes.append( nio_type_code )
        else:
            dtype = dtypes[0]
            nio_type_code = nio_type_map[ dtype.lower() ]
            for k in xrange(len(var_names)):
                nio_type_codes.append( nio_type_code )                
        self.nio_type_codes = nio_type_codes
            
        #-------------------------------------
        # Open a new netCDF file for writing
        #-------------------------------------
        # Sample output from time.asctime():
        #     "Thu Oct  8 17:10:18 2009"
        #-------------------------------------
        opt = Nio.options()
        opt.PreFill = False            # (for efficiency)
        opt.HeaderReserveSpace = 4000  # (4000 bytes, for efficiency)
        history = "Created using PyNIO " + Nio.__version__ + " on "
        history = history + time.asctime() + ". " 
        history = history + comment

        try:
            ncts_unit = Nio.open_file(file_name, mode="w",
                                      options=opt, history=history )
            OK = True
        except:
            OK = False
            return OK
        
        #------------------------------------------------
        # Create an unlimited time dimension (via None)
        #------------------------------------------------
        # Without using "int()" for length, we get this:
        #     TypeError: size must be None or integer
        #------------------------------------------------
        ncts_unit.create_dimension("time", None)

        #-------------------------
        # Create a time variable
        #---------------------------------------------------
        #('f' = float32; must match in add_values_at_IDs()
        #---------------------------------------------------
        # NB! Can't use "time" vs. "tvar" here unless we
        #     add "import time" inside this function.
        #---------------------------------------------------
        tvar = ncts_unit.create_variable('time', 'd', ("time",))
        ncts_unit.variables['time'].units = time_units
        
        #-----------------------------------
        # Create variables using var_names
        #-----------------------------------
        # Returns "var" as a PyNIO object
        #---------------------------------------------------
        # NB! The 3rd argument here (dimension), must be a
        #     tuple.  If there is only one dimension, then
        #     we need to add a comma, as shown.
        #---------------------------------------------------
        for k in xrange(len(var_names)):
            var_name = var_names[k]
            var = ncts_unit.create_variable(var_name, nio_type_codes[k],
                                            ("time",))
        
            #------------------------------------
            # Create attributes of the variable
            #------------------------------------
            ncts_unit.variables[var_name].long_name = long_names[k]
            ncts_unit.variables[var_name].units     = units_names[k]        

            #----------------------------------
            # Specify a "nodata" fill value ?
            #----------------------------------
            var._FillValue = -9999.0    ## Does this jive with Prefill above ??
            
        self.ncts_unit = ncts_unit
        return OK
Ejemplo n.º 43
0
    "=======================================================================")
print(Nio.__doc__)
print(
    "=======================================================================")

print("The NioOptions constructor options docstring (Nio.options.__doc__):")

print(
    "=======================================================================")
print(Nio.options.__doc__)
print(
    "=======================================================================")

# create an NioOptions object

opt = Nio.options()

print("The NioOptions class docstring (opt.__doc__):")

print(
    "=======================================================================")
print(opt.__doc__)
print(
    "=======================================================================")

print("The NioFile constructor open_file docstring (Nio.open_file.__doc__):")

print(
    "=======================================================================")
print(Nio.open_file.__doc__)
print(
Ejemplo n.º 44
0
   - read netCDF file
   - retrieve variable informations

  2018-08-21  kmf
"""
from __future__ import print_function
import Ngl,Nio

print("")

#--  data file name
fname  = "./rectilinear_grid_3D.nc"

#--  open file
f = Nio.open_file(fname, "r")

#-- get the sizes of all dimensions in the same order as the names
dims = f.dimensions.values()
print("--> Dimensions:              "+ str(dims))

#-- retrive the dimension names of the file
dimnames = f.dimensions.keys()
print("--> Dimension names of file: "+ str(dimnames))

#-- get only the variable names not the dimension names
varnames = f.variables.keys()
print ("--> Variable names:         "+ str(varnames))

var_list = [i for i in varnames if i not in dimnames]
print ("--> Variables:              "+ str(var_list))
Ejemplo n.º 45
0
#    o  Reading an existing color map and subsetting it.
#
#  Output:
#    This example produces two visualizations:
#      1.  Fully opaque vectors
#      2.  Partially opaque vectors
#
from __future__ import print_function
import os, numpy
import Ngl, Nio

#
# Create some dummy data for the contour plot.
#
dirc = Ngl.pynglpath("data")
f = Nio.open_file(os.path.join(dirc, "cdf", "uv300.nc"))
u = f.variables["U"][1, :, :]
v = f.variables["V"][1, :, :]
lat = f.variables["lat"][:]
lon = f.variables["lon"][:]
spd = numpy.sqrt(u**2 + v**2)

wks_type = "png"
wks = Ngl.open_wks(wks_type, "newcolor3")

cnres = Ngl.Resources()
cnres.nglDraw = False
cnres.nglFrame = False

cmap = Ngl.read_colormap_file("WhiteBlueGreenYellowRed")
Ejemplo n.º 46
0
def modellearn(ll):
    allpath = '/Users/yetao.lu/Documents/testdata'
    # 遍历文件
    inputfile = ''
    inputfile2 = ''
    #filesdict={}
    for rootpath, dirs, files in os.walk(allpath):
        for file in files:
            if file[:3] == 'sfc' and file[-5:] == '.grib' and (string.find(file,'2014')==-1):
                inputfile = os.path.join(rootpath, file)
                inputfile2=inputfile.replace('sfc','pl')
                #filesdict[inputfile]=inputfile2
                #print inputfile,inputfile2
                sfcfile=Nio.open_file(inputfile,'r')
                plfile=Nio.open_file(inputfile2,'r')
                print sfcfile.variables.keys()
                print plfile.variables.keys()
                #参数0是指第0个时次的预报
                GetStationsAndOnetimesFromEC(0,sfc_varinames,sfcfile,pl_varinames,plfile,inputfile)
                # csvfile1='/Users/yetao.lu/Desktop/mos/data.csv'
                # filewrite=open(csvfile1,'w')
                # #print len(alllist)
                # for i in range(len(stationsVlist)):
                #     for j in range(len(stationsVlist[i])):
                #         if j==len(stationsVlist[i])-1:
                #             filewrite.write(str((stationsVlist[i])[j]))
                #         else:
                #             filewrite.write(str((stationsVlist[i])[j])+',')
                #     filewrite.write('\n')
    # file = Nio.open_file(inputfile, 'r')
    # names = file.variables.keys()
    # fnames = file.variables.values()
    # t = getattr(file.variables[names[1]], 'initial_time')
    # odatetime = datetime.datetime.strptime(t, '%m/%d/%Y (%H:%M)')
    print len(stationsVlist),len(trainlebellist)
    stationArray=numpy.array(stationsVlist)
    trainlebelArray=numpy.array(trainlebellist)
    #数据在训练前进行归一化
    print stationArray
    #print stationArray.shape,trainlebelArray.shape
    # savecsvfile="/Users/yetao.lu/Documents/testdata/a.csv"
    # cf=open(savecsvfile,'w')
    # for i in range(len(stationsVlist)):
    #     for j in range(len(stationsVlist[i])):
    #         cf.write(stationsVlist[i][j]+',')
    #     for j in range(len(trainlebellist[i])):
    #         cf.write(trainlebellist[i][j])
    #     cf.write('\n')
    # cf.close()
    # xgb_train=xgboost.DMatrix(stationArray,label=trainlebelArray)
    # xgb_val=xgboost.DMatrix(stationArray,label=trainlebelArray)
    #xgboost
    x_train,x_test,y_train,y_test=train_test_split(stationArray,trainlebelArray,test_size=0.33,random_state=7)
    y_origin=x_test[:,0]
    xgbtrain=xgboost.DMatrix(x_train,label=y_train)
    xgbtest=xgboost.DMatrix(x_test,label=y_test)
    xgbtrain.save_binary('train.buffer')
    print len(stationArray),len(trainlebelArray) ,len(x_train),len(x_test),len(y_train),len(y_test)
    #print xgbtest
    #训练和验证的错误率
    watchlist=[(xgbtrain,'train'),(xgbtest,'eval')]
    params={
    'booster':'gbtree',
    'objective': 'reg:linear', #线性回归
    'gamma':0.1,  # 用于控制是否后剪枝的参数,越大越保守,一般0.1、0.2这样子。
    'max_depth':12, # 构建树的深度,越大越容易过拟合
    'lambda':2,  # 控制模型复杂度的权重值的L2正则化项参数,参数越大,模型越不容易过拟合。
    'subsample':0.7, # 随机采样训练样本
    'colsample_bytree':0.7, # 生成树时进行的列采样
    'min_child_weight':3,
    # 这个参数默认是 1,是每个叶子里面 h 的和至少是多少,对正负样本不均衡时的 0-1 分类而言
    #,假设 h 在 0.01 附近,min_child_weight 为 1 意味着叶子节点中最少需要包含 100 个样本。
    #这个参数非常影响结果,控制叶子节点中二阶导的和的最小值,该参数值越小,越容易 overfitting。
    'silent':0 ,#设置成1则没有运行信息输出,最好是设置为0.
    'eta': 0.1, # 如同学习率
    'seed':1000,
    'nthread':3,# cpu 线程数
    #'eval_metric': 'auc'
    'scale_pos_weight':1,
    'tree_method':'auto'
    }
    plst=list(params.items())
    num_rounds=5000
    
    #early_stopping_rounds当设置的迭代次数较大时,early_stopping_rounds 可在一定的迭代次数内准确率没有提升就停止训练
    model=xgboost.train(plst,xgbtrain,num_rounds,watchlist,early_stopping_rounds=800)
    
    #print model,watchlist
    preds=model.predict(xgbtest,ntree_limit=model.best_iteration)
    # 将预测结果写入文件,方式有很多,自己顺手能实现即可
    # numpy.savetxt('submission_xgb_MultiSoftmax.csv',numpy.c_[range(1,len(test)+1),preds],
    #                 delimiter=',',header='ImageId,Label',comments='',fmt='%d')
    
    #print preds
    #准确率
    print y_test.dtype,preds.dtype
    y_test=y_test.astype('float32')
    mse=mean_squared_error(y_test,preds)
    print("MSE: %.4f" % mse)
    n=0
    for x,y in zip(y_test,preds):
        if abs(x-y)<4:
            n=n+1
    accuracy=float(n)/float(len(y_test))
    print ("accuracy: %.4f" % accuracy)
    y_origin=y_origin-273.15
    mse0=mean_squared_error(y_test,y_origin)
    print("MSE: %.4f" % mse0)
    n=0
    for x,y in zip(y_test,y_origin):
        if abs(x-y)<7:
            n=n+1
    accuracy1=float(n)/float(len(y_test))
    print ("accuracy: %.4f" % accuracy1)
    testfile='/Users/yetao.lu/Desktop/mos/test'+str(ll)+'.csv'
    predsfile='/Users/yetao.lu/Desktop/mos/preds'+str(ll)+'.csv'
    originfile='/Users/yetao.lu/Desktop/mos/origin'+str(ll)+'.csv'
    testfw=open(testfile,'w')
    predsfw=open(predsfile,'w')
    originfw=open(originfile,'w')
    for u in y_test:
        testfw.write(str(u)+',')
    testfw.close()
    for o in preds:
        predsfw.write(str(o)+',')
    predsfw.close()
    for q in y_origin:
        originfw.write(str(q)+',')
    originfw.close()
Ejemplo n.º 47
0
from __future__ import print_function, division
import numpy as np
import Nio
import time, os

#
# Creating a file
#
init_time = time.clock()
ncfile = 'test-large.nc'
if (os.path.exists(ncfile)):
    os.system("/bin/rm -f " + ncfile)
opt = Nio.options()
opt.Format = "LargeFile"
opt.PreFill = False
file = Nio.open_file(ncfile, 'w', options=opt)

file.title = "Testing large files and dimensions"

file.create_dimension('big', 2500000000)

bigvar = file.create_variable('bigvar', "b", ('big', ))
print("created bigvar")
# note it is incredibly slow to write a scalar to a large file variable
# so create an temporary variable x that will get assigned in steps

x = np.empty(1000000, dtype='int8')
#print x
x[:] = 42
t = list(range(0, 2500000000, 1000000))
ii = 0
Ejemplo n.º 48
0
if latbl == lattr or lonbl == lontr:
    sys.exit('lat and lon values must be different')
else:
    if latbl < lattr:
        latbl, lattr = lattr, latbl
    if lonbl > lontr:
        lonbl, lontr = lontr, lonbl

# read in analysis files

a_fili = "analysis_gfs_4_%s_%s00_000.nc" % (init_dt[:8], init_dt[8:10])

# read pressure levels from analysis file

analysis = nio.open_file(diri + a_fili)

level_dim = analysis.variables["HGT_P0_L100_GLL0"].dimensions[0]

levs_p1 = analysis.variables[level_dim]
levs_p = ['{:.0f}'.format(x) for x in levs_p1[:] / 100.0]
del levs_p1

# identify level index

lev_index = levs_p.index(lev_hPa)

# read in lat

lat1 = analysis.variables["lat_0"]
Ejemplo n.º 49
0
#    http://www.ncl.ucar.edu/Document/Manuals/NCL_User_Guide/Data/
#
'''
  Transition Guide Python Example: 	TRANS_contour_fill_on_map.py

  - drawing contour fill plot
  - drawing a map
  
  18-09-04  kmf
'''
from __future__ import print_function
import numpy as np
import Ngl, Nio

#--  open file and read variables
f = Nio.open_file("../read_data/rectilinear_grid_3D.nc", "r")
var = f.variables["t"][0, 0, :, :]
lat = f.variables["lat"][:]
lon = f.variables["lon"][:]

#-- start the graphics
wks = Ngl.open_wks("png", "plot_TRANS_contour_fill_on_map_py")

#-- resource settings
res = Ngl.Resources()
res.nglFrame = False

res.cnFillOn = True
res.cnFillPalette = "NCL_default"
res.cnLineLabelsOn = False
Ejemplo n.º 50
0
import numpy
import Nio
import time, os

opt = Nio.options()
opt.Format = 'NetCDF4'

print opt.Format

#create a file
hatt = "Created at " + time.ctime(time.time())
fn = "pynio_created.nc"
if (os.path.isfile(fn)):
    os.remove(fn)
file = Nio.open_file(fn, options=opt, history=hatt, mode="w")

#create global attributes
file.source = "Nio created NetCDF4 file"
#setattr(file, 'source', "Nio test NetCDF file")
file.history = "Created " + time.ctime(time.time())

#Create some groups.
forecast = file.create_group("forecast")
analysis = file.create_group("analysis")

fmdl1 = forecast.create_group("model1")
fmdl2 = forecast.create_group("model2")
amdl1 = analysis.create_group("model1")
amdl2 = analysis.create_group("model2")
#print file.groups
Ejemplo n.º 51
0
    return name


#
#  Creating a NetCDF file named "test-types.nc".  If there is already
#  a file with that name, delete it first.
#
if (os.path.exists("test-types.nc")):
    os.system("/bin/rm -f test-types.nc")

#
#  Specify a global history attribute and open a NetCDF file
#  for writing.
#
hatt = "Created " + time.ctime(time.time()) + " by " + getUserName()
file = Nio.open_file("test-types.nc", "w", None, hatt)

#
#  Create some global attributes.
#
file.title = "Nio test NetCDF file"
file.series = [1, 2, 3, 4, 5, 6]
file.version = 45

#
#  Create some dimensions.
#
file.create_dimension("array", 3)
#file.create_dimension("strlen",    6)
file.create_dimension("strlen", 10)
file.create_dimension("dim1", 2)
Ejemplo n.º 52
0
def modelprocess(stationdict, stationlist, ll):
    sys.stdout = open(os.path.join(outpath, 't_t' + str(ll) + '.out'), 'w')
    allpath = '/moji/meteo/cluster/data/MOS/'
    sfc_varinames = [
        '2T_GDS0_SF/moji/meteo/cluster/data/MOS/C', '2D_GDS0_SFC',
        '10U_GDS0_SFC', '10V_GDS0_SFC', 'TCC_GDS0_SFC', 'LCC_GDS0_SFC'
    ]
    pl_varinames = ['R_GDS0_ISBL']
    # 遍历文件
    dict01 = {}
    stationsVlist = []
    trainlebellist = []
    dictlist = []
    #遍历所有的文件,
    for rootpath, dirs, files in os.walk(allpath):
        print allpath
        for file in files:
            if file[:3] == 'sfc' and file[-5:] == '.grib' and (string.find(
                    file, '2014') == -1):
                inputfile = os.path.join(rootpath, file)
                inputfile2 = inputfile.replace('sfc', 'pl')
                sfcfile = Nio.open_file(inputfile, 'r')
                plfile = Nio.open_file(inputfile2, 'r')
                #参数0是指第0个时次的预报,这里只是一个文件的2000个站的列表。
                print sfcfile, plfile
                GetStationsAndOnetimesFromEC(ll, sfc_varinames, sfcfile,
                                             pl_varinames, plfile, inputfile,
                                             stationsVlist, trainlebellist,
                                             stationdict, stationlist, dict01,
                                             dictlist)
    stationArray = numpy.array(stationsVlist)
    trainlebelArray = numpy.array(trainlebellist)
    dictArray = numpy.array(dictlist)
    a_train, a_test = train_test_split(stationArray,
                                       test_size=0.33,
                                       random_state=7)
    #print len(a_train),len(a_test),len(a_train)+len(a_test)
    #数据训练前进行标准化
    x_scaled = preprocessing.scale(stationArray)
    stationArray = x_scaled
    #xgboost
    x_train, x_test, y_train, y_test, z_train, z_test = train_test_split(
        stationArray,
        trainlebelArray,
        dictArray,
        test_size=0.33,
        random_state=7)
    xgbtrain = xgboost.DMatrix(x_train, label=y_train)
    xgbtest = xgboost.DMatrix(x_test, label=y_test)
    #xgbtrain.save_binary('train.buffer')
    #print len(x_train),len(x_test),len(y_train),len(y_test)
    #print xgbtest
    #训练和验证的错误率
    watchlist = [(xgbtrain, 'xgbtrain'), (xgbtest, 'xgbeval')]
    params = {
        'booster': 'gbtree',
        'objective': 'reg:linear',  #线性回归
        'gamma': 0.2,  # 用于控制是否后剪枝的参数,越大越保守,一般0.1、0.2这样子。
        'max_depth': 12,  # 构建树的深度,越大越容易过拟合
        'lambda': 2,  # 控制模型复杂度的权重值的L2正则化项参数,参数越大,模型越不容易过拟合。
        'subsample': 0.7,  # 随机采样训练样本
        'colsample_bytree': 0.7,  # 生成树时进行的列采样
        'min_child_weight': 3,
        # 这个参数默认是 1,是每个叶子里面 h 的和至少是多少,对正负样本不均衡时的 0-1 分类而言
        #,假设 h 在 0.01 附近,min_child_weight 为 1 意味着叶子节点中最少需要包含 100 个样本。
        #这个参数非常影响结果,控制叶子节点中二阶导的和的最小值,该参数值越小,越容易 overfitting。
        'silent': 0,  #设置成1则没有运行信息输出,最好是设置为0.
        'eta': 0.01,  # 如同学习率
        'seed': 1000,
        'nthread': 3,  # cpu 线程数
        #'eval_metric': 'auc'
        'scale_pos_weight': 1
    }
    plst = list(params.items())
    num_rounds = 50000
    #early_stopping_rounds当设置的迭代次数较大时,early_stopping_rounds 可在一定的迭代次数内准确率没有提升就停止训练
    model = xgboost.train(plst,
                          xgbtrain,
                          num_rounds,
                          watchlist,
                          early_stopping_rounds=800)
    #print model,watchlist
    preds = model.predict(xgbtest, ntree_limit=model.best_iteration)
    # 将预测结果写入文件,方式有很多,自己顺手能实现即可
    # numpy.savetxt('submission_xgb_MultiSoftmax.csv',numpy.c_[range(1,len(test)+1),preds],
    #                 delimiter=',',header='ImageId,Label',comments='',fmt='%d')
    #print preds
    #print y_test.dtype,preds.dtype
    y_test = y_test.astype('float32')
    mse = mean_squared_error(y_test, preds)
    rmse = math.sqrt(mse)
    mae = mean_absolute_error(y_test, preds, multioutput='uniform_average')
    print("训练后MSE: %.4f" % mse)
    print("训练后RMSE: %.4f" % rmse)
    print("训练后MAE: %.4f" % mae)
    #气温差2度的准确率
    n = 0
    for x, y in zip(y_test, preds):
        if abs(x - y) < 2:
            n = n + 1
    accuracy2_after = float(n) / float(len(y_test))
    print("训练后2度的accuracy: %.4f" % accuracy2_after)
    n = 0
    for x, y in zip(y_test, preds):
        if abs(x - y) < 3:
            n = n + 1
    accuracy3_after = float(n) / float(len(y_test))
    print("训练后3度的accuracy: %.4f" % accuracy3_after)
    #和EC中原始数据对比获取均方误差
    y_origin = a_test[:, 0]
    #print y_origin
    y_origin = y_origin - 273.15
    #print y_origin
    mse0 = mean_squared_error(y_test, y_origin)
    rmse0 = math.sqrt(mse0)
    mae0 = mean_absolute_error(y_test, y_origin, multioutput='uniform_average')
    print("训练前MSE: %.4f" % mse0)
    print("训练前RMSE: %.4f" % rmse0)
    print("训练前MAE: %.4f" % mae0)
    n = 0
    for x, y in zip(y_test, y_origin):
        if abs(x - y) < 2:
            n = n + 1
    accuracy2_before = float(n) / float(len(y_test))
    print("训练前2度的accuracy: %.4f" % accuracy2_before)
    n = 0
    for x, y in zip(y_test, y_origin):
        if abs(x - y) < 3:
            n = n + 1
    accuracy3_before = float(n) / float(len(y_test))
    print("训练前3度的accuracy: %.4f" % accuracy3_before)
    print mse0, rmse0, mae0, accuracy2_before, accuracy3_before, mse, rmse, mae, accuracy2_after, accuracy3_after, len(
        a_train), len(a_test)
    model.save_model(os.path.join(outpath, 'temperature' + str(ll) + '.model'))
    testfile = os.path.join(outpath, 't_test' + str(ll) + '.csv')
    predsfile = os.path.join(outpath, 't_preds' + str(ll) + '.csv')
    originfile = os.path.join(outpath, 't_origin' + str(ll) + '.csv')
    testfw = open(testfile, 'w')
    predsfw = open(predsfile, 'w')
    originfw = open(originfile, 'w')
    for u in y_test:
        testfw.write(str(u) + ',')
    testfw.close()
    for o in preds:
        predsfw.write(str(o) + ',')
    predsfw.close()
    for q in y_origin:
        originfw.write(str(q) + ',')
    originfw.close()
    kidfile = os.path.join(outpath, 'kid' + str(ll) + '.csv')
    kidfw = open(kidfile, 'w')
    for r in z_test:
        kidfw.write(str(r) + ',')
    kidfw.close()

    del stationArray
    del trainlebelArray
    del a_test
    del a_train
    del x_test
    del x_train
    del y_origin
    del y_test
    del y_train
    del xgbtest
    del xgbtrain
    del x_scaled
    del plfile
    del sfcfile
Ejemplo n.º 53
0
def main(argv):

    print 'Running pyEnsSum!'

    # Get command line stuff and store in a dictionary
    s = 'tag= compset= esize= tslice= res= sumfile= indir= mach= verbose jsonfile= mpi_enable maxnorm gmonly'
    optkeys = s.split()
    try:
        opts, args = getopt.getopt(argv, "h", optkeys)
    except getopt.GetoptError:
        pyEnsLib.EnsSum_usage()
        sys.exit(2)

    # Put command line options in a dictionary - also set defaults
    opts_dict = {}

    # Defaults
    opts_dict['tag'] = 'cesm1_2_0'
    opts_dict['compset'] = 'FC5'
    opts_dict['mach'] = 'yellowstone'
    opts_dict['esize'] = 151
    opts_dict['tslice'] = 0
    opts_dict['res'] = 'ne30_ne30'
    opts_dict['sumfile'] = 'ens.summary.nc'
    opts_dict['indir'] = './'
    opts_dict['jsonfile'] = ''
    opts_dict['verbose'] = True
    opts_dict['mpi_enable'] = False
    opts_dict['maxnorm'] = False
    opts_dict['gmonly'] = False

    # This creates the dictionary of input arguments
    opts_dict = pyEnsLib.getopt_parseconfig(opts, optkeys, 'Ec', opts_dict)

    verbose = opts_dict['verbose']

    st = opts_dict['esize']
    esize = int(st)

    if (verbose == True):
        print opts_dict
        print 'Ensemble size for summary = ', esize

    # Now find file names in indir
    input_dir = opts_dict['indir']
    # The var list that will be excluded
    ex_varlist = []

    # Create a mpi simplecomm object
    if opts_dict['mpi_enable']:
        me = simplecomm.create_comm()
    else:
        me = simplecomm.create_comm(not opts_dict['mpi_enable'])
    if me.get_rank() == 0:
        if opts_dict['jsonfile']:
            # Read in the excluded var list
            ex_varlist = pyEnsLib.read_jsonlist(opts_dict['jsonfile'])

    # Broadcast the excluded var list to each processor
    if opts_dict['mpi_enable']:
        ex_varlist = me.partition(ex_varlist, func=Duplicate(), involved=True)

    in_files = []
    if (os.path.exists(input_dir)):
        # Get the list of files
        in_files_temp = os.listdir(input_dir)
        in_files = sorted(in_files_temp)
        # Make sure we have enough
        num_files = len(in_files)
        if (verbose == True):
            print 'Number of files in input directory = ', num_files
        if (num_files < esize):
            print 'Number of files in input directory (',num_files,\
                ') is less than specified ensemble size of ', esize
            sys.exit(2)
        if (num_files > esize):
            print 'NOTE: Number of files in ', input_dir, \
                'is greater than specified ensemble size of ', esize ,\
                '\nwill just use the first ',  esize, 'files'
    else:
        print 'Input directory: ', input, ' not found'
        sys.exit(2)

    # Open the files in the input directory
    o_files = []
    for onefile in in_files[0:esize]:
        if (os.path.isfile(input_dir + '/' + onefile)):
            o_files.append(Nio.open_file(input_dir + '/' + onefile, "r"))
        else:
            print "COULD NOT LOCATE FILE " + input_dir + onefile + "! EXITING...."
            sys.exit()

    # Store dimensions of the input fields
    if (verbose == True):
        print "Getting spatial dimensions"
    nlev = -1
    ncol = -1
    nlat = -1
    nlon = -1

    # Look at first file and get dims
    input_dims = o_files[0].dimensions
    ndims = len(input_dims)

    for key in input_dims:
        if key == "lev":
            nlev = input_dims["lev"]
        elif key == "ncol":
            ncol = input_dims["ncol"]
        elif key == "nlon":
            nlon = input_dims["nlon"]
        elif key == "nlat":
            nlat = input_dims["nlat"]

    if (nlev == -1):
        print "COULD NOT LOCATE valid dimension lev => EXITING...."
        sys.exit()

    if ((ncol == -1) and ((nlat == -1) or (nlon == -1))):
        print "Need either lat/lon or ncol  => EXITING...."
        sys.exit()

    # Check if this is SE or FV data
    if (ncol != -1):
        is_SE = True
    else:
        is_SE = False

    # Make sure all files have the same dimensions
    if (verbose == True):
        print "Checking dimensions across files...."
        print 'lev = ', nlev
        if (is_SE == True):
            print 'ncol = ', ncol
        else:
            print 'nlat = ', nlat
            print 'nlon = ', nlon

    for count, this_file in enumerate(o_files):
        input_dims = this_file.dimensions
        if (is_SE == True):
            if (nlev != int(input_dims["lev"])
                    or (ncol != int(input_dims["ncol"]))):
                print "Dimension mismatch between ", in_files[
                    0], 'and', in_files[0], '!!!'
                sys.exit()
        else:
            if ( nlev != int(input_dims["lev"]) or ( nlat != int(input_dims["nlat"]))\
                  or ( nlon != int(input_dims["nlon"]))):
                print "Dimension mismatch between ", in_files[
                    0], 'and', in_files[0], '!!!'
                sys.exit()

    # Get 2d vars, 3d vars and all vars (For now include all variables)
    vars_dict = o_files[0].variables
    # Remove the excluded variables (specified in json file) from variable dictionary
    if ex_varlist:
        for i in ex_varlist:
            del vars_dict[i]
    num_vars = len(vars_dict)
    if (verbose == True):
        print 'Number of variables (including metadata) found =  ', num_vars
    str_size = 0

    d2_var_names = []
    d3_var_names = []
    num_2d = 0
    num_3d = 0

    # Which are 2d, which are 3d and max str_size
    for k, v in vars_dict.iteritems():
        var = k
        vd = v.dimensions  # all the variable's dimensions (names)
        vr = v.rank  # num dimension
        vs = v.shape  # dim values
        is_2d = False
        is_3d = False
        if (is_SE == True):  # (time, lev, ncol) or (time, ncol)
            if ((vr == 2) and (vs[1] == ncol)):
                is_2d = True
                num_2d += 1
            elif ((vr == 3) and (vs[2] == ncol and vs[1] == nlev)):
                is_3d = True
                num_3d += 1
        else:  # (time, lev, nlon, nlon) or (time, nlat, nlon)
            if ((vr == 3) and (vs[1] == nlat and vs[2] == nlon)):
                is_2d = True
                num_2d += 1
            elif ((vr == 4)
                  and (vs[2] == nlat and vs[3] == nlon and vs[1] == nlev)):
                is_3d = True
                num_3d += 1
        if (is_3d == True):
            str_size = max(str_size, len(k))
            d3_var_names.append(k)
        elif (is_2d == True):
            str_size = max(str_size, len(k))
            d2_var_names.append(k)

    # Now sort these and combine (this sorts caps first, then lower case -
    # which is what we want)
    d2_var_names.sort()
    d3_var_names.sort()

    # All vars is 3d vars first (sorted), the 2d vars
    all_var_names = list(d3_var_names)
    all_var_names += d2_var_names
    n_all_var_names = len(all_var_names)

    if (verbose == True):
        print 'num vars = ', n_all_var_names, '(3d = ', num_3d, ' and 2d = ', num_2d, ")"

    # Create new summary ensemble file
    this_sumfile = opts_dict["sumfile"]
    if (verbose == True):
        print "Creating ", this_sumfile, "  ..."
    if (me.get_rank() == 0):
        if os.path.exists(this_sumfile):
            os.unlink(this_sumfile)

        opt = Nio.options()
        opt.PreFill = False
        opt.Format = 'NetCDF4Classic'
        nc_sumfile = Nio.open_file(this_sumfile, 'w', options=opt)

        # Set dimensions
        if (verbose == True):
            print "Setting dimensions ....."
        if (is_SE == True):
            nc_sumfile.create_dimension('ncol', ncol)
        else:
            nc_sumfile.create_dimension('nlat', nlat)
            nc_sumfile.create_dimension('nlon', nlon)
        nc_sumfile.create_dimension('nlev', nlev)
        nc_sumfile.create_dimension('ens_size', esize)
        nc_sumfile.create_dimension('nvars', num_3d + num_2d)
        nc_sumfile.create_dimension('nvars3d', num_3d)
        nc_sumfile.create_dimension('nvars2d', num_2d)
        nc_sumfile.create_dimension('str_size', str_size)

        # Set global attributes
        now = time.strftime("%c")
        if (verbose == True):
            print "Setting global attributes ....."
        setattr(nc_sumfile, 'creation_date', now)
        setattr(nc_sumfile, 'title', 'CAM verification ensemble summary file')
        setattr(nc_sumfile, 'tag', opts_dict["tag"])
        setattr(nc_sumfile, 'compset', opts_dict["compset"])
        setattr(nc_sumfile, 'resolution', opts_dict["res"])
        setattr(nc_sumfile, 'machine', opts_dict["mach"])

        # Create variables
        if (verbose == True):
            print "Creating variables ....."
        v_lev = nc_sumfile.create_variable("lev", 'f', ('nlev', ))
        v_vars = nc_sumfile.create_variable("vars", 'S1',
                                            ('nvars', 'str_size'))
        v_var3d = nc_sumfile.create_variable("var3d", 'S1',
                                             ('nvars3d', 'str_size'))
        v_var2d = nc_sumfile.create_variable("var2d", 'S1',
                                             ('nvars2d', 'str_size'))
        if not opts_dict['gmonly']:
            if (is_SE == True):
                v_ens_avg3d = nc_sumfile.create_variable(
                    "ens_avg3d", 'f', ('nvars3d', 'nlev', 'ncol'))
                v_ens_stddev3d = nc_sumfile.create_variable(
                    "ens_stddev3d", 'f', ('nvars3d', 'nlev', 'ncol'))
                v_ens_avg2d = nc_sumfile.create_variable(
                    "ens_avg2d", 'f', ('nvars2d', 'ncol'))
                v_ens_stddev2d = nc_sumfile.create_variable(
                    "ens_stddev2d", 'f', ('nvars2d', 'ncol'))
            else:
                v_ens_avg3d = nc_sumfile.create_variable(
                    "ens_avg3d", 'f', ('nvars3d', 'nlev', 'nlat', 'nlon'))
                v_ens_stddev3d = nc_sumfile.create_variable(
                    "ens_stddev3d", 'f', ('nvars3d', 'nlev', 'nlat', 'nlon'))
                v_ens_avg2d = nc_sumfile.create_variable(
                    "ens_avg2d", 'f', ('nvars2d', 'nlat', 'nlon'))
                v_ens_stddev2d = nc_sumfile.create_variable(
                    "ens_stddev2d", 'f', ('nvars2d', 'nlat', 'nlon'))

            v_RMSZ = nc_sumfile.create_variable("RMSZ", 'f',
                                                ('nvars', 'ens_size'))
        v_gm = nc_sumfile.create_variable("global_mean", 'f',
                                          ('nvars', 'ens_size'))
        v_loadings_gm = nc_sumfile.create_variable('loadings_gm', 'f',
                                                   ('nvars', 'nvars'))
        v_mu_gm = nc_sumfile.create_variable('mu_gm', 'f', ('nvars', ))
        v_sigma_gm = nc_sumfile.create_variable('sigma_gm', 'f', ('nvars', ))
        v_sigma_scores_gm = nc_sumfile.create_variable('sigma_scores_gm', 'f',
                                                       ('nvars', ))

        # Assign vars, var3d and var2d
        if (verbose == True):
            print "Assigning vars, var3d, and var2d ....."

        eq_all_var_names = []
        eq_d3_var_names = []
        eq_d2_var_names = []

        l_eq = len(all_var_names)
        for i in range(l_eq):
            tt = list(all_var_names[i])
            l_tt = len(tt)
            if (l_tt < str_size):
                extra = list(' ') * (str_size - l_tt)
                tt.extend(extra)
            eq_all_var_names.append(tt)

        l_eq = len(d3_var_names)
        for i in range(l_eq):
            tt = list(d3_var_names[i])
            l_tt = len(tt)
            if (l_tt < str_size):
                extra = list(' ') * (str_size - l_tt)
                tt.extend(extra)
            eq_d3_var_names.append(tt)

        l_eq = len(d2_var_names)
        for i in range(l_eq):
            tt = list(d2_var_names[i])
            l_tt = len(tt)
            if (l_tt < str_size):
                extra = list(' ') * (str_size - l_tt)
                tt.extend(extra)
            eq_d2_var_names.append(tt)

        v_vars[:] = eq_all_var_names[:]
        v_var3d[:] = eq_d3_var_names[:]
        v_var2d[:] = eq_d2_var_names[:]

        # Time-invarient metadata
        if (verbose == True):
            print "Assigning time invariant metadata ....."
        lev_data = vars_dict["lev"]
        v_lev = lev_data

    # Form ensembles, each missing one member; compute RMSZs and global means
    #for each variable, we also do max norm also (currently done in pyStats)
    tslice = opts_dict['tslice']

    # Partition the var list
    var3_list_loc = me.partition(d3_var_names,
                                 func=EqualStride(),
                                 involved=True)
    var2_list_loc = me.partition(d2_var_names,
                                 func=EqualStride(),
                                 involved=True)

    # Calculate global means #
    if (verbose == True):
        print "Calculating global means ....."
    gm3d, gm2d = pyEnsLib.generate_global_mean_for_summary(
        o_files, var3_list_loc, var2_list_loc, tslice, is_SE, verbose)

    # Calculate RMSZ scores
    if (verbose == True):
        print "Calculating RMSZ scores ....."
    if not opts_dict['gmonly']:
        zscore3d, zscore2d, ens_avg3d, ens_stddev3d, ens_avg2d, ens_stddev2d = pyEnsLib.calc_rmsz(
            o_files, o_files[0], var3_list_loc, var2_list_loc, tslice, is_SE,
            verbose)

    # Calculate max norm ensemble
    if opts_dict['maxnorm']:
        if (verbose == True):
            print "Calculating max norm of ensembles ....."
        pyEnsLib.calculate_maxnormens(opts_dict, var3_list_loc)
        pyEnsLib.calculate_maxnormens(opts_dict, var2_list_loc)

    if opts_dict['mpi_enable']:
        # Gather the 3d variable results from all processors to the master processor
        slice_index = get_stride_list(len(d3_var_names), me)

        # Gather global means 3d results
        gm3d = gather_npArray(gm3d, me, slice_index,
                              (len(d3_var_names), len(o_files)))

        if not opts_dict['gmonly']:
            # Gather zscore3d results
            zscore3d = gather_npArray(zscore3d, me, slice_index,
                                      (len(d3_var_names), len(o_files)))

            # Gather ens_avg3d and ens_stddev3d results
            shape_tuple3d = get_shape(ens_avg3d.shape, len(d3_var_names),
                                      me.get_rank())
            ens_avg3d = gather_npArray(ens_avg3d, me, slice_index,
                                       shape_tuple3d)
            ens_stddev3d = gather_npArray(ens_stddev3d, me, slice_index,
                                          shape_tuple3d)

        # Gather 2d variable results from all processors to the master processor
        slice_index = get_stride_list(len(d2_var_names), me)

        # Gather global means 2d results
        gm2d = gather_npArray(gm2d, me, slice_index,
                              (len(d2_var_names), len(o_files)))

        if not opts_dict['gmonly']:
            # Gather zscore2d results
            zscore2d = gather_npArray(zscore2d, me, slice_index,
                                      (len(d2_var_names), len(o_files)))

            # Gather ens_avg3d and ens_stddev2d results
            shape_tuple2d = get_shape(ens_avg2d.shape, len(d2_var_names),
                                      me.get_rank())
            ens_avg2d = gather_npArray(ens_avg2d, me, slice_index,
                                       shape_tuple2d)
            ens_stddev2d = gather_npArray(ens_stddev2d, me, slice_index,
                                          shape_tuple2d)

    # Assign to file:
    if me.get_rank() == 0:
        gmall = np.concatenate((gm3d, gm2d), axis=0)
        mu_gm, sigma_gm, standardized_global_mean, loadings_gm, scores_gm = pyEnsLib.pre_PCA(
            gmall)
        if not opts_dict['gmonly']:
            Zscoreall = np.concatenate((zscore3d, zscore2d), axis=0)
            v_RMSZ[:, :] = Zscoreall[:, :]
        v_gm[:, :] = gmall[:, :]
        v_mu_gm[:] = mu_gm[:]
        v_sigma_gm[:] = sigma_gm[:].astype(np.float32)
        v_loadings_gm[:, :] = loadings_gm[:, :]
        v_sigma_scores_gm[:] = scores_gm[:]
        if not opts_dict['gmonly']:
            if (is_SE == True):
                v_ens_avg3d[:, :, :] = ens_avg3d[:, :, :]
                v_ens_stddev3d[:, :, :] = ens_stddev3d[:, :, :]
                v_ens_avg2d[:, :] = ens_avg2d[:, :]
                v_ens_stddev2d[:, :] = ens_stddev2d[:, :]
            else:
                v_ens_avg3d[:, :, :, :] = ens_avg3d[:, :, :, :]
                v_ens_stddev3d[:, :, :, :] = ens_stddev3d[:, :, :, :]
                v_ens_avg2d[:, :, :] = ens_avg2d[:, :, :]
                v_ens_stddev2d[:, :, :] = ens_stddev2d[:, :, :]
        print "All Done"
Ejemplo n.º 54
0
 def __init__(self, filename, mode='r'):
     import Nio
     self.ds = Nio.open_file(filename, mode=mode)
Ejemplo n.º 55
0
    raw_name = pwd_entry[4]
    name = raw_name.split(",")[0].strip()
    if name == '':
        name = pwd_entry[0]

    return name


#
# Creating a file
#
ncfile = 'test.nc'
if (os.path.exists(ncfile)):
    os.system("/bin/rm -f " + ncfile)
file = Nio.open_file(
    ncfile, 'w', None,
    'Created ' + time.ctime(time.time()) + ' by ' + getUserName())
file.title = "Just some useless junk"
#if "series" in file.__dict__:
#    del file.__dict__['series']

file.series = [1, 2, 3, 4, 5, 6]

file.version = 45
#del file.version

file.create_dimension('xyz', 3)
file.create_dimension('n', 20)
file.create_dimension('t', None)  # unlimited dimension

foo = file.create_variable('foo', "i", ('n', 'xyz'))
Ejemplo n.º 56
0
        txres.txJust = just_strs[i]
        Ngl.text_ndc(wks, lon_labels[i], xndc[i], yndc[i], txres)

    return


#----------------------------------------------------------------------
# Main code
#----------------------------------------------------------------------

# Open file and get variable. The lat/lon variables will be
# generated using fspan.  This data came from another dataset
# that had lat/lon on the file, but lat/lon was nothing more
# than equally-spaced values which we can regenerate exactly.
#
f = Nio.open_file(os.path.join(Ngl.pynglpath("data"), "cdf", "hgt.nc"), "r")

hgt = f.variables["HGT"][:, :, :]
lat = Ngl.fspan(-90, 90, 73)
lon = Ngl.fspan(0, 357.5, 144)

# Add a cyclic point in the longitude dimension.
hgt0, lon = Ngl.add_cyclic(hgt[0, :, :], lon)

#
# Start graphics.
#
wks_type = "png"
wks = Ngl.open_wks(wks_type, "spaghetti")
Ngl.define_colormap(wks, "default")  # Change color map.
Ejemplo n.º 57
0
def calculateRasterTodict2(inputfile, inputfile2, longitude, latitude):
    file = Nio.open_file(inputfile, 'r')
    names = file.variables.keys()
    varinames = [
        'SP_GDS0_SFC', 'Z_GDS0_SFC', 'TCC_GDS0_SFC', 'SD_GDS0_SFC',
        '10V_GDS0_SFC', 'TP_GDS0_SFC', '2D_GDS0_SFC', 'MSL_GDS0_SFC',
        'HCC_GDS0_SFC', 'CP_GDS0_SFC', 'MCC_GDS0_SFC', '10U_GDS0_SFC',
        '2T_GDS0_SFC', 'SSTK_GDS0_SFC', 'LCC_GDS0_SFC', 'SKT_GDS0_SFC'
    ]
    #首先计算经纬度对应格点的索引,
    indexlat = int((60 - latitude) / 0.125)
    indexlon = int((longitude - 60) / 0.125)
    #则依次取周边16个点的索引为[indexlat,indexlon+1][indexlat+1,indexlon+1][indexlat+1,indexlon]...(顺时针)
    timeArray = file.variables['forecast_time0']
    featuredic = {}
    for i in range(len(varinames)):
        parray = file.variables[varinames[i]]
        print parray.shape
        for j in range(len(parray)):
            arraylist = []
            #print j,'65'
            hh = int(timeArray[j])
            pdatetime = odatetime + datetime.timedelta(hours=hh)
            timestr = datetime.datetime.strftime(pdatetime, '%Y%m%d %H:%M:%S')
            varikey = varinames[i] + '_' + timestr
            latlonArray = parray[j]
            arraylist.append(latlonArray[indexlat][indexlon])
            arraylist.append(latlonArray[indexlat][indexlon + 1])
            arraylist.append(latlonArray[indexlat + 1][indexlon + 1])
            arraylist.append(latlonArray[indexlat + 1][indexlon])
            arraylist.append(latlonArray[indexlat - 1][indexlon - 1])
            arraylist.append(latlonArray[indexlat - 1][indexlon])
            arraylist.append(latlonArray[indexlat - 1][indexlon + 1])
            arraylist.append(latlonArray[indexlat - 1][indexlon + 2])
            arraylist.append(latlonArray[indexlat][indexlon + 2])
            arraylist.append(latlonArray[indexlat + 2][indexlon + 2])
            arraylist.append(latlonArray[indexlat + 2][indexlon + 1])
            arraylist.append(latlonArray[indexlat + 2][indexlon])
            arraylist.append(latlonArray[indexlat + 2][indexlon - 1])
            arraylist.append(latlonArray[indexlat + 1][indexlon - 1])
            arraylist.append(latlonArray[indexlat][indexlon - 1])
            featuredic[varikey] = arraylist
    # 读高空的文件获取500hpa和850hpa的相对湿度
    gribfile = Nio.open_file(inputfile2, 'r')
    names2 = gribfile.variables.keys()
    print names2
    variablenames = ['R_GDS0_ISBL']
    tt = getattr(gribfile.variables[names2[1]], 'initial_time')
    oodatetime = datetime.datetime.strptime(tt, '%m/%d/%Y (%H:%M)')
    timeArray = gribfile.variables['forecast_time0']
    levelArray = gribfile.variables['lv_ISBL1']
    #print levelArray
    for i in range(len(variablenames)):
        variableArray = gribfile.variables[variablenames[i]]
        for j in range(len(variableArray)):
            pArray = variableArray[j]
            hh = int(timeArray[j])
            pdatetime = oodatetime + datetime.timedelta(hours=hh)
            timestring = datetime.datetime.strftime(pdatetime,
                                                    '%Y%m%d %H:%M:%S')
            for k in range(len(pArray)):
                phalist = []
                phaArray = pArray[k]
                llArray = phaArray
                pha = levelArray[k]
                fkey = str(variablenames[i]) + '_' + timestring + '_' + str(
                    pha) + 'hpa'
                phalist.append(llArray[indexlat][indexlon])
                phalist.append(llArray[indexlat][indexlon + 1])
                phalist.append(llArray[indexlat + 1][indexlon + 1])
                phalist.append(llArray[indexlat + 1][indexlon])
                phalist.append(llArray[indexlat - 1][indexlon - 1])
                phalist.append(llArray[indexlat - 1][indexlon])
                phalist.append(llArray[indexlat - 1][indexlon + 1])
                phalist.append(llArray[indexlat - 1][indexlon + 2])
                phalist.append(llArray[indexlat][indexlon + 2])
                phalist.append(llArray[indexlat + 1][indexlon + 2])
                phalist.append(llArray[indexlat + 2][indexlon + 2])
                phalist.append(llArray[indexlat + 2][indexlon + 1])
                phalist.append(llArray[indexlat + 2][indexlon])
                phalist.append(llArray[indexlat + 2][indexlon - 1])
                phalist.append(llArray[indexlat + 1][indexlon - 1])
                phalist.append(llArray[indexlat][indexlon - 1])
                featuredic[fkey] = phalist
    print featuredic
Ejemplo n.º 58
0
def calculateRaster2(inputfile, inputfile2, longitude, latitude):
    file = Nio.open_file(inputfile, 'r')
    names = file.variables.keys()
    #varinames1=['SP_GDS0_SFC', 'Z_GDS0_SFC', 'TCC_GDS0_SFC', 'SD_GDS0_SFC', '10V_GDS0_SFC', 'TP_GDS0_SFC', '2D_GDS0_SFC', 'MSL_GDS0_SFC', 'HCC_GDS0_SFC', 'CP_GDS0_SFC', 'MCC_GDS0_SFC', '10U_GDS0_SFC', '2T_GDS0_SFC', 'SSTK_GDS0_SFC', 'LCC_GDS0_SFC', 'SKT_GDS0_SFC']
    varinames = [
        '2T_GDS0_SFC', '2D_GDS0_SFC', '10U_GDS0_SFC', '10V_GDS0_SFC',
        'TCC_GDS0_SFC', 'LCC_GDS0_SFC', 'HCC_GDS0_SFC', 'MCC_GDS0_SFC',
        'TP_GDS0_SFC', 'CP_GDS0_SFC'
    ]
    #首先计算经纬度对应格点的索引,
    indexlat = int((60 - latitude) / 0.125)
    indexlon = int((longitude - 60) / 0.125)
    #则依次取周边16个点的索引为[indexlat,indexlon+1][indexlat+1,indexlon+1][indexlat+1,indexlon]...(顺时针)
    vstring = []
    timeArray = file.variables['forecast_time0']
    # for t in range(len(timeArray)):
    #     hh=int(timeArray[t])
    #     pdatetime=odatetime+datetime.timedelta(hours=hh)
    for i in range(len(varinames)):
        vstring.append(varinames[i])
        parray = file.variables[varinames[i]]
        for j in range(len(parray)):
            hh = int(timeArray[j])
            pdatetime = odatetime + datetime.timedelta(hours=hh)
            vstring.append(pdatetime)
            latlonArray = parray[j]
            vstring.append(latlonArray[indexlat][indexlon])
            vstring.append(latlonArray[indexlat][indexlon + 1])
            vstring.append(latlonArray[indexlat + 1][indexlon + 1])
            vstring.append(latlonArray[indexlat + 1][indexlon])
            vstring.append(latlonArray[indexlat - 1][indexlon - 1])
            vstring.append(latlonArray[indexlat - 1][indexlon])
            vstring.append(latlonArray[indexlat - 1][indexlon + 1])
            vstring.append(latlonArray[indexlat - 1][indexlon + 2])
            vstring.append(latlonArray[indexlat][indexlon + 2])
            vstring.append(latlonArray[indexlat + 1][indexlon + 2])
            vstring.append(latlonArray[indexlat + 2][indexlon + 2])
            vstring.append(latlonArray[indexlat + 2][indexlon + 1])
            vstring.append(latlonArray[indexlat + 2][indexlon])
            vstring.append(latlonArray[indexlat + 2][indexlon - 1])
            vstring.append(latlonArray[indexlat + 1][indexlon - 1])
            vstring.append(latlonArray[indexlat][indexlon - 1])
    #读高空的文件获取500hpa和850hpa的相对湿度
    gribfile = Nio.open_file(inputfile2, 'r')
    names2 = gribfile.variables.keys()
    print names2
    variablenames = ['R_GDS0_ISBL']
    tt = getattr(gribfile.variables[names2[1]], 'initial_time')
    oodatetime = datetime.datetime.strptime(tt, '%m/%d/%Y (%H:%M)')
    timeArray = gribfile.variables['forecast_time0']
    levelArray = gribfile.variables['lv_ISBL1']
    print levelArray
    for i in range(len(variablenames)):
        variableArray = gribfile.variables[variablenames[i]]
        vstring.append(variablenames[i])
        for j in range(len(variableArray)):
            pArray = variableArray[j]
            hh = int(timeArray[j])
            pdatetime = oodatetime + datetime.timedelta(hours=hh)
            vstring.append(pdatetime)
            for k in range(len(pArray)):
                phaArray = pArray[k]
                llArray = phaArray
                pha = levelArray[k]
                vstring.append(str(pha) + 'hpa')
                vstring.append(llArray[indexlat][indexlon])
                vstring.append(llArray[indexlat][indexlon + 1])
                vstring.append(llArray[indexlat + 1][indexlon + 1])
                vstring.append(llArray[indexlat + 1][indexlon])
                vstring.append(llArray[indexlat - 1][indexlon - 1])
                vstring.append(llArray[indexlat - 1][indexlon])
                vstring.append(llArray[indexlat - 1][indexlon + 1])
                vstring.append(llArray[indexlat - 1][indexlon + 2])
                vstring.append(llArray[indexlat][indexlon + 2])
                vstring.append(llArray[indexlat + 1][indexlon + 2])
                vstring.append(llArray[indexlat + 2][indexlon + 2])
                vstring.append(llArray[indexlat + 2][indexlon + 1])
                vstring.append(llArray[indexlat + 2][indexlon])
                vstring.append(llArray[indexlat + 2][indexlon - 1])
                vstring.append(llArray[indexlat + 1][indexlon - 1])
                vstring.append(llArray[indexlat][indexlon - 1])
    #循环遍历vsting
    vdescirbe = ''
    for p in range(len(vstring)):
        vdescirbe = vdescirbe + ',' + str(vstring[p])
    print vdescirbe
    del vstring
    del latlonArray
Ejemplo n.º 59
0
#
#  Import Nio for reading netCDF files.
#
import Nio

#
#  Import Ngl support functions.
#
import Ngl

#
#  Open the netCDF file containing the climate divisions polygons.
#
dirc = Ngl.pynglpath("data")
ncdf = Nio.open_file(os.path.join(dirc,"cdf","climdiv_polygons.nc"))

#
#  State names for the contiguous U.S. states.
#
statenames = ["AL","AR","AZ","CA","CO","CT","DE","FL","GA","IA","ID","IL", \
              "IN","KS","KY","LA","MA","MD","ME","MI","MN","MO","MS","MT", \
              "NC","ND","NE","NH","NJ","NM","NV","NY","OH","OK","OR","PA", \
              "RI","SC","SD","TN","TX","UT","VA","VT","WA","WI","WV","WY"]

# 
#  Climate divisions in each state.
#
ncds = [8,9,7,7,5,3,2,6,9,9,10,9,9,9,4,9,3,8,3,10,9,6,10,7, \
        8,9,8,2,3,8,4,10,10,9,9,10,1,7,9,4,10,7,7,3,10,9,6,10]
Ejemplo n.º 60
0
import os, sys
import Ngl, Nio
from utils import *

dirc = os.path.join("$NCARGTEST", "nclscripts", "cdf_files")
pm = numpy.zeros([2, 27, 54, 86], 'f')
qvm = numpy.zeros([2, 27, 54, 86], 'f')
tdm_out = numpy.zeros([2, 27, 54, 86], 'f')

#
# Do individual tests first, but collect data to do multiple
# dimension test later.
#
for i in range(2):
    a = Nio.open_file(os.path.join(dirc, "wrf_td") + str(i) + ".nc")
    p = a.variables["p"][:]
    td_out = a.variables["td"][:]

    #
    # The below is just a test to make sure that the wrf_slp
    # wrapper code for setting qv < 0 to 0 is working. I take
    # any values equal to zero, and randomly set them to
    # random negative values.
    #
    qv1d = numpy.ravel(a.variables["qv"][:])
    ii = numpy.equal(qv1d, 0.)
    qv1d[ii[::2]] = -1
    qv1d[ii[1::2]] = -100
    qv1d[ii[2::2]] = -3000.234

    qv = numpy.reshape(qv1d, a.variables["qv"][:].shape)