示例#1
0
def grid_wind(rs):
    """
    Grid winds based on u and v components
    @param rs array of dicts
    @return uwnd, vwnd
    """
    lats = []
    lons = []
    udata = []
    vdata = []
    for row in rs:
        if row['sknt'] is None or row['drct'] is None:
            continue
        # mps
        u,v = mesonet.uv( row['sknt'] / 0.514, row['drct'] )
        if v is not None:
            lats.append(  nt.sts[row['station']]['lat'] )
            lons.append(  nt.sts[row['station']]['lon'] )
            vdata.append( v )
            udata.append( u )
            
    if len(vdata) < 4:
        print "No wind data at all for time: %s" % (ts,)   
        return None
    
    ugrid = Ngl.natgrid(lons, lats, udata, iemre.XAXIS, iemre.YAXIS)
    vgrid = Ngl.natgrid(lons, lats, vdata, iemre.XAXIS, iemre.YAXIS)
    if ugrid is not None:
        ugt = ugrid.transpose()
        vgt = vgrid.transpose()
        return ugt, vgt
    else:
        return None, None
示例#2
0
def estimate_hilo( ts ):
    """
    Estimate the High and Low Temperature based on gridded data
    """
    # Query Obs
    highs = []
    lows = []
    lats = []
    lons = []
    icursor.execute("""
       SELECT x(s.geom) as lon, y(s.geom) as lat, max_tmpf, min_tmpf
       from summary_%s c, stations s WHERE day = '%s' and s.network in ('AWOS','IA_ASOS', 'MN_ASOS', 'WI_ASOS', 
       'IL_ASOS', 'MO_ASOS',
        'KS_ASOS', 'NE_ASOS', 'SD_ASOS', 'ND_ASOS', 'KY_ASOS', 'MI_ASOS',
        'OH_ASOS') and c.iemid = s.iemid
       and max_tmpf > -90 and min_tmpf < 90""" % (ts.year, ts.strftime("%Y-%m-%d")))
    for row in icursor:
        lats.append( row['lat'] )
        lons.append( row['lon'] )
        highs.append( row['max_tmpf'] )
        lows.append( row['min_tmpf'] )

    # Create the analysis
    highA = Ngl.natgrid(lons, lats, highs, iemre.XAXIS, iemre.YAXIS)
    lowA = Ngl.natgrid(lons, lats, lows, iemre.XAXIS, iemre.YAXIS)

    for id in nt.sts.keys():
        nt.sts[id]['high'] = highA[nt.sts[id]['gridi'], nt.sts[id]['gridj']]
        nt.sts[id]['low'] = lowA[nt.sts[id]['gridi'], nt.sts[id]['gridj']]
示例#3
0
def estimate_snow( ts ):
    """
    Estimate the Snow
    """
    # Query Obs
    snowd = []
    snow = []
    lats = []
    lons = []
    icursor.execute("""
       SELECT x(s.geom) as lon, y(s.geom) as lat, snow, snowd
       from summary_%s c, stations s WHERE day = '%s' and 
       s.network in ('IA_COOP', 'MN_COOP', 'WI_COOP', 'IL_COOP', 'MO_COOP',
        'KS_COOP', 'NE_COOP', 'SD_COOP', 'ND_COOP', 'KY_COOP', 'MI_COOP',
        'OH_COOP') and c.iemid = s.iemid 
       and snowd >= 0""" % (ts.year, ts.strftime("%Y-%m-%d")))
    for row in icursor:
        lats.append( row['lat'] )
        lons.append( row['lon'] )
        snow.append( row['snow'] )
        snowd.append( row['snowd'] )

    if len(lats) < 5: # No data!
        for id in nt.sts.keys():
            nt.sts[id]['snow'] = 0
            nt.sts[id]['snowd'] = 0
        return


    # Create the analysis
    snowA = Ngl.natgrid(lons, lats, snow, iemre.XAXIS, iemre.YAXIS)
    snowdA = Ngl.natgrid(lons, lats, snowd, iemre.XAXIS, iemre.YAXIS)

    for id in nt.sts.keys():
        snowfall = snowA[nt.sts[id]['gridi'], nt.sts[id]['gridj']]
        snowdepth = snowdA[nt.sts[id]['gridi'], nt.sts[id]['gridj']]
        if snowfall > 0 and snowfall < 0.1:
          nt.sts[id]['snow'] = 0.0001
        elif snowfall < 0:
          nt.sts[id]['snow'] = 0
        elif numpy.isnan(snowfall):
          nt.sts[id]['snow'] = 0
        else:
          nt.sts[id]['snow'] = snowfall
        if snowdepth > 0 and snowdepth < 0.1:
          nt.sts[id]['snowd'] = 0.0001
        elif snowdepth < 0:
          nt.sts[id]['snowd'] = 0
        elif numpy.isnan(snowdepth):
          nt.sts[id]['snowd'] = 0
        else:
          nt.sts[id]['snowd'] = snowdepth
示例#4
0
def load_soilt(data):
    soil_obs = []
    lats = [] 
    lons = []
    valid = 'YESTERDAY'
    if mx.DateTime.now().hour < 7:
        valid = '%s' % ((mx.DateTime.now() - mx.DateTime.RelativeDateTime(days=2)).strftime("%Y-%m-%d"), )
    rs = isuag.query("""SELECT station, c30 from daily WHERE 
         valid = '%s'""" % (valid,) ).dictresult()
    for i in range(len(rs)):
        stid = rs[i]['station']
        if not nt.sts.has_key(stid):
            continue
        soil_obs.append( rs[i]['c30'] )
        lats.append( nt.sts[stid]['lat'] )
        lons.append( nt.sts[stid]['lon'] )
    if len(lons) == 0:
        print 'No ISUAG Data for %s' % (valid,)
        sys.exit()
    numxout = 40
    numyout = 40
    xmin    = min(lons) - 1.
    ymin    = min(lats) - 1.
    xmax    = max(lons) + 1.
    ymax    = max(lats) + 1.
    xc      = (xmax-xmin)/(numxout-1)
    yc      = (ymax-ymin)/(numyout-1)

    xo = xmin + xc* numpy.arange(0,numxout)
    yo = ymin + yc* numpy.arange(0,numyout)

    analysis = Ngl.natgrid(lons, lats, soil_obs, list(xo), list(yo))
    for id in data.keys():
        data[id]['soilt'] = sampler(xo,yo,analysis, data[id]['lon'], data[id]['lat'])
示例#5
0
def merge(ts):
    """
    Process an hour's worth of stage4 data into the hourly RE
    """

    # Load up the 12z 24h total, this is what we base our deltas on
    fp = "/mesonet/ARCHIVE/data/%s/stage4/ST4.%s.24h.grib" % (
      ts.strftime("%Y/%m/%d"), ts.strftime("%Y%m%d%H") )

    grib = Nio.open_file(fp, 'r')
    # Rough subsample, since the whole enchillata is too much
    lats = numpy.ravel( grib.variables["g5_lat_0"][200:-100:5,300:900:5] )
    lons = numpy.ravel( grib.variables["g5_lon_1"][200:-100:5,300:900:5] )
    vals = numpy.ravel( grib.variables["A_PCP_GDS5_SFC_acc24h"][200:-100:5,300:900:5] )
    res = Ngl.natgrid(lons, lats, vals, iemre.XAXIS, iemre.YAXIS)
    stage4 = res.transpose()
    # Prevent Large numbers, negative numbers
    stage4 = numpy.where( stage4 < 10000., stage4, 0.)
    stage4 = numpy.where( stage4 < 0., 0., stage4)

    # Open up our RE file
    nc = netCDF4.Dataset("/mesonet/data/iemre/%s_mw_hourly.nc" % (ts.year,),'a')
    ts0 = ts + mx.DateTime.RelativeDateTime(days=-1)
    jan1 = mx.DateTime.DateTime(ts.year, 1, 1, 0, 0)
    offset0 = int(( ts0 - jan1).hours)
    offset1 = int(( ts -  jan1).hours)
    if offset0 < 0:
        offset0 = 0
    iemre2 = numpy.sum(nc.variables["p01m"][offset0:offset1,:,:], axis=0)
    
    iemre2 = numpy.where( iemre2 > 0., iemre2, 0.00024)
    iemre2 = numpy.where( iemre2 < 10000., iemre2, 0.00024)
    print "Stage IV 24h [Avg %5.2f Max %5.2f]  IEMRE Hourly [Avg %5.2f Max: %5.2f]" % (
                    numpy.average(stage4), numpy.max(stage4), 
                    numpy.average(iemre2), numpy.max(iemre2) )
    multiplier = stage4 / iemre2
    print "Multiplier MIN: %5.2f  AVG: %5.2f  MAX: %5.2f" % (
                    numpy.min(multiplier), numpy.average(multiplier),numpy.max(multiplier))
    for offset in range(offset0, offset1):
        data  = nc.variables["p01m"][offset,:,:]
        
        # Keep data within reason
        data = numpy.where( data > 10000., 0., data)
        adjust = numpy.where( data > 0, data, 0.00001) * multiplier
        adjust = numpy.where( adjust > 250.0, 0, adjust)
        nc.variables["p01m"][offset,:,:] = numpy.where( adjust < 0.01, 0, adjust)
        ts = jan1 + mx.DateTime.RelativeDateTime(hours=offset)
        print "%s IEMRE %5.2f %5.2f Adjusted %5.2f %5.2f" % (ts.strftime("%Y-%m-%d %H"), 
                                    numpy.average(data), numpy.max(data),
                                    numpy.average(nc.variables["p01m"][offset]),
                                    numpy.max(nc.variables["p01m"][offset]))
    nc.sync()
    iemre2 = numpy.sum(nc.variables["p01m"][offset0:offset1,:,:], axis=0)
    print "Stage IV 24h [Avg %5.2f Max %5.2f]  IEMRE Hourly [Avg %5.2f Max: %5.2f]" % (
                    numpy.average(stage4), numpy.max(stage4), 
                    numpy.average(iemre2), numpy.max(iemre2) )
    nc.close()
示例#6
0
def grid_wind(nc, ts, rs):
    lats = []
    lons = []
    vals = []
    for i in range(len(rs)):
        if rs[i]['max_sknt'] is not None:
            lats.append(  locs[rs[i]['station']]['lat'] )
            lons.append(  locs[rs[i]['station']]['lon'] )
            vals.append( rs[i]['max_sknt'] * 0.514 ) # knots to mps
    if len(vals) < 4:
        print "No WIND data at all for time: %s" % (ts,)   
        return
    grid = Ngl.natgrid(lons, lats, vals, XAXIS, YAXIS)
    grid = Ngl.natgrid(lons, lats, vals, XAXIS, YAXIS)
    offset = int((ts - BASE).hours )
    if grid is not None:
        gt = grid.transpose()
        nc.variables['smps'][offset,:,:] = numpy.where(gt < 0., 0., gt)
    else:
        print "WIND gridding failed, len vals %s" % (len(vals),)
示例#7
0
def natgrid_interp(data_in, lats_in, lons_in, lats_out, lons_out, valid_range=None, wrapping_overlap_interval=10.):
    IM_i = len(lons_in)
    JM_i = len(lats_in)
    IM_o = len(lons_out)
    JM_o = len(lats_out)
    lons_in_interval1 = np.zeros(IM_i)
    lons_in_interval2 = np.zeros(IM_i)
    lons_out_interval1 = np.zeros(IM_o)
    lons_out_interval2 = np.zeros(IM_o)
    for i in range(IM_i):
        lons_in_interval1[i] = Ngl.normalize_angle(lons_in[i], 0)
        lons_in_interval2[i] = Ngl.normalize_angle(lons_in[i], 1)
    for i in range(IM_o):
        lons_out_interval1[i] = Ngl.normalize_angle(lons_out[i], 0)
    if valid_range == None:
        valid_range = [data_in.min(), data_in.max()]
    x_in_interval1 = np.tile(lons_in_interval1,JM_i)
    y_in = np.repeat(lats_in, IM_i)
    z_in = data_in.flatten()
    if lons_in_interval1.max() - lons_in_interval1.min() > 180. and lons_in_interval2.max() - lons_in_interval2.min() > 180.:
        wrap = True
        ## span of lons is greater than 180; assume that it is a global run and therefore needs to be wrapped
        logical_wrapping = np.logical_and(lons_in_interval2[:] > -wrapping_overlap_interval, lons_in_interval2[:] < 0.)
        lons_wrapped = lons_in_interval2[logical_wrapping]
        data_in_wrapped = data_in[:,logical_wrapping]
        IM_wrapped = len(lons_wrapped)
        lons_wrapped_vector = np.tile(lons_wrapped,JM_i)
        lats_wrapped_vector = np.repeat(lats_in, IM_wrapped)
        data_in_wrapped_vector = data_in_wrapped.flatten()
        ### and add them together
        x_in_interval1 = np.concatenate((x_in_interval1, lons_wrapped_vector))
        y_in = np.concatenate((y_in, lats_wrapped_vector))
        z_in = np.concatenate((z_in, data_in_wrapped_vector))
        #
        logical_wrapping = np.logical_and(lons_in_interval2[:] < wrapping_overlap_interval, lons_in_interval2[:] > 0.)
        lons_wrapped = lons_in_interval1[logical_wrapping] + 360.
        data_in_wrapped = data_in[:,logical_wrapping]
        IM_wrapped = len(lons_wrapped)
        lons_wrapped_vector = np.tile(lons_wrapped,JM_i)
        lats_wrapped_vector = np.repeat(lats_in, IM_wrapped)
        data_in_wrapped_vector = data_in_wrapped.flatten()
        ### and add them together
        x_in_interval1 = np.concatenate((x_in_interval1, lons_wrapped_vector))
        y_in = np.concatenate((y_in, lats_wrapped_vector))
        z_in = np.concatenate((z_in, data_in_wrapped_vector))
        # ### reorder output lons
        # lons_out_interval1_unsorted = lons_out_interval1
        # lons_out_interval1 = lons_out_interval1[lons_out_interval1.argsort()]
    output1 = Ngl.natgrid(x_in_interval1, y_in, z_in, lons_out_interval1, lats_out).transpose()
    output_masked = np.ma.masked_array(output1, mask=np.logical_or(output1 < min(valid_range), output1 > max(valid_range)))
    return(output_masked)
示例#8
0
def grid_high(nc, ts, rs):
    lats = []
    lons = []
    vals = []
    for i in range(len(rs)):
        if rs[i]['high'] is not None:
            lats.append(locs[rs[i]['station']]['lat'])
            lons.append(locs[rs[i]['station']]['lon'])
            vals.append(mesonet.f2k(rs[i]['high']))
    grid = Ngl.natgrid(lons, lats, vals, XAXIS, YAXIS)
    offset = int((ts - BASE).days)
    if grid is not None:
        nc.variables['high'][offset, :, :] = grid.transpose()
    else:
        print "HIGH gridding failed, len vals %s" % (len(vals), )
示例#9
0
def grid_p01m(nc, ts, rs):
    lats = []
    lons = []
    vals = []
    for i in range(len(rs)):
        lats.append(  locs[rs[i]['station']]['lat'] )
        lons.append(  locs[rs[i]['station']]['lon'] )
        vals.append( rs[i]['max_p01m'] )
    grid = Ngl.natgrid(lons, lats, vals, XAXIS, YAXIS)
    offset = int((ts - BASE).hours )
    if grid is not None:
        gt = grid.transpose()
        nc.variables['p01m'][offset,:,:] = numpy.where(gt > 0., gt, 0.)
    else:
        print "P01M gridding failed, len vals %s" % (len(vals),)
示例#10
0
def grid_high(nc, ts, rs):
    lats = []
    lons = []
    vals = []
    for i in range(len(rs)):
        if rs[i]['high'] is not None:
            lats.append(  locs[rs[i]['station']]['lat'] )
            lons.append(  locs[rs[i]['station']]['lon'] )
            vals.append( mesonet.f2k( rs[i]['high'] ) )
    grid = Ngl.natgrid(lons, lats, vals, XAXIS, YAXIS)
    offset = int((ts - BASE).days ) 
    if grid is not None:
        nc.variables['high'][offset,:,:] = grid.transpose()
    else:
        print "HIGH gridding failed, len vals %s" % (len(vals),)
示例#11
0
def grid_tmpf(nc, ts, rs):
    lats = []
    lons = []
    vals = []
    for i in range(len(rs)):
        if rs[i]['max_tmpf'] is not None:
            lats.append(  locs[rs[i]['station']]['lat'] )
            lons.append(  locs[rs[i]['station']]['lon'] )
            vals.append( mesonet.f2k( rs[i]['max_tmpf'] ) )
    if len(vals) < 4:
        print "No TMPF data at all for time: %s" % (ts,)   
        return
    grid = Ngl.natgrid(lons, lats, vals, XAXIS, YAXIS)
    offset = int((ts - BASE).hours ) 
    if grid is not None:
        nc.variables['tmpk'][offset,:,:] = grid.transpose()
    else:
        print "TMPK gridding failed, len vals %s" % (len(vals),)
示例#12
0
def grid_relh(nc, ts, rs):
    lats = []
    lons = []
    vals = []
    for i in range(len(rs)):
        if rs[i]['max_tmpf'] is not None and rs[i]['max_dwpf'] is not None:
            lats.append(  locs[rs[i]['station']]['lat'] )
            lons.append(  locs[rs[i]['station']]['lon'] )
            vals.append( mesonet.relh( rs[i]['max_tmpf'], rs[i]['max_dwpf'] ) )
    if len(vals) < 4:
        print "No RELH data at all for time: %s" % (ts,)   
        return
    grid = Ngl.natgrid(lons, lats, vals, XAXIS, YAXIS)
    offset = int((ts - BASE).hours )
    if grid is not None:
        gt = grid.transpose()
        gt = numpy.where(gt < 100.1, gt, 100.)
        nc.variables['relh'][offset,:,:] = numpy.where(gt < 12., 12., gt)
    else:
        print "RELH gridding failed, len vals %s" % (len(vals),)
示例#13
0
def grid_skyc(rs):
    lats = []
    lons = []
    vals = []
    for row in rs:
        v =  max(row['max_skyc1'], row['max_skyc2'], row['max_skyc3'])
        if v is not None:
            lats.append(  nt.sts[row['station']]['lat'] )
            lons.append(  nt.sts[row['station']]['lon'] )
            vals.append( float(v) )
    if len(vals) < 4:
        print "No SKYC data at all for time: %s" % (ts,)   
        return None
    grid = Ngl.natgrid(lons, lats, vals, iemre.XAXIS, iemre.YAXIS)
    if grid is not None:
        gt = grid.transpose()
        gt = numpy.where(gt > 0., gt, 0.0)
        return numpy.where(gt > 100., 100., gt)
    else:
        return None
示例#14
0
def merge(ts):
    """
    Process an hour's worth of stage4 data into the hourly RE
    """

    fp = "/mesonet/ARCHIVE/data/%s/stage4/ST4.%s.01h.grib" % (
      ts.strftime("%Y/%m/%d"), ts.strftime("%Y%m%d%H") )
    if os.path.isfile(fp):
        grib = Nio.open_file(fp, 'r')
        # Rough subsample, since the whole enchillata is too much
        lats = numpy.ravel( grib.variables["g5_lat_0"][200:-100:5,300:900:5] )
        lons = numpy.ravel( grib.variables["g5_lon_1"][200:-100:5,300:900:5] )
        vals = numpy.ravel( grib.variables["A_PCP_GDS5_SFC_acc1h"][200:-100:5,300:900:5] )
        # Clip large values
        vals = numpy.where( vals > 250., 0, vals)
        #print 'STAGE4 MIN: %5.2f AVG: %5.2f MAX: %5.2f' % (numpy.min(vals), numpy.average(vals),
        #                                           numpy.max(vals))
        res = Ngl.natgrid(lons, lats, vals, iemre.XAXIS, iemre.YAXIS)
        grib.close()
        del grib
    else:
        print 'Missing stage4 %s' % (fp,)
        res = numpy.zeros( (iemre.NX, iemre.NY))

    # Lets clip bad data
    res = numpy.where(res < 0, 0., res)
    # 10 inches per hour is bad data
    res = numpy.where(res > 250., 0., res)

    # Print out some debugging information for now
    #print '%s MIN: %5.2f AVG: %5.2f MAX: %5.2f' % (ts, numpy.min(res), numpy.average(res),
    #                                               numpy.max(res))
    # Open up our RE file
    nc = netCDF4.Dataset("/mnt/mesonet/data/iemre/%s_mw_hourly.nc" % (
                                                            ts.year,),'a')

    offset = int(( ts - (ts + mx.DateTime.RelativeDateTime(month=1,day=1,hour=0))).hours) - 1
    nc.variables["p01m"][offset,:,:] = res.transpose()

    nc.close()
    del nc
示例#15
0
def generic_gridder(rs, idx):
    """
    Generic gridding algorithm for easy variables
    """
    lats = []
    lons = []
    vals = []
    for row in rs:
        if row[idx] is not None:
            lats.append( nt.sts[row['station']]['lat'] )
            lons.append( nt.sts[row['station']]['lon'] )
            vals.append( row[idx]  )
    if len(vals) < 4:
        print "Only %s observations found for %s, won't grid" % (len(vals),
               idx)
        return None
    grid = Ngl.natgrid(lons, lats, vals, iemre.XAXIS, iemre.YAXIS)
    if grid is not None:
        return grid.transpose()
    else:
        return None
示例#16
0
def grid_iowa(lons, lats, vals):
    """
    Convience routine to do a simple grid for Iowa
    @return numpy grid of values and plot res
    """
    delx = (IA_EAST - IA_WEST) / (IA_NX - 1)
    dely = (IA_NORTH - IA_SOUTH) / (IA_NY - 1)
    # Create axis
    xaxis = IA_WEST + delx * numpy.arange(0, IA_NX)
    yaxis = IA_SOUTH + dely * numpy.arange(0, IA_NY)
    # Create the analysis
    analysis = Ngl.natgrid(lons, lats, vals, xaxis, yaxis)

    # Setup res
    res = iowa2()

    res.sfXCStartV = min(xaxis)
    res.sfXCEndV   = max(xaxis)
    res.sfYCStartV = min(yaxis)
    res.sfYCEndV   = max(yaxis)

    return analysis, res
示例#17
0
def grid_conus(lons, lats, vals):
    """
    Convience routine to do a simple grid for CONUS
    @return numpy grid of values and plot res
    """
    delx = (CONUS_EAST - CONUS_WEST) / (CONUS_NX - 1)
    dely = (CONUS_NORTH - CONUS_SOUTH) / (CONUS_NY - 1)
    # Create axis
    xaxis = CONUS_WEST + delx * numpy.arange(0, CONUS_NX)
    yaxis = CONUS_SOUTH + dely * numpy.arange(0, CONUS_NY)
    # Create the analysis
    analysis = Ngl.natgrid(lons, lats, vals, xaxis, yaxis)

    # Setup res
    res = conus()

    res.sfXCStartV = min(xaxis)
    res.sfXCEndV   = max(xaxis)
    res.sfYCStartV = min(yaxis)
    res.sfYCEndV   = max(yaxis)
    
    return analysis, res
示例#18
0
def grid_northeast(lons, lats, vals):
    """
    Convience routine to do a simple grid for MidWest
    @return numpy grid of values and plot res
    """
    delx = (NE_EAST - NE_WEST) / (NE_NX - 1)
    dely = (NE_NORTH - NE_SOUTH) / (NE_NY - 1)
    # Create axis
    xaxis = NE_WEST + delx * numpy.arange(0, NE_NX)
    yaxis = NE_SOUTH + dely * numpy.arange(0, NE_NY)
    # Create the analysis
    analysis = Ngl.natgrid(lons, lats, vals, xaxis, yaxis)

    # Setup res
    res = northeast()

    res.sfXCStartV = min(xaxis)
    res.sfXCEndV   = max(xaxis)
    res.sfYCStartV = min(yaxis)
    res.sfYCEndV   = max(yaxis)

    return analysis, res
示例#19
0
def grid_skyc(nc, ts, rs):
    lats = []
    lons = []
    vals = []
    for i in range(len(rs)):
        v =  max(rs[i]['max_skyc1'], rs[i]['max_skyc2'], rs[i]['max_skyc3'])
        if v is not None:
            lats.append(  locs[rs[i]['station']]['lat'] )
            lons.append(  locs[rs[i]['station']]['lon'] )
            vals.append( float(v) )
    if len(vals) < 4:
        print "No SKYC data at all for time: %s" % (ts,)   
        return
    grid = Ngl.natgrid(lons, lats, vals, XAXIS, YAXIS)
    offset = int((ts - BASE).hours )
    if grid is not None:
        gt = grid.transpose()
        gt = numpy.where(gt > 0., gt, 0.0)
        nc.variables['skyc'][offset,:,:] = numpy.where(gt > 100., 100., gt)
    else:
        print "SKYC gridding failed, len vals %s" % (len(vals),)
        print vals
示例#20
0
def generic_gridder(rs, idx):
    """
    Generic gridding algorithm for easy variables
    """
    lats = []
    lons = []
    vals = []
    for i in range(len(rs)):
        if rs[i][idx] is not None and locs.has_key(rs[i]['station']):
            lats.append(  locs[rs[i]['station']]['lat'] + (random.random() * .01)) 
            lons.append(  locs[rs[i]['station']]['lon'] )
            vals.append( rs[i][idx]  )
    if len(vals) < 4:
        print "Only %s observations found for %s, won't grid" % (len(vals),
               idx)
        return None
    grid = Ngl.natgrid(lons, lats, vals, iemre.XAXIS, iemre.YAXIS)
    print len(rs), idx, numpy.max(grid), numpy.min(grid)
    if grid is not None:
        return grid.transpose()
    else:
        return None
示例#21
0
def generic_gridder(rs, idx):
    """
    Generic gridding algorithm for easy variables
    """
    lats = []
    lons = []
    vals = []
    for row in rs:
        stid = row['station']
        if row[idx] is not None and locs.has_key(stid):
            lats.append(  locs[stid]['lat'] + (random.random() * 0.01) )
            lons.append(  locs[stid]['lon'] )
            vals.append( row[idx]  )
    if len(vals) < 4:
        print "Only %s observations found for %s, won't grid" % (len(vals),
               idx)
        return None
    grid = Ngl.natgrid(lons, lats, vals, iemre.XAXIS, iemre.YAXIS)
    if grid is not None:
        return grid.transpose()
    else:
        return None
示例#22
0
#
x = numpy.array(seismic[:, 0], 'f')
y = numpy.array(seismic[:, 1], 'f')
z = numpy.array(seismic[:, 2], 'f') - 785.

#
#  Define output grid for the calls to "natgrid".
#
numxout, numyout = 20, 20
xmin, xmax, ymin, ymax = min(x), max(x), min(y), max(y)
xc = (xmax - xmin) / (numxout - 1)
yc = (ymax - ymin) / (numyout - 1)
xo = xmin + xc * numpy.arange(0, numxout)
yo = ymin + yc * numpy.arange(0, numxout)

zo = Ngl.natgrid(x, y, z, xo, yo)  # Interpolate, allow negative values.

#
#  Define a color map and open four different types of workstations.
#
cmap = numpy.array([  [1.00, 0.00, 0.00], [1.00, 0.00, 0.40], \
                      [1.00, 0.00, 0.80], [1.00, 0.20, 1.00], \
                      [1.00, 0.60, 1.00], [0.60, 0.80, 1.00], \
                      [0.20, 0.80, 1.00], [0.20, 0.80, 0.60], \
                      [0.20, 0.80, 0.00], [0.20, 0.40, 0.00], \
                      [0.20, 0.45, 0.40], [0.20, 0.40, 0.80], \
                      [0.60, 0.40, 0.80], [0.60, 0.80, 0.80], \
                      [0.60, 0.80, 0.40], [1.00, 0.60, 0.80]],'f')
wks_type = "png"
wks = Ngl.open_wks(wks_type, "natgrid1")
示例#23
0
import random
import sys

# two levels, lat_98 , lon_98
grbs = pygrib.open('flx.ft06.2046010100.grb')
print grbs[6]['values']
print grbs[7]['values']
lats, lons = grbs[6].latlons()
lats = lats[:, 0]
lons = lons[0, :]

# Our final values
emm5 = mm5_class.mm5('MMOUT_DOMAIN1_46')
edata = numpy.ravel(emm5.get_field('soil_m_1', 0)["values"])
edata4 = numpy.ravel(emm5.get_field('soil_m_4', 0)["values"])
elats = numpy.ravel(emm5.get_field('latitcrs', 0)["values"])
elons = numpy.ravel(emm5.get_field('longicrs', 0)["values"])
for i in range(len(elats)):
    elats[i] += (random.random() * 0.01)

newdata = Ngl.natgrid(elons, elats, edata, lons, lats)
grbs[6]['values'] = newdata
newdata = Ngl.natgrid(elons, elats, edata4, lons, lats)
grbs[7]['values'] = newdata

o = open('flx.ft06.2046010100.grb-new', 'wb')
grbs.rewind()
for grb in grbs:
    o.write(grb.tostring())
o.close()
示例#24
0
文件: bug.py 项目: akrherz/MSDOT
import Ngl
import numpy

vals = [278.14, 280.87, 280.87]
lats = [31.39, 33.21, 33.57]
lons = [-92.29, -87.62, -86.75]

XAXIS = numpy.arange(-92., -88.25, 0.25)
YAXIS = numpy.arange(30., 25., 0.25)

grid = Ngl.natgrid(lons, lats, vals, XAXIS, YAXIS)
grid = Ngl.natgrid(lons, lats, vals, XAXIS, YAXIS)
示例#25
0
y = numpy.array(seismic[:, 1], 'f')
z = numpy.array(seismic[:, 2], 'f')

numxout = 20  # Define output grid for call to "natgrid".
numyout = 20
xmin = min(x)
ymin = min(y)
xmax = max(x)
ymax = max(y)

xc = (xmax - xmin) / (numxout - 1)
yc = (ymax - ymin) / (numyout - 1)

xo = xmin + xc * numpy.arange(0, numxout)
yo = ymin + yc * numpy.arange(0, numxout)
zo = Ngl.natgrid(x, y, z, xo, yo)  # Interpolate.

#
#  Define a color map and open four different types of workstations.
#
cmap = numpy.array([[1.00, 0.00, 0.00], [1.00, 0.00, 0.40], \
                    [1.00, 0.00, 0.80], [1.00, 0.20, 1.00], \
                    [1.00, 0.60, 1.00], [0.60, 0.80, 1.00], \
                    [0.20, 0.80, 1.00], [0.20, 0.80, 0.60], \
                    [0.20, 0.80, 0.00], [0.20, 0.40, 0.00], \
                    [0.20, 0.45, 0.40], [0.20, 0.40, 0.80], \
                    [0.60, 0.40, 0.80], [0.60, 0.80, 0.80], \
                    [0.60, 0.80, 0.40], [1.00, 0.60, 0.80]],'f')

xwks = Ngl.open_wks("x11", "ngl08p")  # Send graphics to an X11 window
cgmwks = Ngl.open_wks("ncgm", "ngl08p")  # Send graphics to an NCGM file
示例#26
0
  vsm.append( float(rs[i]['vsm']) )

# Lets grid!
numxout = 35
numyout = 30
xmin    = min(lons) - 0.25
ymin    = min(lats) - 0.25
xmax    = max(lons) + 0.25
ymax    = max(lats) + 0.25

xc      = (xmax-xmin)/(numxout-1)
yc      = (ymax-ymin)/(numyout-1)

xo = xmin + xc*Numeric.arange(0,numxout)
yo = ymin + yc*Numeric.arange(0,numyout)
g_s10cm = Ngl.natgrid(lons, lats, s10cm, xo, yo)
g_s20cm = Ngl.natgrid(lons, lats, s20cm, xo, yo)
g_vsm = Ngl.natgrid(lons, lats, vsm, xo, yo)

# Write NetCDF
nc = NetCDFFile('iem_soilm.nc', 'w')
nc.createDimension('latitude', numyout)
nc.createDimension('longitude', numxout)

la = nc.createVariable('latitude', Numeric.Float, ('latitude',) )
la.units = 'degrees_north'
lo = nc.createVariable('longitude', Numeric.Float, ('longitude',) )
lo.units = 'degrees_east'

nc_s10cm = nc.createVariable('s10cm', Numeric.Float, ('longitude','latitude') )
nc_s10cm.units = 'millimeters'
示例#27
0
def process_period(start_time_string,
                   end_time_string,
                   outfileprefix,
                   force=False):
    nc_fn = outfileprefix + ".nc"
    print("Processing %s to %s output to %s" %
          (start_time_string, end_time_string, nc_fn))

    if (not force) and os.path.exists(nc_fn):
        print("File %s exists - skipping" % nc_fn)
        return

    # pick interpolation method (natural neighbor or linear is recommended):
    #   'nearest' = nearest neighbor
    #   'linear' = linear
    #   'cubic' = cubic spline
    #   'natural' = natural neighbor
    interp_method = 'natural'

    # specify comment string (one line only, i.e., no '\n') for *.amu/*.amv files
    commentstring = 'Prepared by Allie King, SFEI, times are in PST (ignore the +00:00), adapted by Rusty Holleman'

    # specify properties of the wind grid -- this one was used for CASCaDE and sfb_dfm
    bounds = [340000, 610000, 3980000, 4294000]
    dx = 1500.
    dy = 1500.

    #--------------------------------------------------------------------------------------#
    # Main Program
    #--------------------------------------------------------------------------------------#

    n_cols = int(round(1 + (bounds[1] - bounds[0]) / dx))
    n_rows = int(round(1 + (bounds[3] - bounds[2]) / dy))
    x_llcorner = bounds[0]
    y_llcorner = bounds[2]

    start_date = np.datetime64(start_time_string)
    end_date = np.datetime64(end_time_string)

    # specify directory containing the compiled wind observation data and station
    # coordinates (SFB_hourly_U10_2011.csv, SFB_hourly_V10_2011.csv, etc...)
    windobspath = os.path.join(basedir, 'Compiled_Hourly_10m_Winds/data')

    # convert start and end time to datetime object
    start_dt = utils.to_datetime(start_date)
    end_dt = utils.to_datetime(end_date)

    # create a meshgrid corresponding to the CASCaDE wind grid
    x_urcorner = x_llcorner + dx * (n_cols - 1)
    y_urcorner = y_llcorner + dy * (n_rows - 1)
    x = np.linspace(x_llcorner, x_urcorner, n_cols)
    # RH: orient y the usual way, not the arcinfo/dfm wind way (i.e. remove flipud)
    y = np.linspace(y_llcorner, y_urcorner, n_rows)
    xg, yg = np.meshgrid(x, y)

    # read the observed wind data
    tz_offset = dt.timedelta(hours=8)
    try:
        # start_time,end_time are in UTC, so remove the offset when requesting data
        # from wlib which expects PST
        time_days, station_names, U10_obs = wlib.read_10m_wind_data_from_csv(
            os.path.join(windobspath, 'SFB_hourly_U10_'), start_dt - tz_offset,
            end_dt - tz_offset)
        time_days, station_names, V10_obs = wlib.read_10m_wind_data_from_csv(
            os.path.join(windobspath, 'SFB_hourly_V10_'), start_dt - tz_offset,
            end_dt - tz_offset)
    except FileNotFoundError:
        print("Okay - probably beyond the SFEI data")
        U10_obs = V10_obs = None

    if U10_obs is not None:
        # note that time_days is just decimal days after start, so it doesn't need to be adjusted for timezone.
        # read the coordinates of the wind observation stations
        df = pd.read_csv(os.path.join(windobspath, 'station_coordinates.txt'))
        station_names_check = df['Station Organization-Name'].values
        x_obs = df['x (m - UTM Zone 10N)'].values
        y_obs = df['y (m - UTM Zone 10N)'].values
        Nstations = len(df)
        for snum in range(Nstations):
            if not station_names[snum] == station_names_check[snum]:
                raise (
                    'station names in station_coordinates.txt must match headers in SFB_hourly_U10_YEAR.csv and SFB_hourly_V10_YEAR.csv files'
                )
    else:
        x_obs = np.zeros(0, np.float64)
        y_obs = np.zeros(0, np.float64)
        Nstations = 0

        # Fabricate time_days
        all_times = []
        t = start_dt
        interval = dt.timedelta(hours=1)
        while t <= end_dt:
            all_times.append(t)
            t = t + interval
        all_dt64 = np.array([utils.to_dt64(t) for t in all_times])
        time_days = (all_dt64 - all_dt64[0]) / np.timedelta64(1, 's') / 86400.

    # zip the x, y coordinates for use in the griddata interpolation
    points = np.column_stack((x_obs, y_obs))

    # loop through all times, at each time step find all the non-nan data, and
    # interpolate it onto the model grid, then compile the data from all times
    # into a dimension-3 matrix. keep track of which stations were non-nan ('good')
    # at each time step in the matrix igood
    coamps_ds = None  # handled on demand below
    coamps_xy = None  # ditto
    # drops COAMPS data points within buffer dist of a good observation
    buffer_dist = 30e3

    for it in range(len(time_days)):
        if it % 10 == 0:
            print("%d/%d steps" % (it, len(time_days)))

        #-- augment with COAMPS output
        target_time = start_date + np.timedelta64(int(time_days[it] * 86400),
                                                  's')
        if (coamps_ds is None) or (target_time > coamps_ds.time.values[-1]):
            coamps_ds = coamps.coamps_dataset(bounds,
                                              target_time,
                                              target_time +
                                              np.timedelta64(1, 'D'),
                                              cache_dir=cache_dir,
                                              fields=['wnd_utru', 'wnd_vtru'])
            # reduce dataset size -- out in the ocean really don't need too many points
            coamps_ds = coamps_ds.isel(x=slice(None, None, 2),
                                       y=slice(None, None, 2))

            coamps_X, coamps_Y = np.meshgrid(coamps_ds.x.values,
                                             coamps_ds.y.values)
            coamps_xy = np.c_[coamps_X.ravel(), coamps_Y.ravel()]
            print("COAMPS shape: ", coamps_X.shape)

            # seems that the coamps dataset is not entirely consistent in its shape?
            # not sure what's going on, but best to redefine this each time to be
            # sure.
            @memoize.memoize()
            def mask_near_point(xy):
                dists = utils.dist(xy, coamps_xy)
                return (dists > buffer_dist)

        coamps_time_idx = utils.nearest(coamps_ds.time, target_time)
        coamps_sub = coamps_ds.isel(time=coamps_time_idx)

        # Which coamps points are far enough from good observations.  there are
        # also some time where coamps data is missing
        # mask=np.ones(len(coamps_xy),np.bool8)
        mask = np.isfinite(coamps_sub.wind_u.values.ravel())

        # find all non-nan data at this time step
        if U10_obs is not None:
            igood = np.logical_and(~np.isnan(U10_obs[it, :]),
                                   ~np.isnan(V10_obs[it, :]))
            obs_xy = np.c_[x_obs[igood], y_obs[igood]]

            for xy in obs_xy:
                mask = mask & mask_near_point(xy)

            input_xy = np.concatenate([obs_xy, coamps_xy[mask]])
            input_U = np.concatenate(
                [U10_obs[it, igood],
                 coamps_sub.wind_u.values.ravel()[mask]])
            input_V = np.concatenate(
                [V10_obs[it, igood],
                 coamps_sub.wind_v.values.ravel()[mask]])
        else:
            # No SFEI data --
            input_xy = coamps_xy[mask]
            input_U = coamps_sub.wind_u.values.ravel()[mask]
            input_V = coamps_sub.wind_v.values.ravel()[mask]

        if np.any(np.isnan(input_U)) or np.any(np.isnan(input_V)):
            import pdb
            pdb.set_trace()

        Ngood = len(input_xy)

        # set the interpolation method to be used in this time step: interp_method_1.
        # ideally, this would just be the user-defined interpolation method:
        # interp_method. however, if we do not have enough non-nan data to use the
        # user-defined method this time step, temporarily revert to the nearest
        # neighbor method
        if interp_method == 'natural' or interp_method == 'linear' or interp_method == 'cubic':
            if Ngood >= 4:
                interp_method_1 = interp_method
            else:
                interp_method_1 = 'nearest'
        elif interp_method == 'nearest':
            interp_method_1 = 'nearest'

        # if natural neighbor method, interpolate using the pyngl package
        if interp_method_1 == 'natural':
            U10g = np.transpose(
                ngl.natgrid(input_xy[:, 0], input_xy[:, 1], input_U, xg[0, :],
                            yg[:, 0]))
            V10g = np.transpose(
                ngl.natgrid(input_xy[:, 0], input_xy[:, 1], input_V, xg[0, :],
                            yg[:, 0]))

        # for other interpolation methods use the scipy package
        else:
            U10g = griddata(input_xy,
                            input_U, (xg, yg),
                            method=interp_method_1)
            V10g = griddata(input_xy,
                            input_V, (xg, yg),
                            method=interp_method_1)

            # since griddata interpolation fills all data outside range with nan, use
            # the nearest neighbor method to extrapolate
            U10g_nn = griddata(input_xy, input_U, (xg, yg), method='nearest')
            V10g_nn = griddata(input_xy, input_V, (xg, yg), method='nearest')
            ind = np.isnan(U10g)
            U10g[ind] = U10g_nn[ind]
            ind = np.isnan(V10g)
            V10g[ind] = V10g_nn[ind]

        # compile results together over time
        # igood_all not updated for COAMPS, omit here.
        if it == 0:
            U10g_all = np.expand_dims(U10g, axis=0)
            V10g_all = np.expand_dims(V10g, axis=0)
            # igood_all = np.expand_dims(igood,axis=0)
        else:
            U10g_all = np.append(U10g_all,
                                 np.expand_dims(U10g, axis=0),
                                 axis=0)
            V10g_all = np.append(V10g_all,
                                 np.expand_dims(V10g, axis=0),
                                 axis=0)
            # igood_all = np.append(igood_all, np.expand_dims(igood,axis=0), axis=0)

    ##

    # Write netcdf:
    ds = xr.Dataset()

    ds['time'] = ('time', ), start_date + (time_days * 86400).astype(
        np.int32) * np.timedelta64(1, 's')
    ds['x'] = ('x', ), x
    ds['y'] = ('y', ), y
    ds['wind_u'] = ('time', 'y', 'x'), U10g_all
    ds['wind_v'] = ('time', 'y', 'x'), V10g_all

    os.path.exists(nc_fn) and os.unlink(nc_fn)
    ds.to_netcdf(nc_fn)
示例#28
0
文件: natgrid1.py 项目: yyr/pyngl
#
x = numpy.array(seismic[:,0],'f')
y = numpy.array(seismic[:,1],'f')
z = numpy.array(seismic[:,2],'f')-785.

#
#  Define output grid for the calls to "natgrid".
#
numxout, numyout = 20, 20 
xmin, xmax, ymin, ymax   = min(x), max(x), min(y), max(y)
xc      = (xmax-xmin)/(numxout-1)
yc      = (ymax-ymin)/(numyout-1)
xo = xmin + xc*numpy.arange(0,numxout)
yo = ymin + yc*numpy.arange(0,numxout)

zo = Ngl.natgrid(x, y, z, xo, yo)   # Interpolate, allow negative values.

#
#  Define a color map and open four different types of workstations.
#
cmap = numpy.array([[1.00, 1.00, 1.00], [0.00, 0.00, 0.00], \
                      [1.00, 0.00, 0.00], [1.00, 0.00, 0.40], \
                      [1.00, 0.00, 0.80], [1.00, 0.20, 1.00], \
                      [1.00, 0.60, 1.00], [0.60, 0.80, 1.00], \
                      [0.20, 0.80, 1.00], [0.20, 0.80, 0.60], \
                      [0.20, 0.80, 0.00], [0.20, 0.40, 0.00], \
                      [0.20, 0.45, 0.40], [0.20, 0.40, 0.80], \
                      [0.60, 0.40, 0.80], [0.60, 0.80, 0.80], \
                      [0.60, 0.80, 0.40], [1.00, 0.60, 0.80]],'f')
rlist = Ngl.Resources()
rlist.wkColorMap = cmap
示例#29
0
    if float(tokens[0]) > 19.9:
        continue
    if float(tokens[0]) < 4:
        print tokens
        continue
    vals.append( float( tokens[0] ) )
    lats.append( float( tokens[2] ) )
    lons.append( float( tokens[1] ) )
    plotvals.append( tokens[3] )
delx = (iemplot.MW_EAST - iemplot.MW_WEST) / (iemplot.MW_NX - 1)
dely = (iemplot.MW_NORTH - iemplot.MW_SOUTH) / (iemplot.MW_NY - 1)
# Create axis
xaxis = iemplot.MW_WEST + delx * numpy.arange(0, iemplot.MW_NX)
yaxis = iemplot.MW_SOUTH + dely * numpy.arange(0, iemplot.MW_NY)

obs = Ngl.natgrid(lons, lats, vals, xaxis, yaxis)

IEM = iemdb.connect('coop', bypass=True)
icursor = IEM.cursor()

vals2 = []
lats2 = []
lons2 = []
icursor.execute("""
select id, x(geom), y(geom), s.sum from (select station, sum(snow) from climate51 where station ~* '^IA' and (valid > '2000-10-01' or valid < '2000-02-17') GROUP by station) as s JOIN stations t on (t.id = s.station) WHERE s.sum > 0 and t.network = 'IACLIMATE' and t.id NOT IN ('IA2999','IA7147','IA4705','IA3509','IA6800','IA1233','IA7312','IA3517','IA7678','IA6940','IA4049','IA8706', 'IA1354', 'IA2203', 'IA5769','IA3980') ORDER by sum ASC
""")
for row in icursor:
  vals2.append( row[3] )
  lats2.append( row[2] )
  lons2.append( row[1] )
示例#30
0
文件: ngl08p.py 项目: akrherz/me
y = Numeric.array(seismic[:,1],Numeric.Float0)
z = Numeric.array(seismic[:,2],Numeric.Float0)

numxout = 20     # Define output grid for call to "natgrid".
numyout = 20
xmin    = min(x)
ymin    = min(y)
xmax    = max(x)
ymax    = max(y)

xc      = (xmax-xmin)/(numxout-1)
yc      = (ymax-ymin)/(numyout-1)

xo = xmin + xc*Numeric.arange(0,numxout)
yo = ymin + yc*Numeric.arange(0,numxout)
zo = Ngl.natgrid(x, y, z, xo, yo)   # Interpolate.

#
#  Define a color map and open four different types of workstations.
#
cmap = Numeric.array([[1.00, 1.00, 1.00], [0.00, 0.00, 0.00], \
                      [1.00, 0.00, 0.00], [1.00, 0.00, 0.40], \
                      [1.00, 0.00, 0.80], [1.00, 0.20, 1.00], \
                      [1.00, 0.60, 1.00], [0.60, 0.80, 1.00], \
                      [0.20, 0.80, 1.00], [0.20, 0.80, 0.60], \
                      [0.20, 0.80, 0.00], [0.20, 0.40, 0.00], \
                      [0.20, 0.45, 0.40], [0.20, 0.40, 0.80], \
                      [0.60, 0.40, 0.80], [0.60, 0.80, 0.80], \
                      [0.60, 0.80, 0.40], [1.00, 0.60, 0.80]],Numeric.Float0)
rlist = Ngl.Resources()
rlist.wkColorMap = cmap
示例#31
0
文件: bug.py 项目: akrherz/MSDOT
import Ngl
import numpy

vals = [278.14, 280.87, 280.87]
lats = [31.39, 33.21, 33.57]
lons = [-92.29, -87.62, -86.75]

XAXIS = numpy.arange(-92., -88.25, 0.25)
YAXIS = numpy.arange(30.,25., 0.25)

grid = Ngl.natgrid(lons, lats, vals, XAXIS, YAXIS)
grid = Ngl.natgrid(lons, lats, vals, XAXIS, YAXIS)
示例#32
0
def natgrid_interp(data_in,
                   lats_in,
                   lons_in,
                   lats_out,
                   lons_out,
                   valid_range=None,
                   wrapping_overlap_interval=10.):
    IM_i = len(lons_in)
    JM_i = len(lats_in)
    IM_o = len(lons_out)
    JM_o = len(lats_out)
    lons_in_interval1 = np.zeros(IM_i)
    lons_in_interval2 = np.zeros(IM_i)
    lons_out_interval1 = np.zeros(IM_o)
    lons_out_interval2 = np.zeros(IM_o)
    for i in range(IM_i):
        lons_in_interval1[i] = Ngl.normalize_angle(lons_in[i], 0)
        lons_in_interval2[i] = Ngl.normalize_angle(lons_in[i], 1)
    for i in range(IM_o):
        lons_out_interval1[i] = Ngl.normalize_angle(lons_out[i], 0)
    if valid_range == None:
        valid_range = [data_in.min(), data_in.max()]
    x_in_interval1 = np.tile(lons_in_interval1, JM_i)
    y_in = np.repeat(lats_in, IM_i)
    z_in = data_in.flatten()
    if lons_in_interval1.max() - lons_in_interval1.min(
    ) > 180. and lons_in_interval2.max() - lons_in_interval2.min() > 180.:
        wrap = True
        ## span of lons is greater than 180; assume that it is a global run and therefore needs to be wrapped
        logical_wrapping = np.logical_and(
            lons_in_interval2[:] > -wrapping_overlap_interval,
            lons_in_interval2[:] < 0.)
        lons_wrapped = lons_in_interval2[logical_wrapping]
        data_in_wrapped = data_in[:, logical_wrapping]
        IM_wrapped = len(lons_wrapped)
        lons_wrapped_vector = np.tile(lons_wrapped, JM_i)
        lats_wrapped_vector = np.repeat(lats_in, IM_wrapped)
        data_in_wrapped_vector = data_in_wrapped.flatten()
        ### and add them together
        x_in_interval1 = np.concatenate((x_in_interval1, lons_wrapped_vector))
        y_in = np.concatenate((y_in, lats_wrapped_vector))
        z_in = np.concatenate((z_in, data_in_wrapped_vector))
        #
        logical_wrapping = np.logical_and(
            lons_in_interval2[:] < wrapping_overlap_interval,
            lons_in_interval2[:] > 0.)
        lons_wrapped = lons_in_interval1[logical_wrapping] + 360.
        data_in_wrapped = data_in[:, logical_wrapping]
        IM_wrapped = len(lons_wrapped)
        lons_wrapped_vector = np.tile(lons_wrapped, JM_i)
        lats_wrapped_vector = np.repeat(lats_in, IM_wrapped)
        data_in_wrapped_vector = data_in_wrapped.flatten()
        ### and add them together
        x_in_interval1 = np.concatenate((x_in_interval1, lons_wrapped_vector))
        y_in = np.concatenate((y_in, lats_wrapped_vector))
        z_in = np.concatenate((z_in, data_in_wrapped_vector))
        # ### reorder output lons
        # lons_out_interval1_unsorted = lons_out_interval1
        # lons_out_interval1 = lons_out_interval1[lons_out_interval1.argsort()]
    output1 = Ngl.natgrid(x_in_interval1, y_in, z_in, lons_out_interval1,
                          lats_out).transpose()
    output_masked = np.ma.masked_array(output1,
                                       mask=np.logical_or(
                                           output1 < min(valid_range),
                                           output1 > max(valid_range)))
    return (output_masked)
示例#33
0
文件: convert.py 项目: akrherz/pccsp
import sys

# two levels, lat_98 , lon_98
grbs = pygrib.open('flx.ft06.2046010100.grb')
print grbs[6]['values']
print grbs[7]['values']
lats, lons = grbs[6].latlons()
lats = lats[:,0]
lons = lons[0,:]


# Our final values
emm5 = mm5_class.mm5('MMOUT_DOMAIN1_46')
edata = numpy.ravel(emm5.get_field('soil_m_1',0)["values"])
edata4 = numpy.ravel(emm5.get_field('soil_m_4',0)["values"])
elats = numpy.ravel(emm5.get_field('latitcrs',0)["values"])
elons = numpy.ravel(emm5.get_field('longicrs',0)["values"])
for i in range(len(elats)):
  elats[i] += (random.random()  * 0.01)

newdata = Ngl.natgrid(elons, elats, edata, lons, lats)
grbs[6]['values'] = newdata
newdata = Ngl.natgrid(elons, elats, edata4, lons, lats)
grbs[7]['values'] = newdata

o = open('flx.ft06.2046010100.grb-new', 'wb')
grbs.rewind()
for grb in grbs:
  o.write( grb.tostring() )
o.close()
示例#34
0
    """ % (row[0].year, str(tuple(ids))[1:-1], row[0], row[0])
    acursor.execute(sql)
    if acursor.rowcount < 4:
        print 'ASOS Missing!', row[0], row[1]
        continue
        
    for row2 in acursor:
        u,v = uv(row2[1] * 0.514, row2[2])
        lats.append( stations[row2[0]]['lat'] + random.random() * 0.01)
        lons.append( stations[row2[0]]['lon'] + random.random() * 0.01)
        U.append( u )
        V.append( v )
        
    xaxis = numpy.arange(row[4], row[3], 0.25)
    yaxis = numpy.arange(row[6], row[5], 0.25)
    Ugrid = Ngl.natgrid(lons, lats, U, xaxis, yaxis)
    Vgrid = Ngl.natgrid(lons, lats, V, xaxis, yaxis)
    avgU = numpy.average(Ugrid)
    avgV = numpy.average(Vgrid)
    avgDir = dir22(avgU,avgV)
    reports.append(r)
    dirs.append(  math.radians(avgDir) )
    sknts.append( ((avgU ** 2) + (avgV ** 2))**0.5)
    print '%s,%s,%s' % (r, math.radians(avgDir), ((avgU ** 2) + (avgV ** 2))**0.5)

reports = numpy.array(reports)

dirs = numpy.array( dirs )
import matplotlib.pyplot as plt

fig = plt.figure()
示例#35
0
    """ % (row[0].year, str(tuple(ids))[1:-1], row[0], row[0])
    acursor.execute(sql)
    if acursor.rowcount < 4:
        print 'ASOS Missing!', row[0], row[1]
        continue
        
    for row2 in acursor:
        u,v = uv(row2[1] * 0.514, row2[2])
        lats.append( stations[row2[0]]['lat'] + random.random() * 0.01)
        lons.append( stations[row2[0]]['lon'] + random.random() * 0.01)
        U.append( u )
        V.append( v )
        
    xaxis = numpy.arange(row[4], row[3], 0.25)
    yaxis = numpy.arange(row[6], row[5], 0.25)
    Ugrid = Ngl.natgrid(lons, lats, U, xaxis, yaxis)
    Vgrid = Ngl.natgrid(lons, lats, V, xaxis, yaxis)
    avgU = numpy.average(Ugrid)
    avgV = numpy.average(Vgrid)
    avgDir = dir22(avgU,avgV)
    reports.append(r)
    dirs.append(  math.radians(avgDir) )
    sknts.append( ((avgU ** 2) + (avgV ** 2))**0.5)
    print '%s,%s,%s' % (r, math.radians(avgDir), ((avgU ** 2) + (avgV ** 2))**0.5)

reports = numpy.array(reports)

dirs = numpy.array( dirs )
import matplotlib.pyplot as plt

fig = plt.figure()
示例#36
0
lats = numpy.array( lats )
lons = numpy.array( lons )

numxout = 40
numyout = 40
xmin    = min(lons) - 0.5
ymin    = min(lats) - 0.5
xmax    = max(lons) + 0.5
ymax    = max(lats) + 0.5

xc      = (xmax-xmin)/(numxout-1)
yc      = (ymax-ymin)/(numyout-1)

xo = xmin + xc*numpy.arange(0,numxout)
yo = ymin + yc*numpy.arange(0,numyout)
zavg = Ngl.natgrid(lons, lats, nrain, xo, yo)
#zavg = nine_smooth2D(zavg)


# Compute obs!
lats = None
lons = None
interval = mx.DateTime.RelativeDateTime(days=1)
now = t0
while (now <= ts):
    fp = "/mesonet/wepp/data/rainfall/netcdf/daily/%s_rain.nc" % (now.strftime("%Y/%m/%Y%m%d") ,) 
    if not os.path.isfile(fp):
        print fp
        now += interval
        continue
    nc = netCDF4.Dataset(fp)