Esempio n. 1
0
def driver_vortexLoc(data, timeInds, pt_ll, cClose, cellsOnCell, nEdgesOnCell, latCell, lonCell, isMin):
  
  cCloseList = []
  valList = []
  for tInd in timeInds:
    #use theta on 2 pvu
    output_data.print_xtime(data, tInd)
    
    epv_ht, theta_trop = compare.calc_height_theta_2PVU(data, tInd)
    #theta_trop = data.variables['temperature_500hPa'][tInd,:]

    #radius = 1000.e3
    radius = 500.e3
    cClose = findLocalExtremum(pt_ll, cClose, radius, cellsOnCell, nEdgesOnCell, latCell, lonCell,
                                 theta_trop, isMin)
    pt_ll[0] = latCell[cClose]; pt_ll[1] = lonCell[cClose]
    print "{0} {1} {2}".format(pt_ll[0], pt_ll[1], theta_trop[cClose])
    cCloseList.append(cClose)
    valList.append(theta_trop[cClose])
    
    #estimate cyclone region properties
    #tInd=0
    #epv_ht, theta_trop = compare.calc_height_theta_2PVU(data, tInd)
    rShoot = calc_objectRadius_shoot(cClose, cellsOnCell, nEdgesOnCell, latCell, lonCell, theta_trop, isMin)
    cycloneCells = gather_regionGrow_nbrValues(cClose, cellsOnCell, nEdgesOnCell, theta_trop, isMin)
    print "Radius shoot and cells in cyclone region: ", rShoot, cycloneCells
    
  return (cCloseList, pt_ll, valList)
Esempio n. 2
0
def test_run_hgt():
  #long term mean
  fname_mean = '/data01/forONR/hgt.mon.1981-2010.ltm.nc'
  lev_500 = 5 #level of 500hPa
  month = 8 #august
  
  dataMean = netCDF4.Dataset(fname_mean,'r')
  hgtMean = dataMean.variables['hgt'][month,lev_500,:,:] #lat,lon
  #hgtMean += dataMean.variables['hgt'][month-1,lev_500,:,:]
  #hgtMean += dataMean.variables['hgt'][month-2,lev_500,:,:]
  #hgtMean /= 3.
  latMean = dataMean.variables['lat'][:]*np.pi/180. #stored in degrees!
  lonMean = dataMean.variables['lon'][:]*np.pi/180.
  dataMean.close()
  
  nlat = len(latMean)
  nlon = len(lonMean)
  var = np.zeros((nlat,nlon), dtype=float)
  
  #mpas files
  #fnames = []
  fnames = sorted(glob.glob('/arctic1/nick/cases/vduda/x4/x4.t.output.2006-08-*'))
  fnames.extend(sorted(glob.glob('/arctic1/nick/cases/vduda/x4.t.output.2006-08-15*')))
  #fnames = sorted(glob.glob('/arctic1/nick/cases/vduda/x4/*.output.2006-08*'))
  counter = 0
  for iFile, fpath in enumerate(fnames):
    data = output_data.open_netcdf_data(fpath)
    nEdgesOnCell = data.variables['nEdgesOnCell'][:];
    cellsOnCell = data.variables['cellsOnCell'][:]-1;
    latCell = data.variables['latCell'][:];
    lonCell = data.variables['lonCell'][:];
    nTimes = len(data.dimensions['Time'])
    #nTimes = 1 #3
    
    nCells = len(latCell)
    geop_mpas = np.zeros(nCells,dtype=float)
    
    for iTime in xrange(nTimes):
      #get average value for mesh over these times so can even anomaly different meshes together
      output_data.print_xtime(data, iTime)
      hgt_mpas = data.variables['height_500hPa'][iTime,:]
      #average here to avoid summing to large number over many times
      geop_mpas += mpas_hgt2geopotential(nCells, hgt_mpas, latCell)/nTimes
      
    llMap = makeMap_ll2mpas(latMean, lonMean, latCell, lonCell, nEdgesOnCell, cellsOnCell)
    var += calc_diff_field(hgtMean, geop_mpas, llMap, nlat, nlon)
    #print var
    counter = counter+1
    
    data.close()
    
  var /= counter
  #var = hgtMean
  # add wrap-around point in longitude.
  var, lonMean = addcyclic(var, lonMean)
  np.savez('tmp.dat',var,latMean,lonMean)
  lats,lons = np.meshgrid(latMean, lonMean)
  lats *= 180./np.pi; lons *= 180./np.pi
  plot_anomaly_ll(lats, lons, np.transpose(var))