def run_vortexLoc(): #fnames = cfsrFiles2() #fnames = ['/arctic1/nick/cases/v1.0/x4/2week/tiedtke/x4.tiedtke.output.2006-07-24_12.00.00.nc', # '/arctic1/nick/cases/v1.0/x4/2week/tiedtke/x4.tiedtke.output.2006-07-31_12.00.00.nc'] #fnames = ['/arctic1/nick/cases/v1.0/x4/longer/x4.kf.output.2006-08-15_00.00.00.nc'] #fnames = sorted(glob.glob('/arctic1/nick/cases/cfsr/output/x4.cfsr.output.2006-07-*'))[-1::-1] #track backwards #fnames = ['/arctic1/nick/cases/cfsr/output/x4.cfsr.output.2006-08-15_00.00.00.nc'] fnames = sorted(glob.glob('/arctic1/nick/cases/vduda/x4/x4.t.output.2006-07-*')) #fnames = ['/arctic1/nick/cases/vduda/x7/x7.kf.output.2006-08-07_18.00.00.nc'] #fnames = sorted(glob.glob('/arctic1/nick/cases/vduda/x4.t.output.2006-09-*')) pt_ll = np.empty(2,dtype='float') #pt_ll[0] = 90.*np.pi/180.; pt_ll[1] = 0.0 pt_ll[0] = 87.05*np.pi/180.; pt_ll[1] = 18.86*np.pi/180. #for 20060801 start time #pt_ll[0] = 85.38*np.pi/180.; pt_ll[1] = 352.97*np.pi/180. #for 2006080312 start time #pt_ll[0] = 86.1*np.pi/180.; pt_ll[1] = 137.*np.pi/180. #for 2006080718 #pt_ll[0] = 82.5*np.pi/180.; pt_ll[1] = 41.95*np.pi/180. # for 2006081500 #pt_ll[0] = 71.55*np.pi/180.; pt_ll[1] = 153.1*np.pi/180. #for 2006072412 #pt_ll[0] = 81.05*np.pi/180.; pt_ll[1] = 19.4*np.pi/180. #for 2006091900 #pt_ll[0] = 1.41; pt_ll[1] = 0.27 #give initial guess of cClose. guess is just seed to search, so can choose anything. cClose = 71791 #131735 #x4 mesh for 20060801 start time #cClose = 102439 #x7 mesh for 20060801 start time c0IsMin = True; #load in mesh data fpath = fnames[0] data = output_data.open_netcdf_data(fpath) nEdgesOnCell = data.variables['nEdgesOnCell'][:]; cellsOnCell = data.variables['cellsOnCell'][:]-1; latCell = data.variables['latCell'][:]; lonCell = data.variables['lonCell'][:]; data.close() cellList = [] minValList = [] for iFile, fpath in enumerate(fnames): data = output_data.open_netcdf_data(fpath) #timeInds = xrange(0,28,4) #timeInds = [0] nTimes = len(data.dimensions['Time']) #timeInds = xrange(0,nTimes,1) timeInds = xrange(26,nTimes,1) #timeInds = xrange(nTimes-1,-1,-1) #search for cyclone center cClose = conn.findOwner_horizNbrs_latLon(pt_ll, cClose, latCell, lonCell, nEdgesOnCell, cellsOnCell) cCloseList, pt_ll, minVals = driver_vortexLoc(data, timeInds, pt_ll, cClose, cellsOnCell, nEdgesOnCell, latCell, lonCell, c0IsMin) cClose = cCloseList[-1] cellList.extend(cCloseList) minValList.extend(minVals) data.close() print cellList print minValList
def example(ncNameFile): #Input file is a list of each output file on its own line. #Paraview has a hard time animating the 0 time step .nc files. Maybe it's an issue with the MPAS reader? #We can get around it by creating vtk files of the fields of interest named fnameN.vtk where N is an integer that indicates time. #store the names of the files we want in order in a file #ncNameFile = 'ncNames.txt' fp = open(ncNameFile,'r') #output the files to outNameBase = 'june' # i = 0 for line in fp: ncfname = line.rstrip('\n') #do we have to strip off any characters like \n? if (i==0): #reference field for orientation vtkfname = outNameBase+'_ref'+'.vtk' data = output_data.open_netcdf_data(ncfname) #header and mesh info fvtk = output_data.write_vtk_header_polydata(vtkfname, ncfname) nNodes = output_data.write_vtk_xyzNodes(fvtk, data) nCells = output_data.write_vtk_polygons(fvtk, data) fvtk.write('\nCELL_DATA '+str(nCells)+'\n') output_data.write_vtk_staticGeoFields(f,data,nCells) fvtk.close() data.close() # vtkfname = outNameBase+str(i)+'.vtk' data = output_data.open_netcdf_data(ncfname) #header and mesh info fvtk = output_data.write_vtk_header_polydata(vtkfname, ncfname) nNodes = output_data.write_vtk_xyzNodes(fvtk, data) nCells = output_data.write_vtk_polygons(fvtk, data) #write cell data fvtk.write('\nCELL_DATA '+str(nCells)+'\n') time = 0 vLevel = 18 output_data.write_vtk_cellCenterVelocity(fvtk, data, time, vLevel, nCells) output_data.write_vtk_var_timeLevelCells(fvtk, 'pv_cell', data, vLevel, time, nCells) i = i+1 #close files data.close() fvtk.close() fp.close()
def example(): # file properties ncfname = "/arctic1/nick/cases/cfsr/output.2006-08-07_12.00.00.nc" # ncfname = '/home/nickszap/research/mpas/output.2010-10-23_00:00:00.nc' #input file vtkfname = "plotTest.vtk" # output file data = output_data.open_netcdf_data(ncfname) # open the output vtk file and write header. ------------------- fvtk = output_data.write_vtk_header_polydata(vtkfname, ncfname) # write nodes and cells nNodes = output_data.write_vtk_xyzNodes(fvtk, data) nCells = output_data.write_vtk_polygons(fvtk, data) nLevels = len(data.dimensions["nVertLevels"]) # write some cell data -------------------- fvtk.write("\nCELL_DATA " + str(nCells) + "\n") output_data.write_vtk_staticGeoFields(fvtk, data, nCells) # time dependent stuff goes in different files time = 0 output_data.write_vtk_pressureHeights(fvtk, data, nCells, time, nLevels, 50000) # 500mb = 50,000Pa # write some node data # close the .nc and vtk files data.close() fvtk.close()
def driver_tracers(): ''' given .nc (MPAS NETCDF) file and initial location for tracer, we want to advect that tracer with the wind velocity through all time steps. ''' #file properties ncfname = '/home/nickszap/research/mpas/output.2010-10-23_00:00:00.nc' #input file data = output_data.open_netcdf_data(ncfname) tSteps = len(data.dimensions['Time']) vertLevels = len(data.dimensions['nVertLevels']); r = np.empty(3); r[0] = 50.; r[1]=50.; r[2] = 5000.; dt = 6.*60.*60.; #can parse xtime in file as actual timestep #loop through time steps vel = np.empty(3); for i in range(tSteps): (hCell, vCell) = findOwner_coord(r, data, vertLevels); vel[0]=data.variables['uReconstructX'][i,hCell,vCell] vel[1]=data.variables['uReconstructY'][i,hCell,vCell] vel[2]=data.variables['uReconstructZ'][i,hCell,vCell] r = integrate(r,u,dt); print r data.close()
def driver_ncGeo2vtk(ncfname): ''' given .nc (MPAS NETCDF) file, output the surface geography data into classic vtk format. The MPAS NetCDF reader in Paraview loads only the "critical" netcdf variables, ie with time and vertLevels. Rather than edit that reader, we can create a file of the vars we care about on the scvt mesh rather than the dual. I think they have to use the dual for the volume mesh since the elements need to be of supported VTK type (eg prism, hex,...) ''' #file properties #ncfname = '/home/nickszap/research/mpas/output.2010-10-23_00:00:00.nc' #input file vtkfname = 'geoTest.vtk' #output file data = output_data.open_netcdf_data(ncfname) #open the output vtk file and write header. fvtk = output_data.write_vtk_header_polydata(vtkfname, ncfname) #write nodes and cells nNodes = output_data.write_vtk_xyzNodes(fvtk, data) nCells = output_data.write_vtk_polygons(fvtk, data) vLevels = len(data.dimensions['nVertLevels']) nTimes = len(data.dimensions['Time']) #write some geographic cell data fvtk.write('\nCELL_DATA '+str(nCells)+'\n') write_vtk_staticGeoFields(fvtk,data,nCells) write_vtk_timeGeoFields(fvtk,data,nCells, nTimes) #write node data (none for geog) #close the .nc and vtk files data.close() fvtk.close()
def example_2pvu(): # fpath = '/arctic1/nick/cases/v1.0/x4/august/kf/v1.1/x4.kf.output.2006-08-01_00.00.00.nc' fpath = "/arctic1/nick/cases/v1.0/x4/august/tiedtke/v1.1/x4.t.output.2006-08-08_00.00.00.nc" # fpath = '/arctic1/nick/cases/v1.0/x4/august/kf/v1.1/x4.kf.output.2006-08-08_00.00.00.nc' # fnames = searchFiles() # for iFile, fpath in enumerate(fnames): data = output_data.open_netcdf_data(fpath) for timeInd in xrange(0, 28, 4): # for timeInd in [0]: # open the output vtk file and write header, nodes, and cells vtkfname = "x4_t_2006-08-01_1day." + str(28 + timeInd) + ".vtk" # vtkfname = 'x4_cfsr_2006-07-25_1day.'+str(iFile)+'.vtk' fvtk = output_data.write_vtk_header_polydata(vtkfname, fpath) # fvtk = open(vtkfname,'w'); nCells = 163842 nNodes = output_data.write_vtk_xyzNodes(fvtk, data) nCells = output_data.write_vtk_polygons(fvtk, data) # write some cell dataa fvtk.write("\nCELL_DATA " + str(nCells) + "\n") # calc values # timeInd = 0 epv_ht, theta_trop = calc_height_theta_2PVU(data, timeInd) output_data.write_levelData_float("ht_2pvu", fvtk, epv_ht, nCells) output_data.write_levelData_float("theta_2pvu", fvtk, theta_trop, nCells) fvtk.close() data.close()
def test_run_hgt(): #long term mean fname_mean = '/data01/forONR/hgt.mon.1981-2010.ltm.nc' lev_500 = 5 #level of 500hPa month = 8 #august dataMean = netCDF4.Dataset(fname_mean,'r') hgtMean = dataMean.variables['hgt'][month,lev_500,:,:] #lat,lon #hgtMean += dataMean.variables['hgt'][month-1,lev_500,:,:] #hgtMean += dataMean.variables['hgt'][month-2,lev_500,:,:] #hgtMean /= 3. latMean = dataMean.variables['lat'][:]*np.pi/180. #stored in degrees! lonMean = dataMean.variables['lon'][:]*np.pi/180. dataMean.close() nlat = len(latMean) nlon = len(lonMean) var = np.zeros((nlat,nlon), dtype=float) #mpas files #fnames = [] fnames = sorted(glob.glob('/arctic1/nick/cases/vduda/x4/x4.t.output.2006-08-*')) fnames.extend(sorted(glob.glob('/arctic1/nick/cases/vduda/x4.t.output.2006-08-15*'))) #fnames = sorted(glob.glob('/arctic1/nick/cases/vduda/x4/*.output.2006-08*')) counter = 0 for iFile, fpath in enumerate(fnames): data = output_data.open_netcdf_data(fpath) nEdgesOnCell = data.variables['nEdgesOnCell'][:]; cellsOnCell = data.variables['cellsOnCell'][:]-1; latCell = data.variables['latCell'][:]; lonCell = data.variables['lonCell'][:]; nTimes = len(data.dimensions['Time']) #nTimes = 1 #3 nCells = len(latCell) geop_mpas = np.zeros(nCells,dtype=float) for iTime in xrange(nTimes): #get average value for mesh over these times so can even anomaly different meshes together output_data.print_xtime(data, iTime) hgt_mpas = data.variables['height_500hPa'][iTime,:] #average here to avoid summing to large number over many times geop_mpas += mpas_hgt2geopotential(nCells, hgt_mpas, latCell)/nTimes llMap = makeMap_ll2mpas(latMean, lonMean, latCell, lonCell, nEdgesOnCell, cellsOnCell) var += calc_diff_field(hgtMean, geop_mpas, llMap, nlat, nlon) #print var counter = counter+1 data.close() var /= counter #var = hgtMean # add wrap-around point in longitude. var, lonMean = addcyclic(var, lonMean) np.savez('tmp.dat',var,latMean,lonMean) lats,lons = np.meshgrid(latMean, lonMean) lats *= 180./np.pi; lons *= 180./np.pi plot_anomaly_ll(lats, lons, np.transpose(var))
def example(): #file properties ana_fname = '/home/nickszap/research/mpas/output.2010-10-23_00:00:00.nc' #analysis file sim_fname = '/home/nickszap/research/mpas/output.2010-10-23_00:00:00.nc' #simulation file vtkfname = 'fieldDiff.vtk' #output file #read in reference and simulation files anaData = output_data.open_netcdf_data(ana_fname) simData = output_data.open_netcdf_data(sim_fname) #assuming same mesh between ana and sim nTimes = len(data.dimensions['Time']) nCells = len(data.dimensions['nCells']); nVert = len(data.dimensions['nVertLevels']) #I don't know how to write a 3 or 4d field into the .nc format so we're going to have #to hold off on that. #take differences of individual fields (3d, height levels, pressure levels,...) #and generate output (surfaces-horiz and vertical, field statistics,...). tSim = 0 #index into time steps of simulation corresponding to analysis tAna = 0 fieldKeys = ['pv_cell','theta','rho'] for k in fieldKeys: #index is [time,cell,vLevel] varAna = data.variables[k][tAna,:,:] varSim = data.variables[k][tSim,:,:] diff = varSim-varAna avg = sum(diff)/(nCells*nVert) str = 'Variable %s has sim-ana mean= %s for simInd %s\n' %(k, avg, tSim) print str rms = sum(diff*diff)/(nCells*nVert) #square and mean rms = np.sqrt(rms) #root str = 'Variable %s has sim-ana RMS= %s for simInd %s\n' %(k, rms, tSim) print str
def derivedSfcs(ncfname, vtkfname): #write some derived surfaces to file data = output_data.open_netcdf_data(ncfname) #header info fvtk = output_data.write_vtk_header_polydata(vtkfname, ncfname) nNodes = output_data.write_vtk_xyzNodes(fvtk, data) nCells = output_data.write_vtk_polygons(fvtk, data) nLevels = len(data.dimensions['nVertLevels']) #cell data fvtk.write('\nCELL_DATA '+str(nCells)+'\n') #geo for reference output_data.write_vtk_staticGeoFields(f,data,nCells) time = 0 #500 mb output_data.write_vtk_pressureHeights(fvtk, data, nCells, time, vLevels, 50000.) #theta on dynamic tropopause pv = np.empty((nCells,nLevels), dtype=float) for hcell in range(nCells): for l in range(nLevels): pv[hcell,l] = vars.calc_ertelPV(data, 'theta', time, hcell, l, nLevels) # pvuVal = 2. thetaVal = np.empty(nCells) for hcell in range(nCells): (l,dl) = output_data.calcIndexOfValue(pvuVal,pv[hcell,:], nLevels) thetaVal[hcell] = output_data.calcValueOfIndex(l,dl,data.variables['theta'][time,hcell,:]) output_data.write_levelData_float('theta_pv', fvtk, thetaVal, nCells) #slp slp = np.empty(nCells) for hcell in range(nCells): slp[hcell] = vars.calc_slp(data, hcell, nLevels, time) output_data.write_levelData_float('slp', fvtk, slp, nCells) #close da files fvtk.close() data.close()
def example_vars(): # for every 6 hours of the IC CFSR data, # write a vtk file with slp, 500mb heights, and theta on 2 pv surface t0 = dt.datetime(2006, 6, 1, 0) # tf = dt.datetime(2006,9,29,18) tf = dt.datetime(2006, 6, 1, 13) h6 = dt.timedelta(hours=6) cfsrPath = "/arctic1/nick/cases/cfsr/" vtkBase = "cfsrIC." i = 1 t = t0 while t <= tf: # since increment t after check, don't do # open the .nc data file for this datetime tString = cfsr.form_cfsrTimeString(t) ncName = "vert_sfc." + tString + ".nc" # initial condition netcdf file ncName = cfsrPath + ncName data = output_data.open_netcdf_data(ncName) # open the output vtk file and write header. ------------------- vtkfname = vtkBase + str(i - 1) + ".vtk" fvtk = output_data.write_vtk_header_polydata(vtkfname, ncName) # write nodes and cells nNodes = output_data.write_vtk_xyzNodes(fvtk, data) nCells = output_data.write_vtk_polygons(fvtk, data) nLevels = len(data.dimensions["nVertLevels"]) # write some cell data -------------------- fvtk.write("\nCELL_DATA " + str(nCells) + "\n") timeInd = 0 # data.close() fvtk.close() # increment day t = t0 + i * h6 i = i + 1
def plotPoints_example(): fpath = '/arctic1/nick/cases/cfsr/output/x4.cfsr.output.2006-08-01_00.00.00.nc' data = output_data.open_netcdf_data(fpath) latCell = data.variables['latCell'][:]; lonCell = data.variables['lonCell'][:]; data.close() plt.figure() map = Basemap(projection='ortho',lon_0=0,lat_0=90, resolution='l') ''' cellInds_cfsr = [56355, 28662, 11263, 131726, 84718, 84714, 46582, 105453, 114078, 25622, 37141, 6428, 50655, 160926, 144268, 156040, 98770, 42664, 137842, 24214, 151512, 2568, 96729, 11246, 131490, 19940, 163620, 119379, 136917, 104557, 36439, 150649, 21865, 30589, 119446, 71817, 10022, 131487, 84705, 156231, 84691, 5142, 151475, 79538, 28657, 79527, 162909, 151551, 160897, 119355, 151474, 114095, 151473, 41958, 131519, 131551, 131556, 3564] cellInds_t = [56355, 14136, 62441, 105460, 62444, 131720, 84715, 84718, 46589, 58118, 103293, 119276, 119271, 38013, 131725, 20012, 131711, 96797, 1630, 20010, 79513, 79515, 151526, 131702, 41980, 71822, 144522, 137231, 14763] cellInds_kf = [56355, 14136, 62441, 105460, 62444, 50736, 84714, 84713, 46589, 58118, 10538, 119277, 15864, 84711, 131725, 20012, 50734, 50738, 131727, 119266, 38009, 71828, 10022, 33361, 8554, 137254, 66155, 4203, 137296] ''' cellInds_cfsr = cfsrStats.x4_cfsr_july_cells lonp = lonCell[cellInds_cfsr]*180./np.pi latp = latCell[cellInds_cfsr]*180./np.pi xcfsr,ycfsr = map(lonp, latp) #cells_guess = [40419,26537,10552,1634,7338,20030,437,2896,11920,22053,19965,19961,28617] cells_guess = mpasStats.x4_t_0724_cells cells_f500 = [138657, 149403, 38051, 28687, 131782, 131766, 71977, 58328, 22007, 11933, 14811, 72391, 103406] cells_f1000 = [138657, 149403, 38051, 28687, 131782, 131766, 71977, 58328, 33409, 149420, 5194, 93157, 16019] lonp = lonCell[cells_guess]*180./np.pi latp = latCell[cells_guess]*180./np.pi xcfsr_g,ycfsr_g = map(lonp, latp) lonp = lonCell[cells_f500]*180./np.pi latp = latCell[cells_f500]*180./np.pi xcfsr_f500,ycfsr_f500 = map(lonp, latp) lonp = lonCell[cells_f1000]*180./np.pi latp = latCell[cells_f1000]*180./np.pi xcfsr_f1000,ycfsr_f1000 = map(lonp, latp) ''' lonp = lonCell[cellInds_t]*180./np.pi latp = latCell[cellInds_t]*180./np.pi xtiedtke,ytiedtke = map(lonp, latp) lonp = lonCell[cellInds_kf]*180./np.pi latp = latCell[cellInds_kf]*180./np.pi xkf,ykf = map(lonp, latp) ''' map.drawcoastlines() map.drawmapboundary() #map.scatter(x,y, picker=5) #map.scatter(x,y) map.plot(xcfsr,ycfsr,'b*--', label='back') map.plot(xcfsr_g,ycfsr_g, 'go-', label='07-24_12') # label='user') map.plot(xcfsr_f500,ycfsr_f500,'gs:', label='f,r500km') map.plot(xcfsr_f1000,ycfsr_f1000,'r*-.', label='f,r1000km') plt.legend() ''' legend = plt.legend('back', 'user', 'forward') for label in legend.get_texts(): label.set_fontsize('medium') ''' #map.plot(xcfsr,ycfsr,'b*--', xtiedtke,ytiedtke, 'go-.', xkf,ykf, 'rs:') #, markersize=10) #plt.legend('cfsr', 'tiedtke', 'kf') plt.show()
def example_mpas_horiz_tri(): #plot triangulation on a map projection #ncfname = '/home/nickszap/research/mpas/output.2010-10-23_00:00:00.nc' #input file ncfname = '/arctic1/nick/cases/163842/r2614/output.163842.2006-07-08_00.00.00.nc' data = output_data.open_netcdf_data(ncfname) nCells = len(data.dimensions['nCells']) nVertices = len(data.dimensions['nVertices']) nLevels = len(data.dimensions['nVertLevels']) lat = data.variables['latCell'][:]*180./np.pi; #in degrees lon = data.variables['lonCell'][:]*180./np.pi; level = 0; time=15; var = data.variables['theta'][time,:,level];# minVar = np.amin(var); maxVar = np.amax(var); ''' #we can triangulate the map projection coords #although the needed connectivity information is in the mesh, #I don't feel like accumulating the edges into triangles so we'll compute a convex hull #that we "know" will be the same as the dual of the Voronoi xc = data.variables['xCell'][:]; yc = data.variables['yCell'][:]; zc = data.variables['zCell'][:]; coords = np.array([xc,yc,zc]).transpose() triang = Delaunay(coords) ''' ''' #create the triangulation data structure since I've had issues with scipy's delaunay and such. #We can't triangulate all of the projected x,y since opposing sides of the earth will overlay each other tris = recoverTriangles(data, nCells); print "Triangulation finished\n" nContours = 50 #matplotlib.pyplot.tricontour #plt.tripcolor(x, y, tris, facecolors=zfaces, edgecolors='k') plt.tripcolor(lon, lat, tris, var) plt.colorbar() plt.show() ''' #map = Basemap(projection='ortho',lon_0=-105,lat_0=40, resolution='l') map = Basemap(projection='ortho',lon_0=-100,lat_0=60, resolution='l') x,y = map(lon, lat) plt.figure(1) map.drawcoastlines() map.drawmapboundary() map.pcolor(x,y,var,tri=True,shading='flat',edgecolors='none',cmap=plt.cm.jet) #cmap=plt.cm.hot_r) #,vmin=100,vmax=1000) #map.contour(x,y,var,10,tri=True,shading='flat',edgecolors='none',cmap=plt.cm.jet) #map.contourf(x,y,var,10,tri=True,shading='flat',edgecolors='none',cmap=plt.cm.jet) plt.colorbar() if(0): plt.figure(3) map.drawcoastlines() map.drawmapboundary() map.contour(x,y,var,10,tri=True,shading='flat',edgecolors='none',cmap=plt.cm.jet) plt.colorbar() plt.show() ''' map = Basemap(projection='ortho',lat_0=45,lon_0=-100,resolution='l') x, y = map(lon,lat) map.drawcoastlines(linewidth=0.25) map.drawcountries(linewidth=0.25) #cs = map.contour(lon,lat, 50, nContours, latlon=True, tri=True) #cs = map.contour(x,y, var, nContours, latlon=False, tri=True, triangulation=triang) #cs = map.pcolor(x,y, var, tri=True,triangulation=triang) plt.colorbar(cs) plt.title('Test plot') plt.show() ''' data.close()
# xyz gets errors of 1.6e-5. if fVersion == 3: fac = 1.0e6 xyz = xyz / fac val = xyz[0] * xyz[1] * xyz[2] fac = fac * fac * fac deriv = (xyz[1] * xyz[2] / fac, xyz[0] * xyz[2] / fac, xyz[1] * xyz[0] / fac) return (val, deriv) # # sinusoids: rms errors of (dval/dxyz): 0.00063592 0.00068587 0.000641 (refLen=300km) # 8.51791496e-05 8.49776813e-05 6.93804044e-05 (refLen=1200km) if fVersion == 4: refLen = 60.0e3 * 20.0 scale = 2.0 * np.pi / refLen val = np.sin(scale * xyz[0]) + np.cos(scale * xyz[1]) + 3.0 * np.sin(scale * xyz[2]) deriv = (scale * np.cos(scale * xyz[0]), -scale * np.cos(scale * xyz[1]), scale * 3.0 * np.cos(scale * xyz[2])) return (val, deriv) if __name__ == "__main__": # test out the least squares fpath = "/arctic1/nick/cases/40962/output.40962.2006-06-01_00.00.00.nc" data = output_data.open_netcdf_data(fpath) nLevels = len(data.dimensions["nVertLevels"]) nCells = len(data.dimensions["nCells"]) nCellsRMS = 20 nLevelsRMS = 8 testLSTSQ(data, nCellsRMS, nLevelsRMS)
def plotTracks_dist_time(): #how to show longitude is a vexing question since it wraps -180,180 or 360,0... rEarth = 6371. #km fpath = '/arctic1/nick/cases/cfsr/output/x4.cfsr.output.2006-08-01_00.00.00.nc' data = output_data.open_netcdf_data(fpath) latCell_x4 = data.variables['latCell'][:] lonCell_x4 = data.variables['lonCell'][:] data.close() fpath = '/arctic1/nick/cases/vduda/x7/x7.kf.output.2006-08-07_18.00.00.nc' data = output_data.open_netcdf_data(fpath) latCell_x7 = data.variables['latCell'][:] lonCell_x7 = data.variables['lonCell'][:] data.close() #cfsr cells_cfsr = cfsrStats.x4_cfsr_cell lat_cfsr = latCell_x4[cells_cfsr] lon_cfsr = lonCell_x4[cells_cfsr] tBase = dt.datetime(2006,8,1,0) nSteps = len(cells_cfsr) dateList_cfsr = [ tBase + dt.timedelta(hours=12*x) for x in range(0,nSteps) ] plt.figure() plt.gca().xaxis.set_major_formatter(mdates.DateFormatter('%m/%d')) plt.gca().xaxis.set_major_locator(mdates.DayLocator(interval=4)) plt.gca().xaxis.set_minor_locator(mdates.DayLocator(interval=1)) #2006-08-01 cells_x = mpasStats.x4_t_cells lat_x = latCell_x4[cells_x] lon_x = lonCell_x4[cells_x] tBase = dt.datetime(2006,8,1,0) nSteps = len(cells_x) dateList_x = [ tBase + dt.timedelta(hours=6*x) for x in range(0,nSteps) ] ind_cfsr, ind_x, dates_common = get_commonIndices_time(dateList_cfsr, dateList_x) vals = calc_distSphere_multiple(rEarth, lat_x[ind_x], lon_x[ind_x], lat_cfsr[ind_cfsr], lon_cfsr[ind_cfsr]) plt.plot(dates_common,vals, 'b', label='x4_t') cells_x = mpasStats.x4_kf_cells lat_x = latCell_x4[cells_x] lon_x = lonCell_x4[cells_x] tBase = dt.datetime(2006,8,1,0) nSteps = len(cells_x) dateList_x = [ tBase + dt.timedelta(hours=6*x) for x in range(0,nSteps) ] ind_cfsr, ind_x, dates_common = get_commonIndices_time(dateList_cfsr, dateList_x) vals = calc_distSphere_multiple(rEarth, lat_x[ind_x], lon_x[ind_x], lat_cfsr[ind_cfsr], lon_cfsr[ind_cfsr]) plt.plot(dates_common,vals, 'g', label='x4_kf') cells_x = mpasStats.x7_t_cells lat_x = latCell_x7[cells_x] lon_x = lonCell_x7[cells_x] tBase = dt.datetime(2006,8,1,0) nSteps = len(cells_x) dateList_x = [ tBase + dt.timedelta(hours=6*x) for x in range(0,nSteps) ] ind_cfsr, ind_x, dates_common = get_commonIndices_time(dateList_cfsr, dateList_x) vals = calc_distSphere_multiple(rEarth, lat_x[ind_x], lon_x[ind_x], lat_cfsr[ind_cfsr], lon_cfsr[ind_cfsr]) plt.plot(dates_common,vals, 'r.-', label='x7_t') cells_x = mpasStats.x7_kf_cells lat_x = latCell_x7[cells_x] lon_x = lonCell_x7[cells_x] tBase = dt.datetime(2006,8,1,0) nSteps = len(cells_x) dateList_x = [ tBase + dt.timedelta(hours=6*x) for x in range(0,nSteps) ] ind_cfsr, ind_x, dates_common = get_commonIndices_time(dateList_cfsr, dateList_x) vals = calc_distSphere_multiple(rEarth, lat_x[ind_x], lon_x[ind_x], lat_cfsr[ind_cfsr], lon_cfsr[ind_cfsr]) plt.plot(dates_common,vals, 'k--', label='x7_kf') #x7 2006-08-07_18 cells_x = mpasStats.x7_kf_0807_cells lat_x = latCell_x7[cells_x] lon_x = lonCell_x7[cells_x] tBase = dt.datetime(2006,8,7,18) nSteps = len(cells_x) dateList_x = [ tBase + dt.timedelta(hours=6*x) for x in range(0,nSteps) ] ind_cfsr, ind_x, dates_common = get_commonIndices_time(dateList_cfsr, dateList_x) vals = calc_distSphere_multiple(rEarth, lat_x[ind_x], lon_x[ind_x], lat_cfsr[ind_cfsr], lon_cfsr[ind_cfsr]) plt.plot(dates_common,vals, 'k--') #x7 2006-08-03_12 cells_x = mpasStats.x7_t_0803_cells lat_x = latCell_x7[cells_x] lon_x = lonCell_x7[cells_x] tBase = dt.datetime(2006,8,3,12) nSteps = len(cells_x) dateList_x = [ tBase + dt.timedelta(hours=6*x) for x in range(0,nSteps) ] ind_cfsr, ind_x, dates_common = get_commonIndices_time(dateList_cfsr, dateList_x) vals = calc_distSphere_multiple(rEarth, lat_x[ind_x], lon_x[ind_x], lat_cfsr[ind_cfsr], lon_cfsr[ind_cfsr]) plt.plot(dates_common,vals, 'r.-') #x4_t 2006081500 cells_x = mpasStats.x4_t_0815_cells lat_x = latCell_x4[cells_x] lon_x = lonCell_x4[cells_x] tBase = dt.datetime(2006,8,15,0) nSteps = len(cells_x) dateList_x = [ tBase + dt.timedelta(hours=6*x) for x in range(0,nSteps) ] ind_cfsr, ind_x, dates_common = get_commonIndices_time(dateList_cfsr, dateList_x) vals = calc_distSphere_multiple(rEarth, lat_x[ind_x], lon_x[ind_x], lat_cfsr[ind_cfsr], lon_cfsr[ind_cfsr]) plt.plot(dates_common,vals, 'b') #all cfsr ------------------------ #cells cells_cfsr = np.array(cfsrStats.x4_cfsr_july_cells+cfsrStats.x4_cfsr_cell+cfsrStats.x4_cfsr_0919_cells) lat_cfsr = latCell_x4[cells_cfsr] lon_cfsr = lonCell_x4[cells_cfsr] tBase = dt.datetime(2006,7,24,12) nSteps = len(cells_cfsr)-len(cfsrStats.x4_cfsr_0919_cells) dateList_c_all = [ tBase + dt.timedelta(hours=12*x) for x in range(0,nSteps) ] tBase = dt.datetime(2006,9,19,0) nSteps = len(cfsrStats.x4_cfsr_0919_cells) dateList_cfsr = dateList_c_all + [ tBase + dt.timedelta(hours=12*x) for x in range(0,nSteps) ] #x4_t 2006072412 cells_x = mpasStats.x4_t_0724_cells lat_x = latCell_x4[cells_x] lon_x = lonCell_x4[cells_x] nSteps = len(cells_x) tBase = dt.datetime(2006,7,24,12) dateList_x = [ tBase + dt.timedelta(hours=6*x) for x in range(0,nSteps) ] ind_cfsr, ind_x, dates_common = get_commonIndices_time(dateList_cfsr, dateList_x) vals = calc_distSphere_multiple(rEarth, lat_x[ind_x], lon_x[ind_x], lat_cfsr[ind_cfsr], lon_cfsr[ind_cfsr]) plt.plot(dates_common,vals, 'b') #x4_t_0919_vals cells_x = mpasStats.x4_t_0919_cells lat_x = latCell_x4[cells_x] lon_x = lonCell_x4[cells_x] nSteps = len(cells_x) tBase = dt.datetime(2006,9,19,0) dateList_x = [ tBase + dt.timedelta(hours=6*x) for x in range(0,nSteps) ] ind_cfsr, ind_x, dates_common = get_commonIndices_time(dateList_cfsr, dateList_x) vals = calc_distSphere_multiple(rEarth, lat_x[ind_x], lon_x[ind_x], lat_cfsr[ind_cfsr], lon_cfsr[ind_cfsr]) plt.plot(dates_common,vals, 'b') plt.ylabel('$|\Delta x|$'+', km') plt.gcf().autofmt_xdate() #plotOrder = ['x4_t', 'x4_kf', 'x7_t', 'x7_kf'] #plt.legend(plotOrder) #plt.legend(loc=3) #bottom left plt.legend() plt.show()
def plotTracks_lat_time(): #how to show longitude is a vexing question since it wraps -180,180 or 360,0... fpath = '/arctic1/nick/cases/cfsr/output/x4.cfsr.output.2006-08-01_00.00.00.nc' data = output_data.open_netcdf_data(fpath) latCell_x4 = data.variables['latCell'][:]*180./np.pi data.close() fpath = '/arctic1/nick/cases/vduda/x7/x7.kf.output.2006-08-07_18.00.00.nc' data = output_data.open_netcdf_data(fpath) latCell_x7 = data.variables['latCell'][:]*180./np.pi data.close() #cfsr cells_cfsr = cfsrStats.x4_cfsr_cell lat_cfsr = latCell_x4[cells_cfsr] tBase = dt.datetime(2006,8,1,0) nSteps = len(cells_cfsr) dateList_cfsr = [ tBase + dt.timedelta(hours=12*x) for x in range(0,nSteps) ] plt.figure() plt.gca().xaxis.set_major_formatter(mdates.DateFormatter('%m/%d')) plt.gca().xaxis.set_major_locator(mdates.DayLocator(interval=2)) plt.gca().xaxis.set_minor_locator(mdates.DayLocator(interval=1)) #2006-08-01 cells_x = mpasStats.x4_t_cells lat_x = latCell_x4[cells_x] tBase = dt.datetime(2006,8,1,0) nSteps = len(cells_x) dateList_x = [ tBase + dt.timedelta(hours=6*x) for x in range(0,nSteps) ] ind_cfsr, ind_x, dates_common = get_commonIndices_time(dateList_cfsr, dateList_x) plt.plot(dates_common,lat_x[ind_x]-lat_cfsr[ind_cfsr], 'b', label='x4_t') cells_x = mpasStats.x4_kf_cells lat_x = latCell_x4[cells_x] tBase = dt.datetime(2006,8,1,0) nSteps = len(cells_x) dateList_x = [ tBase + dt.timedelta(hours=6*x) for x in range(0,nSteps) ] ind_cfsr, ind_x, dates_common = get_commonIndices_time(dateList_cfsr, dateList_x) plt.plot(dates_common,lat_x[ind_x]-lat_cfsr[ind_cfsr], 'g', label='x4_kf') cells_x = mpasStats.x7_t_cells lat_x = latCell_x7[cells_x] tBase = dt.datetime(2006,8,1,0) nSteps = len(cells_x) dateList_x = [ tBase + dt.timedelta(hours=6*x) for x in range(0,nSteps) ] ind_cfsr, ind_x, dates_common = get_commonIndices_time(dateList_cfsr, dateList_x) plt.plot(dates_common,lat_x[ind_x]-lat_cfsr[ind_cfsr], 'r.-', label='x7_t') cells_x = mpasStats.x7_kf_cells lat_x = latCell_x7[cells_x] tBase = dt.datetime(2006,8,1,0) nSteps = len(cells_x) dateList_x = [ tBase + dt.timedelta(hours=6*x) for x in range(0,nSteps) ] ind_cfsr, ind_x, dates_common = get_commonIndices_time(dateList_cfsr, dateList_x) plt.plot(dates_common,lat_x[ind_x]-lat_cfsr[ind_cfsr], 'k--', label='x7_kf') #x7 2006-08-07_18 cells_x = mpasStats.x7_kf_0807_cells lat_x = latCell_x7[cells_x] tBase = dt.datetime(2006,8,7,18) nSteps = len(cells_x) dateList_x = [ tBase + dt.timedelta(hours=6*x) for x in range(0,nSteps) ] ind_cfsr, ind_x, dates_common = get_commonIndices_time(dateList_cfsr, dateList_x) plt.plot(dates_common,lat_x[ind_x]-lat_cfsr[ind_cfsr], 'k--') #x7 2006-08-03_12 cells_x = mpasStats.x7_t_0803_cells lat_x = latCell_x7[cells_x] tBase = dt.datetime(2006,8,3,12) nSteps = len(cells_x) dateList_x = [ tBase + dt.timedelta(hours=6*x) for x in range(0,nSteps) ] ind_cfsr, ind_x, dates_common = get_commonIndices_time(dateList_cfsr, dateList_x) plt.plot(dates_common,lat_x[ind_x]-lat_cfsr[ind_cfsr], 'r.-') #x4_t 2006081500 cells_x = mpasStats.x4_t_0815_cells lat_x = latCell_x4[cells_x] tBase = dt.datetime(2006,8,15,0) nSteps = len(cells_x) dateList_x = [ tBase + dt.timedelta(hours=6*x) for x in range(0,nSteps) ] ind_cfsr, ind_x, dates_common = get_commonIndices_time(dateList_cfsr, dateList_x) plt.plot(dates_common,lat_x[ind_x]-lat_cfsr[ind_cfsr], 'b') plt.ylabel('$\Delta\phi_{min}$'+', ' +'$\deg$') plt.gcf().autofmt_xdate() #plotOrder = ['x4_t', 'x4_kf', 'x7_t', 'x7_kf'] #plt.legend(plotOrder) plt.legend(loc=3) plt.show()