def plotwithopts(ifile, method, vars, options = defaultoption): from PseudoNetCDF.sci_var import getvarpnc from PseudoNetCDF.pncgen import pncgen exec(options.pre_txt) for varkey in vars: figpath = eval(method)(ifile = ifile, varkey = varkey, options = options, before = options.before_txt, after = options.after_txt) pncgen(getvarpnc(ifile, list(vars) + ['TFLAG', 'time', 'latitude', 'longitude', 'latitude_bounds', 'longitude_bounds']), figpath + '.nc', verbose = False) exec(options.post_txt)
def plotwithopts(ifile, method, vars, options=defaultoption): from PseudoNetCDF.sci_var import getvarpnc from PseudoNetCDF.pncgen import pncgen exec(options.pre_txt) for varkey in vars: figpath = eval(method)(ifile=ifile, varkey=varkey, options=options, before=options.before_txt, after=options.after_txt) pncgen(getvarpnc(ifile, list(vars) + _coordkeys), figpath + '.nc', verbose=0) exec(options.post_txt)
def plotwithopts(ifile, method, vars, options=defaultoption): from PseudoNetCDF.sci_var import getvarpnc from PseudoNetCDF.pncgen import pncgen exec(options.pre_txt) for varkey in vars: figpath = eval(method)(ifile=ifile, varkey=varkey, options=options, before=options.before_txt, after=options.after_txt) pncgen(getvarpnc( ifile, list(vars) + [ 'TFLAG', 'time', 'latitude', 'longitude', 'latitude_bounds', 'longitude_bounds' ]), figpath + '.nc', verbose=False) exec(options.post_txt)
def plotwithopts(ifile, method, vars, options=defaultoption): from PseudoNetCDF.sci_var import getvarpnc from PseudoNetCDF.pncgen import pncgen import matplotlib.pyplot as plt # dummy assignment so that flake8 sees plt as used # plt is loaded into the environment for exec varkey = plt exec(options.pre_txt) for varkey in vars: figpath = eval(method)(ifile=ifile, varkey=varkey, options=options, before=options.before_txt, after=options.after_txt) pncgen(getvarpnc(ifile, list(vars) + list(_coordkeys)), figpath + '.nc', verbose=0) exec(options.post_txt)
def get_files(self, date): """ Put date into file_templates and return open files, where all files have been vertically interpolated and only variables that will be used are present date - date to use for files """ from PseudoNetCDF.sci_var import extract, slice_dim, getvarpnc #from PseudoNetCDF.cmaqfiles.profile import bcon_profile, icon_profile import gc # make quick references to instance variables sources = self._sources verbose = self._verbose coordstr = self._coordstr # Fill file path templates with date using the time # function provided file_paths = [(r, eval(tsf)(date, p)) for r, p, tsf in self._config['file_templates']] # Return cached files if appropriate if file_paths == self.last_file_paths: return self.last_file_objs # If coordstr is not none, this is # a data extraction call and the status should be # updated if coordstr is not None: status('') status('-' * 40) status("Getting files for " + str(date)) status('-' * 40) status('') # For each file, use the reader (r) to open the # path (p) for fi, (r, p) in enumerate(file_paths): print fi, r, p, eval(r) # If last path is this path # no need to update lp = self.last_file_paths[fi] nf = self.last_file_objs[fi] if p != lp: if verbose > 0: timeit('GET_FILE %s' % p, True) # Close old file to prevent memory leaks nf.close() if verbose > 1: status('Opening %s with %s' % (p, r), show = False) onf = nf = eval(r)(p) if coordstr is None: # Coordstr is None, so this call is just for some # meta-data with warnings.catch_warnings(): warnings.simplefilter("ignore") nf = getvarpnc(onf, 'time TFLAG tau0 tau1 latitude longitude latitude_bounds longitude_bounds PRES'.split()) nf = onf if 'PERIM' in onf.dimensions.keys() and not isinstance(onf, bcon_profile): nf.createDimension('PERIM', len(onf.dimensions['PERIM'])) nf.createDimension('LAY', len(onf.dimensions['LAY'])) nf.NCOLS = onf.NCOLS nf.NROWS = onf.NROWS else: # If coordstr is not None, this is a real data # call # # Real data calls require vertical interpolation ## Calculate the vertical coordinate of ## the input file # BCON and ICON file processing if isinstance(nf, (bcon_profile, icon_profile)): nf = nf.interptosigma(self.vert_out, sources) metf = [f for f in self.last_file_objs if 'PERIM' in f.dimensions][0] ## profile files need to be converted ## to METBDY coordinates by repeating ## boundaries nf = profile_to_ftype(nf, ncols = metf.NCOLS, nrows = metf.NROWS, ftype = metf.FTYPE) # GEOS-Chem BPCH processing (TPCORE, ND49 BPCH or ND49) elif isinstance(nf, (bpch, ND49NC)): ## Only extract groups that are used ## in mappings, and only extract variables ## in those groups that are used nf = nf.tooutcoords(coordstr, self.vert_out, self.vgtop, sources) elif isinstance(nf, (METBDY3D, METCRO3D)): # Assuming METBDY and METCRO3D are target coordinates pass else: raise IOError('Unknown type %s; add type to readers' % type(nf)) if verbose > 0: timeit('GET_FILE %s' % p, False) self.last_file_objs[fi] = nf self.last_file_paths = file_paths return self.last_file_objs
print( 'WARNING: %s is newer than %s; not updating. Use -O to overwrite' % (csvpath, outpath)) if os.path.getmtime(__file__) > os.path.getmtime(outpath): print( 'WARNING: script is newer than %s; not updating. Use -O to overwrite' % (outpath, )) continue csvfile = pd.read_csv(csvpath) unique_vars = np.unique( np.char.replace([v for v in csvfile['POLNAME'].values], 'BEN', 'BENZENE')).tolist() aggdata = csvfile.pivot_table(index=('POLNAME', 'ROW', 'COL'), aggfunc=np.sum) oldpolname = None outfile = getvarpnc(Dataset(templatepath, 'r+'), ['TFLAG'] + unique_vars) add_ioapi_from_ioapi(outfile) # outfile.variables['TFLAG'][:, :, 0] = np.arange(0, 25)[:, None] # outfile.variables['TFLAG'][:, :, 0] = 2014001 print('Working on: ' + csvpath) for (polname, rowi, coli), groupdata in aggdata.iterrows(): if args.verbose > 2: print(polname) if polname != oldpolname: if not oldpolname is None: if args.verbose > 0: print('Writing out ' + polname + ' to ' + var.long_name) var[0:26, 0, :, :] = temp[0:26, 0, :, :] if args.verbose > 0: print('Starting ' + polname) if polname == 'BEN': var = outfile.variables['BENZENE'] elif polname != 'CO2':
def makemaps(args): ifiles = args.ifiles cbar = None ifile = ifiles[0] if args.iter != []: ifile, = ifiles ifiles = [] for dimk in args.iter: ifiles += [ slice_dim(getvarpnc(ifile, None), '%s,%d' % (dimk, i)) for i in range(len(ifile.dimensions[dimk])) ] ax = plt.gca() map = getmap(ifile, resolution=args.resolution) if args.coastlines: map.drawcoastlines(ax=ax) if args.countries: map.drawcountries(ax=ax) if args.states: map.drawstates(ax=ax) if args.counties: map.drawcounties(ax=ax) for si, shapefile in enumerate(args.shapefiles): shapeopts = shapefile.split(',') shapepath = shapeopts[0] shapeoptdict = eval('dict(' + ','.join(shapeopts[1:]) + ')') shapename = os.path.basename(shapepath)[:-3] + str(si) map.readshapefile(shapepath, shapename, ax=ax, **shapeoptdict) args.map = map fig = plt.gcf() if len(args.figure_keywords) > 0: plt.setp(fig, **args.figure_keywords) ax = plt.gca() if len(args.axes_keywords) > 0: plt.setp(ax, **args.axes_keywords) map = args.map nborders = len(ax.collections) for fi, ifile in enumerate(ifiles): if map.projection in ('lcc', 'merc'): lat = ifile.variables['latitude'] lon = ifile.variables['longitude'] latb, latunit = getybnds(ifile)[:] lonb, lonunit = getxbnds(ifile)[:] else: lat = ifile.variables['latitude'] lon = ifile.variables['longitude'] latb, latunit = getlatbnds(ifile)[:] lonb, lonunit = getlonbnds(ifile)[:] if latb.ndim == lonb.ndim and lonb.ndim == 2: LON, LAT = lonb, latb else: LON, LAT = np.meshgrid(lonb.view(np.ndarray), latb.view(np.ndarray)) variables = args.variables if variables is None: def isgeo(var): geo2d = set(['latitude', 'longitude']) vard = getattr(var, 'coordinates', '').split() hasgeo2d = len(geo2d.intersection(vard)) == 2 return hasgeo2d variables = [ key for key, var in ifile.variables.items() if isgeo(var) ] if len(variables) == 0: raise ValueError('Unable to heuristically determin plottable ' + 'variables; use -v to specify variables for ' + 'plotting') for varkey in variables: ax = plt.gca() if not args.overlay: del ax.collections[nborders:] var = ifile.variables[varkey] if args.squeeze: vals = var[:].squeeze() else: vals = var[:] vmin, vmax = vals.min(), vals.max() if args.normalize is None: from scipy.stats import normaltest if normaltest(vals.ravel())[1] < 0.001: cvals = np.ma.compressed(vals) boundaries = np.percentile(cvals, np.arange(0, 110, 10)) warn('Autoselect deciles colormap of %s; override ' + 'width --norm' % varkey) else: boundaries = np.linspace(vmin, vmax, num=11) warn(('Autoselect linear colormap of %s; override ' + 'width --norm') % varkey) ordermag = (boundaries.max() / np.ma.masked_values(boundaries, 0).min()) if (ordermag) > 10000: formatter = LogFormatter(labelOnlyBase=False) else: formatter = None norm = BoundaryNorm(boundaries, ncolors=256) else: norm = eval(args.normalize) formatter = None if args.colorbarformatter is not None: try: formatter = eval(args.colorbarformatter) except Exception: formatter = args.colorbarformatter if norm.vmin is not None: vmin = norm.vmin if norm.vmax is not None: vmax = norm.vmax varunit = getattr(var, 'units', 'unknown').strip() if args.verbose > 0: print(varkey, sep='') if vals.ndim == 1: notmasked = ~(np.ma.getmaskarray(lon[:]) | np.ma.getmaskarray( lat[:]) | np.ma.getmaskarray(vals[:])) scatlon = lon[:][notmasked] scatlat = lat[:][notmasked] scatvals = vals[:][notmasked] patches = map.scatter(scatlon[:], scatlat[:], c=scatvals, edgecolors='none', s=24, norm=norm, ax=ax, zorder=2) else: if vals.ndim != 2: dimlendictstr = str(dict(zip(var.dimensions, var.shape))) warn('Maps require 2-d data; values right now %s %s' % (str(vals.shape), dimlendictstr)) patches = map.pcolor(LON, LAT, vals, norm=norm, ax=ax) if lonunit == 'x (m)': ax.xaxis.get_major_formatter().set_scientific(True) ax.xaxis.get_major_formatter().set_powerlimits((-3, 3)) if latunit == 'y (m)': ax.yaxis.get_major_formatter().set_scientific(True) ax.yaxis.get_major_formatter().set_powerlimits((-3, 3)) ax.set_xlabel(lonunit) ax.set_ylabel(latunit) height = np.abs(np.diff(ax.get_ylim())) width = np.abs(np.diff(ax.get_xlim())) if width >= height: orientation = 'horizontal' else: orientation = 'vertical' if cbar is None: cax = None else: cax = cbar.ax cax.cla() if vals.max() > vmax and vals.min() < vmin: extend = 'both' elif vals.max() > vmax: extend = 'max' elif vals.min() < vmin: extend = 'min' else: extend = 'neither' cbar = plt.gcf().colorbar(patches, orientation=orientation, cax=cax, extend=extend, format=formatter, spacing='proportional') del cbar.ax.texts[:] varminmaxtxt = ('; min=%.3g; max=%.3g)' % (var[:].min(), var[:].max())) cbar.set_label(varkey + ' (' + varunit + varminmaxtxt) # if orientation == 'vertical': # cbar.ax.text(.5, 1.05, '%.3g' % var[:].max(), # horizontalalignment = 'center', # verticalalignment = 'bottom') # cbar.ax.text(.5, -.06, '%.3g ' % var[:].min(), # horizontalalignment = 'center', # verticalalignment = 'top') # else: # cbar.ax.text(1.05, .5, ' %.3g' % var[:].max(), # verticalalignment = 'center', # horizontalalignment = 'left') # cbar.ax.text(-.06, .5, '%.3g ' % var[:].min(), # verticalalignment = 'center', # horizontalalignment = 'right') cbar.update_ticks() fmt = args.figformat outpath = args.outpath if len(ifiles) > 1: lstr = str(fi).rjust(len(str(len(ifiles))), '0') if args.verbose > 0: print('adding numeric suffix for file', lstr) else: lstr = '' figpath = os.path.join(outpath + varkey + lstr + '.' + fmt) if args.interactive: csl = PNCConsole(locals=globals()) csl.interact() for cmd in args.plotcommands: exec(cmd) plt.savefig(figpath) if args.verbose > 0: print('Saved fig', figpath)