def _run(args): config = read_config(args.config) files = config["options"]["files"] var_keys = config["options"]["var_keys"] output = config["options"]["output"] binary_mult = config["options"]["binary_mult"] binary_type = (config["options"]["binary_type"],) paths = config["options"]["paths"] out_prefix = config["options"]["out_prefix"] verbose = config["options"]["verbose"] mask = read_netcdf(paths["mask_path"], nc_vars=["mask"])["mask"] yi, xi = np.nonzero(mask) print("found {0} points in mask file.".format(len(yi))) xlist = [] ylist = [] pointlist = [] append = False for i, fname in enumerate(files): d = read_netcdf(os.path.join(paths["in_path"], fname), verbose=verbose) if i == 0: # find point locations xs = d["xc"] ys = d["yc"] posinds = np.nonzero(xs > 180) xs[posinds] -= 360 print("adjusted xs lon minimum") for y, x in pyzip(yi, xi): active_flag = False for key in var_keys: if (d[key][:, y, x].all() is np.ma.masked) or (mask[y, x] == 0): active_flag = True if not active_flag: point = (ys[y, x], xs[y, x]) xlist.append(x) ylist.append(y) pointlist.append(point) else: append = True for y, x, point in pyzip(ylist, xlist, pointlist): data = np.empty((d[var_keys[0]].shape[0], len(var_keys))) for j, key in enumerate(var_keys): data[:, j] = d[key][:, y, x] if output["Binary"]: write_binary(data * binary_mult, point, binary_type, out_prefix, paths["BinaryoutPath"], append) if output["ASCII"]: write_ascii(data, point, out_prefix, paths["ASCIIoutPath"], append) return
def main(): param_data, _ = read_netcdf(param_file) baresoil = 1 - np.sum(param_data['Cv'], axis=0) plot_veg_types(param_data['yc'], param_data['xc'], param_data['Cv'], baresoil)
def subset(param_file, upleft=False, lowright=False, outfiles=1, soil_file=False, snow_file=False, veg_file=False, project=None, nijssen2arno=False): data, attributes = read_netcdf(param_file) if nijssen2arno: import NIJSSEN2001_to_ARNO data = NIJSSEN2001_to_ARNO.convert(data) if project: print('Project Configuration {0}'.format(project)) if project == 'RASM': outfiles = 1 cells, yinds, xinds = find_gridcells(data['mask']) rasm_soil(data, soil_file) else: raise ValueError('Unknown project configuration') return else: cells, yinds, xinds = find_gridcells(data['mask']) # write snow and veg files if veg_file: rootzones = data['root_depth'].shape[1] veg(data, xinds, yinds, veg_file, rootzones=rootzones, global_lai=True) if snow_file: snow(data, xinds, yinds, snow_file) if (upleft and lowright): inds = ((yinds < upleft[0]) and (yinds > lowright[0]) and (xinds < lowright[1]) and (xinds > upleft[1])) yinds = yinds[inds] xinds = xinds[inds] filesize = np.ceil(cells / outfiles) for i in range(outfiles): start = i * filesize end = i * filesize + filesize if end > cells: end = cells if outfiles > 1: out_file = '{0}_{1}.txt'.format(soil_file, str(i).zfill(len(str(outfiles)))) else: out_file = '{0}.txt'.format(soil_file) soil(data, xinds[start:end], yinds[start:end], out_file) return
def make_grid(grid_file, soil_file, snow_file, veg_file, vegl_file, nc_file='params.nc', version='4.1.2'): """ Make grid uses routines from params.py to read standard vic format parameter files. After the parameter files are read, the files are placed onto the target grid using nearest neighbor mapping. If a land mask is present in the target grid it will be used to exclude areas in the ocean. Finally, if the nc_file = 'any_string.nc', a netcdf file be written with the parameter data, if nc_file = False, the dictionary of grids is returned. """ print('making grided parameters now...') soil_dict = soil(soil_file) if snow_file: snow_dict = snow(snow_file, soil_dict) else: snow_dict = False if veg_file: veg_dict = veg(veg_file, soil_dict, lai_index=True) else: veg_dict = False if vegl_file: veglib_dict = veg_class(vegl_file) else: veglib_dict = False if grid_file: target_grid, target_attrs = read_netcdf(grid_file) else: target_grid, target_attrs = calc_grid(soil_dict['lats'], soil_dict['lons']) grid_dict = grid_params(soil_dict, target_grid, snow_dict=snow_dict, veg_dict=veg_dict, veglib_dict=veglib_dict, version=version) if nc_file: write_netcdf(nc_file, target_attrs, target_grid=target_grid, soil_grid=grid_dict['soil_dict'], snow_grid=grid_dict['snow_dict'], veglib_dict=veglib_dict, veg_grid=grid_dict['veg_dict'], version=version) return nc_file else: return grid_dict
def nc_to_vic(config_file): ''' This function converts netCDF files to VIC ascii format files. (This function is adapted from tonic) Parameters ---------- config_file: <str> Path of config file for nc_to_vic Returns ---------- Requires ---------- write_binary ''' import numpy as np import struct import os from tonic.io import read_netcdf, read_config from tonic.pycompat import pyzip config = read_config(config_file) files = config['options']['files'] # should contain "{}", which will be replaced by YYYY var_keys = config['options']['var_keys'] output_format = config['options']['output_format'] # Binary or ASCII out_prefix = config['options']['out_prefix'] verbose = config['options']['verbose'] coord_keys = config['options']['coord_keys'] # varname of lon and lat in netCDF files lon_name = coord_keys[0] lat_name = coord_keys[1] start_year = config['options']['start_year'] end_year = config['options']['end_year'] latlon_precision = config['options']['latlon_precision'] paths = config['paths'] mask_varname = paths['mask_varname'] mask = read_netcdf(paths['mask_path'], variables=['mask'])[0][mask_varname] yi, xi = np.nonzero(mask) print('found {0} points in mask file.'.format(len(yi))) xlist = [] ylist = [] pointlist = [] append = False for i, year in enumerate(range(start_year, end_year+1)): print('Year {}'.format(year)) fname = files.format(year) d = read_netcdf(os.path.join(paths['in_path'], fname), verbose=verbose)[0] if i == 0: # find point locations xs = d[lon_name] ys = d[lat_name] posinds = np.nonzero(xs > 180) xs[posinds] -= 360 print('adjusted xs lon minimum') for y, x in pyzip(yi, xi): active_flag = False for key in var_keys: if (d[key][:, y, x].all() is np.ma.masked) \ or (mask[y, x] == 0): active_flag = True if not active_flag: point = (ys[y], xs[x]) xlist.append(x) ylist.append(y) pointlist.append(point) else: append = True for y, x, point in pyzip(ylist, xlist, pointlist): data = np.empty((d[var_keys[0]].shape[0], len(var_keys))) for j, key in enumerate(var_keys): data[:, j] = d[key][:, y, x] if output_format == 'Binary': write_binary(data * binary_mult, point, binary_type, out_prefix, paths['BinaryoutPath'], append) if output_format == 'ASCII': write_ascii(data, point, out_prefix, paths['ASCIIoutPath'], latlon_precision, append) return
def _run(args): config = read_config(args.config) files = config['options']['files'] var_keys = config['options']['var_keys'] output = config['options']['output'] binary_mult = config['options']['binary_mult'] binary_type = config['options']['binary_type'], paths = config['options']['paths'] out_prefix = config['options']['out_prefix'] verbose = config['options']['verbose'] mask = read_netcdf(paths['mask_path'], nc_vars=['mask'])['mask'] yi, xi = np.nonzero(mask) print('found {0} points in mask file.'.format(len(yi))) xlist = [] ylist = [] pointlist = [] append = False for i, fname in enumerate(files): d = read_netcdf(os.path.join(paths['in_path'], fname), verbose=verbose) if i == 0: # find point locations xs = d['xc'] ys = d['yc'] posinds = np.nonzero(xs > 180) xs[posinds] -= 360 print('adjusted xs lon minimum') for y, x in pyzip(yi, xi): active_flag = False for key in var_keys: if (d[key][:, y, x].all() is np.ma.masked) \ or (mask[y, x] == 0): active_flag = True if not active_flag: point = (ys[y, x], xs[y, x]) xlist.append(x) ylist.append(y) pointlist.append(point) else: append = True for y, x, point in pyzip(ylist, xlist, pointlist): data = np.empty((d[var_keys[0]].shape[0], len(var_keys))) for j, key in enumerate(var_keys): data[:, j] = d[key][:, y, x] if output['Binary']: write_binary(data * binary_mult, point, binary_type, out_prefix, paths['BinaryoutPath'], append) if output['ASCII']: write_ascii(data, point, out_prefix, paths['ASCIIoutPath'], append) return
def _run(args, ): print(args) print('in compare_soil_params') plot_atts_3 = None plot_atts_9 = None dom, dom_atts = read_netcdf(args.domain_file) d1, d1a = read_netcdf(args.soil_file1) d2, d2a = read_netcdf(args.soil_file2) out_path = args.out_path title1 = args.title1 title2 = args.title2 if not plot_atts_3: plot_atts_3 = {'infilt': {'vmin': 0, 'vmax': 1, 'amin': -0.5, 'amax': 0.5, 'amap': cmap_discretize('cm.RdBu_r')}, 'Ws': {'vmin': 0, 'vmax': 100, 'amin': -50, 'amax': 50, 'amap': cmap_discretize('cm.RdBu_r')}, 'Ds': {'vmin': 0, 'vmax': 1, 'amin': -0.5, 'amax': 0.5, 'amap': cmap_discretize('cm.RdBu_r')}, 'Dsmax': {'vmin': 0, 'vmax': 1, 'amin': -0.5, 'amax': 0.5, 'amap': cmap_discretize('cm.RdBu_r')}, 'avg_T': {'vmin': -25, 'vmax': 25, 'amin': -2, 'amax': 2, 'amap': cmap_discretize('cm.RdBu_r')}, 'c': {'vmin': 0, 'vmax': 2.5, 'amin': -0.5, 'amax': 0.5, 'amap': cmap_discretize('cm.RdBu_r')}, 'elev': {'vmin': 0, 'vmax': 2500, 'amin': -200, 'amax': 200, 'amap': cmap_discretize('cm.RdBu_r')}, 'annual_prec': {'vmin': 0, 'vmax': 2000, 'amin': -500, 'amax': 500, 'amap': cmap_discretize('cm.RdBu')}} if not plot_atts_9: plot_atts_9 = {'soil_density': {'vmin': 0, 'vmax': 4000, 'amin': -500, 'amax': 500, 'amap': cmap_discretize('cm.RdBu_r')}, 'bulk_density': {'vmin': 0, 'vmax': 1800, 'amin': -100, 'amax': 100, 'amap': cmap_discretize('cm.RdBu_r')}, 'Wpwp_FRACT': {'vmin': 0, 'vmax': 1, 'amin': -0.4, 'amax': 0.4, 'amap': cmap_discretize('cm.RdBu_r')}, 'bubble': {'vmin': 0, 'vmax': 100, 'amin': -20, 'amax': 20, 'amap': cmap_discretize('cm.RdBu_r')}, 'quartz': {'vmin': 0, 'vmax': 1, 'amin': -0.25, 'amax': 0.25, 'amap': cmap_discretize('cm.RdBu_r')}, 'resid_moist': {'vmin': 0, 'vmax': 0.1, 'amin': -0.05, 'amax': 0.05, 'amap': cmap_discretize('cm.RdBu')}, 'Wcr_FRACT': {'vmin': 0, 'vmax': 1, 'amin': -0.5, 'amax': 0.5, 'amap': cmap_discretize('cm.RdBu_r')}, 'expt': {'vmin': 0, 'vmax': 75, 'amin': -50, 'amax': 50, 'amap': cmap_discretize('cm.RdBu_r')}, 'depth': {'vmin': 0, 'vmax': 2.5, 'amin': -2, 'amax': 2, 'amap': cmap_discretize('cm.RdBu_r')}, 'Ksat': {'vmin': 0, 'vmax': 4000, 'amin': -1000, 'amax': 1000, 'amap': cmap_discretize('cm.RdBu_r')}, 'init_moist': {'vmin': 0, 'vmax': 200, 'amin': -100, 'amax': 100, 'amap': cmap_discretize('cm.RdBu')}} # surface plots for var in plot_atts_3.keys(): print('making plot3 for {}'.format(var)) try: units = d1a[var]['units'] except: units = '' try: f = my_plot3(dom['xc'], dom['yc'], d1[var], d2[var], units=units, mask=(dom['mask'] == 0), t1=title1, t2=title2, **plot_atts_3[var]) plt.figtext(.5, 0.94, var, fontsize=18, ha='center') plt.figtext(.5, 0.90, d1a[var]['description'], fontsize=12, ha='center') fname = os.path.join(out_path, '{}-{}-{}.png'.format(title1, title2, var)) f.savefig(fname, format='png', dpi=150, bbox_inches='tight', pad_inches=0) print('finished {}'.format(fname)) except: print('problem with {}'.format(fname)) # level plots for var in plot_atts_9.keys(): print('making plot9 for {}'.format(var)) try: units = d1a[var]['units'] except: units = '' f = my_plot9(dom['xc'], dom['yc'], d1[var], d2[var], units=units, mask=(dom['mask'] == 0), t1=title1, t2=title2, **plot_atts_9[var]) plt.figtext(.5, 1.06, var, fontsize=18, ha='center') plt.figtext(.5, 1.02, d1a[var]['description'], fontsize=12, ha='center') fname = os.path.join(out_path, '{}-{}-{}.png'.format(title1, title2, var)) f.savefig(fname, format='png', dpi=150, bbox_inches='tight', pad_inches=0) print('finished {}'.format(fname)) return
def _run(args): config = read_config(args.config) files = [config['Basics']['files']] var_keys = [config['Basics']['var_keys']] output = config['Paths']['ASCIIoutPath'] #binary_mult = config['Basics']['binary_mult'] #binary_type = config['Basics']['binary_type'], paths = config['Paths'] out_prefix = config['Basics']['out_prefix'] verbose = config['Basics']['verbose'] mask = read_netcdf(paths['mask_path'], variables=['mask'])[0]['mask'] yi, xi = np.nonzero(mask) print(mask) print('found {0} points in mask fqile.'.format(len(xi))) #x = read_netcdf(os.path.join(paths['in_path'], 'pr_1979.nc')) #print(x) xlist = [] ylist = [] pointlist = [] append = False for i, fname in enumerate(files): d = read_netcdf(os.path.join(paths['in_path'], fname), verbose=verbose)[0] print(i) if i == 0: # find point locations xs = d['lon'] ys = d['lat'] posinds = np.nonzero(xs > 180) xs[posinds] -= 360 print('adjusted xs lon minimum') for y, x in pyzip(yi, xi): active_flag = False for key in var_keys: if (d[key][:, y, x].all() is np.ma.masked) \ or (mask[y, x] == 0): active_flag = True if not active_flag: point = (ys[y], xs[x]) print(point) xlist.append(x) ylist.append(y) pointlist.append(point) else: append = True for y, x, point in pyzip(ylist, xlist, pointlist): data = np.empty((d[var_keys[0]].shape[0], len(var_keys))) for j, key in enumerate(var_keys): data[:, j] = d[key][:, y, x] #if output['Binary']: # write_binary(data * binary_mult, point, binary_type, # out_prefix, paths['BinaryoutPath'], append) #if output['ASCII']: write_ascii(data, point, out_prefix, paths['ASCIIoutPath'], append) return