else: grid_mn = [] for Y in range(168, 171): for X in range(225, 228): if not mask[Y, X]: grid_mn.append(grd(X, Y)) def apply_init(grid): grid.init_caete_dyn(input_path, stime, co2_data, pls_table, tsoil, ssoil) return grid # # START GRIDCELLS print("Starting gridcells") print_progress(0, len(grid_mn), prefix='Progress:', suffix='Complete') for i, g in enumerate(grid_mn): apply_init(g) print_progress(i + 1, len(grid_mn), prefix='Progress:', suffix='Complete') # DEFINE HARVERSTERS - funcs that will apply grd methods(run the CAETÊ model) over the instanvces def apply_spin(grid): """pre-spinup use some outputs of daily budget (water, litter C, N and P) to start soil organic pools""" w, ll, cwd, rl, lnc = grid.bdg_spinup(start_date="19790101", end_date="19830101") grid.sdc_spinup(w, ll, cwd, rl, lnc) return grid def apply_fun(grid):
def create_ncG3(table, interval): nc_out = Path("../nc_outputs") out_data = True if nc_out.exists() else os.mkdir(nc_out) if out_data is None: print(f"Creating output folder at{nc_out.resolve()}") elif out_data: print(f"Saving outputs in {nc_out.resolve()}") vars = [ "rcm", "runom", "evapm", "wsoil", "cleaf", "cawood", "cfroot", "litter_l", "cwd", "litter_fr", "litter_n", "litter_p", "sto_c", "sto_n", "sto_p", "c_cost" ] dates = time_queries(interval) dm1 = len(dates) rcm = np.zeros(shape=(dm1, 61, 71), dtype=np.float32) - 9999.0 runom = np.zeros(shape=(dm1, 61, 71), dtype=np.float32) - 9999.0 evapm = np.zeros(shape=(dm1, 61, 71), dtype=np.float32) - 9999.0 wsoil = np.zeros(shape=(dm1, 61, 71), dtype=np.float32) - 9999.0 swsoil = np.zeros(shape=(dm1, 61, 71), dtype=np.float32) - 9999.0 cleaf = np.zeros(shape=(dm1, 61, 71), dtype=np.float32) - 9999.0 cawood = np.zeros(shape=(dm1, 61, 71), dtype=np.float32) - 9999.0 cfroot = np.zeros(shape=(dm1, 61, 71), dtype=np.float32) - 9999.0 litter_l = np.zeros(shape=(dm1, 61, 71), dtype=np.float32) - 9999.0 cwd = np.zeros(shape=(dm1, 61, 71), dtype=np.float32) - 9999.0 litter_fr = np.zeros(shape=(dm1, 61, 71), dtype=np.float32) - 9999.0 lnc1 = np.zeros(shape=(dm1, 61, 71), dtype=np.float32) - 9999.0 lnc2 = np.zeros(shape=(dm1, 61, 71), dtype=np.float32) - 9999.0 lnc3 = np.zeros(shape=(dm1, 61, 71), dtype=np.float32) - 9999.0 lnc4 = np.zeros(shape=(dm1, 61, 71), dtype=np.float32) - 9999.0 lnc5 = np.zeros(shape=(dm1, 61, 71), dtype=np.float32) - 9999.0 lnc6 = np.zeros(shape=(dm1, 61, 71), dtype=np.float32) - 9999.0 sto1 = np.zeros(shape=(dm1, 61, 71), dtype=np.float32) - 9999.0 sto2 = np.zeros(shape=(dm1, 61, 71), dtype=np.float32) - 9999.0 sto3 = np.zeros(shape=(dm1, 61, 71), dtype=np.float32) - 9999.0 c_cost = np.zeros(shape=(dm1, 61, 71), dtype=np.float32) - 9999.0 print("\nQuerying data from file FOR", end=': ') for v in vars: print(v, end=", ") print("\nInterval: ", interval) print_progress(0, len(dates), prefix='Progress:', suffix='Complete') for i, day in enumerate(dates): out = table.read_where(day) rcm[i, :, :] = assemble_layer(out['grid_y'], out['grid_x'], out['rcm']) runom[i, :, :] = assemble_layer(out['grid_y'], out['grid_x'], out['runom']) evapm[i, :, :] = assemble_layer(out['grid_y'], out['grid_x'], out['evapm']) wsoil[i, :, :] = assemble_layer(out['grid_y'], out['grid_x'], out['wsoil']) swsoil[i, :, :] = assemble_layer(out['grid_y'], out['grid_x'], out['swsoil']) cleaf[i, :, :] = assemble_layer(out['grid_y'], out['grid_x'], out['cleaf']) cawood[i, :, :] = assemble_layer(out['grid_y'], out['grid_x'], out['cawood']) cfroot[i, :, :] = assemble_layer(out['grid_y'], out['grid_x'], out['cfroot']) litter_l[i, :, :] = assemble_layer(out['grid_y'], out['grid_x'], out['litter_l']) cwd[i, :, :] = assemble_layer(out['grid_y'], out['grid_x'], out['cwd']) litter_fr[i, :, :] = assemble_layer(out['grid_y'], out['grid_x'], out['litter_fr']) lnc1[i, :, :] = assemble_layer(out['grid_y'], out['grid_x'], out['lnc1']) lnc2[i, :, :] = assemble_layer(out['grid_y'], out['grid_x'], out['lnc2']) lnc3[i, :, :] = assemble_layer(out['grid_y'], out['grid_x'], out['lnc3']) lnc4[i, :, :] = assemble_layer(out['grid_y'], out['grid_x'], out['lnc4']) lnc5[i, :, :] = assemble_layer(out['grid_y'], out['grid_x'], out['lnc5']) lnc6[i, :, :] = assemble_layer(out['grid_y'], out['grid_x'], out['lnc6']) sto1[i, :, :] = assemble_layer(out['grid_y'], out['grid_x'], out['sto1']) sto2[i, :, :] = assemble_layer(out['grid_y'], out['grid_x'], out['sto2']) sto3[i, :, :] = assemble_layer(out['grid_y'], out['grid_x'], out['sto3']) c_cost[i, :, :] = assemble_layer(out['grid_y'], out['grid_x'], out['c_cost']) print_progress(i + 1, len(dates), prefix='Progress:', suffix='Complete') # write netcdf litter_n = lnc1 + lnc2 + lnc3 litter_p = lnc4 + lnc5 + lnc6 wsoil = swsoil + wsoil vars = [ "rcm", "runom", "evapm", "wsoil", "cleaf", "cawood", "cfroot", "litter_l", "cwd", "litter_fr", "litter_n", "litter_p", "sto_c", "sto_n", "sto_p", "c_cost" ] arr = (rcm, runom, evapm, wsoil, cleaf, cawood, cfroot, litter_l, cwd, litter_fr, litter_n, litter_p, sto1, sto2, sto3, c_cost) var_attrs = get_var_metadata(vars) write_daily_output(arr, vars, var_attrs, interval)
def create_ncG1(table, interval): nc_out = Path("../nc_outputs") out_data = True if nc_out.exists() else os.mkdir(nc_out) if out_data is None: print(f"Creating output folder at{nc_out.resolve()}") elif out_data: print(f"Saving outputs in {nc_out.resolve()}") vars = [ 'photo', 'aresp', 'npp', 'lai', 'wue', 'cue', 'vcmax', 'sla', 'nupt', 'pupt' ] dates = time_queries(interval) dm1 = len(dates) photo = np.zeros(shape=(dm1, 61, 71), dtype=np.float32) - 9999.0 aresp = np.zeros(shape=(dm1, 61, 71), dtype=np.float32) - 9999.0 npp = np.zeros(shape=(dm1, 61, 71), dtype=np.float32) - 9999.0 lai = np.zeros(shape=(dm1, 61, 71), dtype=np.float32) - 9999.0 wue = np.zeros(shape=(dm1, 61, 71), dtype=np.float32) - 9999.0 cue = np.zeros(shape=(dm1, 61, 71), dtype=np.float32) - 9999.0 vcmax = np.zeros(shape=(dm1, 61, 71), dtype=np.float32) - 9999.0 specific_la = np.zeros(shape=(dm1, 61, 71), dtype=np.float32) - 9999.0 nupt1 = np.zeros(shape=(dm1, 61, 71), dtype=np.float32) - 9999.0 nupt2 = np.zeros(shape=(dm1, 61, 71), dtype=np.float32) - 9999.0 pupt1 = np.zeros(shape=(dm1, 61, 71), dtype=np.float32) - 9999.0 pupt2 = np.zeros(shape=(dm1, 61, 71), dtype=np.float32) - 9999.0 pupt3 = np.zeros(shape=(dm1, 61, 71), dtype=np.float32) - 9999.0 # TODO sort G1 table # if table.col has index: reindex_dirty # else: create_index(row_id) # tbl = table.copy(newname="indexed_g1", sortby=table.cols.row_index) print("\nQuerying data from file FOR", end=': ') for v in vars: print(v, end=", ") print("\nInterval: ", interval) print_progress(0, len(dates), prefix='Progress:', suffix='Complete') for i, day in enumerate(dates): out = table.read_where(day) photo[i, :, :] = assemble_layer(out['grid_y'], out['grid_x'], out['photo']) aresp[i, :, :] = assemble_layer(out['grid_y'], out['grid_x'], out['aresp']) npp[i, :, :] = assemble_layer(out['grid_y'], out['grid_x'], out['npp']) lai[i, :, :] = assemble_layer(out['grid_y'], out['grid_x'], out['lai']) wue[i, :, :] = assemble_layer(out['grid_y'], out['grid_x'], out['wue']) cue[i, :, :] = assemble_layer(out['grid_y'], out['grid_x'], out['cue']) vcmax[i, :, :] = assemble_layer(out['grid_y'], out['grid_x'], out['vcmax']) specific_la[i, :, :] = assemble_layer(out['grid_y'], out['grid_x'], out['specific_la']) nupt1[i, :, :] = assemble_layer(out['grid_y'], out['grid_x'], out['nupt1']) nupt2[i, :, :] = assemble_layer(out['grid_y'], out['grid_x'], out['nupt2']) pupt1[i, :, :] = assemble_layer(out['grid_y'], out['grid_x'], out['pupt1']) pupt2[i, :, :] = assemble_layer(out['grid_y'], out['grid_x'], out['pupt2']) pupt3[i, :, :] = assemble_layer(out['grid_y'], out['grid_x'], out['pupt3']) print_progress(i + 1, len(dates), prefix='Progress:', suffix='Complete') # write netcdf nupt1 = nupt2 + nupt1 pupt1 = pupt3 + pupt2 + pupt1 vars = [ 'photo', 'aresp', 'npp', 'lai', 'wue', 'cue', 'vcmax', 'sla', 'nupt', 'pupt' ] arr = (photo, aresp, npp, lai, wue, cue, vcmax, specific_la, nupt1, pupt1) var_attrs = get_var_metadata(vars) write_daily_output(arr, vars, var_attrs, interval)
def create_ncG2(table, interval): nc_out = Path("../nc_outputs") out_data = True if nc_out.exists() else os.mkdir(nc_out) if out_data is None: print(f"Creating output folder at{nc_out.resolve()}") elif out_data: print(f"Saving outputs in {nc_out.resolve()}") vars = [ 'csoil', 'total_n', 'total_p', 'org_n', 'org_p', 'inorg_n', 'inorg_p', 'sorbed_p', 'hresp', 'nmin', 'pmin' ] dates = time_queries(interval) dm1 = len(dates) csoil1 = np.zeros(shape=(dm1, 61, 71), dtype=np.float32) - 9999.0 csoil2 = np.zeros(shape=(dm1, 61, 71), dtype=np.float32) - 9999.0 csoil3 = np.zeros(shape=(dm1, 61, 71), dtype=np.float32) - 9999.0 csoil4 = np.zeros(shape=(dm1, 61, 71), dtype=np.float32) - 9999.0 sncN1 = np.zeros(shape=(dm1, 61, 71), dtype=np.float32) - 9999.0 sncN2 = np.zeros(shape=(dm1, 61, 71), dtype=np.float32) - 9999.0 sncN3 = np.zeros(shape=(dm1, 61, 71), dtype=np.float32) - 9999.0 sncN4 = np.zeros(shape=(dm1, 61, 71), dtype=np.float32) - 9999.0 sncP1 = np.zeros(shape=(dm1, 61, 71), dtype=np.float32) - 9999.0 sncP2 = np.zeros(shape=(dm1, 61, 71), dtype=np.float32) - 9999.0 sncP3 = np.zeros(shape=(dm1, 61, 71), dtype=np.float32) - 9999.0 sncP4 = np.zeros(shape=(dm1, 61, 71), dtype=np.float32) - 9999.0 inorg_n = np.zeros(shape=(dm1, 61, 71), dtype=np.float32) - 9999.0 inorg_p = np.zeros(shape=(dm1, 61, 71), dtype=np.float32) - 9999.0 sorbed_n = np.zeros(shape=(dm1, 61, 71), dtype=np.float32) - 9999.0 sorbed_p = np.zeros(shape=(dm1, 61, 71), dtype=np.float32) - 9999.0 hresp = np.zeros(shape=(dm1, 61, 71), dtype=np.float32) - 9999.0 nmin = np.zeros(shape=(dm1, 61, 71), dtype=np.float32) - 9999.0 pmin = np.zeros(shape=(dm1, 61, 71), dtype=np.float32) - 9999.0 print("\nQuerying data from file FOR", end=': ') for v in vars: print(v, end=", ") print("\nInterval: ", interval) print_progress(0, len(dates), prefix='Progress:', suffix='Complete') for i, day in enumerate(dates): out = table.read_where(day) csoil1[i, :, :] = assemble_layer(out['grid_y'], out['grid_x'], out["csoil1"]) csoil2[i, :, :] = assemble_layer(out['grid_y'], out['grid_x'], out["csoil2"]) csoil3[i, :, :] = assemble_layer(out['grid_y'], out['grid_x'], out["csoil3"]) csoil4[i, :, :] = assemble_layer(out['grid_y'], out['grid_x'], out["csoil4"]) sncN1[i, :, :] = assemble_layer(out['grid_y'], out['grid_x'], out["sncN1"]) sncN2[i, :, :] = assemble_layer(out['grid_y'], out['grid_x'], out["sncN2"]) sncN3[i, :, :] = assemble_layer(out['grid_y'], out['grid_x'], out["sncN3"]) sncN4[i, :, :] = assemble_layer(out['grid_y'], out['grid_x'], out["sncN4"]) sncP1[i, :, :] = assemble_layer(out['grid_y'], out['grid_x'], out["sncP1"]) sncP2[i, :, :] = assemble_layer(out['grid_y'], out['grid_x'], out["sncP2"]) sncP3[i, :, :] = assemble_layer(out['grid_y'], out['grid_x'], out["sncP3"]) sncP4[i, :, :] = assemble_layer(out['grid_y'], out['grid_x'], out["sncP4"]) inorg_n[i, :, :] = assemble_layer(out['grid_y'], out['grid_x'], out["inorg_n"]) inorg_p[i, :, :] = assemble_layer(out['grid_y'], out['grid_x'], out["inorg_p"]) sorbed_n[i, :, :] = assemble_layer(out['grid_y'], out['grid_x'], out["sorbed_n"]) sorbed_p[i, :, :] = assemble_layer(out['grid_y'], out['grid_x'], out["sorbed_p"]) hresp[i, :, :] = assemble_layer(out['grid_y'], out['grid_x'], out["hresp"]) nmin[i, :, :] = assemble_layer(out['grid_y'], out['grid_x'], out["nmin"]) pmin[i, :, :] = assemble_layer(out['grid_y'], out['grid_x'], out["pmin"]) print_progress(i + 1, len(dates), prefix='Progress:', suffix='Complete') # write netcdf csoil = csoil1 + csoil2 + csoil3 + csoil4 org_n = sncN1 + sncN2 + sncN3 + sncN4 org_p = sncP1 + sncP2 + sncP3 + sncP4 inorg_n = sorbed_n + inorg_n vars = [ 'csoil', 'org_n', 'org_p', 'inorg_n', 'inorg_p', 'sorbed_p', 'hresp', 'nmin', 'pmin' ] arr = (csoil, org_n, org_p, inorg_n, inorg_p, sorbed_p, hresp, nmin, pmin) var_attrs = get_var_metadata(vars) write_daily_output(arr, vars, var_attrs, interval)
def write_snap_output(arr, var, flt_attrs, time_index, experiment="TEST RUN HISTORICAL ISIMIP"): NO_DATA = [-9999.0, -9999.0] time_units = TIME_UNITS calendar = CALENDAR nc_out = Path("../nc_outputs") time_dim = time_index longitude_0 = np.arange(-179.75, 180, 0.5)[201:272] latitude_0 = np.arange(89.75, -90, -0.5)[160:221] print("\nSaving netCDF4 files") print_progress(0, len(var), prefix='Progress:', suffix='Complete') for i, v in enumerate(var): nc_filename = os.path.join(nc_out, Path(f'{v}.nc4')) with dt(nc_filename, mode='w', format='NETCDF4') as rootgrp: # dimensions & variables rootgrp.createDimension("latitude", latitude_0.size) rootgrp.createDimension("longitude", longitude_0.size) rootgrp.createDimension("time", None) time = rootgrp.createVariable(varname="time", datatype=np.int32, dimensions=("time", )) latitude = rootgrp.createVariable(varname="latitude", datatype=np.float32, dimensions=("latitude", )) longitude = rootgrp.createVariable(varname="longitude", datatype=np.float32, dimensions=("longitude", )) var_ = rootgrp.createVariable(varname=flt_attrs[v][2], datatype=np.float32, dimensions=( "time", "latitude", "longitude", ), zlib=True, fill_value=NO_DATA[0], fletcher32=True) # attributes # rootgrp rootgrp.description = flt_attrs[v][0] + " from CAETÊ-CNP OUTPUT" rootgrp.source = "CAETE model outputs - [email protected]" rootgrp.experiment = experiment # time time.units = time_units time.calendar = calendar time.axis = 'T' # lat latitude.units = u"degrees_north" latitude.long_name = u"latitude" latitude.standart_name = u"latitude" latitude.axis = u'Y' # lon longitude.units = "degrees_east" longitude.long_name = "longitude" longitude.standart_name = "longitude" longitude.axis = u'X' # var var_.long_name = flt_attrs[v][0] var_.units = flt_attrs[v][1] var_.standard_name = flt_attrs[v][2] var_.missing_value = NO_DATA[0] # WRITING DATA longitude[:] = longitude_0 latitude[:] = latitude_0 time[:] = time_dim var_[:, :, :] = np.ma.masked_array(arr[i], mask=arr[i] == NO_DATA[0]) print_progress(i + 1, len(var), prefix='Progress:', suffix='Complete')