class CalcFramework(DynamicModel): def __init__(self, cloneMapFileName,\ pcraster_files, \ output, \ modelTime): DynamicModel.__init__(self) # pcr.setclone(cloneMapFileName) self.modelTime = modelTime self.pcraster_files = pcraster_files # move to the input folder os.chdir(self.pcraster_files['directory']) self.output = output self.output['file_name'] = vos.getFullPath(self.output['file_name'], self.output['folder']) # object for reporting self.netcdf_report = OutputNetcdf(cloneMapFileName, self.output['description']) print(self.output['long_name']) # make a netcdf file self.netcdf_report.createNetCDF(self.output['file_name'],\ self.output['variable_name'],\ self.output['unit'],\ self.output['long_name']) def initial(self): pass def dynamic(self): # re-calculate current model time using current pcraster timestep value self.modelTime.update(self.currentTimeStep()) # open input data if self.output['variable_name'] != "temperature": pcr_map_values = self.readmap(self.pcraster_files['file_name']) else: pcr_map_values = 0.50*(self.readmap("tn") + self.readmap("tx")) # reporting timeStamp = datetime.datetime(self.modelTime.year,\ self.modelTime.month,\ self.modelTime.day,0) self.netcdf_report.data2NetCDF(self.output['file_name'],\ self.output['variable_name'],\ pcr.pcr2numpy(pcr_map_values,vos.MV),\ timeStamp)
class UpscalingFramework(DynamicModel): def __init__(self, input_files,\ output_files,\ modelTime,\ tmpDir = "/dev/shm/"): DynamicModel.__init__(self) # self.input_files = input_files self.output_files = output_files self.tmpDir = tmpDir self.modelTime = modelTime pcr.setclone(self.input_files["model_cell_area"]) clone_map = pcr.boolean(1) # cell area (m2) self.cell_area = vos.readPCRmapClone(\ self.input_files["model_cell_area"],\ self.input_files["model_cell_area"],\ self.tmpDir) # resampling factor: ratio between target (coarse) and original (fine) resolution self.resample_factor = round( vos.getMapAttributes(self.input_files["one_degree_id"],'cellsize')/\ vos.getMapAttributes(self.input_files["model_cell_area"],'cellsize')) # unique ids for upscaling to one degree resolution (grace resolution) self.one_degree_id = pcr.nominal(\ vos.readPCRmapClone(\ self.input_files["one_degree_id"],\ self.input_files["model_cell_area"],\ self.tmpDir)) # object for reporting at coarse resolution (i.e. one arc degree - grace resolution) self.output = OutputNetcdf(self.input_files["one_degree_id"], self.input_files["model_cell_area"]) # preparing the netcdf file at coarse resolution: self.output.createNetCDF(self.output_files['one_degree_tws']['model'], "pcrglobwb_tws","m") # # edit some attributes: attributeDictionary = {} attributeDictionary['description'] = "One degree resolution total water storage (tws), upscaled from PCR-GLOBWB result. " self.output.changeAtrribute(self.output_files['one_degree_tws']['model'],\ attributeDictionary) def initial(self): pass def dynamic(self): # re-calculate model time using current pcraster timestep value self.modelTime.update(self.currentTimeStep()) # processing / calculating only at the last day of the month: if self.modelTime.endMonth == True: #~ # open totalWaterStorageThickness (unit: m, monthly average values) #~ value_at_5min = vos.netcdf2PCRobjClone(\ #~ self.input_files["model_total_water_storage"],\ #~ "total_thickness_of_water_storage",\ #~ str(self.modelTime.fulldate), useDoy = "end-month") # open totalWaterStorageThickness (unit: m, monthly average values) value_at_5min = vos.netcdf2PCRobjClone(\ self.input_files["model_total_water_storage"],\ self.input_files["model_total_water_storage_variable_name"],\ str(self.modelTime.fulldate), useDoy = "end-month") # upscale to one degree resolution value_at_1deg_but_5min_cell = \ vos.getValDivZero(\ pcr.areatotal(self.cell_area*value_at_5min,\ self.one_degree_id),\ pcr.areatotal(self.cell_area,self.one_degree_id), vos.smallNumber) # resample from 5 arc minute cells to one degree cells value_at_1deg = vos.regridToCoarse(\ pcr.pcr2numpy(value_at_1deg_but_5min_cell,vos.MV),self.resample_factor,"max",vos.MV) # # reporting timestepPCR = self.modelTime.timeStepPCR timeStamp = datetime.datetime(self.modelTime.year,\ self.modelTime.month,\ self.modelTime.day,0) # write it to netcdf self.output.data2NetCDF(self.output_files['one_degree_tws']['model'],\ "pcrglobwb_tws",\ value_at_1deg,\ timeStamp)
class CalcFramework(DynamicModel): def __init__(self, cloneMapFileName,\ input_files, \ modelTime, \ output): DynamicModel.__init__(self) # set the clone map self.cloneMapFileName = cloneMapFileName pcr.setclone(self.cloneMapFileName) # time variable/object self.modelTime = modelTime # output folder self.output = output # prepare temporary directory self.tmpDir = self.output['folder'] +"/tmp/" try: os.makedirs(self.tmpDir) except: os.system('rm -r /' + tmpDir + "/*") # input files self.input_files = input_files # object for reporting self.netcdf_report = OutputNetcdf(cloneMapFileName, self.output['netcdf_format'], self.output['netcdf_attributes']) # make netcdf files for daily/monthly evaporation values: self.variable_name = "potential_evaporation" self.variable_unit = "m.day-1" # This must be in daily average value # for daily values: for lc_type in ["forest", "grassland", "irrPaddy", "irrNonPaddy"]: file_name = self.output['folder'] + "/daily_potential_evaporation_" + self.variable_unit + "_" + lc_type + ".nc" self.netcdf_report.createNetCDF(file_name,\ self.variable_name,\ self.variable_unit,\ self.variable_name) # for monthly values: for lc_type in ["forest", "grassland", "irrPaddy", "irrNonPaddy"]: file_name = self.output['folder'] + "/monthly_potential_evaporation_" + self.variable_unit + "_" + lc_type + ".nc" self.netcdf_report.createNetCDF(file_name,\ self.variable_name,\ self.variable_unit,\ self.variable_name) # initiate acummulator variables for calculating monthly averages: self.monthly_accumulator = {} for lc_type in ["forest", "grassland", "irrPaddy", "irrNonPaddy"]: self.monthly_accumulator[lc_type] = pcr.scalar(0.0) def initial(self): pass def dynamic(self): # re-calculate current model time using current pcraster timestep value self.modelTime.update(self.currentTimeStep()) # open input data referencePotET = vos.netcdf2PCRobjClone(\ self.input_files['referencePotET']['file_name'], \ self.input_files['referencePotET']['variable_name'], \ str(self.modelTime.fulldate), \ useDoy = None, \ cloneMapFileName = self.cloneMapFileName) cropKC = {} for lc_type in ["forest", "grassland", "irrPaddy", "irrNonPaddy"]: cropKC[lc_type] = vos.netcdf2PCRobjClone(\ self.input_files['cropKC'][lc_type], \ self.input_files['cropKC']['variable_name'], \ str(self.modelTime.fulldate), useDoy = None, cloneMapFileName = self.cloneMapFileName) # calculate potential_evaporation = {} for lc_type in ["forest", "grassland", "irrPaddy", "irrNonPaddy"]: potential_evaporation[lc_type] = referencePotET * cropKC[lc_type] # reporting for daily values timeStamp = datetime.datetime(self.modelTime.year,\ self.modelTime.month,\ self.modelTime.day,0) for lc_type in ["forest", "grassland", "irrPaddy", "irrNonPaddy"]: file_name = self.output['folder'] + "/daily_potential_evaporation_" + self.variable_unit + "_" + lc_type + ".nc" self.netcdf_report.data2NetCDF(file_name,\ self.variable_name,\ pcr.pcr2numpy(potential_evaporation[lc_type], vos.MV),\ timeStamp) # reporting for monthly values # - reset at the beginning of the month: if self.modelTime.isFirstDayOfMonth: for lc_type in ["forest", "grassland", "irrPaddy", "irrNonPaddy"]: self.monthly_accumulator[lc_type] = pcr.scalar(0.0) # - accumulate until the last day of the month: for lc_type in ["forest", "grassland", "irrPaddy", "irrNonPaddy"]: self.monthly_accumulator[lc_type] = self.monthly_accumulator[lc_type] + potential_evaporation[lc_type] if self.modelTime.endMonth: for lc_type in ["forest", "grassland", "irrPaddy", "irrNonPaddy"]: file_name = self.output['folder'] + "/monthly_potential_evaporation_" + self.variable_unit + "_" + lc_type + ".nc" print file_name self.netcdf_report.data2NetCDF(file_name,\ self.variable_name,\ pcr.pcr2numpy(self.monthly_accumulator[lc_type]/calendar.monthrange(self.modelTime.year, self.modelTime.month)[1], vos.MV),\ timeStamp)
class ResampleFramework(DynamicModel): def __init__(self, input_netcdf,\ output_netcdf,\ modelTime,\ tmpDir = "/dev/shm/"): DynamicModel.__init__(self) self.input_netcdf = input_netcdf self.output_netcdf = output_netcdf self.tmpDir = tmpDir self.modelTime = modelTime # a dictionary contains input clone properties (based on the input netcdf file) #~ self.input_clone = vos.netcdfCloneAttributes(self.input_netcdf['file_name'],\ #~ np.round(self.input_netcdf['cell_resolution']*60.,1),\ #~ True) pcr.setclone(self.input_netcdf['clone_file']) self.input_clone = {} self.input_clone['cellsize'] = pcr.clone().cellSize() self.input_clone['rows'] = int(pcr.clone().nrRows()) self.input_clone['cols'] = int(pcr.clone().nrCols()) self.input_clone['xUL'] = round(pcr.clone().west(), 2) self.input_clone['yUL'] = round(pcr.clone().north(), 2) # resampling factor: ratio between output and input resolutions self.resample_factor = self.output_netcdf["cell_resolution"]/\ self.input_netcdf['cell_resolution'] # clone map if self.resample_factor > 1.0: # upscaling # the resample factor must be a rounded value without decimal self.resample_factor = round(self.resample_factor) # output clone properties self.output_netcdf['rows' ] = int(round(float(self.input_clone['rows'])/float(self.resample_factor))) self.output_netcdf['cols' ] = int(round(float(self.input_clone['cols'])/float(self.resample_factor))) self.output_netcdf['cellsize'] = self.output_netcdf["cell_resolution"] self.output_netcdf['xUL' ] = self.input_clone['xUL'] self.output_netcdf['yUL' ] = self.input_clone['yUL'] # get the unique ids for the output resolution # - use the clone for the output resolution (only for a temporary purpose) pcr.setclone(self.output_netcdf['rows' ], self.output_netcdf['cols' ], self.output_netcdf['cellsize'], self.output_netcdf['xUL' ], self.output_netcdf['yUL' ]) # - unique_ids in a numpy object cell_unique_ids = pcr.pcr2numpy(pcr.scalar(pcr.uniqueid(pcr.boolean(1.))),vos.MV) # the remaining pcraster calculations are performed at the input resolution pcr.setclone(self.input_clone['rows' ], self.input_clone['cols' ], self.input_clone['cellsize'], self.input_clone['xUL' ], self.input_clone['yUL' ]) # clone map file self.clone_map_file = self.input_netcdf['clone_file'] # cell unique ids in a pcraster object self.unique_ids = pcr.nominal(pcr.numpy2pcr(pcr.Scalar, vos.regridData2FinerGrid(self.resample_factor,cell_unique_ids, vos.MV), vos.MV)) # cell area (m2) self.cell_area = vos.readPCRmapClone(\ self.input_netcdf["cell_area"],\ self.clone_map_file,\ self.tmpDir) else: # downscaling / resampling to smaller cell length # all pcraster calculations are performed at the output resolution pcr.setclone(self.output_netcdf['rows' ], self.output_netcdf['cols' ], self.output_netcdf['cellsize'], self.output_netcdf['xUL' ], self.output_netcdf['yUL' ]) # clone map file self.clone_map_file = self.output_netcdf['clone_file'] # an object for netcdf reporting self.output = OutputNetcdf(mapattr_dict = self.output_netcdf,\ cloneMapFileName = None,\ netcdf_format = self.output_netcdf['format'],\ netcdf_zlib = self.output_netcdf['zlib'],\ netcdf_attribute_dict = self.output_netcdf['netcdf_attribute'],\ netcdf_attribute_description = None) # preparing the netcdf file at coarse resolution: self.output.createNetCDF(self.output_netcdf['file_name'],\ self.output_netcdf['variable_name'],\ self.output_netcdf['variable_unit']) def initial(self): pass def dynamic(self): # update model time using the current pcraster timestep value self.modelTime.update(self.currentTimeStep()) # reading data_available = True if data_available: input_value = vos.netcdf2PCRobjClone(ncFile = self.input_netcdf['file_name'], varName = self.input_netcdf['variable_name'], dateInput = str(self.modelTime.fulldate), useDoy = None, cloneMapFileName = self.clone_map_file) data_available = True else: print "No values are available for this date: "+str(self.modelTime) data_available = False if data_available: output_value = input_value # upscaling if data_available and self.resample_factor > 1.0: # upscaling using cell area cell_area = pcr.ifthen(pcr.defined(output_value), self.cell_area) output_value_in_pcraster = \ vos.getValDivZero(\ pcr.areatotal(output_value*self.cell_area, self.unique_ids),\ pcr.areatotal(self.cell_area, self.unique_ids), vos.smallNumber) # resample to the output clone resolution output_value = vos.regridToCoarse(pcr.pcr2numpy(output_value_in_pcraster, vos.MV), self.resample_factor, "max", vos.MV) # reporting if data_available: # time stamp timestepPCR = self.modelTime.timeStepPCR timeStamp = datetime.datetime(self.modelTime.year,\ self.modelTime.month,\ self.modelTime.day,0) # write to netcdf self.output.data2NetCDF(self.output_netcdf['file_name'],\ self.output_netcdf['variable_name'],\ output_value,\ timeStamp) # closing the file at the end of if self.modelTime.isLastTimeStep(): self.output.close(self.output_netcdf['file_name'])
class CalcFramework(DynamicModel): def __init__(self, cloneMapFileName,\ pcraster_files, \ modelTime, \ output, inputEPSG = None, outputEPSG = None, resample_method = None): DynamicModel.__init__(self) # set the clone map self.cloneMapFileName = cloneMapFileName pcr.setclone(self.cloneMapFileName) # time variable/object self.modelTime = modelTime # output file name, folder name, etc. self.output = output self.output['file_name'] = vos.getFullPath(self.output['file_name'], self.output['folder']) # input and output projection/coordinate systems self.inputEPSG = inputEPSG self.outputEPSG = outputEPSG self.resample_method = resample_method # prepare temporary directory self.tmpDir = output['folder']+"/tmp/" try: os.makedirs(self.tmpDir) os.system('rm -r '+tmpDir+"/*") except: pass # pcraster input files self.pcraster_files = pcraster_files # - the begining part of pcraster file names (e.g. "pr" for "pr000000.001") self.pcraster_file_name = self.pcraster_files['directory']+"/"+\ self.pcraster_files['file_name'] # object for reporting self.netcdf_report = OutputNetcdf(mapattr_dict = None,\ cloneMapFileName = cloneMapFileName,\ netcdf_format = "NETCDF3_CLASSIC",\ netcdf_zlib = False,\ netcdf_attribute_dict = None,\ netcdf_attribute_description = self.output['description']) # make a netcdf file self.netcdf_report.createNetCDF(self.output['file_name'],\ self.output['variable_name'],\ self.output['unit'],\ self.output['long_name']) def initial(self): pass def dynamic(self): # re-calculate current model time using current pcraster timestep value self.modelTime.update(self.currentTimeStep()) # for variables other than temperature and maximum temperature, just read them directly if self.output['variable_name'] != "temperature" and self.output['variable_name'] != "maximum_temperature": pcraster_map_file_name = pcr.framework.frameworkBase.generateNameT(self.pcraster_file_name,\ self.modelTime.timeStepPCR) pcr_map_values = vos.readPCRmapClone(v = pcraster_map_file_name,\ cloneMapFileName = self.cloneMapFileName,\ tmpDir = self.tmpDir,\ absolutePath = None, isLddMap = False,\ cover = None,\ isNomMap = False,\ inputEPSG = self.inputEPSG,\ outputEPSG = self.outputEPSG,\ method = self.resample_method) # for temperature and maximum temperature, we have to make sure that maximum temperature is higher than minimum temperature if self.output['variable_name'] == "temperature" or self.output['variable_name'] == "maximum_temperature": min_map_file_name = pcr.framework.frameworkBase.generateNameT(self.pcraster_files['directory']+"/tn", self.modelTime.timeStepPCR) max_map_file_name = pcr.framework.frameworkBase.generateNameT(self.pcraster_files['directory']+"/tx", self.modelTime.timeStepPCR) min_map_values = vos.readPCRmapClone(v = min_map_file_name,\ cloneMapFileName = self.cloneMapFileName,\ tmpDir = self.tmpDir,\ absolutePath = None, isLddMap = False,\ cover = None,\ isNomMap = False,\ inputEPSG = self.inputEPSG,\ outputEPSG = self.outputEPSG,\ method = self.resample_method) max_map_values = vos.readPCRmapClone(v = max_map_file_name,\ cloneMapFileName = self.cloneMapFileName,\ tmpDir = self.tmpDir,\ absolutePath = None, isLddMap = False,\ cover = None,\ isNomMap = False,\ inputEPSG = self.inputEPSG,\ outputEPSG = self.outputEPSG,\ method = self.resample_method) # make sure that maximum values are higher than minimum values max_map_values = pcr.max(min_map_values, max_map_values) if self.output['variable_name'] == "temperature": pcr_map_values = 0.50*(min_map_values + \ max_map_values) if self.output['variable_name'] == "maximum_temperature": pcr_map_values = pcr.max(min_map_values, max_map_values) # for precipitation, converting the unit from mm.day-1 to m.day-1 if self.output['variable_name'] == "precipitation": pcr_map_values *= 0.001 # reporting timeStamp = datetime.datetime(self.modelTime.year,\ self.modelTime.month,\ self.modelTime.day,0) self.netcdf_report.data2NetCDF(self.output['file_name'],\ self.output['variable_name'],\ pcr.pcr2numpy(pcr_map_values,vos.MV),\ timeStamp)
class CalcFramework(DynamicModel): def __init__(self, cloneMapFileName,\ pcraster_files, \ modelTime, \ output, inputEPSG = None, outputEPSG = None, resample_method = None): DynamicModel.__init__(self) # set the clone map self.cloneMapFileName = cloneMapFileName pcr.setclone(self.cloneMapFileName) # time variable/object self.modelTime = modelTime # output file name, folder name, etc. self.output = output self.output['file_name'] = vos.getFullPath(self.output['file_name'], self.output['folder']) # input and output projection/coordinate systems self.inputEPSG = inputEPSG self.outputEPSG = outputEPSG self.resample_method = resample_method # prepare temporary directory self.tmpDir = output['folder'] + "/tmp/" try: os.makedirs(self.tmpDir) os.system('rm -r ' + tmpDir + "/*") except: pass # pcraster input files self.pcraster_files = pcraster_files # - the begining part of pcraster file names (e.g. "pr" for "pr000000.001") self.pcraster_file_name = self.pcraster_files['directory']+"/"+\ self.pcraster_files['file_name'] # object for reporting self.netcdf_report = OutputNetcdf(mapattr_dict = None,\ cloneMapFileName = cloneMapFileName,\ netcdf_format = "NETCDF3_CLASSIC",\ netcdf_zlib = False,\ netcdf_attribute_dict = None,\ netcdf_attribute_description = self.output['description']) # make a netcdf file self.netcdf_report.createNetCDF(self.output['file_name'],\ self.output['variable_name'],\ self.output['unit'],\ self.output['long_name']) def initial(self): pass def dynamic(self): # re-calculate current model time using current pcraster timestep value self.modelTime.update(self.currentTimeStep()) # for variables other than temperature and maximum temperature, just read them directly if self.output['variable_name'] != "temperature" and self.output[ 'variable_name'] != "maximum_temperature": pcraster_map_file_name = pcr.framework.frameworkBase.generateNameT(self.pcraster_file_name,\ self.modelTime.timeStepPCR) pcr_map_values = vos.readPCRmapClone(v = pcraster_map_file_name,\ cloneMapFileName = self.cloneMapFileName,\ tmpDir = self.tmpDir,\ absolutePath = None, isLddMap = False,\ cover = None,\ isNomMap = False,\ inputEPSG = self.inputEPSG,\ outputEPSG = self.outputEPSG,\ method = self.resample_method) # for temperature and maximum temperature, we have to make sure that maximum temperature is higher than minimum temperature if self.output['variable_name'] == "temperature" or self.output[ 'variable_name'] == "maximum_temperature": min_map_file_name = pcr.framework.frameworkBase.generateNameT( self.pcraster_files['directory'] + "/tn", self.modelTime.timeStepPCR) max_map_file_name = pcr.framework.frameworkBase.generateNameT( self.pcraster_files['directory'] + "/tx", self.modelTime.timeStepPCR) min_map_values = vos.readPCRmapClone(v = min_map_file_name,\ cloneMapFileName = self.cloneMapFileName,\ tmpDir = self.tmpDir,\ absolutePath = None, isLddMap = False,\ cover = None,\ isNomMap = False,\ inputEPSG = self.inputEPSG,\ outputEPSG = self.outputEPSG,\ method = self.resample_method) max_map_values = vos.readPCRmapClone(v = max_map_file_name,\ cloneMapFileName = self.cloneMapFileName,\ tmpDir = self.tmpDir,\ absolutePath = None, isLddMap = False,\ cover = None,\ isNomMap = False,\ inputEPSG = self.inputEPSG,\ outputEPSG = self.outputEPSG,\ method = self.resample_method) # make sure that maximum values are higher than minimum values max_map_values = pcr.max(min_map_values, max_map_values) if self.output['variable_name'] == "temperature": pcr_map_values = 0.50*(min_map_values + \ max_map_values) if self.output['variable_name'] == "maximum_temperature": pcr_map_values = pcr.max(min_map_values, max_map_values) # for precipitation, converting the unit from mm.day-1 to m.day-1 if self.output['variable_name'] == "precipitation": pcr_map_values *= 0.001 # reporting timeStamp = datetime.datetime(self.modelTime.year,\ self.modelTime.month,\ self.modelTime.day,0) self.netcdf_report.data2NetCDF(self.output['file_name'],\ self.output['variable_name'],\ pcr.pcr2numpy(pcr_map_values,vos.MV),\ timeStamp)
class ConvertVolumeToHeightFramework(DynamicModel): def __init__(self, input_netcdf,\ output_netcdf,\ modelTime,\ tmpDir = "/dev/shm/"): DynamicModel.__init__(self) self.input_netcdf = input_netcdf self.output_netcdf = output_netcdf self.tmpDir = tmpDir self.modelTime = modelTime # set clone self.clone_map_file = self.input_netcdf['cell_area'] pcr.setclone(self.clone_map_file) self.clone = {} self.clone['cellsize'] = pcr.clone().cellSize() ; print self.clone['cellsize'] self.clone['rows'] = int(pcr.clone().nrRows()) self.clone['cols'] = int(pcr.clone().nrCols()) self.clone['xUL'] = round(pcr.clone().west(), 2) self.clone['yUL'] = round(pcr.clone().north(), 2) # cell area (unit: m2) self.cell_area = pcr.readmap(self.input_netcdf['cell_area']) # an object for netcdf reporting self.output = OutputNetcdf(self.clone, self.output_netcdf) # preparing the netcdf file and make variable: self.output.createNetCDF(self.output_netcdf['file_name'], self.output_netcdf['gross_variable_name'], self.output_netcdf['variable_unit']) self.output.addNewVariable(self.output_netcdf['file_name'], self.output_netcdf['netto_variable_name'], self.output_netcdf['variable_unit']) def initial(self): pass def dynamic(self): # update model time using the current pcraster timestep value self.modelTime.update(self.currentTimeStep()) # reading gross and netto values: if self.modelTime.isLastDayOfMonth(): gross_value = vos.netcdf2PCRobjClone(ncFile = self.input_netcdf['gross_file_name'], varName = self.input_netcdf['gross_variable_name'], dateInput = str(self.modelTime.fulldate)) netto_value = vos.netcdf2PCRobjClone(ncFile = self.input_netcdf['netto_file_name'], varName = self.input_netcdf['netto_variable_name'], dateInput = str(self.modelTime.fulldate)) # covering with zero and convert the unit to gross_value = pcr.cover(gross_value, 0.0)/self.cell_area netto_value = pcr.cover(netto_value, 0.0)/self.cell_area # reporting if self.modelTime.isLastDayOfMonth(): # put the output in a dictionary output = {} output[self.output_netcdf['gross_variable_name']] = pcr.pcr2numpy(gross_value, vos.MV) output[self.output_netcdf['netto_variable_name']] = pcr.pcr2numpy(netto_value, vos.MV) # time stamp timeStamp = datetime.datetime(self.modelTime.year,\ self.modelTime.month,\ self.modelTime.day,0) # to netcdf self.output.dataList2NetCDF(self.output_netcdf['file_name'],\ [self.output_netcdf['gross_variable_name'], self.output_netcdf['netto_variable_name']],\ output,\ timeStamp) # closing the file at the end of if self.modelTime.isLastTimeStep(): self.output.close(self.output_netcdf['file_name'])
class GraceEvaluation(DynamicModel): def __init__(self, input_files,\ output_files,\ modelTime,\ main_tmp_dir = "/dev/shm/"): DynamicModel.__init__(self) self.input_files = input_files self.output_files = output_files self.modelTime = modelTime # main temporary directory self.main_tmp_dir = main_tmp_dir+"/"+vos.get_random_word() # make the temporary directory if not exist yet try: os.makedirs(self.main_tmp_dir) except: os.system('rm -r '+str(self.main_tmp_dir)+'*') os.makedirs(self.main_tmp_dir) # clone map for pcraster process - depend on the resolution of the basin/catchment map pcr.setclone(self.input_files["basin30minmap"]) self.clone_map = pcr.boolean(1.0) # # catchment ids map self.catchment = pcr.nominal(\ pcr.readmap(self.input_files["basin30minmap"])) self.catchment = pcr.ifthen(pcr.scalar(self.catchment) > 0.0,\ self.catchment) # cell area map self.cell_area = pcr.cover(pcr.readmap(self.input_files["area30min_map"]), 0.0) # prepare grace monthly and annual anomaly time series self.pre_process_grace_file() # prepare model monthly and annual anomaly time series self.pre_process_model_file() # prepare object for writing netcdf files: self.output = OutputNetcdf(self.input_files["area30min_map"]) self.output.createNetCDF(self.output_files['basinscale_tws_month_anomaly']['grace'], "lwe_thickness","m") self.output.createNetCDF(self.output_files['basinscale_tws_month_anomaly']['model'], "pcrglobwb_tws","m") self.output.createNetCDF(self.output_files['basinscale_tws_annua_anomaly']['grace'], "lwe_thickness","m") self.output.createNetCDF(self.output_files['basinscale_tws_annua_anomaly']['model'], "pcrglobwb_tws","m") def pre_process_grace_file(self): # using the scale factor to correct the original monthly grace file # (as recommended by Landerer & Swenson, 2012; see also http://grace.jpl.nasa.gov/data/gracemonthlymassgridsland/) grace_file = self.input_files["grace_total_water_storage_original"] # unit: cm scale_file = self.input_files["grace_scale_factor"] # unit: cm output_file = self.output_files['one_degree_tws']['grace'] # unit: m start_year = str(self.modelTime.startTime.year) end_year = str(self.modelTime.endTime.year) print("\n") print("test") cdo_command = "cdo invertlat -selyear,"+str(start_year)+"/"+str(end_year)+\ " -sellonlatbox,-180,180,-90,90"+\ " -mulc,0.01"+\ " -mul -selname,lwe_thickness "+str(grace_file)+\ " -selname,SCALE_FACTOR "+str(scale_file)+\ " "+str(output_file) print(cdo_command); os.system(cdo_command); print("\n") # calculate monthly anomaly: input_file = self.output_files['one_degree_tws']['grace'] output_file = self.output_files['one_degree_tws_month_anomaly']['grace'] print("\n") cdo_command = "cdo sub "+str(input_file)+" -timmean"+" "+str(input_file)+" "+str(output_file) print(cdo_command); os.system(cdo_command); print("\n") # calculate yearly anomaly: input_file = self.output_files['one_degree_tws_month_anomaly']['grace'] output_file = self.output_files['one_degree_tws_annua_anomaly']['grace'] print("\n") cdo_command = "cdo yearmean "+str(input_file)+" "+str(output_file) print(cdo_command); os.system(cdo_command); print("\n") def pre_process_model_file(self): # calculate monthly anomaly: input_file = self.output_files['one_degree_tws']['model'] output_file = self.output_files['one_degree_tws_month_anomaly']['model'] print("\n") cdo_command = "cdo sub "+str(input_file)+" -timmean"+" "+str(input_file)+" "+str(output_file) print(cdo_command); os.system(cdo_command); print("\n") # calculate yearly anomaly: input_file = self.output_files['one_degree_tws_month_anomaly']['model'] output_file = self.output_files['one_degree_tws_annua_anomaly']['model'] print("\n") cdo_command = "cdo yearmean "+str(input_file)+" "+str(output_file) print(cdo_command); os.system(cdo_command); print("\n") def initial(self): pass def dynamic(self): # re-calculate model time using current pcraster timestep value self.modelTime.update(self.currentTimeStep()) # at the end of every month: # - aggregate/average the value at basin scale: # - then report it to the netcdf file: if self.modelTime.endMonth == True: # values from grace: grace_value = pcr.cover(vos.netcdf2PCRobjClone(\ self.output_files['one_degree_tws_month_anomaly']['grace'],\ "lwe_thickness",\ str(self.modelTime.fulldate), "mid-month",\ self.input_files["basin30minmap"]), 0.0) # basin_grace = pcr.areatotal(self.cell_area * grace_value, self.catchment)/\ pcr.areatotal(self.cell_area, self.catchment) # values from pcr-globwb simulation: model_value = pcr.cover(vos.netcdf2PCRobjClone(\ self.output_files['one_degree_tws_month_anomaly']['model'],\ "pcrglobwb_tws",\ str(self.modelTime.fulldate), "end-month",\ self.input_files["basin30minmap"]), 0.0) # basin_model = pcr.areatotal(self.cell_area * model_value, self.catchment)/\ pcr.areatotal(self.cell_area, self.catchment) # reporting timeStamp = datetime.datetime(self.modelTime.year,\ self.modelTime.month,\ self.modelTime.day,0) # write grace self.output.data2NetCDF(self.output_files["basinscale_tws_month_anomaly"]['grace'],\ "lwe_thickness",\ pcr.pcr2numpy(basin_grace,vos.MV),\ timeStamp) # write model self.output.data2NetCDF(self.output_files["basinscale_tws_month_anomaly"]['model'],\ "pcrglobwb_tws",\ pcr.pcr2numpy(basin_model,vos.MV),\ timeStamp) # at the last dynamic time step # - prepare annual anomaly time series # - evaluate the pcr-globwb model results to grace time series (monthly and annual) if self.modelTime.currTime == self.modelTime.endTime: # prepare annual anomaly time series self.prepare_annual_anomaly() # evaluate the pcr-globwb model results to grace time series # (monthly & annual resolution - basin & one degree scale) self.evaluate_to_grace_data() def prepare_annual_anomaly(self): # prepare one degree - grace - annual anomaly time series input_file = self.output_files["one_degree_tws_month_anomaly"]['grace'] output_file = self.output_files["one_degree_tws_annua_anomaly"]['grace'] print("\n") cdo_command = "cdo yearmean "+str(input_file)+" "+str(output_file) print(cdo_command); os.system(cdo_command); print("\n") # prepare one degree - model - annual anomaly time series input_file = self.output_files["one_degree_tws_month_anomaly"]['model'] output_file = self.output_files["one_degree_tws_annua_anomaly"]['model'] print("\n") cdo_command = "cdo yearmean "+str(input_file)+" "+str(output_file) print(cdo_command); os.system(cdo_command); print("\n") # prepare basin scale - grace - annual anomaly time series input_file = self.output_files["basinscale_tws_month_anomaly"]['grace'] output_file = self.output_files["basinscale_tws_annua_anomaly"]['grace'] print("\n") cdo_command = "cdo yearmean "+str(input_file)+" "+str(output_file) print(cdo_command); os.system(cdo_command); print("\n") # prepare basin scale - model - annual anomaly time series input_file = self.output_files["basinscale_tws_month_anomaly"]['model'] output_file = self.output_files["basinscale_tws_annua_anomaly"]['model'] print("\n") cdo_command = "cdo yearmean "+str(input_file)+" "+str(output_file) print(cdo_command); os.system(cdo_command); print("\n") def evaluate_to_grace_data(self): # one degree and monthly resolution self.evaluation(self.output_files['one_degree_tws_month_anomaly']['model'],\ self.output_files['one_degree_tws_month_anomaly']['grace'],\ self.output_files['one_degree_month_analyses']) # basin and monthly resolution self.evaluation(self.output_files['basinscale_tws_month_anomaly']['model'],\ self.output_files['basinscale_tws_month_anomaly']['grace'],\ self.output_files['basinscale_month_analyses']) # one degree and annual resolution self.evaluation(self.output_files['one_degree_tws_annua_anomaly']['model'],\ self.output_files['one_degree_tws_annua_anomaly']['grace'],\ self.output_files['one_degree_annua_analyses']) # basin and annual resolution self.evaluation(self.output_files['basinscale_tws_annua_anomaly']['model'],\ self.output_files['basinscale_tws_annua_anomaly']['grace'],\ self.output_files['basinscale_annua_analyses']) def evaluation(self,model_file,grace_file,output_files): # bias output_file = output_files['bias'] print("\n") cdo_command = "cdo sub -timmean "+str(model_file)+" -timmean "+str(grace_file)+" "+str(output_file) print(cdo_command); os.system(cdo_command); print("\n") # mae output_file = output_files['mae'] print("\n") cdo_command = "cdo timmean -abs -sub "+str(model_file)+" "+str(grace_file)+" "+str(output_file) print(cdo_command); os.system(cdo_command); print("\n") # correlation output_file = output_files['correlation'] print("\n") cdo_command = "cdo setunit,1 -timcor "+str(grace_file)+" "+str(model_file)+" "+str(output_file) print(cdo_command); os.system(cdo_command); print("\n") # relative interquantile range error model_range_file = output_files['rel_iqtil_error']+".inter_qtile.model.nc" print("\n") cdo_command = "cdo sub "+\ "-timpctl,95 "+str(model_file)+" "+\ "-timmin " +str(model_file)+" "+\ "-timmax " +str(model_file)+" "+\ "-timpctl,5 "+str(model_file)+" "+\ "-timmin " +str(model_file)+" "+\ "-timmax " +str(model_file)+" "+model_range_file print(cdo_command); os.system(cdo_command); print("\n") grace_range_file = output_files['rel_iqtil_error']+".inter_qtile.grace.nc" cdo_command = "cdo sub "+\ "-timpctl,95 "+str(grace_file)+" "+\ "-timmin " +str(grace_file)+" "+\ "-timmax " +str(grace_file)+" "+\ "-timpctl,5 "+str(grace_file)+" "+\ "-timmin " +str(grace_file)+" "+\ "-timmax " +str(grace_file)+" "+grace_range_file print(cdo_command); os.system(cdo_command); print("\n") output_file = output_files['rel_iqtil_error'] print("\n") cdo_command = "cdo setunit,1 -div "+\ "-sub "+model_range_file+" "+grace_range_file+" "+\ " "+grace_range_file+" "+output_file print(cdo_command); os.system(cdo_command); print("\n") # relative mae mae_file = output_files['mae'] grace_range_file = output_files['rel_iqtil_error']+".inter_qtile.grace.nc" output_file = output_files['relative_mae'] print("\n") cdo_command = "cdo div "+\ str(mae_file)+" "+\ str(grace_range_file)+" "+\ str(output_file) print(cdo_command); os.system(cdo_command); print("\n")