def main(): # prepare logger and its directory log_file_location = output['folder'] + "/log/" try: os.makedirs(log_file_location) except: cmd = 'rm -r ' + log_file_location + "/*" os.system(cmd) pass vos.initialize_logging(log_file_location) # time object modelTime = ModelTime() # timeStep info: year, month, day, doy, hour, etc modelTime.getStartEndTimeSteps(startDate, endDate, nrOfTimeSteps) calculationModel = CalcFramework(cloneMapFileName,\ pcraster_files, \ modelTime, \ output, inputEPSG, outputEPSG, resample_method) dynamic_framework = DynamicFramework(calculationModel, modelTime.nrOfTimeSteps) dynamic_framework.setQuiet(True) dynamic_framework.run()
def main(): # input_file input_file = "/projects/0/dfguu/users/edwin/pcr-globwb-aqueduct/historical/1958-2001_watch/" input_file = sys.argv[1] # netcdf variable name input_variable_name = sys.argv[2] # timeStep info: year, month, day, doy, hour, etc start_date = "2015-01-01" end_date = "2015-12-31" start_date = sys.argv[3] end_date = sys.argv[4] # currTimeStep = ModelTime() currTimeStep.getStartEndTimeSteps(start_date, end_date) # output folder from this calculation output_folder = "/scratch-shared/edwin/mekong_basin_temperature/test/" output_folder = sys.argv[5] # - if exists, cleaning the previous output directory: if os.path.isdir(output_folder): shutil.rmtree(output_folder) # - making the output folder os.makedirs(output_folder) # output file, variable name and unit output_file = output_folder + "/" + sys.argv[6] variable_name = sys.argv[7] variable_unit = sys.argv[8] # logger # - making a log directory log_file_directory = output_folder + "/" + "log/" os.makedirs(log_file_directory) # - initialize logging vos.initialize_logging(log_file_directory) # Running the deterministic_runner logger.info('Starting the calculation.') deterministic_runner = DeterministicRunner(currTimeStep, input_file, input_variable_name, output_file, variable_name, variable_unit) dynamic_framework = DynamicFramework(deterministic_runner, currTimeStep.nrOfTimeSteps) dynamic_framework.setQuiet(True) dynamic_framework.run()
def main(): # prepare the output directory try: os.makedirs(pcraster_output['output_folder']) except: os.system('rm -r ' + str(pcraster_output['output_folder'])) pass # - making the directory for storing global extent output files os.makedirs(pcraster_output['output_folder'] + "/global/") # - making the directory for storing regional extent output files os.makedirs(pcraster_output['output_folder'] + "/regional/") # prepare logger and its directory log_file_location = pcraster_output['output_folder'] + "/log/" try: os.makedirs(log_file_location) except: pass vos.initialize_logging(log_file_location) # prepare a temporary folder tmp_file_location = pcraster_output['output_folder'] + "/tmp/" try: os.makedirs(tmp_file_location) except: pass # time object modelTime = ModelTime() # timeStep info: year, month, day, doy, hour, etc modelTime.getStartEndTimeSteps(startDate, endDate) calculationModel = CalcFramework(globeCloneMapFileName, localCloneMapFileName, \ netcdf_input, \ pcraster_output, \ modelTime, \ inputEPSG, outputEPSG, resample_method) dynamic_framework = DynamicFramework(calculationModel, modelTime.nrOfTimeSteps) dynamic_framework.setQuiet(True) dynamic_framework.run()
def main(): # - prepare the output folder if os.path.exists(output_folder): shutil.rmtree(output_folder) os.makedirs(output_folder) # prepare logger and its directory log_file_location = output_folder + "/log/" os.makedirs(log_file_location) vos.initialize_logging(log_file_location) # time object modelTime = ModelTime() # timeStep info: year, month, day, doy, hour, etc modelTime.getStartEndTimeSteps(startDate, endDate) #~ print modelTime.nrOfTimeSteps # calculation model/framework calculationModel = AreaOperationNetcdfToPCRasterTSS(netcdf_input_file = netcdf_input_file, \ areaMapFileName = areaMapFileName, \ areaPointMapFileName = areaPointMapFileName, \ netcdf_input_clone_map_file = netcdf_input_clone_map_file, \ output_folder = output_folder, \ unit_conversion_factor = unit_conversion_factor, \ unit_conversion_offset = unit_conversion_offset, \ modelTime = modelTime, \ inputProjection = inputProjection, \ outputProjection = outputProjection, \ resample_method = resample_method, \ tss_daily_output_file = tss_daily_output_file, \ tss_10day_output_file = tss_10day_output_file, \ report_10day_pcr_files = True ) number_of_time_steps = modelTime.nrOfTimeSteps #~ number_of_time_steps = 100 dynamic_framework = DynamicFramework(calculationModel, number_of_time_steps) dynamic_framework.setQuiet(True) # - start the calculation dynamic_framework.run()
def main(): # prepare logger and its directory log_file_location = output['folder'] + "/log/" try: os.makedirs(log_file_location) except: pass vos.initialize_logging(log_file_location) # time object modelTime = ModelTime() # timeStep info: year, month, day, doy, hour, etc modelTime.getStartEndTimeSteps(startDate, endDate) calculationModel = CalcFramework(cloneMapFileName,\ input_files, \ modelTime, \ output) dynamic_framework = DynamicFramework(calculationModel, modelTime.nrOfTimeSteps) dynamic_framework.setQuiet(True) dynamic_framework.run()
def main(): # the output folder from this calculation output_folder = "/scratch-shared/edwinvua/data_for_diede/netcdf_process/climatology_average/" output_folder = sys.argv[1] # totat_runoff_input_file totat_runoff_input_file = "/scratch-shared/edwinvua/data_for_diede/flow_scenarios/climatology_average_totalRunoff_monthTot_output_1979-2015.nc" totat_runoff_input_file = sys.argv[2] # timeStep info: year, month, day, doy, hour, etc start_date = "2015-01-01" end_date = "2015-12-31" start_date = sys.argv[3] end_date = sys.argv[4] # currTimeStep = ModelTime() currTimeStep.getStartEndTimeSteps(start_date, end_date) # - if exists, cleaning the previous output directory: if os.path.isdir(output_folder): shutil.rmtree(output_folder) # - making the output folder os.makedirs(output_folder) # logger # - making a log directory log_file_directory = output_folder + "/" + "log/" os.makedirs(log_file_directory) # - initialize logging vos.initialize_logging(log_file_directory) # Running the deterministic_runner logger.info('Starting the calculation.') deterministic_runner = DeterministicRunner(currTimeStep, output_folder, totat_runoff_input_file) dynamic_framework = DynamicFramework(deterministic_runner,currTimeStep.nrOfTimeSteps) dynamic_framework.setQuiet(True) dynamic_framework.run()
def main(): # prepare logger and its directory log_file_location = output['folder']+"/log/" try: os.makedirs(log_file_location) except: cmd = 'rm -r '+log_file_location+"/*" os.system(cmd) pass vos.initialize_logging(log_file_location) # time object modelTime = ModelTime() # timeStep info: year, month, day, doy, hour, etc modelTime.getStartEndTimeSteps(startDate,endDate,nrOfTimeSteps) calculationModel = CalcFramework(cloneMapFileName,\ pcraster_files, \ modelTime, \ output, inputEPSG, outputEPSG, resample_method) dynamic_framework = DynamicFramework(calculationModel,modelTime.nrOfTimeSteps) dynamic_framework.setQuiet(True) dynamic_framework.run()
os.system('rm -r ' + output_files['folder'] + "/*") pass # - temporary output folder (e.g. needed for resampling/gdalwarp) output_files['tmp_folder'] = output_files['folder'] + "/tmp/" try: os.makedirs(output_files['tmp_folder']) except: os.system('rm -r ' + output_files['tmp_folder'] + "/*") pass # - prepare logger and its directory log_file_location = output_files['folder'] + "/log/" try: os.makedirs(log_file_location) except: pass vos.initialize_logging(log_file_location) # start and end years for this analysis: #~ # - for historical runs #~ str_year = 1960 #~ end_year = 1999 #~ # - for the year 2030 #~ str_year = 2010 #~ end_year = 2049 #~ # - for the year 2050 #~ str_year = 2030 #~ end_year = 2069 #~ # - for the year 2080 #~ str_year = 2060 #~ end_year = 2099 # - based on the system arguments:
os.system('rm -r ' + output_files['folder'] + "/*") pass # - temporary output folder (e.g. needed for resampling/gdalwarp) output_files['tmp_folder'] = output_files['folder'] + "/tmp/" try: os.makedirs(output_files['tmp_folder']) except: os.system('rm -r ' + output_files['tmp_folder'] + "/*") pass # - prepare logger and its directory log_file_location = output_files['folder'] + "/log/" try: os.makedirs(log_file_location) except: pass vos.initialize_logging(log_file_location) # netcdf general setup: netcdf_setup = {} netcdf_setup['format'] = "NETCDF4" netcdf_setup['zlib'] = True netcdf_setup[ 'institution'] = "Utrecht University, Department of Physical Geography ; Deltares ; World Resources Institute" netcdf_setup[ 'title'] = "PCR-GLOBWB 2 output (post-processed for the Aqueduct Flood Analyzer): Gumbel Fit to Annual Flood Maxima" netcdf_setup['created by'] = "Edwin H. Sutanudjaja ([email protected])" netcdf_setup[ 'description'] = "The gumbel fit/analysis output for the annual flood maxima." netcdf_setup[ 'source'] = "Utrecht University, Department of Physical Geography - contact: Edwin H. Sutanudjaja ([email protected])" netcdf_setup['references'] = "Sutanudjaja et al., in prep."
masking_out_reservoirs = False if map_type_name == "channel_storage.map": masking_out_reservoirs = True # output folder for this mask only output_folder = global_output_folder + "/" + str(mask_code) + "/" # clean any files exists on the ouput directory clean_previous_output = True if clean_previous_output and os.path.exists(output_folder): shutil.rmtree(output_folder) # make output and log folders, and initialize logging: log_file_folder = output_folder + "/log/" if os.path.exists(log_file_folder): shutil.rmtree(log_file_folder) os.makedirs(log_file_folder) vos.initialize_logging(log_file_folder) # make tmp folder: tmp_folder = output_folder + "/tmp/" if os.path.exists(tmp_folder): shutil.rmtree(tmp_folder) os.makedirs(tmp_folder) # change the working directory to the output folder os.chdir(output_folder) # copy ini file cmd = "cp " + str(ini_file) + " downscaling.ini" vos.cmd_line(cmd, using_subprocess=False) # clone and landmask files at low resolution (using 5 arc-minutes) # - set clone map
# - option with first upscaling model results to 30 arc-min model try: with_upscaling = str(sys.argv[5]) == "with_upscaling" except: with_upscaling = False # clean any files exists on the ouput directory (this can be done for global runs) clean_previous_output = True if clean_previous_output and os.path.exists(general_output_folder): shutil.rmtree(general_output_folder) # make log folder and initialize logging log_file_folder = general_output_folder + "/global/log/" if clean_previous_output and os.path.exists(log_file_folder): shutil.rmtree(log_file_folder) if os.path.exists(log_file_folder) == False: os.makedirs(log_file_folder) vos.initialize_logging(log_file_folder) # run the downscaling scripts parallelly msg = "Run the downscaling scripts." logger.info(msg) # number_of_clone_maps = 53 all_clone_codes = ['M%02d'%i for i in range(1,number_of_clone_maps+1,1)] #~ all_clone_codes = ['M09'] # # - due to limited memory, we have to split the runs into several groups (assumption: a process takes maximum about 4.5 GB RAM and we will use normal nodes) num_of_clones_in_a_grp = np.int(np.floor(64.0 / 4.5)) number_of_clone_groups = np.int(np.ceil(float(number_of_clone_maps)/ num_of_clones_in_a_grp)) start_clone = 0 for i_group in range(number_of_clone_groups):