def run_parameter_set(parameter_set_list, core_number, chg_typ_dict, header, calibration_header, working_dir): """ """ report = "{0},NSE\n".format(",".join(header)) for parameter_set in parameter_set_list: calibration_cal = set_calibration_cal(header, parameter_set, chg_typ_dict, calibration_header) # write to the correct location relative to the 'working' directory write_to( "{working_dir}/{core}/calibration.cal".format( working_dir=working_dir, core=core_number), calibration_cal, report_=False, ) os.chdir("{working_dir}/{core}".format(working_dir=working_dir, core=core_number)) if not os.path.isfile("rev59.3_64rel.exe"): shutil.copyfile(executable_path, "rev59.3_64rel.exe") print("\t> running SWAT+ in process {0}".format(core_number)) if platform.system() == "Linux": os.system("chmod 777 ./rev59.3_64rel.exe") os.system("./rev59.3_64rel.exe") else: os.system("rev59.3_64rel.exe") # subprocess.Popen('rev59.3_64rel.exe', stdout=subprocess.PIPE) # extract flow for specified unit at specified timestep sim_results = read_from( "{working_dir}/{core}/channel_sd_day.csv".format( working_dir=working_dir, core=core_number))[3:] simulated_string = "Date,Simulated\n" results_index = None if calibration_variable == "1": results_index = 47 if calibration_variable == "2": results_index = 9 if not results_index is None: for r_line in sim_results: if r_line.split(",")[4] == str(unit_number): day_val = r_line.split(",")[2] simulated_string += "{dd}/{mm}/{yy},{val}\n".format( dd=day_val, mm=r_line.split(",")[1], yy=r_line.split(",")[3], val=r_line.split(",")[results_index], ) simulated_fn = "{working_dir}/{core}/simulated.csv".format( working_dir=working_dir, core=core_number) report_fn = "{working_dir}/{core}/report.csv".format( working_dir=working_dir, core=core_number) observed_fn = "{home_dir}/data/observations/{cal_obs_fn}".format( cal_obs_fn=observation_filename, home_dir=sys.argv[1]) write_to(simulated_fn, simulated_string) # calculate nse and append to list table if calibration_time_step == '1': print("\t > calculating NSE at daily timestep") NSE = calculate_nse(simulated_fn, observed_fn, t_step=1) if calibration_time_step == '2': print("\t > calculating NSE at monthly timestep") NSE = calculate_nse(simulated_fn, observed_fn, t_step=2) if calibration_time_step == '3': print("\t > calculating NSE at yearly timestep") NSE = calculate_nse(simulated_fn, observed_fn, t_step=3) if not NSE is None: front_string = "" for item in parameter_set: front_string += str(item) + "," report += front_string + str(NSE) + "\n" # report += "{0},{1}\n".format(",".join(parameter_set), NSE) write_to(report_fn, report)
# prepare environment parameters = read_from(config_file_path)[8:] copy_results = None # prepare file.cio to read calibration.cal cio_string = "" for line in file_cio: if line.startswith("chg"): cio_string += "chg cal_parms.cal "\ "calibration.cal null null "\ "null null null "\ "null null \n" else: cio_string += line write_to("{base}/TxtInOut/file.cio".format(base=base), cio_string) # duplicate txtinout fn_list = [ "{working_dir}/{core}/cal_parameters.csv".format( working_dir=working_dir, core=i) for i in range(1, core_count + 1) ] with pool_cores: copy_results = pool_cores.starmap( copy_directory, product( ["{base}/TxtInOut".format(base=base)], [working_dir], [i for i in range(1, core_count + 1)], ))
def convert_weather(weather_source, weather_data_dir, file_count=None): print("") weather_dir = weather_source destination = weather_data_dir if not os.path.isdir(destination): os.makedirs(destination) forks = ["pcp.txt", "wnd.txt", "slr.txt", "hmd.txt", "tmp.txt"] counting = 0 for fork_file in forks: fork_path = "{0}/{1}".format(weather_dir, fork_file) if os.path.isfile(fork_path): fork_content = read_from(fork_path) new_fork_string = "file names - file written by SWAT+ editor auto-workflow v1.0 [{0}]\nfilename\n".format( str(datetime.datetime.now()).split(".")[0]) for line in fork_content: if line == fork_content[0]: continue if not file_count is None: counting += 1 show_progress(counting, file_count, string_before="\t formating weather: ") filename = "{0}.{1}".format( line.split(",")[1], fork_file.split(".")[0]) new_fork_string += "{0}\n".format(filename) file_2012 = "" date_ = None start_date = None nyears = 1 version2012_station_content = read_from("{0}/{1}.txt".format( weather_dir, line.split(",")[1])) for line_2012 in version2012_station_content: if line_2012 == version2012_station_content[0]: date_ = datetime.datetime(int(line_2012[:4]), 1, 1) start_date = datetime.datetime(int(line_2012[:4]), 1, 1) continue else: if date_.year - start_date.year > 0: start_date = datetime.datetime(date_.year, 1, 1) nyears += 1 if fork_file == "tmp.txt": min_tmp = float(line_2012.split(",")[1]) max_tmp = float(line_2012.split(",")[0]) tmp_values = "{0}{1}".format( "{0}".format(max_tmp).rjust(10), "{0}".format(min_tmp).rjust(10)) file_2012 += "{0}{1}{2}\n".format( date_.year, str(int((date_ - start_date).days) + 1).rjust(5), tmp_values) else: file_2012 += "{0}{1}{2}\n".format( date_.year, str(int((date_ - start_date).days) + 1).rjust(5), str(float(line_2012)).rjust(9)) date_ += datetime.timedelta(days=1) station_info = "{z}{o}{t}{th}{f}".format( z=str(nyears).rjust(4), o="0".rjust(10), t=line.split(",")[2].rjust(10), th=line.split(",")[3].rjust(10), f=line.split(",")[4].rjust(11)) file_header_ = \ "{1}: data - file written by SWAT+ editor auto-workflow v1.0 [{0}]\nnbyr tstep lat lon elev\n{2}".format( str(datetime.datetime.now()).split(".")[0], filename, station_info) file_header_ += file_2012 write_to( "{dest}/{fname}".format(fname=filename, dest=destination), file_header_) write_to( "{0}/{1}.cli".format(destination, fork_file.split(".")[0]), new_fork_string) # else: # print("\t! could not find {0} in {1}".format(fork_file, weather_dir)) print("\n\t finished.\n")
# create dada directory structure directories = [ "calibration", "observations", "rasters", "shapefiles", "tables", "weather" ] log.info("creating data directories", keep_log) for directory in directories: if not os.path.isdir("{0}/{1}/{2}".format(sys.argv[1], "data", directory)): os.makedirs("{0}/{1}/{2}".format(sys.argv[1], "data", directory)) # save calibration config file log.info("writing calibration_config.csv template in /data/calibration/", keep_log) write_to( "{base}/data/calibration/calibration_config.csv".format(base=sys.argv[1]), calibration_config_template) # get project data from xml file log.info("reading qgis project", keep_log) xml_fn = "{base}/{fn}/{fn}.qgs".format(base=sys.argv[1], fn=selected_model) title = xml_children_attributes(xml_fn, "./")["title"] delin_data = xml_children_attributes( xml_fn, "./properties/{s}/delin".format(s=selected_model)) hru_data = xml_children_attributes( xml_fn, "./properties/{s}/hru".format(s=selected_model)) landuse_data = xml_children_attributes( xml_fn, "./properties/{s}/landuse".format(s=selected_model)) soil_data = xml_children_attributes( xml_fn, "./properties/{s}/soil".format(s=selected_model))
log.info("> filter method is filter by landuse, soil, slope", keep_log) log.info(" - thresholds = {0}".format(config.Land_Soil_Slope_Thres), keep_log) if len(config.Land_Soil_Slope_Thres.split(",")) != 3: print( '\t! Provide thresholds in the config with the correct format\n\t - e.g. Land_Soil_Slope_Thres = "12, 10, 7"' ) sys.exit(1) else: hru_land_thres, hru_soil_thres, hru_slope_thres = config.Land_Soil_Slope_Thres.replace( " ", "").split(",") is_multiple = 1 log.info("writing raster projection information", keep_log) write_to( '{base}/{project_name}/Watershed/Rasters/DEM/{dem_name}.prj.txt'.format( base=sys.argv[1], project_name=project_name, dem_name=dem_name), formated_projcs) write_to( '{base}/{project_name}/Watershed/Rasters/DEM/{dem_name}.prj'.format( base=sys.argv[1], project_name=project_name, dem_name=dem_name), prjcrs) write_to( '{base}/{project_name}/Watershed/Rasters/DEM/{dem_name}hillshade.prj'. format(base=sys.argv[1], project_name=project_name, dem_name=dem_name), prjcrs) log.info("getting gis data extents", keep_log) extent_xmin, extent_ymin, extent_xmax, extent_ymax = get_extents(dem_fn) raster_stats = raster_statistics(dem_fn) third_delta = round((raster_stats.maximum - raster_stats.minimum) / 3, 0)