"rev59.3_64rel.exe" if platform.system() == "Windows" else "swatplusrev59-static.exe" } executable_path = "{swatplus_wf_dir}editor_api/swat_exe/{swatplus_exe}".format( **variables) base = "{0}/{1}/Scenarios/Default".format(sys.argv[1], config.Project_Name) working_dir = "{base_dir}/working".format(base_dir=base) runs = config.Number_of_Runs output_dir = working_dir + "/output/" config_file_name = config.Calibration_Config_File home_dir = sys.argv[1] config_file_path = "{home_dir}/data/calibration/{config_file}".format( config_file=config_file_name, home_dir=home_dir) observation_filename = read_from(config_file_path)[2].split(",")[2] observation_file_path = "{home_dir}/data/observations/{cal_obs_fn}".format( cal_obs_fn=observation_filename, home_dir=sys.argv[1]) if not os.path.isfile(observation_file_path): print("\t! the observation file was not found!\n\t path: {0}".format( observation_file_path)) sys.exit(1) unit_number = read_from(config_file_path)[3].split(",")[2] print("\t> calibrating to channel number {0}".format(unit_number)) calibration_time_step = read_from(config_file_path)[4].split(",")[2] calibration_variable = read_from(config_file_path)[5].split(",")[2] core_count = config.Number_of_Processes pool_cores = multiprocessing.Pool(core_count) file_cio = read_from("{base}/TxtInOut/file.cio".format(base=base))
def run_parameter_set(parameter_set_list, core_number, chg_typ_dict, header, calibration_header, working_dir): """ """ report = "{0},NSE\n".format(",".join(header)) for parameter_set in parameter_set_list: calibration_cal = set_calibration_cal(header, parameter_set, chg_typ_dict, calibration_header) # write to the correct location relative to the 'working' directory write_to( "{working_dir}/{core}/calibration.cal".format( working_dir=working_dir, core=core_number), calibration_cal, report_=False, ) os.chdir("{working_dir}/{core}".format(working_dir=working_dir, core=core_number)) if not os.path.isfile("rev59.3_64rel.exe"): shutil.copyfile(executable_path, "rev59.3_64rel.exe") print("\t> running SWAT+ in process {0}".format(core_number)) if platform.system() == "Linux": os.system("chmod 777 ./rev59.3_64rel.exe") os.system("./rev59.3_64rel.exe") else: os.system("rev59.3_64rel.exe") # subprocess.Popen('rev59.3_64rel.exe', stdout=subprocess.PIPE) # extract flow for specified unit at specified timestep sim_results = read_from( "{working_dir}/{core}/channel_sd_day.csv".format( working_dir=working_dir, core=core_number))[3:] simulated_string = "Date,Simulated\n" results_index = None if calibration_variable == "1": results_index = 47 if calibration_variable == "2": results_index = 9 if not results_index is None: for r_line in sim_results: if r_line.split(",")[4] == str(unit_number): day_val = r_line.split(",")[2] simulated_string += "{dd}/{mm}/{yy},{val}\n".format( dd=day_val, mm=r_line.split(",")[1], yy=r_line.split(",")[3], val=r_line.split(",")[results_index], ) simulated_fn = "{working_dir}/{core}/simulated.csv".format( working_dir=working_dir, core=core_number) report_fn = "{working_dir}/{core}/report.csv".format( working_dir=working_dir, core=core_number) observed_fn = "{home_dir}/data/observations/{cal_obs_fn}".format( cal_obs_fn=observation_filename, home_dir=sys.argv[1]) write_to(simulated_fn, simulated_string) # calculate nse and append to list table if calibration_time_step == '1': print("\t > calculating NSE at daily timestep") NSE = calculate_nse(simulated_fn, observed_fn, t_step=1) if calibration_time_step == '2': print("\t > calculating NSE at monthly timestep") NSE = calculate_nse(simulated_fn, observed_fn, t_step=2) if calibration_time_step == '3': print("\t > calculating NSE at yearly timestep") NSE = calculate_nse(simulated_fn, observed_fn, t_step=3) if not NSE is None: front_string = "" for item in parameter_set: front_string += str(item) + "," report += front_string + str(NSE) + "\n" # report += "{0},{1}\n".format(",".join(parameter_set), NSE) write_to(report_fn, report)
lsu_shapefile_gpd = geopandas.read_file(lsus_shapefile) new_lsu_shapefile_gpd = lsu_shapefile_gpd new_lsu_shapefile_gpd["precip"] = 0.0 new_lsu_shapefile_gpd["surq_gen"] = 0.0 new_lsu_shapefile_gpd["latq"] = 0.0 new_lsu_shapefile_gpd["wateryld"] = 0.0 new_lsu_shapefile_gpd["perc"] = 0.0 new_lsu_shapefile_gpd["et"] = 0.0 new_lsu_shapefile_gpd["cn"] = 0.0 new_lsu_shapefile_gpd["pet"] = 0.0 new_lsu_shapefile_gpd["irr"] = 0.0 # read output data into dictionary log.info("reading annual average LSU results", keep_log) wb_aa_data_list = read_from(aa_lsu_wb_file) wb_aa_data_dict = {} for wb_aa_line in wb_aa_data_list[3:]: wb_aa_line_parts = wb_aa_line.split() lsu_no = wb_aa_line_parts[4] if not lsu_no in wb_aa_data_dict: wb_aa_data_dict[lsu_no] = wb_result(wb_aa_line_parts) # add output to shapefile log.info("mapping annual average LSU results", keep_log) for index, row in lsu_shapefile_gpd.iterrows(): new_lsu_shapefile_gpd.loc[index, "precip"] = float( wb_aa_data_dict[str(row.Channel)].precip) new_lsu_shapefile_gpd.loc[index, "surq_gen"] = float( wb_aa_data_dict[str(row.Channel)].surq_gen) new_lsu_shapefile_gpd.loc[index, "latq"] = float(
shutil.copyfile( # get outlets file "{base}/{project_name}/Watershed/Shapes/{shp_base}.{ext}". format(base=sys.argv[1], project_name=selected_model, shp_base=file_name(delin_data["burn"], extension=False), ext=shapefile_extension), "{base}/data/shapefiles/{shp_base}.{ext}".format( base=sys.argv[1], shp_base=file_name(delin_data["burn"], extension=False), ext=shapefile_extension)) # retrieve weather log.info("reading file.cio", keep_log) file_cio = read_from( "{base}/{project_name}/Scenarios/Default/TxtInOut/file.cio".format( base=sys.argv[1], project_name=selected_model)) weather_dir = file_cio[-2].strip("\n").split()[1] if not \ file_cio[-2].strip("\n").split()[1] == "null" else \ "" log.info( "weatherfiles are in: {fn}".format( fn="{base}/{project_name}/Scenarios/Default/TxtInOut/{weather_dir}". format(base=sys.argv[1], project_name=selected_model, weather_dir=weather_dir)), keep_log) pcp_cli_file = "{base}/{project_name}/Scenarios/Default/TxtInOut/{weather_dir}pcp.cli".format(
def convert_weather(weather_source, weather_data_dir, file_count=None): print("") weather_dir = weather_source destination = weather_data_dir if not os.path.isdir(destination): os.makedirs(destination) forks = ["pcp.txt", "wnd.txt", "slr.txt", "hmd.txt", "tmp.txt"] counting = 0 for fork_file in forks: fork_path = "{0}/{1}".format(weather_dir, fork_file) if os.path.isfile(fork_path): fork_content = read_from(fork_path) new_fork_string = "file names - file written by SWAT+ editor auto-workflow v1.0 [{0}]\nfilename\n".format( str(datetime.datetime.now()).split(".")[0]) for line in fork_content: if line == fork_content[0]: continue if not file_count is None: counting += 1 show_progress(counting, file_count, string_before="\t formating weather: ") filename = "{0}.{1}".format( line.split(",")[1], fork_file.split(".")[0]) new_fork_string += "{0}\n".format(filename) file_2012 = "" date_ = None start_date = None nyears = 1 version2012_station_content = read_from("{0}/{1}.txt".format( weather_dir, line.split(",")[1])) for line_2012 in version2012_station_content: if line_2012 == version2012_station_content[0]: date_ = datetime.datetime(int(line_2012[:4]), 1, 1) start_date = datetime.datetime(int(line_2012[:4]), 1, 1) continue else: if date_.year - start_date.year > 0: start_date = datetime.datetime(date_.year, 1, 1) nyears += 1 if fork_file == "tmp.txt": min_tmp = float(line_2012.split(",")[1]) max_tmp = float(line_2012.split(",")[0]) tmp_values = "{0}{1}".format( "{0}".format(max_tmp).rjust(10), "{0}".format(min_tmp).rjust(10)) file_2012 += "{0}{1}{2}\n".format( date_.year, str(int((date_ - start_date).days) + 1).rjust(5), tmp_values) else: file_2012 += "{0}{1}{2}\n".format( date_.year, str(int((date_ - start_date).days) + 1).rjust(5), str(float(line_2012)).rjust(9)) date_ += datetime.timedelta(days=1) station_info = "{z}{o}{t}{th}{f}".format( z=str(nyears).rjust(4), o="0".rjust(10), t=line.split(",")[2].rjust(10), th=line.split(",")[3].rjust(10), f=line.split(",")[4].rjust(11)) file_header_ = \ "{1}: data - file written by SWAT+ editor auto-workflow v1.0 [{0}]\nnbyr tstep lat lon elev\n{2}".format( str(datetime.datetime.now()).split(".")[0], filename, station_info) file_header_ += file_2012 write_to( "{dest}/{fname}".format(fname=filename, dest=destination), file_header_) write_to( "{0}/{1}.cli".format(destination, fork_file.split(".")[0]), new_fork_string) # else: # print("\t! could not find {0} in {1}".format(fork_file, weather_dir)) print("\n\t finished.\n")
str(datetime.datetime.now()).split(".")[0], filename, station_info) file_header_ += file_2012 write_to( "{dest}/{fname}".format(fname=filename, dest=destination), file_header_) write_to( "{0}/{1}.cli".format(destination, fork_file.split(".")[0]), new_fork_string) # else: # print("\t! could not find {0} in {1}".format(fork_file, weather_dir)) print("\n\t finished.\n") if __name__ == "__main__": forks = ["pcp.txt", "wnd.txt", "slr.txt", "hmd.txt", "tmp.txt"] weather_source = argv[1] weather_data_dir = argv[2] all_files = 0 for fork in forks: try: all_files += len(read_from("{0}/{1}".format(weather_source, fork))) - 1 except: pass convert_weather(weather_source, weather_data_dir, all_files)
'{base}/{project_name}/{project_name}.sqlite'.format( base=sys.argv[1], project_name=project_name)) project_database = sqlite_connection( '{base}/{project_name}/{project_name}.sqlite'.format( base=sys.argv[1], project_name=project_name)) # - copy templates log.info("importing usersoil into project database", keep_log) project_database.connect() if project_database.table_exists("{usersoil}".format(usersoil=usersoil)): project_database.delete_table("{usersoil}".format(usersoil=usersoil)) # - get_data into database # - - usersoil usersoil_rows = read_from("{base_dir}/data/tables/{usersoil_file}".format( base_dir=sys.argv[1], usersoil_file=config.Usersoil)) column_types_usersoil = { "OBJECTID": 'INTEGER', "TEXTURE": 'TEXT', "HYDGRP": 'TEXT', "CMPPCT": 'TEXT', "S5ID": 'TEXT', "SNAM": 'TEXT', "SEQN": 'TEXT', "MUID": 'TEXT', } project_database.create_table( "{usersoil}".format(usersoil=usersoil), usersoil_rows[0].replace('"', "").split(",")[0],