def scan_condition(self): items = self.lb_condition.curselection() self.condition = str( [self.condition_list[int(item)] for item in items][0]) self.dir2condition_act = config.dir2conditions + self.condition # INFO: dir2new_condition may not end with "\\"! if os.path.isfile(self.dir2condition_act + "\\flow_definitions.xlsx"): run = tk.messagebox.askyesno( "Create new?", "%s already exists.\nDo you want to create another flow_definitions.xlsx?" % self.dir2condition_act, parent=self.top) else: run = True if run: condition4flows = cCC.ConditionCreator(self.dir2condition_act) self.flows_xlsx = condition4flows.create_discharge_table() self.l_c_dir.config(fg="forest green", text="Selected: " + self.dir2condition_act) self.b_sc.config(fg="forest green", text="Analyzed") self.b_sct["state"] = "normal" try: if run and not condition4flows.error: msg0 = "Analysis complete.\n" msg1 = "Complete discharge (flood) return periods in the discharges workbook." showinfo("INFO", msg0 + msg1, parent=self.top) fGl.open_file(self.flows_xlsx) if run and condition4flows.error: self.b_sc.config(fg="firebrick3", text="Analysis failed") except: pass
def make_flow_duration(self): condition4input = cCC.ConditionCreator(self.dir2condition_act) if not self.flow_series_xlsx: self.select_flow_series_xlsx() flow_duration_xlsx = condition4input.create_flow_duration_table( self.flow_series_xlsx, self.eco_flow_type_applied) try: if not condition4input.error: fGl.open_file(flow_duration_xlsx) self.b_q_dur.config(fg="forest green") else: showinfo("ERROR", "Review error messages (console / logfile.log).", parent=self.top) self.b_q_dur.config(fg="firebrick3", text="Re-try flow duration creation") except: pass
def make_input_file(self): items = self.lb_condition.curselection() # INFO: dir2new_condition may not end with "\\"! condition = str([self.condition_list[int(item)] for item in items][0]) condition4input = cCC.ConditionCreator(config.dir2conditions + condition) condition4input.generate_input_file(config.dir2conditions + condition + "\\flow_definitions.xlsx") try: if not condition4input.error: fGl.open_file(config.dir2conditions + condition + "\\input_definition.inp") self.b_sc.config(fg="forest green") self.l_c_dir.config(fg="forest green", text=config.dir2conditions + condition + "\\input_definition.inp") else: showinfo( "INFO", "Make sure that the flow return periods are defined.") self.b_sc.config(fg="red", text="failed - try again") except: pass
def open_files(self, f_list): for _f in f_list: self.user_message("Do not forget to save files after editing ...") fGl.open_file(_f)
def main(best_plant_dir=str(), lf_dir=str(), crit_lf=float(), prj_name=str(), unit=str(), version=str()): """ derive and draw stabilizing features for vegetation plantings crit_lf = 2.5 # years of minimum plant survival without stabilization prj_name = "TBR" # corresponding to folder name unit = "us" or "si" version = "v10" # type() = 3-char str: vII """ logger = logging.getLogger("logfile") logger.info("STABILIZING PLANTS ----- ----- ----- -----") if unit == "us": area_units = "SQUARE_FEET_US" ft2_to_acres = config.ft2ac else: area_units = "SQUARE_METERS" ft2_to_acres = 1.0 arcpy.CheckOutExtension('Spatial') arcpy.gp.overwriteOutput = True dir2pp = config.dir2pm + prj_name + "_" + version + "\\" # folder settings ras_dir = dir2pp + "Geodata\\Rasters\\" shp_dir = dir2pp + "Geodata\\Shapefiles\\" quant_dir = dir2pp + "Quantities\\" # file and variable settings xlsx_target = dir2pp + prj_name + "_assessment_" + version + ".xlsx" feature_dict = { "Large wood": 211, "ELJs (plantings)": 212, "Bioengineering (veget.)": 213, "Bioengineering (mineral)": 214, "Angular boulders (instream)": 215 } # LOOK UP INPUT RASTERS try: logger.info("Looking up maximum lifespan rasters ...") max_lf_plants = arcpy.Raster(ras_dir + "max_lf_pl_c.tif") logger.info(" >> Vegetation plantings OK.") logger.info(" -- OK (MaxLifespan raster read)\n") except: logger.info("ERROR: Could not find max. lifespan Rasters.") return -1 logger.info("Looking up specific bioengineering lifespan rasters ...") logger.info(best_plant_dir + "lf_wood.tif") try: lf_wood = arcpy.Raster(lf_dir + "lf_wood.tif") logger.info(" >> Added Streamwood.") except: lf_wood = Float(0) logger.info( "WARNING: Could not find Lifespan Raster (%slf_wood.tif)." % lf_dir) logger.info( " > Go to the Lifespan Tab and create lifespan rasters for the Bioengineering feature group." ) logger.info(" > Applying 0-lifespans instead.") try: lf_bio = arcpy.Raster(lf_dir + "lf_bio_v_bio.tif") logger.info(" >> Added Other bioengineering.") except: lf_bio = Float(0) logger.info( "WARNING: Could not find Lifespan Raster (%slf_bio_v_bio.tif)." % lf_dir) logger.info( " > Go to the Lifespan Tab and create lifespan rasters for the Bioengineering feature group." ) logger.info(" > Applying 0-lifespans instead.") logger.info(" -- OK (Bioengineering raster read)") # EVALUATE BEST STABILIZATION FEATURES try: logger.info("Assessing best features for plant stabilization.") arcpy.env.extent = max_lf_plants.extent best_stab = Con( max_lf_plants <= crit_lf, Con( ~IsNull(lf_wood), Con(lf_wood > crit_lf, Int(feature_dict["Large wood"]), Int(feature_dict["ELJs (plantings)"])), Con( ~IsNull(lf_bio), Con(lf_bio > crit_lf, Int(feature_dict["Bioengineering (veget.)"]), Int(feature_dict["Bioengineering (mineral)"])), Int(feature_dict["Angular boulders (instream)"])))) logger.info(" -- OK (Stabilization assessment.)\n") except: logger.info("ERROR: Best stabilization assessment failed.") return -1 # SAVE RASTERS try: logger.info("Saving results raster as " + ras_dir + "plant_stab.tif") best_stab.save(ras_dir + "plant_stab.tif") logger.info(" -- OK (Raster saved.)\n") except: logger.info("ERROR: Result geofile saving failed.") return -1 # SHAPEFILE CONVERSION AND STATS try: logger.info("Extracting quantities from geodata ...") logger.info(" >> Converting results raster to polygon shapefile ...") p_stab_shp = shp_dir + "Plant_stab.shp" try: arcpy.RasterToPolygon_conversion(Int(best_stab), p_stab_shp, "NO_SIMPLIFY") if not fGl.verify_shp_file(p_stab_shp): logger.info( "NO STABILIZATION MEASURE IDENTIFIED (EMPTY: %s)." % p_stab_shp) logger.info(fGl.open_file(xlsx_target)) return -1 except: logger.info( "NOTHING TO DO. Consider to increase the critical lifespan threshold." ) logger.info(" >> Calculating area statistics ... ") try: arcpy.AddField_management(p_stab_shp, "F_AREA", "FLOAT", 9) except: logger.info( " * field F_AREA already exists or the dataset is opened by another software." ) try: arcpy.CalculateGeometryAttributes_management( p_stab_shp, geometry_property=[["F_AREA", "AREA"]], area_unit=area_units) except: logger.info(" * no plant stabilization applicable ") logger.info(" >> Adding field (stabilizing feature) ... ") try: arcpy.AddField_management(p_stab_shp, "Stab_feat", "TEXT") except: logger.info(" * field Stab_feat already exists ") logger.info(" >> Evaluating field (stabilizing feature) ... ") inv_feature_dict = {v: k for k, v in feature_dict.items()} code_block = "inv_feature_dict = " + str(inv_feature_dict) try: arcpy.CalculateField_management(p_stab_shp, "Stab_feat", "inv_feature_dict[!gridcode!]", "PYTHON", code_block) except: logger.info(" * no plant stabilization added ... ") logger.info(" >> Exporting tables ...") arcpy.TableToTable_conversion(p_stab_shp, quant_dir, "plant_stab.txt") logger.info(" -- OK (Quantity export)\n") except: logger.info("ERROR: Shapefile operations failed.") return -1 # PREPARE AREA DATA (QUANTITIES) logger.info("Processing table statistics ...") write_dict = {} for k in feature_dict.keys(): write_dict.update({k: 0.0}) # set to zero for surface count stat_data = fGl.read_txt(quant_dir + "plant_stab.txt") logger.info(" >> Extracting relevant area sizes ...") for row in stat_data: try: write_dict[inv_feature_dict[int(row[0])]] += row[1] except: logger.info(" --- Unknown key: " + str(int(row[0]))) if unit == "us": logger.info(" >> Converting ft2 to acres ...") for k in write_dict.keys(): write_dict[k] = write_dict[k] * float(ft2_to_acres) logger.info(" -- OK (Area extraction finished)\n") # WRITE AREA DATA TO EXCEL FILE logger.info("Writing results to costs workbook (sheet: from_geodata) ...") fGl.write_dict2xlsx(write_dict, xlsx_target, "B", "C", 12) # CLEAN UP useless shapefiles logger.info("Cleaning up redundant shapefiles ...") arcpy.env.workspace = shp_dir all_shps = arcpy.ListFeatureClasses() for shp in all_shps: if "_del" in str(shp): try: arcpy.Delete_management(shp) except: logger.info( str(shp) + " is locked. Remove manually to avoid confusion.") arcpy.env.workspace = dir2pp + "Geodata\\" logger.info(" -- OK (Clean up)\n")
def main(condition_initial=str(), condition_project=str(), cover_pre=bool(), cover_post=bool(), fish={}, prj_name=str(), unit=str(), version=str(), apply_wua=bool()): """ calculates pre- and post implementation SHArea version = "v10" # type() = 3-char str: vII prj_name = "MyProject" # (corresponding to folder name) condition_initial = "2008" condition_project = "2008_tbr_lyr10" cover_app_pre = False fish = {"Chinook salmon": ["juvenile"]} """ logger = logging.getLogger("logfile") error = False sys.path.append(config.dir2oxl) # set directories if cover_pre: pre_ext = "cover" else: pre_ext = "no_cover" if cover_post: post_ext = "cover" else: post_ext = "no_cover" dir2pp = os.path.dirname( os.path.realpath(__file__)) + "\\" + prj_name + "_" + version + "\\" dir2ras_chsi = [ config.dir2sh + "SHArea\\Rasters_" + condition_initial + "\\" + pre_ext + "\\", config.dir2sh + "SHArea\\Rasters_" + condition_project + "\\" + post_ext + "\\" ] dir2ras_tar = [ dir2pp + "Geodata\\Rasters\\" + condition_initial + "\\" + pre_ext + "\\", dir2pp + "Geodata\\Rasters\\" + condition_project + "\\" + post_ext + "\\" ] fGl.chk_dir(dir2ras_tar[0]) fGl.chk_dir(dir2ras_tar[1]) xlsx_out_name = config.empty_file shp_dir = dir2pp + "Geodata\\Shapefiles\\" # file and variable settings xlsx_tar_costs = dir2pp + prj_name + "_assessment_" + version + ".xlsx" if unit == "us": unit_q = "cfs" xlsx_sha_template = dir2pp + "Geodata\\SHArea_evaluation_template_us.xlsx" else: unit_q = "m3/s" xlsx_sha_template = dir2pp + "Geodata\\SHArea_evaluation_template_si.xlsx" # INSTANTIATE SHArea CLASS OBJECT: sha = cSHArC.SHArC(unit, prj_name, version) # RUN SHArea ANALYSIS try: logger.info("Starting SHArea analysis ...") project_area = shp_dir + "ProjectArea.shp" fields = ["SHAPE@", "gridcode"] sha.get_extents(project_area, fields[0]) sha.set_project_area("ProjectArea") for species, lifestages in fish.items(): for ls in lifestages: logger.info("SHArea ANALYSIS FOR " + str(species).upper() + " - " + str(ls).upper()) fili = str(species).lower()[0:2] + str(ls)[0:2] xlsx_conditions = [ condition_initial + "_sharea_" + fili + ".xlsx", condition_project + "_sharea_" + fili + ".xlsx" ] xlsx_sha = cIO.Write(xlsx_sha_template, worksheet_no=0, data_only=False) xlsx_sha_name = dir2pp + "Geodata\\SHArea_" + fili + ".xlsx" conditions_sha = [] xc_count = 0 start_write_col = "B" for xc in xlsx_conditions: # instantiate dict for results writing (entry types are {Q: [Probability, Usable Area]}) result_matrix = [] try: logger.info(" >> Condition: " + str(xc).split("_sharea_")[0]) xlsx_info = cIO.Read(config.dir2sh + "SHArea\\" + xc) except: xlsx_info = "" logger.info("ERROR: Could not access " + str(xc)) error = True try: logger.info( " -> Looking up discharge information (RiverArchitect/SHArC/SHArea/)..." ) discharges = xlsx_info.read_float_column_short("B", 4) exceedance_pr = xlsx_info.read_float_column_short( "E", 4) discharge_dict = dict(zip(discharges, exceedance_pr)) raster_list = glob.glob(dir2ras_chsi[xc_count] + "*.tif") logger.info( " -> Matching CHSI rasters with discharge information ..." ) for q in discharges: test_ras = dir2ras_chsi[ xc_count] + "csi_" + fili + str( int(q)) + ".tif" ras = [r for r in raster_list if (r == test_ras)][0] logger.info( " ---> Calculating habitat area from {0} for Q = {1}" .format(ras, str(q) + unit_q)) try: sha.get_usable_area(ras.split(".tif")[0]) result_matrix.append( [q, discharge_dict[q], sha.result]) except: logger.info(" * empty sluice for " + str(q)) logger.info(" ok") except: logger.info( "ERROR: Could not process information from " + str(xc)) error = True try: logger.info( " -> Writing discharges and usable area to " + xlsx_sha_name + " ...") result_matrix.sort(key=itemgetter(0), reverse=True) write_row = 9 for res in result_matrix: xlsx_sha.write_row( start_write_col, write_row, [res[0], res[1], res[2]]) # q, pr, area write_row += 1 logger.info(" -> ok") except: logger.info("ERROR: Could not write SHArea data for " + str(species) + " - " + str(ls)) error = True # calculate SHArea for transfer independent from xlsx calculation try: ex_pr_pdf = [float(exceedance_pr[0])] for i_pr in range(1, exceedance_pr.__len__()): if not ((float(exceedance_pr[i_pr - 1]) >= 100.0) or (exceedance_pr[i_pr] == 0)): ex_pr_pdf.append( float(exceedance_pr[i_pr] - exceedance_pr[i_pr - 1])) else: ex_pr_pdf.append(0.0) conditions_sha.append( sha.calculate_sha( [pr for pr in ex_pr_pdf], [res[2] for res in result_matrix])) except: logger.info( "ERROR: Could not transfer SHArea data for " + str(species) + " - " + str(ls)) error = True xc_count += 1 start_write_col = cIO.Read.col_num_to_name( cIO.Read.col_name_to_num(start_write_col) + 5) xlsx_info.close_wb() logger.info(" >> Saving and closing " + xlsx_sha_name + " ...") try: xlsx_sha.save_close_wb(xlsx_sha_name) except: logger.info("ERROR: Could not save " + xlsx_sha_name) del xlsx_sha sha.clear_cache(True) # limit cache size try: logger.info( " >> Transferring results (net SHArea gain) to cost table ..." ) xlsx_costs = cIO.Write(xlsx_tar_costs, data_only=False) xlsx_costs.write_cell( "G", 3, float(conditions_sha[1] - conditions_sha[0])) xlsx_out_name = prj_name + "_assessment_" + version + "_" + fili + ".xlsx" xlsx_costs.save_close_wb(dir2pp + xlsx_out_name) logger.info(" >> CHECK RESULTS IN: " + dir2pp + xlsx_out_name) except: logger.info("ERROR: Could not transfer net SHArea gain.") error = True sha.clear_cache() except: logger.info("ERROR: Could not run SHArea analysis.") return -1 if not error: fGl.open_file(dir2pp + xlsx_out_name) return sha.cache
def start_app(self, app_name): # app_name = STR c_msg1 = "Background calcluation (check console window).\n\n" c_msg2 = "Python windows seem unresponsive in the meanwhile.\n\n" c_msg3 = "Logfile and cost master file automatically open once the process successfully terminated.\n\n" c_msg4 = "\n >> PRESS OK TO START << " if app_name == "s2X": try: items = self.lb_condition_pl.curselection() condition_pl = [ self.condition_pl_list[int(item)] for item in items ][0] if (condition_pl.__len__() < 1) or (str(condition_pl) == "Validate Variables"): showinfo("ERROR", "Select condition.") return -1 dir2ml_pl = config.dir2ml + "Output\\Rasters\\" + condition_pl + "\\" showinfo("INFO", c_msg1 + c_msg2 + c_msg3 + c_msg4) if float(self.vege_cr.get()) > float(self.vege_stab_cr.get()): showinfo( "CORRECTION REQUIRED", "The stabilization lifespan must be larger or equal to the minimum lifespans of plants." ) return -1 best_plant_dir = s20.main(dir2ml_pl, self.vege_cr.get(), self.prj_name.get(), self.unit, self.version) except: showinfo( "ERROR", "Select (highlight) at least one Max Lifespan condition.\n\nClose all relevant geofiles and the cost master workbook (xlsx)." ) return -1 try: lf_req = float(self.vege_stab_cr.get()) except: showinfo( "ERROR", "Wrong format of critical lifespan (must be numeric).") return -1 try: s21.main( best_plant_dir, config.dir2lf + "Output\\Rasters\\" + condition_pl + "\\", lf_req, self.prj_name.get(), self.unit, self.version) self.b_s20.config(text="Plantings OK", fg="forest green") showinfo( "INFO", "Calulation finished. VERIFY CELL LINKS IN WORKBOOK! Make sure that cell links in column F of the COSTS tab link to the correct cells in the FROM_GEODATA tab." ) except: showinfo( "WARNING", "Could not load Max Lifespan maps for nature-based engineering.\n\nRun Lifespan module for nature-based engineering for the selected condition first." ) try: fGl.open_file("{0}{1}_{2}\\{1}_assessment_{2}.xlsx".format( config.dir2pm, self.prj_name.get(), self.version)) except: pass if app_name == "s30": try: items = self.lb_condition_ter.curselection() condition_ter = [ self.condition_ter_list[int(item)] for item in items ][0] if (condition_ter.__len__() < 1) or (str(condition_ter) == "Validate Variables"): showinfo("ERROR", "Validate Variables first.") return -1 except: showinfo("ERROR", "Select condition.") return -1 try: dir2lf_ter = config.dir2lf + "Output\\Rasters\\" + condition_ter + "\\" showinfo("INFO", c_msg1 + c_msg2 + c_msg3 + c_msg4) try: lf_req = float(self.ter_cr.get()) except: showinfo( "ERROR", "Wrong format of critical lifespan (must be numeric).") return -1 s30.main(dir2lf_ter, lf_req, self.prj_name.get(), self.unit, self.version, self.n, self.txcr) self.b_s30.config(text="Stabilize terrain OK", fg="forest green") showinfo( "INFO", "Calulation finished. VERIFY CELL LINKS IN WORKBOOK! Make sure that cell links in column F of the COSTS tab link to the correct cells in the FROM_GEODATA tab." ) except: showinfo( "ERROR", "Close all relevant geofiles and the cost master workbook (xlsx)." ) if app_name == "s40": try: if self.fish_applied.__len__() == 0: showinfo( "ATTENTION", "Select at least one Physical Habitat of a fish species - lifestage!" ) return -1 if self.cover_app_pre.get() or self.cover_app_post.get(): msg1 = "Make sure that cover cHSI rasters are available in SHArC/CHSI/" msg2 = str(self.condition_init) + " AND / OR " + str( self.condition_proj) + "/cover/.\n\n" msg3 = "Press OK to launch SHArea calculation with cover." showinfo("Info", msg1 + msg2 + msg3) if (self.condition_init.__len__() < 1) or (str( self.condition_init) == "Validate Variables"): showinfo("ERROR", "Select initial condition.") return -1 if (self.condition_proj.__len__() < 1) or (str( self.condition_proj) == "Validate Variables"): showinfo("ERROR", "Select condition after terraforming.") return -1 showinfo("INFO", c_msg1 + c_msg2 + c_msg3 + c_msg4) cache2del = s40.main(self.condition_init, self.condition_proj, self.cover_app_pre.get(), self.cover_app_post.get(), self.fish_applied, self.prj_name.get(), self.unit, self.version, self.apply_wua.get()) self.b_s40.config(text="Net gain in SHArea calculation OK", fg="forest green") try: fGl.rm_dir(cache2del) except: showinfo( "CHECK", "Cache folder (%s) needs to be deleted manually." % str(cache2del)) except: showinfo( "ERROR", "Close all relevant geofiles and the cost master workbook (xlsx)." )
def main(lf_dir=str(), crit_lf=float(), prj_name=str(), unit=str(), version=str(), n_m=float(), txcr=float(), s_sed=2.68): """ derive and draw stabilizing features for vegetation plantings crit_lf = 2.5 # years of minimum plant survival without stabilization prj_name = "TBR" # corresponding to folder name unit = "us" or "si" version = "v10" # type() = 3-char str: vII n_m = Mannings n txcr = critical dimensionless bed shear stress for grain motion s_sed = grain """ logger = logging.getLogger("logfile") logger.info("STABILIZING TERRAIN ----- ----- ----- -----") error = False if unit == "us": area_units = "SQUARE_FEET_US" ft2_to_acres = config.ft2ac n_m = n_m / 1.49 # (s/ft^(1/3)) global Manning's n where k =1.49 converts to US customary else: area_units = "SQUARE_METERS" ft2_to_acres = 1.0 arcpy.CheckOutExtension('Spatial') arcpy.gp.overwriteOutput = True dir2pp = config.dir2pm + prj_name + "_" + version + "\\" # folder settings ras_dir = dir2pp + "Geodata\\Rasters\\" shp_dir = dir2pp + "Geodata\\Shapefiles\\" quant_dir = dir2pp + "Quantities\\" # file and variable settings xlsx_target = dir2pp + prj_name + "_assessment_" + version + ".xlsx" feature_dict = {"Large wood": 211, "Bioengineering (veget.)": 213, "Bioengineering (mineral)": 214, "Angular boulders (instream)": 215} # LOOK UP INPUT RASTERS try: project_ras = arcpy.Raster(ras_dir + "ProjectArea.tif") except: try: project_ras = arcpy.Raster(ras_dir + "projectarea.tif") except: logger.info("ERROR: Could not create Raster of the project area.") return -1 try: hy_condition = lf_dir.split("_lyr")[0].split("\\")[-1].split("/")[-1] logger.info("Looking up hydraulic Rasters for %s ..." % hy_condition) except: logger.info("ERROR: Could not find hydraulic Rasters (associated with %s)." % lf_dir) return -1 try: h = cPa.FlowDepth(hy_condition) u = cPa.FlowVelocity(hy_condition) info = cRIL.Info(hy_condition) lifespans = info.lifespan_read() except: logger.info("ERROR: Could not find hydraulic Rasters (01_Conditions/%s)." % hy_condition) return -1 try: logger.info("Looking up grain lifespan Raster ...") max_lf_grains = arcpy.Raster(lf_dir + "lf_grains.tif") except: logger.info("ERROR: Could not find Lifespan Raster (%slf_grains.tif)." % lf_dir) return -1 logger.info("Retrieving wood lifespan Raster ...") try: lf_wood = arcpy.Raster(lf_dir + "lf_wood.tif") except: lf_wood = Float(0.0) logger.info("WARNING: Could not find Lifespan Raster (%slf_wood.tif) -- continue anyway using 0-wood-lifespans ..." % lf_dir) logger.info("Retrieving bioengineering lifespan Raster ...") try: lf_bio = arcpy.Raster(lf_dir + "lf_bio_v_bio.tif") except: lf_bio = Float(0.0) logger.info("WARNING: Could not find Lifespan Raster (%slf_bio.tif) -- continue anyway using 0-bio-lifespans ..." % lf_dir) logger.info(" -- OK (Lifespan Rasters read)\n") # EVALUATE BEST STABILIZATION FEATURES tar_lf = fGl.get_closest_val_in_list(lifespans, crit_lf) if int(tar_lf) != int(crit_lf): logger.info( "WARNING: Substituting user-defined crit. lifespan ({0}) with {1} (Condition: {2}).".format(str(crit_lf), str(tar_lf), hy_condition)) try: logger.info("Calculating required stable grains sizes to yield a lifespan of %s years ..." % str(tar_lf)) arcpy.env.extent = max_lf_grains.extent i = lifespans.index(int(tar_lf)) stab_grain_ras = Con(~IsNull(project_ras), Float(Square(u.rasters[i] * Float(n_m)) / ((Float(s_sed) - 1.0) * Float(txcr) * Power(h.rasters[i], (1 / 3))))) except arcpy.ExecuteError: logging.info("ExecuteERROR: (arcpy).") logging.info(arcpy.GetMessages(2)) arcpy.AddError(arcpy.GetMessages(2)) return -1 except Exception as e: logging.info("ExceptionERROR: (arcpy).") logging.info(e.args[0]) arcpy.AddError(e.args[0]) return -1 except: logging.info("ERROR: Could not calculate stable grain size Raster for %s." % str(tar_lf)) logging.info(arcpy.GetMessages()) return -1 try: logger.info("Assigning stabilization features (hierarchy: Streamwood -> Bioengineering (other) -> Boulder paving") arcpy.env.extent = max_lf_grains.extent best_stab_i = Con(max_lf_grains <= crit_lf, Con(~IsNull(lf_wood), Con(lf_wood > crit_lf, Int(feature_dict["Large wood"])), Con(~IsNull(lf_bio), Con(lf_bio > crit_lf, Int(feature_dict["Bioengineering (veget.)"]), Int(feature_dict["Bioengineering (mineral)"])), Int(feature_dict["Angular boulders (instream)"])))) best_boulders = Con(max_lf_grains <= crit_lf, Con(IsNull(best_stab_i), Float(stab_grain_ras))) best_stab = Con(IsNull(best_stab_i), Con(~IsNull(best_boulders), Int(feature_dict["Angular boulders (instream)"])), Int(best_stab_i)) logger.info(" -- OK (Stabilization assessment)\n") except: logger.info("ERROR: Best stabilization assessment failed.") return -1 # SAVE RASTERS try: logger.info("Saving results Raster " + ras_dir + "terrain_stab.tif") best_stab.save(ras_dir + "terrain_stab.tif") logger.info(" -- OK (Raster saved.)") except: logger.info("ERROR: Result geofile saving failed.") try: logger.info("Saving results Raster " + ras_dir + "terrain_boulder_stab.tif") best_boulders.save(ras_dir + "terrain_boulder_stab.tif") logger.info(" -- OK (Stabilization Rasters saved)\n") except: logger.info("ERROR: Result geofile saving failed.") # SHAPEFILE CONVERSION AND STATS try: logger.info("Extracting quantities from geodata ...") logger.info(" >> Converting terrain_stab.tif to polygon shapefile ...") t_stab_shp = shp_dir + "Terrain_stab.shp" conversion_success = True try: arcpy.RasterToPolygon_conversion(best_stab, t_stab_shp, "NO_SIMPLIFY") if not fGl.verify_shp_file(t_stab_shp): logger.info("NO BIOENGINEERING STABILIZATION MEASURE IDENTIFIED (EMPTY: %s)." % t_stab_shp) except: conversion_success = True logger.info(" >> Converting terrain_boulder_stab.tif to layer ...") t_boulder_shp = shp_dir + "Terrain_boulder_stab.shp" try: arcpy.RasterToPolygon_conversion(Int(best_boulders + 1.0), t_boulder_shp, "NO_SIMPLIFY") if not fGl.verify_shp_file(t_stab_shp): logger.info("NO BOULDER STABILIZATION MEASURE IDENTIFIED (EMPTY: %s)." % t_boulder_shp) except: if not conversion_success: logger.info("No stabilization requirement identified. Returning without action.") return -1 logger.info(" >> Calculating area statistics ... ") try: arcpy.AddField_management(t_stab_shp, "F_AREA", "FLOAT", 9) except: logger.info(" * field F_AREA already exists or the dataset is opened by another software.") try: arcpy.CalculateGeometryAttributes_management(t_stab_shp, geometry_property=[["F_AREA", "AREA"]], area_unit=area_units) except: logger.info(" * no terrain stabilization applicable ") logger.info(" >> Adding field (stabilizing feature) ... ") try: arcpy.AddField_management(t_stab_shp, "Stab_feat", "TEXT") except: logger.info(" * field Stab_feat already exists ") logger.info(" >> Evaluating field (stabilizing feature) ... ") inv_feature_dict = {v: k for k, v in feature_dict.items()} code_block = "inv_feature_dict = " + str(inv_feature_dict) try: arcpy.CalculateField_management(t_stab_shp, "Stab_feat", "inv_feature_dict[!gridcode!]", "PYTHON", code_block) except: logger.info(" * no plant stabilization added ... ") logger.info(" >> Exporting tables ...") arcpy.TableToTable_conversion(t_stab_shp, quant_dir, "terrain_stab.txt") logger.info(" -- OK (Quantity export)\n") except: logger.info("ERROR: Shapefile operations failed.") return -1 # PREPARE AREA DATA (QUANTITIES) logger.info("Processing table statistics ...") write_dict = {} for k in feature_dict.keys(): write_dict.update({k: 0.0}) # set to zero for surface count stat_data = fGl.read_txt(quant_dir + "terrain_stab.txt") logger.info(" >> Extracting relevant area sizes ...") for row in stat_data: try: write_dict[inv_feature_dict[int(row[0])]] += row[1] except: logger.info(" --- Unknown key: " + str(int(row[0]))) error = True if unit == "us": logger.info(" >> Converting ft2 to acres ...") for k in write_dict.keys(): write_dict[k] = write_dict[k] * float(ft2_to_acres) logger.info(" -- OK (Area extraction finished)\n") # WRITE AREA DATA TO EXCEL FILE logger.info("Writing results to costs workbook (sheet: from_geodata) ...") fGl.write_dict2xlsx(write_dict, xlsx_target, "E", "F", 12) # CLEAN UP useless shapefiles logger.info("Cleaning up redundant shapefiles ...") arcpy.env.workspace = shp_dir all_shps = arcpy.ListFeatureClasses() for shp in all_shps: if "_del" in str(shp): try: arcpy.Delete_management(shp) except: logger.info(str(shp) + " is locked. Remove manually to avoid confusion.") arcpy.env.workspace = dir2pp + "Geodata\\" logger.info(" -- OK (Clean up)\n") if not error: fGl.open_file(xlsx_target)