Ejemplo n.º 1
0
    def clean_up(self):
        arcpy.env.overwriteOutput = True
        arcpy.env.workspace = self.cache
        ras_list = arcpy.ListRasters()
        shp_list = arcpy.ListFeatureClasses()
        try:

            self.logger.info(
                "   * clearing .cache (arcpy.Delete_management) ...")
            for ras in ras_list:
                try:
                    arcpy.Delete_management(str(ras))
                except:
                    pass
            for shp in shp_list:
                try:
                    arcpy.Delete_management(str(shp))
                except:
                    pass
            fGl.rm_dir(self.cache)
            if self.reset_cache:
                fGl.chk_dir(self.cache)
            self.logger.info("   * ok")
        except:
            self.logger.info(
                "WARNING: .cache folder will be removed by package controls.")
    def __init__(self, dem, q_h_interp_dict, q_disc_ras, *args, **kwargs):

        self.logger = logging.getLogger("logfile")
        self.cache = config.dir2co + ".cache%s" % str(random.randint(1000000, 9999999))
        fGl.chk_dir(self.cache)
        arcpy.env.workspace = self.cache
        arcpy.env.overwriteOutput = True

        self.dem = Raster(dem)
        # dictionary with discharge as key and wse raster as value
        self.q_h_interp_dict = q_h_interp_dict
        self.q_wse_dict = {q: self.dem + h for q, h in self.q_h_interp_dict.items()}
        # patches defining areas for which to fit rating curve
        self.q_disc_ras = q_disc_ras
        self.discharges = self.q_h_interp_dict.keys()

        # populated by self.create_patches()
        self.patch_ras = ''

        # populated by self.get_patch_wses()
        # patch number as key, [[WSEs,...], [Qs,...]] as values
        self.patch_wses = {}
        self.patch_qs = {}

        arcpy.env.snapRaster = self.q_disc_ras
        self.create_patches()
        self.get_patch_wses()
        self.fit_rating_curves()
Ejemplo n.º 3
0
    def __init__(self, unit_system, *args, **kwargs):
        # args[0] optional out_dir -- otherwise: out_dir = script_dir
        # kwargs
        self.logger = logging.getLogger("logfile")
        self.cache = config.dir2gs + ".cache\\"
        self.mu_xlsx_dir = config.xlsx_mu
        self.logger.info("->> Reading Morphological Units (%s)" % self.mu_xlsx_dir)
        fGl.chk_dir(self.cache)

        try:
            self.out_dir = args[0]
        except:
            self.out_dir = config.dir2gs

        if unit_system == "us":
            self.logger.info(" * converting Rasters to U.S. customary units")
            self.uc = 3.2808399
        else:
            self.logger.info(" * using SI metric Raster units")
            self.uc = 1.0

        self.mu_dict = {}
        self.mu_h_lower = {}
        self.mu_h_upper = {}
        self.mu_u_lower = {}
        self.mu_u_upper = {}
        self.read_mus()
        self.raster_dict = {}
        self.ras_mu = 0
Ejemplo n.º 4
0
    def clear_cache(self, *args):
        # if args[0]==False: the cache folder itself is not deleted
        arcpy.env.overwriteOutput = True
        arcpy.env.workspace = self.cache
        ras_list = arcpy.ListRasters()
        shp_list = arcpy.ListFeatureClasses()
        try:
            for ras in ras_list:
                try:
                    arcpy.Delete_management(str(ras))
                except:
                    pass
            for shp in shp_list:
                try:
                    arcpy.Delete_management(str(shp))
                except:
                    pass
            try:
                arcpy.env.workspace = os.path.dirname(os.path.abspath(__file__))  # temporary workspace
                fGl.rm_dir(self.cache)
                if not args[0]:
                    self.logger.info("        * restoring cache ...")
                    fGl.chk_dir(self.cache)
                    arcpy.env.workspace = self.cache
            except:
                self.logger.info(" >> Cleared .cache folder (arcpy.Delete_management) ...")

        except:
            self.logger.info("WARNING: .cache folder will be removed by package controls.")
Ejemplo n.º 5
0
 def clear_cache(self, *args):
     try:
         # check for optional BOOL argument to restore cache
         recreate_cache = args[0]
     except:
         recreate_cache = False
     arcpy.env.overwriteOutput = True
     arcpy.env.workspace = self.cache
     ras_list = arcpy.ListRasters()
     shp_list = arcpy.ListFeatureClasses()
     try:
         if not recreate_cache:
             self.logger.info(
                 "   * clearing .cache (arcpy.Delete_management) ...")
         else:
             self.logger.info(
                 "   * resetting .cache (arcpy.Delete_management) ...")
         for ras in ras_list:
             try:
                 arcpy.Delete_management(str(ras))
             except:
                 pass
         for shp in shp_list:
             try:
                 arcpy.Delete_management(str(shp))
             except:
                 pass
         fGl.rm_dir(self.cache)
         self.cache_count = 0
         if recreate_cache:
             fGl.chk_dir(self.cache)
         self.logger.info("   * ok")
     except:
         self.logger.info(
             "       .cache folder will be removed by package controls.")
Ejemplo n.º 6
0
    def __init__(self, unit=str(), prj_name=str(), version=str()):
        self.dir2geo = config.dir2pm + prj_name + "_" + version + "\\Geodata\\"
        self.cache = self.dir2geo + ".cache%s\\" % str(
            random.randint(1000000, 9999999))
        self.cache_count = 0
        fGl.chk_dir(self.cache)
        self.extents = [0, 0, 0, 0]
        self.logger = logging.getLogger("logfile")
        self.ras_project = None
        self.result = 0.0
        self.cell_size = 0.0
        self.cell_area = 0.0
        self.mean_csi = 0.0
        self.wua = 0.0
        self.hhs = 0.0
        self.unit = unit
        if self.unit == "us":
            self.area_unit = "SQUARE_FEET_US"
            self.unit_str = "acres"
            self.ft2ac = config.ft2ac
        else:
            self.area_unit = "SQUARE_METERS"
            self.unit_str = "m2"
            self.ft2ac = 1.0

        self.xlsx_out = ""
 def __init__(self, dir2condition):
     self.condition = os.path.basename(dir2condition.strip("\\").strip("/"))
     self.dir2condition = dir2condition  # string of the condition to be created
     self.cache = os.path.join(config.dir2conditions, ".cache")
     self.error = False
     self.warning = False
     self.logger = logging.getLogger("logfile")
     fGl.chk_dir(self.cache)
Ejemplo n.º 8
0
def calculate_hydraulics(roughness_laws, roughness, sediment_container):
    # roughness_laws = LIST of applicable roughness types included in roughness_dict
    # roughness = cRoughness.RoughnessLaw() object
    # sediment_container = cMorphoDynamic.SedimentDynamics() object
    qgs = launch_qgis()
    h_file = open("input/txt/flow_depth_list.txt", "r")
    u_file = open("input/txt/flow_velocity_list.txt", "r")

    h_list = h_file.read().splitlines()
    u_list = u_file.read().splitlines()
    hy_dict = dict(zip(h_list, u_list))

    roughness_dict = {
        "Bathurst": lambda go: roughness.Bathurst(),
        "Drag1": lambda go: roughness.Drag1(),
        "Drag2": lambda go: roughness.Drag2(),
        "Ferguson": lambda go: roughness.Ferguson(),
        "Hey": lambda go: roughness.Hey(),
        "Keulegan": lambda go: roughness.Keulegan(),
        "MPM": lambda go: roughness.MPM(),
        "ParkerA": lambda go: roughness.ParkerA(),
        "ParkerB": lambda go: roughness.ParkerB(),
        "Smart": lambda go: roughness.Smart(),
        "Strickler": lambda go: roughness.Strickler()
    }

    for rl in roughness_laws:
        logging.info(' APPLYING ROUGHNESS LAW: ' + str(rl).upper())
        logging.info(' -- Creating U-Ux Rasters ...')
        for flow_h in h_list:
            roughness.set_hy_rasters(flow_h, hy_dict[flow_h])
            try:
                logging.info(' --- Discharge: ' +
                             str(int(roughness.label_q) * 100) + ' cfs')
            except:
                logging.warning('INVALID discharge code:  ' +
                                str(roughness.label_q))
            try:
                roughness_dict[rl](1)
            except:
                logging.warning('ERROR: Could not calculate ' +
                                str(roughness.label_q) + '(' + str(rl) + ')')

        logging.info(' -- Creating TAUx Rasters ...')

        # calculate dimensionless bed shear stress taux
        for i, j in zip(u_list, roughness.out_txts):
            sediment_container.make_taux_ras(i, j)
        roughness.reset_out_txt()

        logging.info(
            ' -- Clearing uux (roughness) Rasters (required for disk space preservation) ...'
        )
        fun.clean_dir(roughness.dir_out)
        fun.chk_dir(
            os.path.abspath(os.path.dirname(__file__)) + "/output/roughness/")
    qgs.exitQgis()
Ejemplo n.º 9
0
    def __init__(self, *args, **kwargs):
        # args[0] optional out_dir -- otherwise: out_dir = script_dir
        # kwargs

        self.cache = config.dir2gs + ".cache2\\"  # use cache2 to enable parallel proc.
        self.reset_cache = False
        fGl.chk_dir(self.cache)

        try:
            self.out_dir = args[0]
        except:
            self.out_dir = config.dir2gs

        self.logger = logging.getLogger("logfile")
Ejemplo n.º 10
0
    def __init__(self, path2h_ras, path2dem_ras, *args, **kwargs):
        """
        path2h_ras (str): full path to the depth raster used for interpolating WLE
        path2dem_ras (str): full path to the DEM
        args[0] (str): optional out_dir -- otherwise: out_dir = script_dir
        kwargs["unique_id"] (Boolean): determines if output files have integer discharge value in output file name
        kwargs["method"] (str): 'IDW', 'Kriging', or 'Nearest Neighbor'. Determines the interpolation scheme. Default 'IDW'.
        """

        self.cache = os.path.join(
            config.dir2gs, ".cache%s" % str(random.randint(1000000, 9999999)))
        fGl.chk_dir(self.cache)

        self.path2h_ras = path2h_ras
        self.path2dem_ras = path2dem_ras

        try:
            self.out_dir = args[0]
        except:
            self.out_dir = config.dir2gs

        try:
            self.unique_id = kwargs["unique_id"]
        except:
            self.unique_id = False

        try:
            self.method = kwargs["method"]
        except:
            self.method = 'IDW'

        if self.unique_id:
            Q = int(
                os.path.splitext(os.path.basename(
                    self.path2h_ras))[0].split("h")[1])
            self.out_wle = "wle%06d.tif" % Q
            self.out_wle_var = "wle%06d_var.tif" % Q
            self.out_h_interp = "h%06d_interp.tif" % Q
            self.out_d2w = "d2w%06d.tif" % Q
        else:
            self.out_wle = "wle.tif"
            self.out_wle_var = "wle_var.tif"
            self.out_h_interp = "h_interp.tif"
            self.out_d2w = "d2w.tif"

        self.logger = logging.getLogger("logfile")
Ejemplo n.º 11
0
    def __init__(self, condition, purpose, **kwargs):
        # condition = STR defining the CONDITION
        # purpose =  STR, either "sharc" or "q_return"  or "q_duration"
        # **kwargs: unit
        self.unit = ""
        # parse optional arguments
        try:
            for k in kwargs.items():
                if "unit" in k[0]:
                    self.unit = k[1]  # STR of either "us" or "si"
        except:
            pass
        self.out_ras = []
        self.condition = condition
        self.dict_Q_h_ras = {}
        self.dict_Q_u_ras = {}
        self.dict_Q_va_ras = {}

        self.dir_in_ras = config.dir2conditions + str(self.condition) + "\\"
        self.dir_xlsx_out = ""

        self.logger = logging.getLogger("logfile")
        self.discharges = []
        self.h_rasters = []
        self.u_rasters = []
        self.va_rasters = []

        # dummy workbook variable instantiations
        self.xlsx_template = config.xlsx_dummy
        self.wb = oxl.load_workbook(filename=self.xlsx_template)
        self.wb_out_name = ""
        self.ws = self.wb.worksheets[0]

        # workbook range specifications
        self.col_Q = ""
        self.col_ras_h = ""
        self.col_ras_u = ""
        self.row_start = int()
        self.set_directories(purpose)  # sets purpose-specific workbook ranges
        self.get_condition_discharges()
        try:
            fGl.chk_dir(self.dir_xlsx_out)
        except:
            pass
Ejemplo n.º 12
0
 def __init__(self, input_csv=str()):
     """
     :param input_csv: STR of full dir to input csv
     """
     self.csv_file = input_csv
     self.cache = os.path.dirname(os.path.abspath(__file__)) + "/.cache/"
     fGl.chk_dir(self.cache)
     self.csv_name = input_csv.split("\\")[-1].split("/")[-1].split(
         ".csv")[0]
     self.out_dir = os.path.dirname(
         os.path.abspath(__file__)) + "/output/" + self.csv_name + "/"
     print(" * creating output dir (%s) ..." % self.out_dir)
     fGl.chk_dir(self.out_dir)
     print(" * cleaning output dir ...")
     fGl.clean_dir(self.out_dir)
     self.boundary_shp = self.out_dir + "boundary.shp"
     self.point_shp = self.out_dir + self.csv_name + "_pts.shp"
     self.raster_tif = self.out_dir + self.csv_name + ".tif"
     self.tin = self.out_dir + self.csv_name + "_tin"
     self.sr = arcpy.SpatialReference(26942)
Ejemplo n.º 13
0
    def __init__(self, hab_condition, cover_applies, unit):
        self.cache = config.dir2sh + ".cache%s\\" % str(random.randint(10000, 99999))
        self.condition = hab_condition
        self.combine_method = "geometric_mean"
        self.cover_applies = cover_applies  # BOOL
        self.logger = logging.getLogger("logfile")

        self.path_condition = config.dir2conditions + self.condition + "\\"
        self.path_hsi = config.dir2sh + "HSI\\" + str(self.condition) + "\\"
        if self.cover_applies:
            p_ext = "cover"
        else:
            p_ext = "no_cover"
        self.path_csi = config.dir2sh + "CHSI\\" + str(self.condition) + "\\" + p_ext + "\\"
        self.path_sha_ras = config.dir2sh + "SHArea\\Rasters_" + str(self.condition) + "\\" + p_ext + "\\"
        fGl.chk_dir(self.cache)
        fGl.chk_dir(self.path_csi)
        fGl.chk_dir(self.path_sha_ras)

        self.unit = unit
        if self.unit == "us":
            self.area_unit = "SQUARE_FEET_US"
            self.u_length = "ft"
            self.u_discharge = "cfs"
            self.ft2ac = 1 / 43560
        else:
            self.area_unit = "SQUARE_METERS"
            self.u_length = "m"
            self.u_discharge = "m3"
            self.ft2ac = 1
Ejemplo n.º 14
0
    def __init__(self, geo_input_path, condition, *unit_system):

        # general directories and parameters
        self.cache = config.dir2sh + ".cache\\"
        self.condition = condition
        self.dir_in_geo = geo_input_path
        self.path_hsi = config.dir2sh + "HSI\\" + str(condition) + "\\"
        self.error = False
        self.flow_dict_h = {}
        self.flow_dict_u = {}
        self.fish = cFi.Fish()
        self.logger = logging.getLogger("logfile")
        self.raster_dict = {}
        self.ras_h = []
        self.ras_u = []

        fGl.chk_dir(self.cache)
        fGl.clean_dir(self.cache)
        fGl.chk_dir(self.path_hsi)
        fGl.chk_dir(self.dir_in_geo)

        # set unit system variables
        try:
            self.units = unit_system[0]
        except:
            self.units = "us"
            print("WARNING: Invalid unit_system identifier. unit_system must be either \'us\' or \'si\'.")
            print("         Setting unit_system default to \'us\'.")
Ejemplo n.º 15
0
    def write_flow_duration2xlsx(self, condition):
        fG.chk_dir(config.dir2ra + "00_Flows\\" + condition + "\\")
        for fish in self.export_dict.keys():
            export_xlsx_name = config.dir2ra + "00_Flows\\" + condition + "\\flow_duration_" + str(
                fish) + ".xlsx"
            self.logger.info("   * writing to " + export_xlsx_name)
            try:
                xlsx_write = cIO.Write(self.xlsx_template)
            except:
                self.logger.info("ERROR: Could not open workbook (%s)." %
                                 self.xlsx_template)
                continue
            # xlsx_write.open_wb(export_xlsx_name, 0)
            self.logger.info("   * writing data ...")
            try:
                xlsx_write.write_cell("E", 4, fish)
                xlsx_write.write_cell(
                    "E", 5, " Month:" +
                    str(self.fish_seasons[fish]["start"]["month"]) + " Day:" +
                    str(self.fish_seasons[fish]["start"]["day"]))
                xlsx_write.write_cell(
                    "E", 6,
                    " Month:" + str(self.fish_seasons[fish]["end"]["month"]) +
                    " Day:" + str(self.fish_seasons[fish]["end"]["day"]))
                xlsx_write.write_cell("E", 7, self.min_year)
                xlsx_write.write_cell("E", 8, self.max_year)
                xlsx_write.write_matrix("A", 3, self.export_dict[fish])
            except:
                self.logger.info("")

            try:
                self.logger.info("   * saving workbook ... ")
                xlsx_write.save_close_wb(export_xlsx_name)
            except:
                self.logger.info("ERROR: Failed to save %s" % export_xlsx_name)
        try:
            return export_xlsx_name
        except:
            return -1
Ejemplo n.º 16
0
def main(software_name, license_type, model_name):
    """
    software_name = STR of 2D modelling software (e.g., "Tuflow")
    license_type = STR, either "full" or "limited"
    model_name = STR of model (e.g., "A_Unique_Reach_2008")
    """
    if "tuflow" in software_name.lower():
        model = c2D.Tuflow(license_type)
    else:
        return print("No valid model name found (provided %s)." %
                     str(software_name))

    if not fGl.chk_dir(model.model_dir + model_name):
        model.make_file_structure(model_name)
        model.file_dialogue()
    else:
        model.update_model_name(model_name)
Ejemplo n.º 17
0
    def __init__(self, condition, map_type, *args):
        # condition = [str] state of planning situation, e.g., "2008"
        # map_type = [str] options: "lf", "ds", "mlf", "mt"
        # args[0] alternative raster input directory - if empty: uses standard output
        # args[1] alternative output directory - if empty: 02_Maps/CONDITION/
        self.logger = logging.getLogger("logfile")

        # get and make directories
        self.condition = condition
        self.dir_map_shp = ""
        self.error = False
        self.legend = None  # tar: aprx.listLayouts()[0].listElements("mapframe_element")[0].listElements("legend_element")[0]
        self.m = None  # will be an aprx.listMaps(STR)[0] object
        self.map_frame = None  # will be an aprx.listLayouts()[0].listElements("mapframe_element)[0] object
        self.map_layout = None  # will be an aprx.listLayouts()[0] object
        self.map_type = map_type
        self.map_string = ""  # will be assigned as a function of map_type in self.assign_directories
        self.raster_extent = None  # tar: arcpy.Raster().extent
        self.map_list = []
        self.ras4map_list = []
        self.resolution = 96  # dpi
        self.xy_center_points = []

        try:
            if not (args[0].__len__() < 2):
                self.dir_map_ras = args[0]
            else:
                self.dir_map_ras = str(self.get_input_ras_dir(map_type))
            fGl.chk_dir(self.dir_map_ras)
        except:
            try:
                self.dir_map_ras = str(self.get_input_ras_dir(map_type))
            except:
                self.logger.info(
                    "WARNING: The provided path to rasters for mapping is invalid. Using templates instead."
                )
                self.dir_map_ras = config.dir2map_templates + "rasters"

        try:
            self.output_dir = args[1]
        except:
            self.output_dir = config.dir2map + self.condition + "\\"
        fGl.chk_dir(self.output_dir)
        fGl.chk_dir(self.output_dir + "layers\\")

        try:
            self.aprx = self.copy_template_project(
            )  # returns an arcpy.mp.ArcGISProject() object
        except:
            self.logger.info("ERROR: Could read source project (ensure that " +
                             config.dir2map_templates +
                             "river_template.aprx exists).")
Ejemplo n.º 18
0
def prepare_calculation(roughness_list):
    # verify output folder structure
    dir_corr_out = os.path.abspath(
        os.path.dirname(__file__)) + "/output/correlations/"
    fun.chk_dir(dir_corr_out)
    fun.chk_dir(os.path.abspath(os.path.dirname(__file__)) + "/output/phi2dz/")
    fun.chk_dir(
        os.path.abspath(os.path.dirname(__file__)) + "/output/roughness/")
    fun.chk_dir(os.path.abspath(os.path.dirname(__file__)) + "/output/taux/")
    clear_all(
        os.path.abspath(os.path.dirname(__file__)) + '/output/roughness/val_',
        '.txt', roughness_list)
    clear_all(
        os.path.abspath(os.path.dirname(__file__)) + '/output/phi2dz/val_',
        '.txt', ['phi_mpm', 'phi_rel'])
    clear_all(
        os.path.abspath(os.path.dirname(__file__)) + '/output/taux/val_',
        '.txt', ['taux'])
    clear_all(dir_corr_out, '.txt', [
        f for f in os.listdir(dir_corr_out)
        if os.path.isfile(os.path.join(dir_corr_out, f))
        and f.split('.')[-1] == 'txt'
    ])
Ejemplo n.º 19
0
    def construct_graph(self, graph_vis=False):
        """
        Convert matrices to weighted inverse digraph
        key = to_vertex
        values = list of tuples [(from_vertex, cost),...] with cost of getting from from_vertex to to_vertex
        """
        self.logger.info("Constructing graph...")
        for i, row in enumerate(self.h_mat):
            for j, val in enumerate(row):
                # check if val is nan
                if not np.isnan(val):
                    key = str(i) + ',' + str(j)
                    neighbors, octants = self.get_neighbors(i, j)

                    for neighbor_i, octant in zip(neighbors, octants):
                        neighbor_key = str(neighbor_i[0]) + ',' + str(
                            neighbor_i[1])
                        # check if neighbor index is within array
                        if (0 <= neighbor_i[0] < self.h_mat.shape[0]) and (
                                0 <= neighbor_i[1] < self.h_mat.shape[1]):
                            # check if neighbor is nan
                            if not np.isnan(self.h_mat[neighbor_i]):
                                # check if depth > threshold (at neighbor location)
                                if self.h_mat[neighbor_i] > self.h_thresh:
                                    # check velocity condition
                                    if self.analyze_v:
                                        mag_u_w = self.u_mat[
                                            i,
                                            j]  # magnitude of water velocity
                                        dir_u_w = self.va_mat[
                                            i, j]  # angle from north
                                    else:
                                        mag_u_w = 0
                                        dir_u_w = 0
                                    if self.check_velocity_condition(
                                            mag_u_w, dir_u_w, octant):
                                        cost = self.get_cost(key, neighbor_key)
                                        try:
                                            self.graph[key] = self.graph[
                                                key] + [neighbor_key]
                                        except KeyError:
                                            self.graph[key] = [neighbor_key]
                                        try:
                                            self.inv_graph[
                                                neighbor_key] = self.inv_graph[
                                                    neighbor_key] + [
                                                        (key, cost)
                                                    ]
                                        except KeyError:
                                            self.inv_graph[neighbor_key] = [
                                                (key, cost)
                                            ]

        if graph_vis:
            # outputs for graph visualization
            self.logger.info("Making rasters for graph visualization...")
            for key, neighbor_keys in self.graph.items():
                i1, j1 = list(map(int, key.split(",")))
                for n, neighbor_key in enumerate(neighbor_keys):
                    i2, j2 = list(map(int, neighbor_key.split(",")))
                    self.graph_mats[n, 1, i1,
                                    j1] = -(i2 - i1)  # increasing row =  down
                    self.graph_mats[n, 0, i1, j1] = j2 - j1
            q = os.path.basename(self.path2_h_ras).replace("h",
                                                           "").split("_")[0]
            graph_vis_dir = os.path.join(
                os.path.dirname(self.path2_target_ras), "graph_vis%s" % q)
            fGl.chk_dir(graph_vis_dir)
            for i, graph_mat in enumerate(self.graph_mats):
                ras_x = arcpy.NumPyArrayToRaster(graph_mat[0],
                                                 lower_left_corner=self.ref_pt,
                                                 x_cell_size=self.cell_size,
                                                 value_to_nodata=np.nan)
                ras_y = arcpy.NumPyArrayToRaster(graph_mat[1],
                                                 lower_left_corner=self.ref_pt,
                                                 x_cell_size=self.cell_size,
                                                 value_to_nodata=np.nan)
                ras = arcpy.CompositeBands_management(
                    [ras_x, ras_y],
                    os.path.join(graph_vis_dir, "graph_vis%i.tif" % (i + 1)))

            i_mat = np.zeros(self.h_mat.shape)
            j_mat = np.zeros(self.h_mat.shape)
            for i, row in enumerate(i_mat):
                for j in range(len(row)):
                    i_mat[i, j] = i
                    j_mat[i, j] = j
            ras_i = arcpy.NumPyArrayToRaster(i_mat,
                                             lower_left_corner=self.ref_pt,
                                             x_cell_size=self.cell_size,
                                             value_to_nodata=np.nan)
            ras_j = arcpy.NumPyArrayToRaster(j_mat,
                                             lower_left_corner=self.ref_pt,
                                             x_cell_size=self.cell_size,
                                             value_to_nodata=np.nan)
            ras_i.save(
                os.path.join(os.path.dirname(self.path2_target_ras),
                             "i_mat%s.tif" % q))
            ras_j.save(
                os.path.join(os.path.dirname(self.path2_target_ras),
                             "j_mat%s.tif" % q))

        self.logger.info("Merging target vertices...")
        # make copy so we can delete keys of original graph during iteration (not containing "end" key)
        graph_copy = {k: v for k, v in self.inv_graph.items()}
        self.inv_graph["end"] = []
        for v, neighbors in graph_copy.items():
            # update values
            self.inv_graph[v] = list(
                map(lambda x: ("end", x[1])
                    if x[0] in self.end else x, neighbors))
            # merge vertices
            if v in self.end:
                self.inv_graph["end"] += neighbors
                del self.inv_graph[v]
        del graph_copy
        # remove duplicate values
        for v, neighbors in self.inv_graph.items():
            num_vs = [neighbor[0] for neighbor in neighbors].count("end")
            if num_vs > 1:
                # keep one with least cost, remove other duplicates
                least_cost = min([x[1] for x in neighbors if x[0] == "end"])
                self.inv_graph[v] = [
                    x for x in self.inv_graph[v] if x[0] != "end"
                ] + [("end", least_cost)]
    def __init__(self, unit_system, org_ras_dir, mod_ras_dir, reach_ids):
        # unit_system must be either "us" or "si"
        # feature_ids = list of feature shortnames
        # reach_ids = list of reach names to limit the analysis

        # general directories and parameters
        self.cache = config.dir2va + ".cache%s\\" % str(random.randint(1000000, 9999999))
        self.vol_name = mod_ras_dir.split(":\\")[-1].split(":/")[-1].split("01_Conditions\\")[-1].split("01_Conditions/")[-1].split(".tif")[0].replace("\\", "_").replace("/", "_").replace("_dem", "")
        fGl.chk_dir(self.cache)
        fGl.clean_dir(self.cache)
        self.logger = logging.getLogger("logfile")
        self.output_ras_dir = config.dir2va + "Output\\%s\\" % self.vol_name
        fGl.chk_dir(self.output_ras_dir)
        fGl.clean_dir(self.output_ras_dir)
        self.rasters = []
        self.raster_info = ""
        self.rasters_for_pos_vol = {}
        self.rasters_for_neg_vol = {}
        self.reader = cRM.Read()
        self.reaches = cDef.ReachDefinitions()
        self.volume_neg_dict = {}
        self.volume_pos_dict = {}

        try:
            self.orig_raster = arcpy.Raster(org_ras_dir)
        except:
            self.orig_raster = Float(-1)
            self.logger.info("ERROR: Cannot load original DEM")
        try:
            self.modified_raster = arcpy.Raster(mod_ras_dir)
        except:
            self.modified_raster = Float(-1)
            self.logger.info("ERROR: Cannot load modified DEM.")

        # set relevant reaches
        try:
            self.reach_ids_applied = reach_ids
            self.reach_names_applied = []
            for rn in self.reach_ids_applied:
                self.reach_names_applied.append(self.reaches.dict_id_names[rn])
        except:
            self.reach_ids_applied = self.reaches.id_xlsx
            self.reach_names_applied = self.reaches.name_dict
            self.logger.info("WARNING: Cannot identify reaches.")

        # set unit system variables
        if ("us" in str(unit_system)) or ("si" in str(unit_system)):
            self.units = unit_system
        else:
            self.units = "us"
            self.logger.info("WARNING: Invalid unit_system identifier. unit_system must be either \'us\' or \'si\'.")
            self.logger.info("         Setting unit_system default to \'us\'.")

        if self.units == "us":
            self.convert_volume_to_cy = 0.037037037037037037037037037037037  #ft3 -> cy: float((1/3)**3)
            self.unit_info = " cubic yard"
            self.volume_threshold = 0.99  # ft -- CHANGE lod US customary HERE --
        else:
            self.convert_volume_to_cy = 1.0  # m3
            self.unit_info = " cubic meter"
            self.volume_threshold = 0.30  # m -- CHANGE lod SI metric HERE --
def main(condition_initial=str(),
         condition_project=str(),
         cover_pre=bool(),
         cover_post=bool(),
         fish={},
         prj_name=str(),
         unit=str(),
         version=str(),
         apply_wua=bool()):
    """ calculates pre- and post implementation SHArea
    version = "v10"             # type() =  3-char str: vII
    prj_name = "MyProject"               # (corresponding to folder name)
    condition_initial = "2008"
    condition_project = "2008_tbr_lyr10"
    cover_app_pre = False
    fish = {"Chinook salmon": ["juvenile"]}
    """
    logger = logging.getLogger("logfile")
    error = False
    sys.path.append(config.dir2oxl)

    # set directories
    if cover_pre:
        pre_ext = "cover"
    else:
        pre_ext = "no_cover"
    if cover_post:
        post_ext = "cover"
    else:
        post_ext = "no_cover"

    dir2pp = os.path.dirname(
        os.path.realpath(__file__)) + "\\" + prj_name + "_" + version + "\\"
    dir2ras_chsi = [
        config.dir2sh + "SHArea\\Rasters_" + condition_initial + "\\" +
        pre_ext + "\\", config.dir2sh + "SHArea\\Rasters_" +
        condition_project + "\\" + post_ext + "\\"
    ]
    dir2ras_tar = [
        dir2pp + "Geodata\\Rasters\\" + condition_initial + "\\" + pre_ext +
        "\\", dir2pp + "Geodata\\Rasters\\" + condition_project + "\\" +
        post_ext + "\\"
    ]
    fGl.chk_dir(dir2ras_tar[0])
    fGl.chk_dir(dir2ras_tar[1])
    xlsx_out_name = config.empty_file

    shp_dir = dir2pp + "Geodata\\Shapefiles\\"

    # file and variable settings
    xlsx_tar_costs = dir2pp + prj_name + "_assessment_" + version + ".xlsx"
    if unit == "us":
        unit_q = "cfs"
        xlsx_sha_template = dir2pp + "Geodata\\SHArea_evaluation_template_us.xlsx"
    else:
        unit_q = "m3/s"
        xlsx_sha_template = dir2pp + "Geodata\\SHArea_evaluation_template_si.xlsx"

    # INSTANTIATE SHArea CLASS OBJECT:
    sha = cSHArC.SHArC(unit, prj_name, version)

    # RUN SHArea ANALYSIS
    try:
        logger.info("Starting SHArea analysis ...")
        project_area = shp_dir + "ProjectArea.shp"
        fields = ["SHAPE@", "gridcode"]
        sha.get_extents(project_area, fields[0])
        sha.set_project_area("ProjectArea")
        for species, lifestages in fish.items():
            for ls in lifestages:
                logger.info("SHArea ANALYSIS FOR " + str(species).upper() +
                            " - " + str(ls).upper())
                fili = str(species).lower()[0:2] + str(ls)[0:2]
                xlsx_conditions = [
                    condition_initial + "_sharea_" + fili + ".xlsx",
                    condition_project + "_sharea_" + fili + ".xlsx"
                ]
                xlsx_sha = cIO.Write(xlsx_sha_template,
                                     worksheet_no=0,
                                     data_only=False)
                xlsx_sha_name = dir2pp + "Geodata\\SHArea_" + fili + ".xlsx"
                conditions_sha = []

                xc_count = 0
                start_write_col = "B"
                for xc in xlsx_conditions:
                    # instantiate dict for results writing (entry types are {Q: [Probability, Usable Area]})
                    result_matrix = []
                    try:
                        logger.info(" >> Condition: " +
                                    str(xc).split("_sharea_")[0])
                        xlsx_info = cIO.Read(config.dir2sh + "SHArea\\" + xc)
                    except:
                        xlsx_info = ""
                        logger.info("ERROR: Could not access " + str(xc))
                        error = True
                    try:
                        logger.info(
                            "    -> Looking up discharge information (RiverArchitect/SHArC/SHArea/)..."
                        )
                        discharges = xlsx_info.read_float_column_short("B", 4)
                        exceedance_pr = xlsx_info.read_float_column_short(
                            "E", 4)

                        discharge_dict = dict(zip(discharges, exceedance_pr))
                        raster_list = glob.glob(dir2ras_chsi[xc_count] +
                                                "*.tif")
                        logger.info(
                            "    -> Matching CHSI rasters with discharge information ..."
                        )
                        for q in discharges:
                            test_ras = dir2ras_chsi[
                                xc_count] + "csi_" + fili + str(
                                    int(q)) + ".tif"
                            ras = [r for r in raster_list
                                   if (r == test_ras)][0]
                            logger.info(
                                "    ---> Calculating habitat area from {0} for Q = {1}"
                                .format(ras,
                                        str(q) + unit_q))
                            try:
                                sha.get_usable_area(ras.split(".tif")[0])
                                result_matrix.append(
                                    [q, discharge_dict[q], sha.result])
                            except:
                                logger.info("         * empty sluice for " +
                                            str(q))
                            logger.info("         ok")
                    except:
                        logger.info(
                            "ERROR: Could not process information from " +
                            str(xc))
                        error = True

                    try:
                        logger.info(
                            "    -> Writing discharges and usable area to " +
                            xlsx_sha_name + " ...")
                        result_matrix.sort(key=itemgetter(0), reverse=True)
                        write_row = 9
                        for res in result_matrix:
                            xlsx_sha.write_row(
                                start_write_col, write_row,
                                [res[0], res[1], res[2]])  # q, pr, area
                            write_row += 1
                        logger.info("    -> ok")
                    except:
                        logger.info("ERROR: Could not write SHArea data for " +
                                    str(species) + " - " + str(ls))
                        error = True

                    # calculate SHArea for transfer independent from xlsx calculation
                    try:
                        ex_pr_pdf = [float(exceedance_pr[0])]
                        for i_pr in range(1, exceedance_pr.__len__()):
                            if not ((float(exceedance_pr[i_pr - 1]) >= 100.0)
                                    or (exceedance_pr[i_pr] == 0)):
                                ex_pr_pdf.append(
                                    float(exceedance_pr[i_pr] -
                                          exceedance_pr[i_pr - 1]))
                            else:
                                ex_pr_pdf.append(0.0)
                        conditions_sha.append(
                            sha.calculate_sha(
                                [pr for pr in ex_pr_pdf],
                                [res[2] for res in result_matrix]))
                    except:
                        logger.info(
                            "ERROR: Could not transfer SHArea data for " +
                            str(species) + " - " + str(ls))
                        error = True

                    xc_count += 1
                    start_write_col = cIO.Read.col_num_to_name(
                        cIO.Read.col_name_to_num(start_write_col) + 5)
                    xlsx_info.close_wb()

                logger.info(" >> Saving and closing " + xlsx_sha_name + " ...")
                try:
                    xlsx_sha.save_close_wb(xlsx_sha_name)
                except:
                    logger.info("ERROR: Could not save " + xlsx_sha_name)
                del xlsx_sha

                sha.clear_cache(True)  # limit cache size

                try:
                    logger.info(
                        " >> Transferring results (net SHArea gain) to cost table ..."
                    )
                    xlsx_costs = cIO.Write(xlsx_tar_costs, data_only=False)
                    xlsx_costs.write_cell(
                        "G", 3, float(conditions_sha[1] - conditions_sha[0]))
                    xlsx_out_name = prj_name + "_assessment_" + version + "_" + fili + ".xlsx"
                    xlsx_costs.save_close_wb(dir2pp + xlsx_out_name)
                    logger.info(" >> CHECK RESULTS IN: " + dir2pp +
                                xlsx_out_name)
                except:
                    logger.info("ERROR: Could not transfer net SHArea gain.")
                    error = True

        sha.clear_cache()
    except:
        logger.info("ERROR: Could not run SHArea analysis.")
        return -1

    if not error:
        fGl.open_file(dir2pp + xlsx_out_name)

    return sha.cache
Ejemplo n.º 22
0
    def __init__(self, condition, species, lifestage, units, *args, **kwargs):
        self.logger = logging.getLogger("logfile")
        self.cache = config.dir2co + ".cache%s\\" % str(
            random.randint(1000000, 9999999))
        fGl.chk_dir(self.cache)
        arcpy.env.workspace = self.cache
        arcpy.env.overwriteOutput = True
        self.condition = condition
        self.dir2condition = config.dir2conditions + self.condition + "\\"

        self.units = units
        self.q_units = 'cfs' if self.units == "us" else 'm^3/s'
        self.length_units = 'ft' if self.units == "us" else 'm'
        self.u_units = self.length_units + '/s'
        self.area_units = self.length_units + '^2'

        self.species = species
        self.lifestage = lifestage
        self.lifestage_code = self.species.lower()[:2] + self.lifestage.lower(
        )[:2]
        # read in fish data (minimum depth needed, max swimming speed, ...)
        self.h_min = cFi.Fish().get_travel_threshold(self.species,
                                                     self.lifestage, "h_min")
        self.logger.info("minimum swimming depth = %s %s" %
                         (self.h_min, self.length_units))
        self.u_max = cFi.Fish().get_travel_threshold(self.species,
                                                     self.lifestage, "u_max")
        self.logger.info("maximum swimming speed  = %s %s" %
                         (self.u_max, self.u_units))
        self.analyze_v = True

        try:
            self.method = kwargs['method']
        except:
            self.method = "IDW"

        try:
            self.out_dir = args[0]
        except:
            self.out_dir = config.dir2co + "Output\\" + self.condition + "\\"

        fGl.chk_dir(self.out_dir)
        # these directories don't depend on applied flow reduction, share with other runs
        self.h_interp_dir = os.path.join(self.out_dir, "h_interp\\")
        fGl.chk_dir(self.h_interp_dir)
        self.u_interp_dir = os.path.join(self.out_dir, "u_interp\\")
        fGl.chk_dir(self.u_interp_dir)
        self.va_interp_dir = os.path.join(self.out_dir, "va_interp\\")
        fGl.chk_dir(self.va_interp_dir)

        try:
            self.q_high = kwargs['q_high']
            self.q_low = kwargs['q_low']
            self.out_dir = os.path.join(
                self.out_dir, "flow_red_%06d_%06d" % (self.q_high, self.q_low))
        except:
            self.q_high = self.q_low = None

        try:
            self.dt = kwargs['dt']
        except:
            self.dt = None

        fGl.chk_dir(self.out_dir)
        # these directories depend on applied flow reduction
        self.shortest_paths_dir = os.path.join(self.out_dir,
                                               "shortest_paths\\")
        fGl.chk_dir(self.shortest_paths_dir)
        self.areas_dir = os.path.join(self.out_dir, "areas\\")
        fGl.chk_dir(self.areas_dir)
        self.disc_areas_dir = os.path.join(self.out_dir, "disc_areas\\")
        fGl.chk_dir(self.disc_areas_dir)
        # populated by self.get_hydraulic_rasters()
        self.discharges = []
        self.Q_h_dict = {}
        self.Q_u_dict = {}
        self.Q_va_dict = {}
        # populated by self.get_interpolated_rasters()
        self.Q_h_interp_dict = {}
        self.Q_u_interp_dict = {}
        self.Q_va_interp_dict = {}
        # populated by self.get_hsi_rasters()
        self.Q_chsi_dict = {}
        # populated by self.make_shortest_paths_map(Q)
        self.Q_escape_dict = {}
        # populated by self.disconnected_areas(Q)
        self.Q_disc_areas_dict = {}
        # populated by self.make_disconnect_Q_map()
        self.target = ''

        self.xlsx = os.path.join(self.out_dir, "disconnected_area.xlsx")
        self.xlsx_writer = cIO.Write(config.xlsx_connectivity)
        self.xlsx_writer.write_cell("E", 4, self.species)
        self.xlsx_writer.write_cell("E", 5, self.lifestage)
        self.xlsx_writer.write_cell("E", 6, self.h_min)
        self.xlsx_writer.write_cell("E", 7, self.u_max)

        self.get_hydraulic_rasters()
        self.get_interpolated_rasters()
        self.get_hsi_rasters()
        self.get_target_raster()
Ejemplo n.º 23
0
    def make_pdf_maps(self, map_name, *args, **kwargs):
        # map_name = STR of pdf name
        # args[0] =  STR of alternative output directory for PDFs
        # optional kwarg "extent": overwrite mapping extent
        # optional kwarg "map_layout": alternative aprx.listLayouts()[] object
        try:
            length = args[0].__len__()
            if length > 3:
                self.output_dir = args[0]
                fGl.chk_dir(self.output_dir)
                self.logger.info(
                    " >> Alternative output directory provided: " +
                    str(self.output_dir))
        except:
            pass

        try:
            for k in kwargs.items():
                if "extent" in str(k[0]).lower():
                    if k[1] == "raster":
                        self.logger.info(
                            " >> Using Raster coordinates for mapping.")
                        self.make_xy_centerpoints([
                            self.raster_extent.XMin, self.raster_extent.YMin,
                            self.raster_extent.XMax, self.raster_extent.YMax
                        ])
                    else:
                        if not (k[1] == "MAXOF"):
                            self.make_xy_centerpoints(k[1])
                            self.logger.info(
                                " >> Special reach extents provided.")
                            self.logger.info(
                                "    --> Overwriting mapping.inp center point definitions."
                            )
                if "map_layout" in k[0]:
                    self.logger.info(
                        " >> External map layout provided - using: " +
                        str(k[1]))
                    self.map_layout = k[1]
        except:
            pass

        self.logger.info(" >> Starting PDF creation for: " +
                         str(self.map_layout.name))
        self.logger.info(
            "    * Map format: ANSI E landscape (w = %0.1f in, h = %0.1f in)" %
            (self.map_layout.pageWidth, self.map_layout.pageHeight))
        map_name = map_name.split("\\")[-1].split("/")[-1]
        if self.map_type == "lf":
            for lyr in self.m.listLayers():
                if not ((str(map_name).split(".pdf")[0] in lyr.name) or
                        ("background" in lyr.name)):
                    lyr.visible = False
                else:
                    lyr.visible = True
        self.aprx.save()

        arcpy.env.workspace = self.output_dir
        arcpy.env.overwriteOutput = True
        pdf_name = self.output_dir + map_name.split(".pdf")[0] + ".pdf"
        self.logger.info("    * Creating PDF %s ..." % pdf_name)
        try:
            os.remove(pdf_name) if os.path.isfile(pdf_name) else print()
        except:
            pass
        try:
            __outputPDF__ = arcpy.mp.PDFDocumentCreate(pdf_name)
            __tempPDFs__ = []
            __count__ = 0
            for xy in self.xy_center_points:
                __count__ += 1
                self.logger.info("      - zooming to " + str(xy))
                try:
                    self.zoom2map(xy)
                except:
                    self.logger.info(
                        "ERROR: Invalid x-y coordinates in extent source [mapping.inp / reaches]."
                    )
                fig_name = "fig_" + "%02d" % (__count__, )
                __PDFpath__ = self.output_dir + fig_name + "_temp.pdf"
                self.logger.info("      - exporting PDF page ... ")
                try:
                    self.map_layout.exportToPDF(__PDFpath__,
                                                image_compression="ADAPTIVE",
                                                resolution=self.resolution,
                                                clip_to_elements=True)
                except:
                    self.error = True
                    self.logger.info("ERROR: Could not export PDF page no. " +
                                     str(__count__))
                self.logger.info("      - appending PDF page ... ")
                try:
                    __outputPDF__.appendPages(str(__PDFpath__))
                    __tempPDFs__.append(
                        __PDFpath__)  # remember temp names to remove later on
                    self.logger.info("      - page complete.")
                except:
                    self.error = True
                    self.logger.info("ERROR: Could not append PDF page no." +
                                     str(__count__) + " to map assembly.")
            try:
                __outputPDF__.saveAndClose()
            except:
                self.error = True
                self.logger.info("ERROR: Failed to save PDF map assembly.")

            for deletePDF in __tempPDFs__:
                try:
                    os.remove(deletePDF)
                except:
                    self.logger.info(
                        "WARNING: Could not clean up PDF map temp_pages.")
            self.logger.info("    * Finished map: " + self.output_dir +
                             map_name.split(".pdf")[0] + ".pdf")

        except arcpy.ExecuteError:
            self.logger.info(arcpy.GetMessages(2))
            arcpy.AddError(arcpy.GetMessages(2))
            self.logger.info("ERROR: Mapping failed.")
            self.error = True
        except:
            self.logger.info("ERROR: Mapping failed.")
            self.error = True