Exemplo n.º 1
0
def main(in_raster=None, img_filter=None, percentile=None,
         min_nbhs=None, max_nbhs=None, out_file=True):

    r = arcpy.RasterToNumPyArray(in_raster, "", 200, 200, 0)
    if r.ndim > 2:
        r = np.squeeze(r[0, :, :])
    min_nbhs = int(min_nbhs)
    max_nbhs = int(max_nbhs)

    fig = plt.figure(figsize=(10, 10))
    i = 0
    # cast with astype instead of using dtype= for Numpy 1.7 compat
    sizes = np.linspace(min_nbhs, max_nbhs, num=25, endpoint=True,
                        retstep=False).astype('uint32')
    for size in sizes:
        utils.msg("Processing neighborhood size {}".format(size))
        perc = ""
        if percentile is not None:
            perc = "{},".format(percentile)
        filterexec = "nd.{}_filter(r,{}{})".format(img_filter.lower(), perc, size)
        med = eval(filterexec)

        a = fig.add_subplot(5, 5, i+1)
        plt.imshow(med, interpolation='nearest')
        a.set_title('{}x{}'.format(size, size), fontsize=8)
        plt.axis('off')
        plt.subplots_adjust(hspace=0.01, wspace=0.09)
        i += 1

    plt.savefig(out_file, bbox_inches='tight')
    return
Exemplo n.º 2
0
def main(bathy=None, out_sin_raster=None, out_cos_raster=None):
    """
    Calculate the statistical aspect of a raster, which
    computes the sin(aspect) and cos(aspect). By using these two
    variables, aspect can be accounted for as a continuous circular
    variable. Because aspect is circular (0 and 359.9 are immediately
    adjacent), this trigonometric transformation preserves distances
    between elements and is the simplest transformation mechanism.
    """

    try:
        arcpy.env.rasterStatistics = "STATISTICS"
        # Calculate the aspect of the bathymetric raster. "Aspect is expressed
        # in positive degrees from 0 to 359.9, measured clockwise from north."
        utils.msg("Calculating aspect...")
        aspect = Aspect(bathy)

        # Both the sin and cos functions here expect radians, not degrees.
        # convert our Aspect raster into radians, check that the values
        # are in range.
        aspect_rad = aspect * (math.pi / 180)

        aspect_sin = Sin(aspect_rad)
        aspect_cos = Cos(aspect_rad)

        out_sin_raster = utils.validate_path(out_sin_raster)
        out_cos_raster = utils.validate_path(out_cos_raster)
        aspect_sin.save(out_sin_raster)
        aspect_cos.save(out_cos_raster)
    except Exception as e:
        utils.msg(e, mtype='error')
Exemplo n.º 3
0
def main(bathy=None,
         inner_radius=None,
         outer_radius=None,
         out_raster=None,
         bpi_type='broad'):
    """
    Create a bathymetric position index (BPI) raster, which
    measures the average value in a 'donut' of locations, excluding
    cells too close to the origin point, and outside a set distance.
    """

    arcpy.env.rasterStatistics = "STATISTICS"
    try:
        # Create the broad-scale Bathymetric Position Index (BPI) raster
        msg = ("Generating the {bpi_type}-scale Bathymetric"
               "Position Index (BPI) raster...".format(bpi_type=bpi_type))

        utils.msg(msg)
        utils.msg("Calculating neighborhood...")
        neighborhood = NbrAnnulus(inner_radius, outer_radius, "CELL")
        utils.msg("Calculating FocalStatistics for {}...".format(bathy))
        out_focal_statistics = FocalStatistics(bathy, neighborhood, "MEAN")
        result_raster = Int(Plus(Minus(bathy, out_focal_statistics), 0.5))

        out_raster_path = utils.validate_path(out_raster)
        result_raster.save(out_raster_path)
        utils.msg("Saved output as {}".format(out_raster_path))
    except Exception as e:
        utils.msg(e, mtype='error')
Exemplo n.º 4
0
def main(bathy=None, inner_radius=None, outer_radius=None,
         out_raster=None, bpi_type='broad'):
    """
    Create a bathymetric position index (BPI) raster, which
    measures the average value in a 'donut' of locations, excluding
    cells too close to the origin point, and outside a set distance.
    """

    arcpy.env.rasterStatistics = "STATISTICS"
    try:
        # Create the broad-scale Bathymetric Position Index (BPI) raster
        msg = ("Generating the {bpi_type}-scale Bathymetric"
               "Position Index (BPI) raster...".format(bpi_type=bpi_type))

        utils.msg(msg)
        utils.msg("Calculating neighborhood...")
        neighborhood = NbrAnnulus(inner_radius, outer_radius, "CELL")
        utils.msg("Calculating FocalStatistics for {}...".format(bathy))
        out_focal_statistics = FocalStatistics(bathy, neighborhood, "MEAN")
        result_raster = Int(Plus(Minus(bathy, out_focal_statistics), 0.5))

        out_raster_path = utils.validate_path(out_raster)
        result_raster.save(out_raster_path)
        utils.msg("Saved output as {}".format(out_raster_path))
    except Exception as e:
        utils.msg(e, mtype='error')
Exemplo n.º 5
0
def run_con(lower_bounds, upper_bounds, in_grid, true_val, true_alt=None):
    "Conditionally evaluate raster range within bounds."""

    if config.debug:
        utils.msg("run_con: lb: {} ub: {} grid: {}  val: {}, alt: {}".format(
            lower_bounds, upper_bounds, in_grid, true_val, true_alt))

    out_grid = None

    # if our initial desired output value isn't set, use the backup
    if true_val is None:
        true_val = true_alt
    # calculate our output grid
    if lower_bounds is not None:
        if upper_bounds is not None:
            out_grid_a = Con(
                in_grid, true_val, 0, "VALUE < {}".format(upper_bounds))
            out_grid = Con(
                in_grid, out_grid_a, 0, "VALUE > {}".format(lower_bounds))
        else:
            out_grid = Con(
                in_grid, true_val, 0, "VALUE >= {}".format(lower_bounds))
    elif upper_bounds is not None:
        out_grid = Con(
            in_grid, true_val, 0, "VALUE <= {}".format(upper_bounds))

    if type(out_grid).__name__ == 'NoneType' and \
       type(true_val) == arcpy.sa.Raster:
        out_grid = true_val

    return out_grid
Exemplo n.º 6
0
def run_con(lower_bounds, upper_bounds, in_grid, true_val, true_alt=None):
    "Conditionally evaluate raster range within bounds." ""

    if config.debug:
        utils.msg("run_con: lb: {} ub: {} grid: {}  val: {}, alt: {}".format(
            lower_bounds, upper_bounds, in_grid, true_val, true_alt))

    out_grid = None

    # if our initial desired output value isn't set, use the backup
    if true_val is None:
        true_val = true_alt
    # calculate our output grid
    if lower_bounds is not None:
        if upper_bounds is not None:
            out_grid_a = Con(in_grid, true_val, 0,
                             "VALUE < {}".format(upper_bounds))
            out_grid = Con(in_grid, out_grid_a, 0,
                           "VALUE > {}".format(lower_bounds))
        else:
            out_grid = Con(in_grid, true_val, 0,
                           "VALUE >= {}".format(lower_bounds))
    elif upper_bounds is not None:
        out_grid = Con(in_grid, true_val, 0,
                       "VALUE <= {}".format(upper_bounds))

    if type(out_grid).__name__ == 'NoneType' and \
       type(true_val) == arcpy.sa.Raster:
        out_grid = true_val

    return out_grid
Exemplo n.º 7
0
def main(out_workspace, input_bathymetry, broad_bpi_inner_radius,
         broad_bpi_outer_radius, fine_bpi_inner_radius,
         fine_bpi_outer_radius, classification_dict, output_zones):
    """
    Compute complete model. The crux of this computation maps ranges
    of values provided in the classification dictionary (a CSV or Excel
    spreadsheet) to bathymetry derivatives: standardized
    fine- and broad- scale BPI and slope.
    """

    # local variables:
    broad_bpi = os.path.join(out_workspace, "broad_bpi")
    fine_bpi = os.path.join(out_workspace, "fine_bpi")
    slope_rast = os.path.join(out_workspace, "slope")
    broad_std = os.path.join(out_workspace, "broad_std")
    fine_std = os.path.join(out_workspace, "fine_std")

    utils.workspace_exists(out_workspace)
    # set geoprocessing environments
    arcpy.env.scratchWorkspace = out_workspace
    arcpy.env.workspace = out_workspace

    # TODO: currently set to automatically overwrite, expose this as option
    arcpy.env.overwriteOutput = True

    try:
        # Process: Build Broad Scale BPI
        utils.msg("Calculating broad-scale BPI...")
        bpi.main(input_bathymetry, broad_bpi_inner_radius,
                 broad_bpi_outer_radius, broad_bpi, bpi_type='broad')

        # Process: Build Fine Scale BPI
        utils.msg("Calculating fine-scale BPI...")
        bpi.main(input_bathymetry, fine_bpi_inner_radius,
                 fine_bpi_outer_radius, fine_bpi, bpi_type='fine')

        # Process: Standardize BPIs
        utils.msg("Standardizing BPI rasters...")
        standardize_bpi.main(broad_bpi, broad_std)
        standardize_bpi.main(fine_bpi, fine_std)

        # Process: Calculate Slope
        slope.main(input_bathymetry, slope_rast)

        # Process: Zone Classification Builder
        outputs_base = arcpy.env.addOutputsToMap
        arcpy.env.addOutputsToMap = True
        utils.msg("Classifying Zones...")
        classify.main(classification_dict, broad_std, fine_std,
                      slope_rast, input_bathymetry, output_zones)
        arcpy.env.addOutputsToMap = outputs_base

    except Exception as e:
        # Print error message if an error occurs
        utils.msg(e, mtype='error')
Exemplo n.º 8
0
Arquivo: slope.py Projeto: lsxinh/btm
def main(bathy=None, out_raster=None):
    """Compute raster slope in degrees."""

    try:
        arcpy.env.rasterStatistics = "STATISTICS"
        # Calculate the slope of the bathymetric raster
        utils.msg("Calculating the slope...")
        out_slope = Slope(bathy, "DEGREE", 1)
        out_raster = utils.validate_path(out_raster)
        out_slope.save(out_raster)
    except Exception as e:
        utils.msg(e, mtype='error')
Exemplo n.º 9
0
def main(bathy=None, out_raster=None):
    """Compute raster slope in degrees."""

    try:
        arcpy.env.rasterStatistics = "STATISTICS"
        # Calculate the slope of the bathymetric raster
        utils.msg("Calculating the slope...")
        out_slope = Slope(bathy, "DEGREE", 1)
        out_raster = utils.validate_path(out_raster)
        out_slope.save(out_raster)
    except Exception as e:
        utils.msg(e, mtype='error')
Exemplo n.º 10
0
    def execute(self, parameters, messages):
        from scripts import ExportToSPAGeDi
   
        results = parameters[3].valueAsText

        # temporary SPAGeDi output file
        if self.spagedi_file_path is not None:
            spagedi_file_path = self.spagedi_file_path
        else:
            spagedi_file_path = os.path.join(config.config_dir, "spagedi_data.txt")

        utils.msg("writing SPAGeDi-formatted results...")

        # compute our spagedi file first
        ExportToSPAGeDi.main(
            input_features=parameters[0].valueAsText,
            where_clause="",
            order_by=parameters[1].valueAsText,
            output_name=spagedi_file_path
        )

        utils.msg("writing out SPAGeDi commands...")

        # now, generate an input file for SPAGeDi
        spagedi_commands = os.path.join(config.config_dir, "spagedi_commands.txt")
        utils.msg(spagedi_commands)
        with open(spagedi_commands, 'w') as command_file:
            file_string = "{spagedi_file_path}\n{results}\n\n".format(
                spagedi_file_path=spagedi_file_path,
                results=results
            )
            for cmd in self.sequence:
                file_string += cmd + '\n'
            file_string += '\n\n\n'
            command_file.write(file_string)

        # now, fire up SPAGeDi
        spagedi_msg = """Now running SPAGeDi 1.4c (build 17-07-2013)
   - a program for Spatial Pattern Analysis of Genetic Diversity
               Written by Olivier Hardy & Xavier Vekemans
               Contributions by Reed Cartwright"""
        utils.msg(spagedi_msg)
        time.sleep(2)
        # pull in the full location of the SPAGeDi binary
        spagedi_exe = settings.spagedi_executable_path
        shell_cmd = "{spagedi_exe} < {spagedi_commands}".format(
                spagedi_exe=spagedi_exe,
                spagedi_commands=spagedi_commands)
        res = os.system(shell_cmd)
        return 0
Exemplo n.º 11
0
    def execute(self, parameters, messages):
        from scripts import ExportToSPAGeDi

        results = parameters[3].valueAsText

        # temporary SPAGeDi output file
        if self.spagedi_file_path is not None:
            spagedi_file_path = self.spagedi_file_path
        else:
            spagedi_file_path = os.path.join(config.config_dir,
                                             "spagedi_data.txt")

        utils.msg("writing SPAGeDi-formatted results...")

        # compute our spagedi file first
        ExportToSPAGeDi.main(input_features=parameters[0].valueAsText,
                             where_clause="",
                             order_by=parameters[1].valueAsText,
                             output_name=spagedi_file_path)

        utils.msg("writing out SPAGeDi commands...")

        # now, generate an input file for SPAGeDi
        spagedi_commands = os.path.join(config.config_dir,
                                        "spagedi_commands.txt")
        utils.msg(spagedi_commands)
        with open(spagedi_commands, 'w') as command_file:
            file_string = "{spagedi_file_path}\n{results}\n\n".format(
                spagedi_file_path=spagedi_file_path, results=results)
            for cmd in self.sequence:
                file_string += cmd + '\n'
            file_string += '\n\n\n'
            command_file.write(file_string)

        # now, fire up SPAGeDi
        spagedi_msg = """Now running SPAGeDi 1.4c (build 17-07-2013)
   - a program for Spatial Pattern Analysis of Genetic Diversity
               Written by Olivier Hardy & Xavier Vekemans
               Contributions by Reed Cartwright"""
        utils.msg(spagedi_msg)
        time.sleep(2)
        # pull in the full location of the SPAGeDi binary
        spagedi_exe = settings.spagedi_executable_path
        shell_cmd = "{spagedi_exe} < {spagedi_commands}".format(
            spagedi_exe=spagedi_exe, spagedi_commands=spagedi_commands)
        res = os.system(shell_cmd)
        return 0
Exemplo n.º 12
0
def main(in_raster=None, img_filter=None, percentile=None,
         min_nbhs=None, max_nbhs=None, position=None, out_file=True):

    point = None
    if position:
        try:
            x, y = position.split(" ")
            point = arcpy.Point(x, y)
        except Exception as e:
            utils.msg("Invalid point, using lower left corner", "warning")
            utils.msg(e, 'error')
            point = None

    r = arcpy.RasterToNumPyArray(in_raster, point, 200, 200, 0)

    if r.ndim > 2:
        r = np.squeeze(r[0, :, :])
    min_nbhs = int(min_nbhs)
    max_nbhs = int(max_nbhs)

    fig = plt.figure(figsize=(10, 10))
    i = 0
    # cast with astype instead of using dtype= for Numpy 1.7 compat
    sizes = np.linspace(min_nbhs, max_nbhs, num=25, endpoint=True,
                        retstep=False).astype('uint32')
    for size in sizes:
        utils.msg("Processing neighborhood size {}".format(size))
        perc = ""
        if percentile is not None:
            perc = "{},".format(percentile)
        filterexec = "nd.{}_filter(r,{}{})".format(img_filter.lower(), perc, size)
        med = eval(filterexec)

        a = fig.add_subplot(5, 5, i+1)
        plt.imshow(med, interpolation='nearest')
        a.set_title('{}x{}'.format(size, size), fontsize=8)
        plt.axis('off')
        plt.subplots_adjust(hspace=0.01, wspace=0.09)
        i += 1

    plt.savefig(out_file, bbox_inches='tight')
    return
Exemplo n.º 13
0
def main(in_raster=None, out_raster=None, area_raster=None):
    """
    A calculation of rugosity, based on the difference between surface
    area and planar area, as described in Jenness, J. 2002. Surface Areas
    and Ratios from Elevation Grid (surfgrids.avx) extension for ArcView 3.x,
    v. 1.2. Jenness Enterprises.

    NOTE: the VRM method implemented in ruggeddness is generally considered
          superior to this method.
    """
    out_workspace = os.path.dirname(out_raster)
    # make sure workspace exists
    utils.workspace_exists(out_workspace)

    utils.msg("Set scratch workspace to {}...".format(out_workspace))

    # force temporary stats to be computed in our output workspace
    arcpy.env.scratchWorkspace = out_workspace
    arcpy.env.workspace = out_workspace
    pyramid_orig = arcpy.env.pyramid
    arcpy.env.pyramid = "NONE"
    # TODO: currently set to automatically overwrite, expose this as option
    arcpy.env.overwriteOutput = True

    bathy = Raster(in_raster)
    desc = arcpy.Describe(bathy)
    # get the cell size of the input raster; use same calculation as was
    # performed in BTM v1: (mean_x + mean_y) / 2
    cell_size = (desc.meanCellWidth + desc.meanCellHeight) / 2.0
    corner_dist = math.sqrt(2 * cell_size**2)
    flat_area = cell_size**2
    utils.msg("Cell size: {}\nFlat area: {}".format(cell_size, flat_area))

    try:
        # Create a set of shifted grids, with offets n from the origin X:

        #        8 | 7 | 6
        #        --|---|---
        #        5 | X | 4
        #        --|---|---
        #        3 | 2 | 1

        positions = [(1, -1), (0, -1), (-1, -1), (1, 0), (-1, 0), (1, 1),
                     (0, 1), (-1, 1)]

        corners = (1, 3, 6, 8)  # dist * sqrt(2), as set in corner_dist
        orthogonals = (2, 4, 5, 7)  # von Neumann neighbors, straight dist
        shift_rasts = [None]  # offset to align numbers
        temp_rasts = []

        for (n, pos) in enumerate(positions, start=1):
            utils.msg("Creating Shift Grid {} of 8...".format(n))
            # scale shift grid by cell size
            (x_shift, y_shift) = map(lambda (n): n * cell_size, pos)

            # set explicit path on shift rasters, otherwise suffer
            # inexplicable 999999 errors.
            shift_out = os.path.join(out_workspace, "shift_{}.tif".format(n))
            shift_out = utils.validate_path(shift_out)
            temp_rasts.append(shift_out)
            arcpy.Shift_management(bathy, shift_out, x_shift, y_shift)
            shift_rasts.append(arcpy.sa.Raster(shift_out))

        edge_rasts = [None]
        # calculate triangle length grids

        # edges 1-8: pairs of bathy:shift[n]
        for (n, shift) in enumerate(shift_rasts[1:], start=1):
            utils.msg("Calculating Triangle Edge {} of 16...".format(n))
            # adjust for corners being sqrt(2) from center
            if n in corners:
                dist = corner_dist
            else:
                dist = cell_size
            edge_out = os.path.join(out_workspace, "edge_{}.tif".format(n))
            edge_out = utils.validate_path(edge_out)
            temp_rasts.append(edge_out)
            edge = compute_edge(bathy, shift, dist)
            edge.save(edge_out)
            edge_rasts.append(arcpy.sa.Raster(edge_out))

        # edges 9-16: pairs of adjacent shift grids [see layout above]
        # in BTM_v1, these are labeled A-H
        adjacent_shift = [(1, 2), (2, 3), (1, 4), (3, 5), (6, 4), (5, 8),
                          (6, 7), (7, 8)]
        for (n, pair) in enumerate(adjacent_shift, start=9):
            utils.msg("Calculating Triangle Edge {} of 16...".format(n))
            # the two shift rasters for this iteration
            (i, j) = pair
            edge_out = os.path.join(out_workspace, "edge_{}.tif".format(n))
            edge_out = utils.validate_path(edge_out)
            temp_rasts.append(edge_out)
            edge = compute_edge(shift_rasts[i], shift_rasts[j], cell_size)
            edge.save(edge_out)
            edge_rasts.append(arcpy.sa.Raster(edge_out))

        # areas of each triangle
        areas = []
        for (n, pair) in enumerate(adjacent_shift, start=1):
            utils.msg("Calculating Triangle Area {} of 8...".format(n))
            # the two shift rasters; n has the third side
            (i, j) = pair
            area_out = os.path.join(out_workspace, "area_{}.tif".format(n))
            area_out = utils.validate_path(area_out)
            temp_rasts.append(area_out)

            area = triangle_area(edge_rasts[i], edge_rasts[j],
                                 edge_rasts[n + 8])
            area.save(area_out)
            areas.append(arcpy.sa.Raster(area_out))

        utils.msg("Summing Triangle Area...")
        arcpy.env.pyramid = pyramid_orig
        arcpy.env.rasterStatistics = "STATISTICS"
        total_area = (areas[0] + areas[1] + areas[2] + areas[3] + areas[4] +
                      areas[5] + areas[6] + areas[7])
        if area_raster:
            save_msg = "Saving Surface Area Raster to " + \
                "{}.".format(area_raster)
            utils.msg(save_msg)
            total_area.save(area_raster)

        area_ratio = total_area / cell_size**2

        out_raster = utils.validate_path(out_raster)
        save_msg = "Saving Surface Area to Planar Area ratio to " + \
            "{}.".format(out_raster)
        utils.msg(save_msg)
        area_ratio.save(out_raster)

    except Exception as e:
        utils.msg(e, mtype='error')

    try:
        # Delete all intermediate raster data sets
        utils.msg("Deleting intermediate data...")
        for path in temp_rasts:
            arcpy.Delete_management(path)

    except Exception as e:
        utils.msg(e, mtype='error')
Exemplo n.º 14
0
def main(classification_file, bpi_broad_std, bpi_fine_std,
         slope, bathy, out_raster=None):
    """
    Perform raster classification, based on classification mappings
    and provided raster derivatives (fine- and broad- scale BPI,
    slope, and the original raster). Outputs a classified raster.
    """
    try:
        # set up scratch workspace
        # FIXME: see issue #18
        # CON is very very picky. it generates GRID outputs by default, and the
        # resulting names must not exist. for now, push our temp results
        # to the output folder.
        out_workspace = os.path.dirname(out_raster)
        # make sure workspace exists
        utils.workspace_exists(out_workspace)
        arcpy.env.scratchWorkspace = out_workspace
        arcpy.env.workspace = out_workspace

        arcpy.env.overwriteOutput = True
        # Create the broad-scale Bathymetric Position Index (BPI) raster
        msg_text = ("Generating the classified grid, based on the provided"
                    " classes in '{}'.".format(classification_file))
        utils.msg(msg_text)

        # Read in the BTM Document; the class handles parsing a variety of inputs.
        btm_doc = utils.BtmDocument(classification_file)
        classes = btm_doc.classification()
        utils.msg("Parsing {} document... found {} classes.".format(
            btm_doc.doctype, len(classes)))

        grids = []
        key = {'0': 'None'}
        for item in classes:
            cur_class = str(item["Class"])
            cur_name = str(item["Zone"])
            utils.msg("Calculating grid for {}...".format(cur_name))
            key[cur_class] = cur_name
            out_con = None
            # here come the CONs:
            out_con = run_con(item["Depth_LowerBounds"],
                              item["Depth_UpperBounds"],
                              bathy, cur_class)
            out_con2 = run_con(item["Slope_LowerBounds"],
                               item["Slope_UpperBounds"],
                               slope, out_con, cur_class)
            out_con3 = run_con(item["LSB_LowerBounds"],
                               item["LSB_UpperBounds"],
                               bpi_fine_std, out_con2, cur_class)
            out_con4 = run_con(item["SSB_LowerBounds"],
                               item["SSB_UpperBounds"],
                               bpi_broad_std, out_con3, cur_class)

            if type(out_con4) == arcpy.sa.Raster:
                rast = utils.save_raster(out_con4, "con_{}.tif".format(cur_name))
                grids.append(rast)
            else:
                # fall-through: no valid values detected for this class.
                warn_msg = ("WARNING, no valid locations found for class"
                            " {}:\n".format(cur_name))
                classifications = {
                    'depth': (item["Depth_LowerBounds"], item["Depth_UpperBounds"]),
                    'slope': (item["Slope_LowerBounds"], item["Slope_UpperBounds"]),
                    'broad': (item["SSB_LowerBounds"], item["SSB_UpperBounds"]),
                    'fine': (item["LSB_LowerBounds"], item["LSB_UpperBounds"])
                }
                for (name, vrange) in classifications.items():
                    (vmin, vmax) = vrange
                    if vmin or vmax is not None:
                        if vmin is None:
                            vmin = ""
                        if vmax is None:
                            vmax = ""
                        warn_msg += "  {}: {{{}:{}}}\n".format(name, vmin, vmax)

                utils.msg(textwrap.dedent(warn_msg))

        if len(grids) == 0:
            raise NoValidClasses

        utils.msg("Creating Benthic Terrain Classification Dataset...")
        merge_grid = grids[0]
        for i in range(1, len(grids)):
            utils.msg("{} of {}".format(i, len(grids)-1))
            merge_grid = Con(merge_grid, grids[i], merge_grid, "VALUE = 0")
        arcpy.AddField_management(merge_grid, 'Zone', 'TEXT')
        rows = arcpy.UpdateCursor(merge_grid)
        for row in rows:
            val = str(row.getValue('VALUE'))
            if val in key:
                row.setValue('Zone', key[val])
                rows.updateRow(row)
            else:
                row.setValue('Zone', 'No Matching Zone')
                rows.updateRow(row)
        # writing Python like it's C
        del(rows)
        del(row)

        arcpy.env.rasterStatistics = "STATISTICS"
        # validate the output raster path
        out_raster = utils.validate_path(out_raster)
        utils.msg("Saving Output to {}".format(out_raster))
        merge_grid.save(out_raster)

        utils.msg("Complete.")

    except NoValidClasses as e:
        utils.msg(e, mtype='error')
    except Exception as e:
        if type(e) is ValueError:
            raise e
        utils.msg(e, mtype='error')

    try:
        utils.msg("Deleting intermediate data...")
        # Delete all intermediate raster data sets
        for grid in grids:
            arcpy.Delete_management(grid.catalogPath)
    except Exception as e:
        # hack -- swallowing this exception, because sometimes
        # refs are left around for these files.
        utils.msg("Failed to delete all intermediate data.", mtype='warning')
Exemplo n.º 15
0
def main(bpi_raster=None, out_raster=None):
    try:
        # Get raster properties
        message = ("Calculating properties of the Bathymetric "
                   "Position Index (BPI) raster...")
        utils.msg(message)
        utils.msg("  input raster: {}\n   output: {}".format(
            bpi_raster, out_raster))
        # convert to a path
        desc = arcpy.Describe(bpi_raster)
        bpi_raster_path = desc.catalogPath

        bpi_mean = utils.raster_properties(bpi_raster_path, "MEAN")
        utils.msg("BPI raster mean: {}.".format(bpi_mean))
        bpi_std_dev = utils.raster_properties(bpi_raster_path, "STD")
        utils.msg("BPI raster standard deviation: {}.".format(bpi_std_dev))

        # Create the standardized Bathymetric Position Index (BPI) raster
        std_msg = "Standardizing the Bathymetric Position Index (BPI) raster..."
        utils.msg(std_msg)
        arcpy.env.rasterStatistics = "STATISTICS"
        outRaster = Int(Plus(Times(Divide(
            Minus(bpi_raster_path, bpi_mean), bpi_std_dev), 100), 0.5))
        out_raster = utils.validate_path(out_raster)
        outRaster.save(out_raster)

    except Exception as e:
        utils.msg(e, mtype='error')
Exemplo n.º 16
0
def main(in_raster=None, out_raster=None, acr_correction=True, area_raster=None):
    """
    A calculation of rugosity, based on the difference between surface
    area and planar area, as described in Jenness, J. 2002. Surface Areas
    and Ratios from Elevation Grid (surfgrids.avx) extension for ArcView 3.x,
    v. 1.2. Jenness Enterprises.

    NOTE: the VRM method implemented in ruggeddness is generally considered
          superior to this method.
    """

    # sanitize acr input
    if isinstance(acr_correction, unicode) and acr_correction.lower() == 'false':
        acr_correction = False

    out_workspace = os.path.dirname(out_raster)
    # make sure workspace exists
    utils.workspace_exists(out_workspace)

    utils.msg("Set scratch workspace to {}...".format(out_workspace))

    # force temporary stats to be computed in our output workspace
    arcpy.env.scratchWorkspace = out_workspace
    arcpy.env.workspace = out_workspace
    pyramid_orig = arcpy.env.pyramid
    arcpy.env.pyramid = "NONE"
    # TODO: currently set to automatically overwrite, expose this as option
    arcpy.env.overwriteOutput = True

    bathy = Raster(in_raster)
    # get the cell size of the input raster; use same calculation as was
    # performed in BTM v1: (mean_x + mean_y) / 2
    cell_size = (bathy.meanCellWidth + bathy.meanCellHeight) / 2.0
    corner_dist = math.sqrt(2 * cell_size ** 2)
    flat_area = cell_size ** 2
    utils.msg("Cell size: {}\nFlat area: {}".format(cell_size, flat_area))

    try:
        # Create a set of shifted grids, with offets n from the origin X:

        #        8 | 7 | 6
        #        --|---|---
        #        5 | X | 4
        #        --|---|---
        #        3 | 2 | 1

        positions = [(1, -1), (0, -1), (-1, -1),
                     (1,  0),          (-1,  0),
                     (1,  1), (0,  1), (-1,  1)]

        corners = (1, 3, 6, 8)      # dist * sqrt(2), as set in corner_dist
        orthogonals = (2, 4, 5, 7)  # von Neumann neighbors, straight dist
        shift_rasts = [None]        # offset to align numbers
        temp_rasts = []

        for (n, pos) in enumerate(positions, start=1):
            utils.msg("Creating Shift Grid {} of 8...".format(n))
            # scale shift grid by cell size
            (x_shift, y_shift) = map(lambda(n): n * cell_size, pos)

            # set explicit path on shift rasters, otherwise suffer
            # inexplicable 999999 errors.
            shift_out = os.path.join(out_workspace, "shift_{}.tif".format(n))
            shift_out = utils.validate_path(shift_out)
            temp_rasts.append(shift_out)
            arcpy.Shift_management(bathy, shift_out, x_shift, y_shift)
            shift_rasts.append(arcpy.sa.Raster(shift_out))

        edge_rasts = [None]
        # calculate triangle length grids

        # edges 1-8: pairs of bathy:shift[n]
        for (n, shift) in enumerate(shift_rasts[1:], start=1):
            utils.msg("Calculating Triangle Edge {} of 16...".format(n))
            # adjust for corners being sqrt(2) from center
            if n in corners:
                dist = corner_dist
            else:
                dist = cell_size
            edge_out = os.path.join(out_workspace, "edge_{}.tif".format(n))
            edge_out = utils.validate_path(edge_out)
            temp_rasts.append(edge_out)
            edge = compute_edge(bathy, shift, dist)
            edge.save(edge_out)
            edge_rasts.append(arcpy.sa.Raster(edge_out))

        # edges 9-16: pairs of adjacent shift grids [see layout above]
        # in BTM_v1, these are labeled A-H
        adjacent_shift = [(1, 2), (2, 3), (1, 4), (3, 5),
                          (6, 4), (5, 8), (6, 7), (7, 8)]
        for (n, pair) in enumerate(adjacent_shift, start=9):
            utils.msg("Calculating Triangle Edge {} of 16...".format(n))
            # the two shift rasters for this iteration
            (i, j) = pair
            edge_out = os.path.join(out_workspace, "edge_{}.tif".format(n))
            edge_out = utils.validate_path(edge_out)
            temp_rasts.append(edge_out)
            edge = compute_edge(shift_rasts[i], shift_rasts[j], cell_size)
            edge.save(edge_out)
            edge_rasts.append(arcpy.sa.Raster(edge_out))

        # areas of each triangle
        areas = []
        for (n, pair) in enumerate(adjacent_shift, start=1):
            utils.msg("Calculating Triangle Area {} of 8...".format(n))
            # the two shift rasters; n has the third side
            (i, j) = pair
            area_out = os.path.join(out_workspace, "area_{}.tif".format(n))
            area_out = utils.validate_path(area_out)
            temp_rasts.append(area_out)

            area = triangle_area(edge_rasts[i], edge_rasts[j], edge_rasts[n+8])
            area.save(area_out)
            areas.append(arcpy.sa.Raster(area_out))

        utils.msg("Summing Triangle Area...")
        arcpy.env.pyramid = pyramid_orig
        arcpy.env.rasterStatistics = "STATISTICS"
        total_area = (areas[0] + areas[1] + areas[2] + areas[3] +
                      areas[4] + areas[5] + areas[6] + areas[7])
        if area_raster:
            save_msg = "Saving Surface Area Raster to " + \
                "{}.".format(area_raster)
            utils.msg(save_msg)
            total_area.save(area_raster)

        if not acr_correction:
            utils.msg("Calculating ratio with uncorrected planar area.")
            area_ratio = total_area / cell_size**2
        else:
            utils.msg("Calculating ratio with slope-corrected planar area.")
            slope_raster = arcpy.sa.Slope(in_raster, "DEGREE", "1")
            planar_area = Divide(float(cell_size**2),
                                 Cos(Times(slope_raster, 0.01745)))
            area_ratio = Divide(total_area, planar_area)

        out_raster = utils.validate_path(out_raster)
        save_msg = "Saving Surface Area to Planar Area ratio to " + \
            "{}.".format(out_raster)
        utils.msg(save_msg)
        area_ratio.save(out_raster)

    except Exception as e:
        utils.msg(e, mtype='error')

    try:
        # Delete all intermediate raster data sets
        utils.msg("Deleting intermediate data...")
        for path in temp_rasts:
            arcpy.Delete_management(path)

    except Exception as e:
        utils.msg(e, mtype='error')
Exemplo n.º 17
0
def main(in_raster=None, neighborhood_size=None, out_raster=None):
    """
    Compute terrain ruggedness, using the vector ruggedness measure (VRM),
    as described in:

        Sappington et al., 2007. Quantifying Landscape Ruggedness for
        Animal Habitat Analysis: A Case Study Using Bighorn Sheep in the
        Mojave Desert. Journal of Wildlife Management. 71(5): 1419 -1426.
    """
    hood_size = int(neighborhood_size)

    # FIXME: expose this as an option per #18
    out_workspace = os.path.dirname(out_raster)
    utils.workspace_exists(out_workspace)
    # force temporary stats to be computed in our output workspace
    arcpy.env.scratchWorkspace = out_workspace
    arcpy.env.workspace = out_workspace

    # TODO expose as config
    pyramid_orig = arcpy.env.pyramid
    arcpy.env.pyramid = "NONE"
    # TODO: currently set to automatically overwrite, expose this as option
    arcpy.env.overwriteOutput = True

    try:
        # Create Slope and Aspect rasters
        utils.msg("Calculating aspect...")
        out_aspect = Aspect(in_raster)
        utils.msg("Calculating slope...")
        out_slope = Slope(in_raster, "DEGREE")

        # Convert Slope and Aspect rasters to radians
        utils.msg("Converting slope and aspect to radians...")
        slope_rad = out_slope * (math.pi / 180)
        aspect_rad = out_aspect * (math.pi / 180)

        # Calculate x, y, and z rasters
        utils.msg("Calculating x, y, and z rasters...")
        xy_raster_calc = Sin(slope_rad)
        z_raster_calc = Cos(slope_rad)
        x_raster_calc = Con(out_aspect == -1, 0, Sin(aspect_rad)) * xy_raster_calc
        y_raster_calc = Con(out_aspect == -1, 0, Cos(aspect_rad)) * xy_raster_calc

        # Calculate sums of x, y, and z rasters for selected neighborhood size
        utils.msg("Calculating sums of x, y, and z rasters in neighborhood...")
        hood = NbrRectangle(hood_size, hood_size, "CELL")
        x_sum_calc = FocalStatistics(x_raster_calc, hood, "SUM", "NODATA")
        y_sum_calc = FocalStatistics(y_raster_calc, hood, "SUM", "NODATA")
        z_sum_calc = FocalStatistics(z_raster_calc, hood, "SUM", "NODATA")

        # Calculate the resultant vector
        utils.msg("Calculating the resultant vector...")
        result_vect = (x_sum_calc**2 + y_sum_calc**2 + z_sum_calc**2)**0.5

        arcpy.env.rasterStatistics = "STATISTICS"
        arcpy.env.pyramid = pyramid_orig
        # Calculate the Ruggedness raster
        utils.msg("Calculating the final ruggedness raster...")
        ruggedness = 1 - (result_vect / hood_size**2)

        out_raster = utils.validate_path(out_raster)
        utils.msg("Saving ruggedness raster to to {}.".format(out_raster))
        ruggedness.save(out_raster)

    except Exception as e:
        utils.msg(e, mtype='error')
Exemplo n.º 18
0
def main(classification_file,
         bpi_broad_std,
         bpi_fine_std,
         slope,
         bathy,
         out_raster=None):
    """
    Perform raster classification, based on classification mappings
    and provided raster derivatives (fine- and broad- scale BPI,
    slope, and the original raster). Outputs a classified raster.
    """
    try:
        # set up scratch workspace
        # FIXME: see issue #18
        # CON is very very picky. it generates GRID outputs by default, and the
        # resulting names must not exist. for now, push our temp results
        # to the output folder.
        out_workspace = os.path.dirname(out_raster)
        # make sure workspace exists
        utils.workspace_exists(out_workspace)
        arcpy.env.scratchWorkspace = out_workspace
        arcpy.env.workspace = out_workspace

        arcpy.env.overwriteOutput = True
        # Create the broad-scale Bathymetric Position Index (BPI) raster
        msg_text = ("Generating the classified grid, based on the provided"
                    " classes in '{}'.".format(classification_file))
        utils.msg(msg_text)

        # Read in the BTM Document; the class handles parsing a variety of inputs.
        btm_doc = utils.BtmDocument(classification_file)
        classes = btm_doc.classification()
        utils.msg("Parsing {} document... found {} classes.".format(
            btm_doc.doctype, len(classes)))

        grids = []
        for item in classes:
            cur_class = str(item["Class"])
            cur_name = str(item["Zone"])
            utils.msg("Calculating grid for {}...".format(cur_name))
            out_con = None
            # here come the CONs:
            out_con = run_con(item["Depth_LowerBounds"],
                              item["Depth_UpperBounds"], bathy, cur_class)
            out_con2 = run_con(item["Slope_LowerBounds"],
                               item["Slope_UpperBounds"], slope, out_con,
                               cur_class)
            out_con3 = run_con(item["LSB_LowerBounds"],
                               item["LSB_UpperBounds"], bpi_fine_std, out_con2,
                               cur_class)
            out_con4 = run_con(item["SSB_LowerBounds"],
                               item["SSB_UpperBounds"], bpi_broad_std,
                               out_con3, cur_class)

            if type(out_con4) == arcpy.sa.Raster:
                rast = utils.save_raster(out_con4,
                                         "con_{}.tif".format(cur_name))
                grids.append(rast)
            else:
                # fall-through: no valid values detected for this class.
                warn_msg = ("WARNING, no valid locations found for class"
                            " {}:\n".format(cur_name))
                classifications = {
                    'depth':
                    (item["Depth_LowerBounds"], item["Depth_UpperBounds"]),
                    'slope':
                    (item["Slope_LowerBounds"], item["Slope_UpperBounds"]),
                    'broad':
                    (item["SSB_LowerBounds"], item["SSB_UpperBounds"]),
                    'fine': (item["LSB_LowerBounds"], item["LSB_UpperBounds"])
                }
                for (name, vrange) in classifications.items():
                    (vmin, vmax) = vrange
                    if vmin or vmax is not None:
                        if vmin is None:
                            vmin = ""
                        if vmax is None:
                            vmax = ""
                        warn_msg += "  {}: {{{}:{}}}\n".format(
                            name, vmin, vmax)

                utils.msg(textwrap.dedent(warn_msg))

        if len(grids) == 0:
            raise NoValidClasses

        utils.msg("Creating Benthic Terrain Classification Dataset...")
        merge_grid = grids[0]
        for i in range(1, len(grids)):
            utils.msg("{} of {}".format(i, len(grids) - 1))
            merge_grid = Con(merge_grid, grids[i], merge_grid, "VALUE = 0")

        arcpy.env.rasterStatistics = "STATISTICS"
        # validate the output raster path
        out_raster = utils.validate_path(out_raster)
        utils.msg("Saving Output to {}".format(out_raster))
        merge_grid.save(out_raster)
        utils.msg("Complete.")

    except NoValidClasses as e:
        utils.msg(e, mtype='error')
    except Exception as e:
        if type(e) is ValueError:
            raise e
        utils.msg(e, mtype='error')

    try:
        utils.msg("Deleting intermediate data...")
        # Delete all intermediate raster data sets
        for grid in grids:
            arcpy.Delete_management(grid.catalogPath)
    except Exception as e:
        # hack -- swallowing this exception, because sometimes
        # refs are left around for these files.
        utils.msg("Failed to delete all intermediate data.", mtype='warning')
Exemplo n.º 19
0
def main(in_raster=None,
         neighborhood_size=None,
         out_workspace=None,
         out_stats_raw=None):
    """
    Compute depth statisitcs, averaging values over a defined neighborhood
    of cells. Can compute mean, standard deviation, and variance.
    """
    out_stats = out_stats_raw.replace("'", '').split(";")
    arcpy.env.rasterStatistics = "STATISTICS"

    # convert our data to sets for easy comparison
    mean_set = set(['Mean Depth'])
    std_dev_set = set(['Standard Deviation', 'Variance'])

    # list stats to be computed
    utils.msg("The following stats will be computed: " + \
        "{}".format(";".join(out_stats)))

    try:
        # initialize our neighborhood
        utils.msg("Calculating neighborhood...")
        neighborhood = NbrRectangle(neighborhood_size, neighborhood_size,
                                    "CELL")

        if mean_set.intersection(out_stats):
            utils.msg("Calculating mean depth...")
            mean_depth = FocalStatistics(in_raster, neighborhood, "MEAN",
                                         "NODATA")
            mean_raster = os.path.join(out_workspace, "meandepth")
            utils.msg("saving mean depth to {}".format(mean_raster))
            mean_depth.save(mean_raster)

        # compute stdev in ths case
        if std_dev_set.intersection(out_stats):
            utils.msg("Calculating depth standard deviation...")
            std_dev_depth = FocalStatistics(in_raster, neighborhood, "STD",
                                            "NODATA")
            std_dev_raster = os.path.join(out_workspace, "stdevdepth")
            utils.msg(
                "saving standard deviation depth to {}".format(std_dev_raster))
            std_dev_depth.save(std_dev_raster)

            # no direct variance focal stat, have to stdev^2
            if 'Variance' in out_stats:
                utils.msg("Calculating depth variance...")
                var_depth = Power(std_dev_depth, 2)
                var_raster = os.path.join(out_workspace, "vardepth")
                utils.msg("saving depth variance to {}".format(var_raster))
                var_depth.save(var_raster)

    except Exception as e:
        utils.msg(e, mtype='error')
Exemplo n.º 20
0
import arcpy
import numpy as np
import sys
import scripts.config as config
import scripts.utils as utils
from matplotlib import pyplot as plt

if not utils.SCIPY_EXISTS:
    utils.msg("This tool requires the SciPy module to be " +
              "installed. SciPy is included in ArcGIS 10.4 " +
              "and later versions.", "error")
    sys.exit()
else:
    import scipy.ndimage as nd


def main(in_raster=None, img_filter=None, percentile=None,
         min_nbhs=None, max_nbhs=None, position=None, out_file=True):

    point = None
    if position:
        try:
            x, y = position.split(" ")
            point = arcpy.Point(x, y)
        except Exception as e:
            utils.msg("Invalid point, using lower left corner", "warning")
            utils.msg(e, 'error')
            point = None

    r = arcpy.RasterToNumPyArray(in_raster, point, 200, 200, 0)
Exemplo n.º 21
0
def main(in_raster=None, neighborhood_size=None,
         out_workspace=None, out_stats_raw=None, verbose=True):
    """
    Compute depth statisitcs, averaging values over a defined neighborhood
    of cells. Can compute mean, standard deviation, and variance.
    """
    out_stats = out_stats_raw.replace("'", '').split(";")
    out_stats = list(set(out_stats)-set(['Terrain Ruggedness (VRM)']))
    arcpy.env.rasterStatistics = "STATISTICS"
    arcpy.env.compression = 'LZW'  # compress output rasters

    # convert our data to sets for easy comparison
    mean_set = set(['Mean Depth'])
    std_dev_set = set(['Standard Deviation', 'Variance'])
    iqr_set = set(['Interquartile Range'])
    kurt_set = set(['Kurtosis'])

    # list stats to be computed
    if verbose:
        utils.msg("The following stats will be computed: " +
                  "{}".format(";".join(out_stats)))

    # these two tools both use block processing which requires NetCDF4
    if 'Interquartile Range' in out_stats or 'Kurtosis' in out_stats:
        if not utils.NETCDF4_EXISTS:
            utils.msg("The interquartile range and kurtosis tools require "
                      "the NetCDF4 Python library is installed. NetCDF4 "
                      "is included in ArcGIS 10.3 and later.", "error")
            return

        if 'Kurtosis' in out_stats and not utils.SCIPY_EXISTS:
            utils.msg("The kurtosis calculation requires the SciPy library "
                      "is installed. SciPy is included in ArcGIS 10.4 and "
                      "later versions.", "error")
            return
    try:
        # initialize our neighborhood
        if verbose:
            utils.msg("Calculating neighborhood...")
        neighborhood = NbrRectangle(
            neighborhood_size, neighborhood_size, "CELL")
        n_label = "{:03d}".format(int(neighborhood_size))
        overlap = int((int(neighborhood_size)/2.0)-0.5)

        if mean_set.intersection(out_stats):
            if verbose:
                utils.msg("Calculating mean depth...")
            mean_depth = FocalStatistics(in_raster, neighborhood,
                                         "MEAN", "NODATA")
            mean_raster = os.path.join(out_workspace,
                                       "meandepth_{}.tif".format(n_label))
            if verbose:
                utils.msg("saving mean depth to {}".format(mean_raster))
            arcpy.CopyRaster_management(mean_depth, mean_raster)

        # compute stdev in ths case
        if std_dev_set.intersection(out_stats):
            std_dev = 'Standard Deviation' in out_stats
            if verbose and std_dev:
                utils.msg("Calculating depth standard deviation...")
            std_dev_depth = FocalStatistics(in_raster, neighborhood,
                                            "STD", "NODATA")
            std_dev_raster = os.path.join(out_workspace,
                                          "stddevdepth_{}.tif".format(n_label))
            if verbose and std_dev:
                utils.msg("saving standard deviation depth to \
                          {}".format(std_dev_raster))
            arcpy.CopyRaster_management(std_dev_depth, std_dev_raster)

            # no direct variance focal stat, have to stdev^2
            if 'Variance' in out_stats:
                if verbose:
                    utils.msg("Calculating depth variance...")
                var_depth = Power(std_dev_depth, 2)
                var_raster = os.path.join(out_workspace,
                                          "vardepth_{}.tif".format(n_label))
                if verbose:
                    utils.msg("saving depth variance to {}".format(var_raster))
                arcpy.CopyRaster_management(var_depth, var_raster)
                if not std_dev:
                    arcpy.Delete_management(std_dev_raster)

        if iqr_set.intersection(out_stats):
            if verbose:
                utils.msg("Calculating depth interquartile range...")
            iqr_raster = os.path.join(out_workspace,
                                      "iqrdepth_{}.tif".format(n_label))
            bp = utils.BlockProcessor(in_raster)
            # limit 3D blocks to 10^8 elements (.4GB)
            blocksize = int(math.sqrt((10**8) /
                                      (int(neighborhood_size)**2))**2)
            bp.computeBlockStatistics(iqr, blocksize, iqr_raster, overlap)

        if kurt_set.intersection(out_stats):
            if verbose:
                utils.msg("Calculating depth kurtosis...")
            kurt_raster = os.path.join(out_workspace,
                                       "kurtosisdepth_{}.tif".format(n_label))
            bp = utils.BlockProcessor(in_raster)
            # limit 3D blocks to 10^8 elements (.4GB)
            blocksize = int(math.sqrt((10**8) /
                                      (int(neighborhood_size)**2)) - overlap*2)
            bp.computeBlockStatistics(kurtosis, blocksize,
                                      kurt_raster, overlap)

    except Exception as e:
        utils.msg(e, mtype='error')
Exemplo n.º 22
0
import arcpy
import numpy as np
import sys
import scripts.config as config
import scripts.utils as utils
from matplotlib import pyplot as plt

if not utils.SCIPY_EXISTS:
    utils.msg("This tool requires the SciPy module to be " +
              "installed. SciPy is included in ArcGIS 10.4 " +
              "and later versions.", "error")
    sys.exit()
else:
    import scipy.ndimage as nd


def main(in_raster=None, img_filter=None, percentile=None,
         min_nbhs=None, max_nbhs=None, out_file=True):

    r = arcpy.RasterToNumPyArray(in_raster, "", 200, 200, 0)
    if r.ndim > 2:
        r = np.squeeze(r[0, :, :])
    min_nbhs = int(min_nbhs)
    max_nbhs = int(max_nbhs)

    fig = plt.figure(figsize=(10, 10))
    i = 0
    # cast with astype instead of using dtype= for Numpy 1.7 compat
    sizes = np.linspace(min_nbhs, max_nbhs, num=25, endpoint=True,
                        retstep=False).astype('uint32')
    for size in sizes:
Exemplo n.º 23
0
def main(out_workspace, input_bathymetry, broad_bpi_inner_radius,
         broad_bpi_outer_radius, fine_bpi_inner_radius, fine_bpi_outer_radius,
         classification_dict, output_zones):
    """
    Compute complete model. The crux of this computation maps ranges
    of values provided in the classification dictionary (a CSV or Excel
    spreadsheet) to bathymetry derivatives: standardized
    fine- and broad- scale BPI and slope.
    """

    # local variables:
    broad_bpi = os.path.join(out_workspace, "broad_bpi")
    fine_bpi = os.path.join(out_workspace, "fine_bpi")
    slope_rast = os.path.join(out_workspace, "slope")
    broad_std = os.path.join(out_workspace, "broad_std")
    fine_std = os.path.join(out_workspace, "fine_std")

    utils.workspace_exists(out_workspace)
    # set geoprocessing environments
    arcpy.env.scratchWorkspace = out_workspace
    arcpy.env.workspace = out_workspace

    # TODO: currently set to automatically overwrite, expose this as option
    arcpy.env.overwriteOutput = True

    try:
        # Process: Build Broad Scale BPI
        utils.msg("Calculating broad-scale BPI...")
        bpi.main(input_bathymetry,
                 broad_bpi_inner_radius,
                 broad_bpi_outer_radius,
                 broad_bpi,
                 bpi_type='broad')

        # Process: Build Fine Scale BPI
        utils.msg("Calculating fine-scale BPI...")
        bpi.main(input_bathymetry,
                 fine_bpi_inner_radius,
                 fine_bpi_outer_radius,
                 fine_bpi,
                 bpi_type='fine')

        # Process: Standardize BPIs
        utils.msg("Standardizing BPI rasters...")
        standardize_bpi.main(broad_bpi, broad_std)
        standardize_bpi.main(fine_bpi, fine_std)

        # Process: Calculate Slope
        slope.main(input_bathymetry, slope_rast)

        # Process: Zone Classification Builder
        outputs_base = arcpy.env.addOutputsToMap
        arcpy.env.addOutputsToMap = True
        utils.msg("Classifying Zones...")
        classify.main(classification_dict, broad_std, fine_std, slope_rast,
                      input_bathymetry, output_zones)
        arcpy.env.addOutputsToMap = outputs_base

    except Exception as e:
        # Print error message if an error occurs
        utils.msg(e, mtype='error')
Exemplo n.º 24
0
def main(in_raster=None, neighborhood_size=None,
         out_workspace=None, out_stats_raw=None, verbose=True):
    """
    Compute depth statisitcs, averaging values over a defined neighborhood
    of cells. Can compute mean, standard deviation, and variance.
    """
    out_stats = out_stats_raw.replace("'", '').split(";")
    arcpy.env.rasterStatistics = "STATISTICS"
    arcpy.env.compression = 'LZW'  # compress output rasters

    # convert our data to sets for easy comparison
    mean_set = set(['Mean Depth'])
    std_dev_set = set(['Standard Deviation', 'Variance'])

    # list stats to be computed
    if verbose:
        utils.msg("The following stats will be computed: " + \
            "{}".format(";".join(out_stats)))

    try:
        # initialize our neighborhood
        if verbose:
            utils.msg("Calculating neighborhood...")
        neighborhood = NbrRectangle(
            neighborhood_size, neighborhood_size, "CELL")
        n_label = "{:03d}".format(int(neighborhood_size))

        if mean_set.intersection(out_stats):
            if verbose:
                utils.msg("Calculating mean depth...")
            mean_depth = FocalStatistics(in_raster, neighborhood, "MEAN", "NODATA")
            mean_raster = os.path.join(out_workspace, "meandepth_{}.tif".format(n_label))
            if verbose:
                utils.msg("saving mean depth to {}".format(mean_raster))
            arcpy.CopyRaster_management(mean_depth, mean_raster)

        # compute stdev in ths case
        if std_dev_set.intersection(out_stats):
            if verbose:
                utils.msg("Calculating depth standard deviation...")
            std_dev_depth = FocalStatistics(in_raster, neighborhood, "STD", "NODATA")
            std_dev_raster = os.path.join(out_workspace, "stddevdepth_{}.tif".format(n_label))
            if verbose:
                utils.msg("saving standard deviation depth to {}".format(std_dev_raster))
            arcpy.CopyRaster_management(std_dev_depth, std_dev_raster)

            # no direct variance focal stat, have to stdev^2
            if 'Variance' in out_stats:
                if verbose:
                    utils.msg("Calculating depth variance...")
                var_depth = Power(std_dev_depth, 2)
                var_raster = os.path.join(out_workspace, "vardepth_{}.tif".format(n_label))
                if verbose:
                    utils.msg("saving depth variance to {}".format(var_raster))
                arcpy.CopyRaster_management(var_depth, var_raster)

    except Exception as e:
        utils.msg(e, mtype='error')
Exemplo n.º 25
0
def main(bpi_raster=None, out_raster=None):
    try:
        # Get raster properties
        message = ("Calculating properties of the Bathymetric "
                   "Position Index (BPI) raster...")
        utils.msg(message)
        utils.msg("  input raster: {}\n   output: {}".format(
            bpi_raster, out_raster))
        # convert to a path
        desc = arcpy.Describe(bpi_raster)
        bpi_raster_path = desc.catalogPath

        bpi_mean = utils.raster_properties(bpi_raster_path, "MEAN")
        utils.msg("BPI raster mean: {}.".format(bpi_mean))
        bpi_std_dev = utils.raster_properties(bpi_raster_path, "STD")
        utils.msg("BPI raster standard deviation: {}.".format(bpi_std_dev))

        # Create the standardized Bathymetric Position Index (BPI) raster
        std_msg = "Standardizing the Bathymetric Position Index (BPI) raster..."
        utils.msg(std_msg)
        arcpy.env.rasterStatistics = "STATISTICS"
        outRaster = Int(
            Plus(
                Times(Divide(Minus(bpi_raster_path, bpi_mean), bpi_std_dev),
                      100), 0.5))
        out_raster = utils.validate_path(out_raster)
        outRaster.save(out_raster)

    except Exception as e:
        utils.msg(e, mtype='error')