Ejemplo n.º 1
0
def convert_and_filter_by_code(raster_dataset, filter_value=0):
	"""
		Given a raster and a grid value, it converts the raster to a polygon and filters out all values != to filter_value

		TODO: This could be faster or more resource efficient if we use raster calculator to set all non-interesting pixels to Null first, then they just don't get converted?
	:param raster_dataset: A raster dataset on disk
	:param filter_value: the value to keep - Polygons resulting from all other values will be discarded.
	:return: polygon feature class
	"""

	arcpy.CheckOutExtension("Spatial")
	null_raster = arcpy.sa.SetNull(raster_dataset, raster_dataset, where_clause="Value <> {0:s}".format(str(filter_value)))
	raster_dataset = generate_gdb_filename("raster")
	null_raster.save(raster_dataset)

	raster_poly = generate_gdb_filename("fil", scratch=True)
	arcpy.RasterToPolygon_conversion(null_raster, raster_poly, simplify=False, raster_field="Value")

	# remove polygons that we're not looking at (value == 1)
	working_layer = "working_layer"
	arcpy.MakeFeatureLayer_management(raster_poly, working_layer, where_clause="gridcode = {0:s}".format(str(filter_value)))  # load a feature layer and remove the polygons we're not interested in in a single step

	final_poly = generate_gdb_filename("polygon")
	arcpy.CopyFeatures_management(working_layer, final_poly)

	arcpy.CheckInExtension("Spatial")
	return final_poly
Ejemplo n.º 2
0
def process_local_slope(dem=None,
                        slope=None,
                        max_slope=30,
                        mask=None,
                        return_type="polygon",
                        workspace=None):
    """

	:param dem: The DEM to process
	:param slope: If slope is already processed, use this instead.
	:param max_slope: The maximum slope in degrees that will be considered suitable for building
	:param mask: A polygon or raster mask to use as the processing area (arcpy.env.mask/Analysis Mask environment)
	:param return_type: whether to return a polygon feature class or a raster. Default is polygon, where raster will be processed to polygon automatically. Options are "polygon" or "raster"
	:return:
	"""

    if not dem and not slope:
        raise ValueError(
            "Must provide either a slope raster or a DEM raster. Either parameter 'dem' or parameter 'slope' must be defined."
        )

    arcpy.CheckOutExtension("Spatial")

    if not slope:
        arcpy.env.mask = mask
        logger.info("Processing raster to slope")
        mask_raster = arcpy.sa.ExtractByMask(
            dem, mask
        )  # mask environment variable hasn't been working - force extraction
        slope_raster = arcpy.sa.Slope(mask_raster, output_measurement="DEGREE")
    else:
        slope_raster = arcpy.sa.ExtractByMask(
            slope, mask
        )  # mask environment variable hasn't been working - force extraction

    logger.info("Thresholding raster")
    threshold_raster = slope_raster < max_slope

    raster_name = generate_gdb_filename("slope_raster", gdb=workspace)

    logger.info("Saving raster to disk")
    threshold_raster.save(raster_name)

    arcpy.CheckInExtension("Spatial")

    if return_type.lower() == "polygon":

        logger.info("Converting to polygons")
        new_name = convert_and_filter_by_code(raster_name, filter_value=1)

        poly_name = generate_gdb_filename("slope_polygon", gdb=workspace)
        arcpy.CopyFeatures_management(new_name, poly_name)

        return poly_name
    elif return_type.lower() == "raster":
        return raster_name
    else:
        raise ValueError(
            "Invalid parameter for return_type. Must be either \"raster\" or \"polygon\""
        )
Ejemplo n.º 3
0
def land_use(nlcd_layer, search_area, excluded_types, tiger_lines,
             census_places, crs, workspace):
    arcpy.CheckOutExtension("spatial")

    geoprocessing_log.info("Extracting NLCD raster to search area")
    nlcd_in_area = arcpy.sa.ExtractByMask(nlcd_layer, search_area)
    avoid_types = [exclude.value for exclude in excluded_types]

    thresholded_raster = nlcd_in_area
    for avoid in avoid_types:  # this is inefficient, but I couldn't get it to work with a "not in" and I need something that works for n number of values
        thresholded_raster = arcpy.sa.Con(thresholded_raster != avoid,
                                          thresholded_raster, 0)

    scratch_raster = generate_gdb_filename("temp_raster", scratch=True)
    thresholded_raster.save(
        scratch_raster)  # save it so we can use it for environment variables

    stored_environments = store_environments(
        ('cellSize', 'mask', 'extent', 'snapRaster')
    )  # cache the env vars so we can reset them at the end of this function
    arcpy.env.cellSize = scratch_raster
    arcpy.env.mask = scratch_raster
    arcpy.env.extent = scratch_raster
    arcpy.env.snapRaster = scratch_raster

    roads_mask = make_road_mask(tiger_lines,
                                census_places=census_places,
                                search_area=search_area)
    roads_raster = generate_gdb_filename("roads_raster")
    geoprocessing_log.info("Converting roads mask to raster")
    try:
        arcpy.PolygonToRaster_conversion(roads_mask, "OBJECTID", roads_raster)
        #arcpy.CalculateStatistics_management(roads_raster)  # crashes for invalid statistics unless we run this after the conversion
    except:
        geoprocessing_log.error(
            "Error creating raster: {0:s} - from roads mask: {1:s}".format(
                roads_raster, roads_mask))
        raise

    # Raster Calculations
    final_nlcd = arcpy.sa.Con(arcpy.sa.IsNull(arcpy.sa.Raster(roads_raster)),
                              thresholded_raster, 1)
    intermediate_raster = generate_gdb_filename("intermediate_nlcd_mask")
    projected_raster = generate_gdb_filename("projected_nlcd_mask",
                                             gdb=workspace)
    final_nlcd.save(intermediate_raster)

    reset_environments(stored_environments)

    geoprocessing_log.info("Reprojecting final raster")
    arcpy.ProjectRaster_management(intermediate_raster,
                                   projected_raster,
                                   out_coor_system=crs)

    filtered_nlcd_poly = filter_patches.convert_and_filter_by_code(
        projected_raster, filter_value=0)

    return filtered_nlcd_poly
Ejemplo n.º 4
0
def filter_small_patches(raster_dataset,
                         patch_area=9000,
                         area_length_ratio=4,
                         filter_value=0):
    """
		Given a boolean 0/1 raster, filters out patches that are either too small, or which are not compact
	:param raster_dataset: An ArcGIS compatible raster dataset
	:param patch_area: The minimum area to keep a patch, in square units of the raster's projection. Patches smaller than this will be removed.
	:param area_length_ratio: The ratio of area to perimeter (Area/Perimeter) under which patches will be removed.
	:param filter_value: The value in the grid which will be kept - other grid features will be removed.
	:return: Feature class converted from raster with patches removed when they don't match the filter_value, or when they are smaller than patch_area, or when their area to perimeter ratio is smaller than specified.
	"""
    # confirm we have a raster layer
    desc = arcpy.Describe(raster_dataset)
    if desc.dataType != "RasterDataset":
        raise TypeError("parameter raster_layer must be of type RasterDataset")

    del desc

    # convert raster to polygon
    raster_poly = generate_gdb_filename("fil", scratch=True)
    arcpy.RasterToPolygon_conversion(raster_dataset,
                                     raster_poly,
                                     simplify=False,
                                     raster_field="Value")

    # remove polygons that we're not looking at (value == 1)
    working_layer = "working_layer"
    arcpy.MakeFeatureLayer_management(
        raster_poly,
        working_layer,
        where_clause="gridcode = {0:s}".format(filter_value)
    )  # load a feature layer and remove the polygons we're not interested in in a single step

    # remove polygons that are too small (smaller than patch_area)
    arcpy.SelectLayerByAttribute_management(
        working_layer,
        "NEW_SELECTION",
        where_clause="Shape_Area > {0:s}".format(patch_area))

    # export to new layer for passing off to remove_non_compact_polys
    first_filtered_poly = generate_gdb_filename("filter_patches", scratch=True)
    arcpy.CopyFeatures_management(working_layer, first_filtered_poly)
    arcpy.Delete_management(
        working_layer
    )  # delete it, then create a new layer with the same name to pass to the next function

    arcpy.MakeFeatureLayer_management(first_filtered_poly, working_layer)

    # run remove_non_compact_polys to remove polygons that aren't compact
    filtered_features = remove_non_compact_polys(
        working_layer, area_length_ratio=area_length_ratio)

    # return the polygons
    return filtered_features
Ejemplo n.º 5
0
def land_use(nlcd_layer, search_area, excluded_types, tiger_lines, census_places, crs, workspace):
    arcpy.CheckOutExtension("spatial")

    geoprocessing_log.info("Extracting NLCD raster to search area")
    nlcd_in_area = arcpy.sa.ExtractByMask(nlcd_layer, search_area)
    avoid_types = [exclude.value for exclude in excluded_types]

    thresholded_raster = nlcd_in_area
    for (
        avoid
    ) in (
        avoid_types
    ):  # this is inefficient, but I couldn't get it to work with a "not in" and I need something that works for n number of values
        thresholded_raster = arcpy.sa.Con(thresholded_raster != avoid, thresholded_raster, 0)

    scratch_raster = generate_gdb_filename("temp_raster", scratch=True)
    thresholded_raster.save(scratch_raster)  # save it so we can use it for environment variables

    stored_environments = store_environments(
        ("cellSize", "mask", "extent", "snapRaster")
    )  # cache the env vars so we can reset them at the end of this function
    arcpy.env.cellSize = scratch_raster
    arcpy.env.mask = scratch_raster
    arcpy.env.extent = scratch_raster
    arcpy.env.snapRaster = scratch_raster

    roads_mask = make_road_mask(tiger_lines, census_places=census_places, search_area=search_area)
    roads_raster = generate_gdb_filename("roads_raster")
    geoprocessing_log.info("Converting roads mask to raster")
    try:
        arcpy.PolygonToRaster_conversion(roads_mask, "OBJECTID", roads_raster)
        # arcpy.CalculateStatistics_management(roads_raster)  # crashes for invalid statistics unless we run this after the conversion
    except:
        geoprocessing_log.error(
            "Error creating raster: {0:s} - from roads mask: {1:s}".format(roads_raster, roads_mask)
        )
        raise

        # Raster Calculations
    final_nlcd = arcpy.sa.Con(arcpy.sa.IsNull(arcpy.sa.Raster(roads_raster)), thresholded_raster, 1)
    intermediate_raster = generate_gdb_filename("intermediate_nlcd_mask")
    projected_raster = generate_gdb_filename("projected_nlcd_mask", gdb=workspace)
    final_nlcd.save(intermediate_raster)

    reset_environments(stored_environments)

    geoprocessing_log.info("Reprojecting final raster")
    arcpy.ProjectRaster_management(intermediate_raster, projected_raster, out_coor_system=crs)

    filtered_nlcd_poly = filter_patches.convert_and_filter_by_code(projected_raster, filter_value=0)

    return filtered_nlcd_poly
Ejemplo n.º 6
0
def mark_side_of_river(parcels, town_boundary, rivers):

	geoprocessing_log.info("Marking Side of River on Parcels")

	river_field = "is_river"
	correct_side_field = "stat_is_on_correct_side_of_river"
	dissolved_polygon_layer = "dissolved_polygon_layer"
	parcels_layer = "parcels_layer"

	# Process: Remove fields if they already exist - simpler
	geoprocessing_log.debug("--Cleaning fields")
	fields = [field.name for field in arcpy.ListFields(parcels)]
	if river_field in fields:
		arcpy.DeleteField_management(parcels, river_field)
	if correct_side_field in fields:
		arcpy.DeleteField_management(parcels, correct_side_field)

	geoprocessing_log.debug("--Adding and Calculating Defaults")
	arcpy.AddField_management(parcels, river_field, "SHORT", "", "", "", "", "NULLABLE", "NON_REQUIRED", "")
	arcpy.CalculateField_management(parcels, river_field, "0", "PYTHON_9.3", "")

	# Process: Add Correct Side Field
	arcpy.AddField_management(parcels, correct_side_field, "SHORT", "", "", "", "", "NULLABLE", "NON_REQUIRED", "")
	arcpy.CalculateField_management(parcels, correct_side_field, "0", "PYTHON_9.3", "")

	geoprocessing_log.debug("--Marking River")
	arcpy.MakeFeatureLayer_management(parcels, parcels_layer)
	try:
		arcpy.SelectLayerByLocation_management(parcels_layer, "INTERSECT", rivers, "", "NEW_SELECTION", "NOT_INVERT")
		arcpy.CalculateField_management(parcels_layer, "is_river", "1", "PYTHON_9.3", "")

		arcpy.SelectLayerByLocation_management(parcels_layer, "INTERSECT", parcels_layer, "", "SWITCH_SELECTION", "NOT_INVERT")

		geoprocessing_log.debug("--Dissolving")
		dissolved_parcels = generate_gdb_filename("dissolved_parcels")
		arcpy.Dissolve_management(parcels_layer, dissolved_parcels, "is_river", "", "SINGLE_PART", "DISSOLVE_LINES")

		arcpy.MakeFeatureLayer_management(dissolved_parcels, dissolved_polygon_layer)
		try:
			geoprocessing_log.debug("--Exporting and Marking River Side")
			correct_side_on_disk = generate_gdb_filename("correct_side_on_disk")
			arcpy.SelectLayerByLocation_management(dissolved_polygon_layer, "INTERSECT", town_boundary, "", "NEW_SELECTION", "NOT_INVERT")
			arcpy.CopyFeatures_management(dissolved_polygon_layer, correct_side_on_disk)

			arcpy.SelectLayerByLocation_management(parcels_layer, "INTERSECT", correct_side_on_disk, "", "NEW_SELECTION", "NOT_INVERT")
			arcpy.CalculateField_management(parcels_layer, correct_side_field, "1", "PYTHON_9.3", "")
		finally:
			arcpy.Delete_management(dissolved_polygon_layer)
	finally:

		arcpy.Delete_management(parcels_layer)
Ejemplo n.º 7
0
def file_gdb_layer_to_geojson(geodatabase, layer_name, outfile):

	geoprocessing_log.info("Converting layer to geojson")
	if os.path.exists(os.path.join(outfile)):
		geoprocessing_log.warn("Output file {0:s} exists - Deleting".format(outfile))
		os.remove(outfile)

	geoprocessing_log.info("Reprojecting to web_mercator")
	reprojected = temp.generate_gdb_filename(layer_name)
	arcpy.Project_management(in_dataset=os.path.join(geodatabase, layer_name), out_dataset=reprojected, out_coor_system=arcpy.SpatialReference(REPROJECTION_ID))

	geoprocessing_log.info("Writing out geojson file at {0:s}".format(reprojected))
	arcpy.FeaturesToJSON_conversion(reprojected, outfile, geoJSON="GEOJSON")  # export GeoJSON with ArcGIS Pro

	return  #  skip the code below for now, but retain it for legacy purposes for now. Can probably delete after August 2016. It was replaced by the line above

	ogr.UseExceptions()

	geoprocessing_log.debug("Opening FGDB")
	file_gdb_driver = ogr.GetDriverByName("OpenFileGDB")
	new_gdb, new_layer_name = os.path.split(reprojected)
	gdb = file_gdb_driver.Open(new_gdb, 0)

	geojson_driver = ogr.GetDriverByName("GeoJSON")
	geojson = geojson_driver.CreateDataSource(outfile)

	geoprocessing_log.info("Writing out geojson file at {0:s}".format(new_layer_name))
	layer = gdb.GetLayer(new_layer_name)
	geojson.CopyLayer(layer, layer_name, options=["COORDINATE_PRECISION=4",])
Ejemplo n.º 8
0
def make_extent_from_dem(dem, output_location):
	arcpy.CheckOutExtension("Spatial")
	environments = store_environments(["mask", "extent", "outputCoordinateSystem"])

	try:
		temp_raster_filename = generate_gdb_filename(scratch=True)

		dem_properties = arcpy.Describe(dem)
		arcpy.env.outputCoordinateSystem = dem_properties.spatialReference  # set the spatial reference environment variable so that the constant raster gets created properly

		geoprocessing_log.info("Creating Constant Raster")
		arcpy.env.mask = dem
		raster = arcpy.sa.CreateConstantRaster(constant_value=1, data_type="INTEGER", cell_size=10, extent=dem)

		geoprocessing_log.info("Saving to output filename")
		print(temp_raster_filename)
		raster.save(temp_raster_filename)

		geoprocessing_log.info("Converting Raster to Polygon")
		arcpy.RasterToPolygon_conversion(temp_raster_filename, output_location, simplify=False, raster_field="Value")

		#arcpy.Delete_management(temp_raster_filename)

	finally:
		arcpy.CheckInExtension("Spatial")
		reset_environments(environments)
Ejemplo n.º 9
0
def remove_non_compact_polys(feature_layer, area_length_ratio=1):

    desc = arcpy.Describe(feature_layer)
    if desc.dataType != "FeatureLayer":
        feature_class = feature_layer
        feature_layer = "working_layer"
        delete_layer = True
        arcpy.MakeFeatureLayer_management(feature_class, feature_layer)
    else:
        delete_layer = False

    ratio_field = "area_permiter_ratio"
    arcpy.AddField_management(feature_layer, ratio_field, "DOUBLE")

    arcpy.CalculateField_management(feature_layer, ratio_field,
                                    "!Shape_Area!/!Shape_Length!",
                                    "PYTHON_9.3")

    # remove polygons that are too small (smaller than patch_area)
    arcpy.SelectLayerByAttribute_management(
        feature_layer,
        "NEW_SELECTION",
        where_clause="ratio_field > {0:s}".format(area_length_ratio))
    filtered_poly = generate_gdb_filename("filter_patches", scratch=False)
    arcpy.CopyFeatures_management(feature_layer, filtered_poly)

    if delete_layer:
        arcpy.Delete_management(feature_layer)

    return filtered_poly
Ejemplo n.º 10
0
def remove_non_compact_polys(feature_layer, area_length_ratio=1):

	desc = arcpy.Describe(feature_layer)
	if desc.dataType != "FeatureLayer":
		feature_class = feature_layer
		feature_layer = "working_layer"
		delete_layer = True
		arcpy.MakeFeatureLayer_management(feature_class, feature_layer)
	else:
		delete_layer = False

	ratio_field = "area_permiter_ratio"
	arcpy.AddField_management(feature_layer, ratio_field, "DOUBLE")

	arcpy.CalculateField_management(feature_layer, ratio_field, "!Shape_Area!/!Shape_Length!", "PYTHON_9.3")

	# remove polygons that are too small (smaller than patch_area)
	arcpy.SelectLayerByAttribute_management(feature_layer, "NEW_SELECTION", where_clause="ratio_field > {0:s}".format(area_length_ratio))
	filtered_poly = generate_gdb_filename("filter_patches", scratch=False)
	arcpy.CopyFeatures_management(feature_layer, filtered_poly)

	if delete_layer:
		arcpy.Delete_management(feature_layer)

	return filtered_poly
Ejemplo n.º 11
0
def process_local_slope(dem=None, slope=None, max_slope=30, mask=None, return_type="polygon", workspace=None):
	"""

	:param dem: The DEM to process
	:param slope: If slope is already processed, use this instead.
	:param max_slope: The maximum slope in degrees that will be considered suitable for building
	:param mask: A polygon or raster mask to use as the processing area (arcpy.env.mask/Analysis Mask environment)
	:param return_type: whether to return a polygon feature class or a raster. Default is polygon, where raster will be processed to polygon automatically. Options are "polygon" or "raster"
	:return:
	"""

	if not dem and not slope:
		raise ValueError("Must provide either a slope raster or a DEM raster. Either parameter 'dem' or parameter 'slope' must be defined.")

	arcpy.CheckOutExtension("Spatial")

	if not slope:
		arcpy.env.mask = mask
		logger.info("Processing raster to slope")
		mask_raster = arcpy.sa.ExtractByMask(dem, mask)  # mask environment variable hasn't been working - force extraction
		slope_raster = arcpy.sa.Slope(mask_raster, output_measurement="DEGREE")
	else:
		slope_raster = arcpy.sa.ExtractByMask(slope, mask)  # mask environment variable hasn't been working - force extraction

	logger.info("Thresholding raster")
	threshold_raster = slope_raster < max_slope

	raster_name = generate_gdb_filename("slope_raster", gdb=workspace)

	logger.info("Saving raster to disk")
	threshold_raster.save(raster_name)

	arcpy.CheckInExtension("Spatial")

	if return_type.lower() == "polygon":

		logger.info("Converting to polygons")
		new_name = convert_and_filter_by_code(raster_name, filter_value=1)
		
		poly_name = generate_gdb_filename("slope_polygon", gdb=workspace)
		arcpy.CopyFeatures_management(new_name, poly_name)

		return poly_name
	elif return_type.lower() == "raster":
		return raster_name
	else:
		raise ValueError("Invalid parameter for return_type. Must be either \"raster\" or \"polygon\"")
Ejemplo n.º 12
0
def get_centroids(feature=None, method="FEATURE_TO_POINT", dissolve=False, as_file=False, id_field=False):
	"""
		Given an input polygon, this function returns a list of arcpy.Point objects that represent the centroids

	:param feature: str location of a shapefile or feature class
	:param method: str indicating the method to use to obtain the centroid. Possible values are "FEATURE_TO_POINT"
		(default - more accurate) and "ATTRIBUTE" (faster, but error-prone)
	:param dissolve: boolean flag indicating whether or not to dissolve the input features befor obtaining centroids
	:param as_file: boolean flag indicating whether to return the data as a file instead of a point list
	:param id_field: when included, means to pull ids into a tuple with the centroid from the specified field
	:return: list of arcpy.Point objects
	:raise:
	"""
	methods = ("FEATURE_TO_POINT", "ATTRIBUTE",)  # "MEAN_CENTER","MEDIAN_CENTER")

	if method not in methods:
		geoprocessing_log.warning("Centroid determination method is not in the set {0:s}".format(methods))
		return []

	if not feature:
		raise NameError("get_centroids requires a feature as input")

	if not check_type(feature, "Polygon"):
		geoprocessing_log.warning("Type of feature in get_centroids is not Polygon")
		return []

	if dissolve:  # should we pre-dissolve it?
		t_name = generate_gdb_filename("dissolved", gdb="in_memory")
		try:
			arcpy.Dissolve_management(feature, t_name, multi_part=True)
			feature = t_name
		except:
			geoprocessing_log.warning("Couldn't dissolve features first. Continuing anyway, but the results WILL be different than expected")

	if method == "ATTRIBUTE":
		points = centroid_attribute(feature, id_field=id_field)
		if as_file:
			if len(points) > 0:
				return_points = write_features_from_list(points, data_type="POINT", filename=None, spatial_reference=feature, write_ids=id_field)  # write_ids = id_field works because it just needs to set it to a non-false value
			else:
				return_points = None

	elif method == "FEATURE_TO_POINT":
		try:
			if as_file:
				return_points = centroid_feature_to_point(feature,as_file=True, id_field=id_field)
			else:
				points = centroid_feature_to_point(feature, id_field=id_field)
		except:
			err_str = traceback.format_exc()
			geoprocessing_log.warning("failed to obtain centroids using feature_to_point method. traceback follows:\n {0:s}".format(err_str))

	if as_file:
		return return_points
	else:
		return points
Ejemplo n.º 13
0
def centroid_near_distance(feature_class, near_feature, id_field, search_radius=1000):
	"""
		Adaptation of centroid distance code from code library to do a more basic operation by simply getting the centroid of each polygon,
		and then doing the same for the near features
	"""

	if not feature_class or not near_feature:
		raise ValueError("missing the feature class or the near feature - both arguments must be defined!")

	centroids = geometry.get_centroids(feature_class, dissolve=False, id_field=id_field)  # merge, don't append

	if not centroids:
		processing_log.info("No centroids generated - something probably went wrong")
		return False

	processing_log.info("first centroids retrieved")

	temp_filename = arcpy.CreateScratchName("temp", workspace=r"C:\Users\dsx.AD3\Documents\ArcGIS\scratch.gdb")
	processing_log.info("{0:s}".format(temp_filename))
	point_file = geometry.write_features_from_list(centroids, "POINT", filename=temp.generate_gdb_filename(), spatial_reference=feature_class, write_ids=True)
	processing_log.info("first centroids written")

	near_centroid = geometry.get_centroids(near_feature, dissolve=False)  # merge, don't append

	processing_log.info("second centroids retrieved")
	if not near_centroid:
		processing_log.info("No centroids generated for near feature- something probably went wrong")
		return False

	near_point_file = geometry.write_features_from_list(near_centroid, "POINT", spatial_reference=near_feature)
	processing_log.info("second centroids written")

	processing_log.info("Point File located at {0!s}".format(point_file))  # change back to info
	out_table = temp.generate_gdb_filename("out_table", return_full=True)
	processing_log.info("Output Table will be located at {0!s}".format(out_table))  # change back to info

	try:
		arcpy.PointDistance_analysis(in_features=point_file, near_features=near_point_file, out_table=out_table, search_radius=search_radius)
	except:
		processing_log.error("Couldn't run PointDistance - {0!s}".format(traceback.format_exc()))
		return False

	return {"table": out_table, "point_file": point_file, }  # start just returning a dictionary instead of positional values
Ejemplo n.º 14
0
def fast_dissolve(features, raise_error=True, base_name="dissolved"):
	out_name = generate_gdb_filename(base_name)
	try:
		arcpy.Dissolve_management(features, out_name)
	except:
		if raise_error is False:
			geoprocessing_log.warning("Couldn't dissolve. Returning non-dissolved layer")
			return features
		else:
			raise
	return out_name
Ejemplo n.º 15
0
def fast_dissolve(features, raise_error=True, base_name="dissolved"):
    out_name = generate_gdb_filename(base_name)
    try:
        arcpy.Dissolve_management(features, out_name)
    except:
        if raise_error is False:
            geoprocessing_log.warning(
                "Couldn't dissolve. Returning non-dissolved layer")
            return features
        else:
            raise
    return out_name
def temp_correction(correction):
	"""
		A one time function to correct an object - can be deleted
	:return:
	"""
	processing_log.warning("{0:s}".format(correction.parcels.layer))
	if correction.parcels.layer is None or correction.parcels.layer == "":
		correction.parcels.layer = generate_gdb_filename(correction.region.parcels_name, gdb=correction.layers)
		processing_log.warning("{0:s}".format(correction.parcels.layer))
		arcpy.CopyFeatures_management(correction.region.parcels, correction.parcels.layer)
		correction.parcels.save()

	processing_log.warning("Done setting up")
Ejemplo n.º 17
0
def centroid_feature_to_point(feature, as_file=False, id_field=None):
    """
	for internal use only

	:param feature: str feature class
	:param as_file: boolean indicates whether to return the arcpy file instead of returning the point array
	:param id_field: when included, means to pull ids into a tuple with the centroid from the specified field - can't return ids
	:return: list containing arcpy.Point objects
	"""
    if as_file:
        t_name = generate_gdb_filename(
            "feature_to_point"
        )  # we don't want a memory file if we are returning the filename
    else:
        t_name = generate_gdb_filename("feature_to_point", gdb="in_memory")

    arcpy.FeatureToPoint_management(feature, t_name, "CENTROID")

    if as_file:  # if asfile, return the filename, otherwise, make and return the point_array
        return t_name

    curs = arcpy.SearchCursor(t_name)  # open up the output

    points = []
    for record in curs:
        shape = record.shape.getPart()

        if id_field:
            shape_id = record.getValue(id_field)  # get the shape's point
            item = (shape, shape_id)
        else:
            item = shape

        points.append(item)

    arcpy.Delete_management(t_name)  # clean up the in_memory workspace
    del curs

    return points
Ejemplo n.º 18
0
def filter_small_patches(raster_dataset, patch_area=9000, area_length_ratio=4, filter_value=0):
	"""
		Given a boolean 0/1 raster, filters out patches that are either too small, or which are not compact
	:param raster_dataset: An ArcGIS compatible raster dataset
	:param patch_area: The minimum area to keep a patch, in square units of the raster's projection. Patches smaller than this will be removed.
	:param area_length_ratio: The ratio of area to perimeter (Area/Perimeter) under which patches will be removed.
	:param filter_value: The value in the grid which will be kept - other grid features will be removed.
	:return: Feature class converted from raster with patches removed when they don't match the filter_value, or when they are smaller than patch_area, or when their area to perimeter ratio is smaller than specified.
	"""
	# confirm we have a raster layer
	desc = arcpy.Describe(raster_dataset)
	if desc.dataType != "RasterDataset":
		raise TypeError("parameter raster_layer must be of type RasterDataset")

	del desc

	# convert raster to polygon
	raster_poly = generate_gdb_filename("fil", scratch=True)
	arcpy.RasterToPolygon_conversion(raster_dataset, raster_poly, simplify=False, raster_field="Value")

	# remove polygons that we're not looking at (value == 1)
	working_layer = "working_layer"
	arcpy.MakeFeatureLayer_management(raster_poly, working_layer, where_clause="gridcode = {0:s}".format(filter_value))  # load a feature layer and remove the polygons we're not interested in in a single step

	# remove polygons that are too small (smaller than patch_area)
	arcpy.SelectLayerByAttribute_management(working_layer, "NEW_SELECTION", where_clause="Shape_Area > {0:s}".format(patch_area))

	# export to new layer for passing off to remove_non_compact_polys
	first_filtered_poly = generate_gdb_filename("filter_patches", scratch=True)
	arcpy.CopyFeatures_management(working_layer, first_filtered_poly)
	arcpy.Delete_management(working_layer)  # delete it, then create a new layer with the same name to pass to the next function

	arcpy.MakeFeatureLayer_management(first_filtered_poly, working_layer)

	# run remove_non_compact_polys to remove polygons that aren't compact
	filtered_features = remove_non_compact_polys(working_layer, area_length_ratio=area_length_ratio)

	# return the polygons
	return filtered_features
Ejemplo n.º 19
0
def make_road_mask(tiger_lines, census_places, search_area):

    # clip tiger lines to search area
    tiger_clip = generate_gdb_filename("tiger_clip", scratch=True)

    geoprocessing_log.info("Clipping to analysis area")
    arcpy.Clip_analysis(tiger_lines, search_area, tiger_clip)

    geoprocessing_log.info("Adding and calculating roads buffer field")
    # add field to tiger lines
    field_name = "buffer_distance"
    arcpy.AddField_management(tiger_clip, field_name, "LONG")

    # calculate buffer by distances
    code_block = """def calc_distance(r_type):
		if r_type == "S1100":  # highways, or so
			return 128  # 3x cell size (30m) * sqrt(2) to capture diagonals
		elif r_type == "S1200":  # major roadways
			return 85  # 2x cell size (30m) * sqrt(2) to capture diagonals
		else:  #other
			return 43  # cell size (30m) * sqrt(2) to capture diagonals
	"""
    arcpy.CalculateField_management(tiger_clip, field_name,
                                    "calc_distance(!MTFCC!)", "PYTHON_9.3",
                                    code_block)

    # run buffer on lines
    geoprocessing_log.info("Buffering roads")
    buffered_roads = generate_gdb_filename("buffered_roads", scratch=True)
    arcpy.Buffer_analysis(tiger_clip, buffered_roads, field_name)

    # erase using census lines
    geoprocessing_log.info(
        "Removing census places from mask (adding back to banned areas")
    road_mask = generate_gdb_filename("road_mask")
    arcpy.Erase_analysis(buffered_roads, census_places, road_mask)

    # return mask
    return road_mask
Ejemplo n.º 20
0
def centroid_feature_to_point(feature,as_file=False, id_field=None):
	"""
	for internal use only

	:param feature: str feature class
	:param as_file: boolean indicates whether to return the arcpy file instead of returning the point array
	:param id_field: when included, means to pull ids into a tuple with the centroid from the specified field - can't return ids
	:return: list containing arcpy.Point objects
	"""
	if as_file:
		t_name = generate_gdb_filename("feature_to_point")  # we don't want a memory file if we are returning the filename
	else:
		t_name = generate_gdb_filename("feature_to_point", gdb="in_memory")

	arcpy.FeatureToPoint_management(feature, t_name, "CENTROID")

	if as_file:  # if asfile, return the filename, otherwise, make and return the point_array
		return t_name

	curs = arcpy.SearchCursor(t_name)  # open up the output

	points = []
	for record in curs:
		shape = record.shape.getPart()

		if id_field:
			shape_id = record.getValue(id_field)  # get the shape's point
			item = (shape, shape_id)
		else:
			item = shape

		points.append(item)

	arcpy.Delete_management(t_name)  # clean up the in_memory workspace
	del curs

	return points
Ejemplo n.º 21
0
def convert_and_filter_by_code(raster_dataset, filter_value=0):
    """
		Given a raster and a grid value, it converts the raster to a polygon and filters out all values != to filter_value

		TODO: This could be faster or more resource efficient if we use raster calculator to set all non-interesting pixels to Null first, then they just don't get converted?
	:param raster_dataset: A raster dataset on disk
	:param filter_value: the value to keep - Polygons resulting from all other values will be discarded.
	:return: polygon feature class
	"""

    arcpy.CheckOutExtension("Spatial")
    null_raster = arcpy.sa.SetNull(raster_dataset,
                                   raster_dataset,
                                   where_clause="Value <> {0:s}".format(
                                       str(filter_value)))
    raster_dataset = generate_gdb_filename("raster")
    null_raster.save(raster_dataset)

    raster_poly = generate_gdb_filename("fil", scratch=True)
    arcpy.RasterToPolygon_conversion(null_raster,
                                     raster_poly,
                                     simplify=False,
                                     raster_field="Value")

    # remove polygons that we're not looking at (value == 1)
    working_layer = "working_layer"
    arcpy.MakeFeatureLayer_management(
        raster_poly,
        working_layer,
        where_clause="gridcode = {0:s}".format(str(filter_value))
    )  # load a feature layer and remove the polygons we're not interested in in a single step

    final_poly = generate_gdb_filename("polygon")
    arcpy.CopyFeatures_management(working_layer, final_poly)

    arcpy.CheckInExtension("Spatial")
    return final_poly
Ejemplo n.º 22
0
def get_area(feature_class):
    '''returns the total area of a feature class'''

    temp_fc = generate_gdb_filename(return_full=True)
    arcpy.CalculateAreas_stats(feature_class, temp_fc)
    area_field = "F_AREA"  # this is hardcoded, but now guaranteed because it is added to a copy and the field is updated if it already exists

    area_curs = arcpy.SearchCursor(temp_fc)
    total_area = 0
    for row in area_curs:
        total_area += row.getValue(area_field)
    del row
    del area_curs

    return total_area
Ejemplo n.º 23
0
def get_area(feature_class):
	'''returns the total area of a feature class'''

	temp_fc = generate_gdb_filename(return_full=True)
	arcpy.CalculateAreas_stats(feature_class, temp_fc)
	area_field = "F_AREA" # this is hardcoded, but now guaranteed because it is added to a copy and the field is updated if it already exists

	area_curs = arcpy.SearchCursor(temp_fc)
	total_area = 0
	for row in area_curs:
		total_area += row.getValue(area_field)
	del row
	del area_curs

	return total_area
Ejemplo n.º 24
0
def make_road_mask(tiger_lines, census_places, search_area):

    # clip tiger lines to search area
    tiger_clip = generate_gdb_filename("tiger_clip", scratch=True)

    geoprocessing_log.info("Clipping to analysis area")
    arcpy.Clip_analysis(tiger_lines, search_area, tiger_clip)

    geoprocessing_log.info("Adding and calculating roads buffer field")
    # add field to tiger lines
    field_name = "buffer_distance"
    arcpy.AddField_management(tiger_clip, field_name, "LONG")

    # calculate buffer by distances
    code_block = """def calc_distance(r_type):
		if r_type == "S1100":  # highways, or so
			return 128  # 3x cell size (30m) * sqrt(2) to capture diagonals
		elif r_type == "S1200":  # major roadways
			return 85  # 2x cell size (30m) * sqrt(2) to capture diagonals
		else:  #other
			return 43  # cell size (30m) * sqrt(2) to capture diagonals
	"""
    arcpy.CalculateField_management(tiger_clip, field_name, "calc_distance(!MTFCC!)", "PYTHON_9.3", code_block)

    # run buffer on lines
    geoprocessing_log.info("Buffering roads")
    buffered_roads = generate_gdb_filename("buffered_roads", scratch=True)
    arcpy.Buffer_analysis(tiger_clip, buffered_roads, field_name)

    # erase using census lines
    geoprocessing_log.info("Removing census places from mask (adding back to banned areas")
    road_mask = generate_gdb_filename("road_mask")
    arcpy.Erase_analysis(buffered_roads, census_places, road_mask)

    # return mask
    return road_mask
Ejemplo n.º 25
0
def road_distance(roads_layer, max_distance, roads_layer_where_clause, workspace):
	"""
		Given a roads layer, it loads the roads specified in the where clause, then buffers the roads to max_distance, returning the buffered feature class
	:param roads_layer:
	:param max_distance:
	:param roads_layer_where_clause:
	:param workspace:
	:return:
	"""


	feature_layer = "roads_layer"
	arcpy.MakeFeatureLayer_management(roads_layer, feature_layer, roads_layer_where_clause)

	roads_buffered = generate_gdb_filename("roads_buffer", gdb=workspace)
	arcpy.Buffer_analysis(feature_layer, roads_buffered, max_distance)

	arcpy.Delete_management(feature_layer)

	return roads_buffered
Ejemplo n.º 26
0
def road_distance(roads_layer, max_distance, roads_layer_where_clause,
                  workspace):
    """
		Given a roads layer, it loads the roads specified in the where clause, then buffers the roads to max_distance, returning the buffered feature class
	:param roads_layer:
	:param max_distance:
	:param roads_layer_where_clause:
	:param workspace:
	:return:
	"""

    feature_layer = "roads_layer"
    arcpy.MakeFeatureLayer_management(roads_layer, feature_layer,
                                      roads_layer_where_clause)

    roads_buffered = generate_gdb_filename("roads_buffer", gdb=workspace)
    arcpy.Buffer_analysis(feature_layer, roads_buffered, max_distance)

    arcpy.Delete_management(feature_layer)

    return roads_buffered
Ejemplo n.º 27
0
def merge(existing_areas, change_areas, workspace, method="ERASE"):
	"""

	:param existing_areas: The existing valid areas (as a feature class)
	:param change_areas: The areas to modify - they can either be remaining suitable areas (INTERSECT) or areas to remove (ERASE)
	:param method: the method to use for interacting the two layers, ERASE or INTERSECT. ERASE is default and removes areas from existing_areas when they are present in change_areas
	:return:
	"""

	if method not in ("ERASE", "INTERSECT"):
		raise ValueError("argument 'method' to merge function is invalid. Must be either 'ERASE' or 'INTERSECT'")

	new_features = generate_gdb_filename("suitable_areas", return_full=True, gdb=workspace)
	if method == "ERASE":
		geoprocessing_log.info("Erasing features")
		arcpy.Erase_analysis(existing_areas, change_areas, new_features)
	elif method == "INTERSECT":
		geoprocessing_log.info("Intersecting features")
		arcpy.Intersect_analysis([existing_areas, change_areas], new_features)
	elif method == "UNION":
		geoprocessing_log.info("Unioning features")
		arcpy.Union_analysis([existing_areas, change_areas], new_features)  # existing areas should be a list already

	return new_features
Ejemplo n.º 28
0
def centroid_near_distance(feature_class,
                           near_feature,
                           id_field,
                           search_radius=1000):
    """
		Adaptation of centroid distance code from code library to do a more basic operation by simply getting the centroid of each polygon,
		and then doing the same for the near features
	"""

    if not feature_class or not near_feature:
        raise ValueError(
            "missing the feature class or the near feature - both arguments must be defined!"
        )

    centroids = geometry.get_centroids(
        feature_class, dissolve=False,
        id_field=id_field)  # merge, don't append

    if not centroids:
        processing_log.info(
            "No centroids generated - something probably went wrong")
        return False

    processing_log.info("first centroids retrieved")

    temp_filename = arcpy.CreateScratchName(
        "temp", workspace=r"C:\Users\dsx.AD3\Documents\ArcGIS\scratch.gdb")
    processing_log.info("{0:s}".format(temp_filename))
    point_file = geometry.write_features_from_list(
        centroids,
        "POINT",
        filename=temp.generate_gdb_filename(),
        spatial_reference=feature_class,
        write_ids=True)
    processing_log.info("first centroids written")

    near_centroid = geometry.get_centroids(
        near_feature, dissolve=False)  # merge, don't append

    processing_log.info("second centroids retrieved")
    if not near_centroid:
        processing_log.info(
            "No centroids generated for near feature- something probably went wrong"
        )
        return False

    near_point_file = geometry.write_features_from_list(
        near_centroid, "POINT", spatial_reference=near_feature)
    processing_log.info("second centroids written")

    processing_log.info("Point File located at {0!s}".format(
        point_file))  # change back to info
    out_table = temp.generate_gdb_filename("out_table", return_full=True)
    processing_log.info("Output Table will be located at {0!s}".format(
        out_table))  # change back to info

    try:
        arcpy.PointDistance_analysis(in_features=point_file,
                                     near_features=near_point_file,
                                     out_table=out_table,
                                     search_radius=search_radius)
    except:
        processing_log.error("Couldn't run PointDistance - {0!s}".format(
            traceback.format_exc()))
        return False

    return {
        "table": out_table,
        "point_file": point_file,
    }  # start just returning a dictionary instead of positional values
Ejemplo n.º 29
0
def get_centroids(feature=None,
                  method="FEATURE_TO_POINT",
                  dissolve=False,
                  as_file=False,
                  id_field=False):
    """
		Given an input polygon, this function returns a list of arcpy.Point objects that represent the centroids

	:param feature: str location of a shapefile or feature class
	:param method: str indicating the method to use to obtain the centroid. Possible values are "FEATURE_TO_POINT"
		(default - more accurate) and "ATTRIBUTE" (faster, but error-prone)
	:param dissolve: boolean flag indicating whether or not to dissolve the input features befor obtaining centroids
	:param as_file: boolean flag indicating whether to return the data as a file instead of a point list
	:param id_field: when included, means to pull ids into a tuple with the centroid from the specified field
	:return: list of arcpy.Point objects
	:raise:
	"""
    methods = (
        "FEATURE_TO_POINT",
        "ATTRIBUTE",
    )  # "MEAN_CENTER","MEDIAN_CENTER")

    if method not in methods:
        geoprocessing_log.warning(
            "Centroid determination method is not in the set {0:s}".format(
                methods))
        return []

    if not feature:
        raise NameError("get_centroids requires a feature as input")

    if not check_type(feature, "Polygon"):
        geoprocessing_log.warning(
            "Type of feature in get_centroids is not Polygon")
        return []

    if dissolve:  # should we pre-dissolve it?
        t_name = generate_gdb_filename("dissolved", gdb="in_memory")
        try:
            arcpy.Dissolve_management(feature, t_name, multi_part=True)
            feature = t_name
        except:
            geoprocessing_log.warning(
                "Couldn't dissolve features first. Continuing anyway, but the results WILL be different than expected"
            )

    if method == "ATTRIBUTE":
        points = centroid_attribute(feature, id_field=id_field)
        if as_file:
            if len(points) > 0:
                return_points = write_features_from_list(
                    points,
                    data_type="POINT",
                    filename=None,
                    spatial_reference=feature,
                    write_ids=id_field
                )  # write_ids = id_field works because it just needs to set it to a non-false value
            else:
                return_points = None

    elif method == "FEATURE_TO_POINT":
        try:
            if as_file:
                return_points = centroid_feature_to_point(feature,
                                                          as_file=True,
                                                          id_field=id_field)
            else:
                points = centroid_feature_to_point(feature, id_field=id_field)
        except:
            err_str = traceback.format_exc()
            geoprocessing_log.warning(
                "failed to obtain centroids using feature_to_point method. traceback follows:\n {0:s}"
                .format(err_str))

    if as_file:
        return return_points
    else:
        return points