def convert_raster_2_polygon():
    output_urb = r"V:"
    output_hdc = r"V:"
    arcpy.RasterToPolygon_conversion(urb_Clst_Grid, output_urb, "NO_SIMPLIFY",
                                     "VALUE")
    arcpy.RasterToPolygon_conversion(hdens_Clst_Grid, output_hdc,
                                     "NO_SIMPLIFY", "VALUE")
Esempio n. 2
0
def generate_binned_shapefiles(inputRaster, num_bins_list):
    intRas = Int(Raster(inputRaster))
    intPolygon = r'C:\user\Documents\workspace\heatmap-dev\anonCrimeHeatmap\int_simplified.shp'
    intPolygon2 = r'C:\user\Documents\workspace\heatmap-dev\anonCrimeHeatmap\int_notsimplified.shp'
    arcpy.RasterToPolygon_conversion(intRas, intPolygon, 'SIMPLIFY',
                                     'VALUE')  # do the conversion
    arcpy.RasterToPolygon_conversion(intRas, intPolygon2, 'NO_SIMPLIFY',
                                     'VALUE')  # do the conversion
    rasterMax = arcpy.GetRasterProperties_management(intRas, 'MAXIMUM')
    rasterMax = float(rasterMax.getOutput(0))

    for num_bins in num_bins_list:
        remapRange = RemapRange(
            get_remap_table_for_remap_range(num_bins, rasterMax))
        binnedRas = Reclassify(intRas, 'VALUE', remapRange, 'NODATA')
        # location of temporary polygon
        tempPolygon = r'C:\user\Documents\workspace\heatmap-dev\anonCrimeHeatmap\\' + str(
            num_bins) + '_bins_simplified.shp'
        arcpy.RasterToPolygon_conversion(binnedRas, tempPolygon, 'SIMPLIFY',
                                         'VALUE')  # do the conversion
        tempPolygon2 = r'C:\user\Documents\workspace\heatmap-dev\anonCrimeHeatmap\\' + str(
            num_bins) + '_bins_notsimplified.shp'
        arcpy.RasterToPolygon_conversion(binnedRas, tempPolygon2,
                                         'NO_SIMPLIFY',
                                         'VALUE')  # do the conversion
Esempio n. 3
0
def hru1(path, intslope):
    import arcpy, os, re
    #from arcpy.sa import *
    from arcpy import env
    arcpy.env.workspace = path

    print(
        'converting landuse, soil, and slope raster to polygon and delineation of HRU map'
    )
    # converting landuse, slope, and soil rasters to feature class and overlaying the created feature classes

    arcpy.RasterToPolygon_conversion("type_sol.tif", 'soil_poly',
                                     "NO_SIMPLIFY", "VALUE")
    arcpy.RasterToPolygon_conversion("occupation_sol.tif", 'LU_poly',
                                     "NO_SIMPLIFY", "VALUE")
    arcpy.RasterToPolygon_conversion(
        intslope, 'pente_poly', "NO_SIMPLIFY", "VALUE"
    )  # raster to polygon conversion accepts only the integer type raster

    # add the new fields in created polygons and copy the value into it.
    arcpy.AddField_management("soil_poly.shp", "soil_type", "SHORT", "", "",
                              "", "", "", "", "")
    arcpy.AddField_management("LU_poly.shp", "LU_type", "SHORT", "", "", "",
                              "", "", "", "")
    arcpy.AddField_management("pente_poly.shp", "slope", "FLOAT", "", "", 32,
                              "", "", "", "")

    arcpy.Delete_management("times.tif")
    arcpy.Delete_management("slope_in_deg.tif")

    # copy the created fields to new fields
    arcpy.CalculateField_management("soil_poly.shp", "soil_type", "!gridcode!",
                                    "PYTHON_9.3")
    arcpy.DeleteField_management("soil_poly.shp", "gridcode")
    arcpy.CalculateField_management("LU_poly.shp", "LU_type", "!gridcode!",
                                    "PYTHON_9.3")
    arcpy.DeleteField_management("LU_poly.shp", "gridcode")
    arcpy.CalculateField_management("pente_poly.shp", "slope",
                                    "!gridcode!/100.0", "PYTHON_9.3")
    arcpy.DeleteField_management("pente_poly.shp", "gridcode")
    # overlain the soil type, land use, and slope for creating the HRU map
    arcpy.Intersect_analysis(["soil_poly.shp", "LU_poly.shp"], "HRU1", "ALL")
    arcpy.Intersect_analysis(["HRU1.shp", "pente_poly.shp"], "HRU2", "ALL")

    # delete HRU1 from directory and unnecessary fields in attribute table of HRU2
    arcpy.Delete_management("HRU1.shp")
    arcpy.Delete_management("soil_poly.shp")
    arcpy.Delete_management("pente_poly.shp")
    arcpy.Delete_management("LU_poly.shp")
    arcpy.DeleteField_management("HRU2.shp", [
        "FID_HRU1", "FID_soil_p", "FID_LU_pol", "FID_pente_", "Id", "Id_1",
        "Id_12"
    ])
    print('done!')
def create_path_footprints(image_directory, burn_raster_path):
    '''
    Given a directory containing .tif images and a path to an 0 constant raster this function will add 1 to every raster
    pixel where there is meaningful data in the raster. Converts to shapefile in directory and burns based on the
    output polygon geometry. Puts those shapefiles into a temporary directory then removes the directory.
    :param image_directory: The directory containing the raster images
    :param burn_raster_path: The path to the 0 constant raster. Should be of an extent that contains all the paths.
    :return:
    '''
    temp_dir = tempfile.mkdtemp()

    for image in get_files_of_ext(image_directory, '.tif'):

        image_name = os.path.splitext(os.path.basename(image))[0]
        # con_raster = custom_nodata(bands_to_raster_obj(image, get_band_list(image))) for bsq images
        raster = Raster(image)
        mask_raster = Con((raster == 0) | (raster == 1), 1, 0)
        output_path = os.path.join(temp_dir, image_name + '.shp')
        arcpy.RasterToPolygon_conversion(mask_raster, output_path, "NO_SIMPLIFY")

        add_to_raster(burn_raster_path, output_path)

    shutil.rmtree(temp_dir)

    return
Esempio n. 5
0
def main():
	# Setup script path and workspace folder
	outWorkspace = flmc.SetupWorkspace("FLM_CSR_output")
	arcpy.env.workspace = outWorkspace
	arcpy.env.overwriteOutput = True
	
	# Load arguments from file
	args = flmc.GetArgs("FLM_CSR_params.txt")
	
	# Tool arguments
	InShapefile = args[0].rstrip()
	InRaster = args[1].rstrip()
	ShrinkSize = args[2].rstrip()
	OutShapefile = args[3].rstrip()

	# Local variables:
	FLM_CSR_IsNull = outWorkspace+"\\FLM_CSR_IsNull.tif"
	FLM_CSR_SetNull = outWorkspace+"\\FLM_CSR_SetNull.tif"
	FLM_CSR_Shrink = outWorkspace+"\\FLM_CSR_Shrink.tif"
	FLM_CSR_RasterPoly = outWorkspace+"\\FLM_CSR_RasterPoly.shp"
	
	arcpy.gp.IsNull_sa(InRaster, FLM_CSR_IsNull)

	arcpy.gp.SetNull_sa(FLM_CSR_IsNull, "1", FLM_CSR_SetNull, "Value = 1")

	arcpy.gp.Shrink_sa(FLM_CSR_SetNull, FLM_CSR_Shrink, ShrinkSize, "1")

	arcpy.RasterToPolygon_conversion(FLM_CSR_Shrink, FLM_CSR_RasterPoly, simplify="NO_SIMPLIFY", raster_field="Value", create_multipart_features="SINGLE_OUTER_PART", max_vertices_per_feature="")

	arcpy.Clip_analysis(InShapefile, FLM_CSR_RasterPoly, OutShapefile, cluster_tolerance="0 Meters")
Esempio n. 6
0
def split_strahler(stream_area_fc, streams, out_area_fc):
    """This function splits up the NHDArea feature class, which does not
    start and stop polygons at confluences, by creating break points near the
    confluences to split up the polygons. Then, it adds the Strahler value from
    the stream centerline."""
    # 1) Generate euclidean allocation raster from streams (use OBJECTID)
    # 2) Convert euclidean allocation raster to polygons
    # 3) Join allocation polygons "gridcode" to streams "OBJECTID" so that
    #    Strahler value is attached to allocation polygon
    # 4) Use identity function to split up the StreamRiver polygons at the
    #    allocation polygon boundaries, and add the Strahler values
    old_workspace = env.workspace
    env.workspace = 'in_memory'
    cu.multi_msg(
        "Splitting stream area polygons between confluences and joining 1) Strahler order to them..."
    )
    cu.multi_msg('next messages for testing')
    arcpy.CheckOutExtension('Spatial')
    cu.multi_msg('euc')
    euc = EucAllocation(streams, cell_size='50', source_field='OBJECTID')
    arcpy.CheckInExtension('Spatial')
    cu.multi_msg('conversion')
    arcpy.RasterToPolygon_conversion(euc, 'allocation_polys')
    stream_id_field = arcpy.ListFields(streams, 'Permanent_')[0].name
    cu.multi_msg('join')
    arcpy.JoinField_management('allocation_polys', 'grid_code', streams,
                               'OBJECTID',
                               ['Strahler', 'LengthKm', stream_id_field])
    cu.multi_msg('identity')
    arcpy.Identity_analysis(stream_area_fc, 'allocation_polys', out_area_fc)
    env.workspace = old_workspace
    cu.multi_msg("Splitting strema area polygons finished.")
Esempio n. 7
0
def rasterizeSinglebeam(points,
                        mb,
                        out="__rast",
                        number=6,
                        radius=18,
                        overWrite=False):
    import arcpy
    from arcpy.sa import *
    if overWrite:
        arcpy.env.overwriteOutput = True
    outIDW = Idw(points, "bottom_elevation", 3, 2,
                 RadiusVariable(number, radius))
    if mb:
        conRast = Con(outIDW, 1, '', '')
        poly = arcpy.RasterToPolygon_conversion(conRast, "_poly" + points,
                                                "SIMPLIFY", "")
        buff = arcpy.Buffer_analysis(poly, "buff_" + points, "-15 feet",
                                     "FULL", "", "", "")
        footprint = arcpy.Erase_analysis(buff, mb, "footprint_" + points, "")
        finalRast = arcpy.Clip_management(outIDW, "", "rast_" + points,
                                          footprint, "", "ClippingGeometry")
        arcpy.Delete_management(poly)
        arcpy.Delete_management(buff)
        arcpy.Delete_management(footprint)
    else:
        outIDW.save(arcpy.Describe(points).catalogPath + out)
def get_stream_order(scratch_gdb, stream_burn_dem, seg_network_a, DEM_orig,
                     FlowDir, net_raster):

    orderMethod = "STRAHLER"

    print("running Stream order")
    outStreamOrder = StreamOrder(net_raster, FlowDir, orderMethod)

    strord_path = scratch_gdb + "/streamord_out"
    outStreamOrder.save(strord_path)

    print("fixing dodgy first order streams")
    str_ras = Raster(strord_path)
    Cor_Str_Ord_b = Con(str_ras == 1, 1, str_ras - 1)

    Cor_Str_Ord = scratch_gdb + "/Cor_Str_Ord"
    Cor_Str_Ord_b.save(Cor_Str_Ord)

    max_val = arcpy.GetRasterProperties_management(Cor_Str_Ord, "MAXIMUM")
    int_max_val = int(max_val.getOutput(0)) + 1
    val_range = list(range(2, int_max_val))

    print("expand values to remove 1st order errors")
    str_ord_exp = Expand(Cor_Str_Ord, 1, val_range)

    str_ord_exp_path = (scratch_gdb + "/str_ord_exp")
    str_ord_exp.save(str_ord_exp_path)

    print("convert Raster to Polygon")
    str_ord_exp_poly = scratch_gdb + "/st_or_ex_poly"
    arcpy.RasterToPolygon_conversion(str_ord_exp_path, str_ord_exp_poly,
                                     "NO_SIMPLIFY", "Value")

    net_fields = [f.name for f in arcpy.ListFields(seg_network_a)]
    if "Str_order" in net_fields:
        arcpy.DeleteField_management(seg_network_a, "Str_order")
    if "gridcode" in net_fields:
        arcpy.DeleteField_management(seg_network_a, "gridcode")
    del net_fields

    print("join network and StrOrd Polygon fields")
    seg_network_b = scratch_gdb + "/seg_network_b"

    arcpy.SpatialJoin_analysis(seg_network_a, str_ord_exp_poly, seg_network_b,
                               "JOIN_ONE_TO_ONE", "KEEP_ALL", "",
                               "HAVE_THEIR_CENTER_IN")

    arcpy.AddField_management(seg_network_b, "Str_order", "SHORT")

    with arcpy.da.UpdateCursor(seg_network_b,
                               ["Str_order", "gridcode"]) as cursor:
        for row in cursor:
            row[0] = row[1]
            cursor.updateRow(row)
    del row
    del cursor

    arcpy.DeleteField_management(seg_network_b, "gridcode")

    return seg_network_b
Esempio n. 9
0
def estabFOVfootprint(DEM_raster_layer,Camera_point_layer,Smoothing_Tolerance):
    # Spatial Analyst Tools > Surface > Visibility
    arcpy.gp.Visibility_sa(DEM_raster_layer,Camera_point_layer,"in_memory\\rawVisRast",\
                           "","FREQUENCY","NODATA","0.00001201","FLAT_EARTH","0.13","","",\
                           "OFFSETA","","","AZIMUTH1","AZIMUTH2","VERT1","VERT2")

    # Spatial Analyst Tools > Generalization > Boundary Clean
    arcpy.gp.BoundaryClean_sa("in_memory\\rawVisRast","in_memory\\clnVisRast","ASCEND","TWO_WAY")
    arcpy.Delete_management("in_memory\\rawVisRast")

    # Conversion Tools > From Raster > Raster to Polygon
    arcpy.RasterToPolygon_conversion("in_memory\\clnVisRast","in_memory\\visPoly","NO_SIMPLIFY","")
    arcpy.Delete_management("in_memory\\clnVisRast")

    # Cartographic Tools > Generalization > Smooth Polygon
    arcpy.SmoothPolygon_cartography("in_memory\\visPoly","in_memory\\smthVisPoly","PAEK",\
                                     Smoothing_Tolerance,"NO_FIXED","NO_CHECK")
    arcpy.Delete_management("in_memory\\visPoly")

    # Analysis Tools > Overlay > Union
    arcpy.Union_analysis("in_memory\\smthVisPoly","in_memory\\uniVisPoly","ALL","","NO_GAPS")
    arcpy.Delete_management("in_memory\\smthVisPoly")

    # Data Management Tools > Generalization > Dissolve
    footprintFOVout = "in_memory\\whlVisPoly"
    arcpy.Dissolve_management("in_memory\\uniVisPoly",footprintFOVout,"","",\
                              "SINGLE_PART","DISSOLVE_LINES")
    arcpy.Delete_management("in_memory\\uniVisPoly")
    return footprintFOVout
Esempio n. 10
0
    def onClick(self):
        arcpy.env.workspace = r"C:\Users\s\Documents\Masters of Geospatial\GISP\Assignment2\GISdata"
        # reclassify NIDEM based on user input tide value
        # replace tide_height float with a parameter from Tide Height combo box

        reclass_values = "-2.601000 {0} 0;{0} 2.772000 1".format(tide_height)
        #   may need to replace output path with reference to workspace...
        if arcpy.Exists("NIDEM_reclass.tif"):
            arcpy.Delete_management("NIDEM_reclass.tif")

        arcpy.env.overwriteOutput = True  # temporarily allow overwriting of exisitng datasets, then disallow.
        arcpy.gp.Reclassify_sa(
            "NIDEM.tif", "VALUE", reclass_values,
            "C:/Users/s/Documents/Masters of Geospatial/GISP/Assignment2/GISdata/NIDEM_reclass.tif",
            "DATA")

        # RasterToPolygon(in_raster, out_polygon_features, {simplify}, {raster_field}, {create_multipart_features}, {max_vertices_per_feature})
        arcpy.RasterToPolygon_conversion("NIDEM_reclass.tif", "zones.shp",
                                         "NO_SIMPLIFY", "VALUE")

        # Select(in_features, out_feature_class, {where_clause})
        if arcpy.Exists("submerged_extent"):
            arcpy.Delete_management("submerged_extent")
        arcpy.Select_analysis("zones", "submerged_extent", '"gridcode" = 0')

        if arcpy.Exists("exposed_extent"):
            arcpy.Delete_management("exposed_extent")
        arcpy.Select_analysis("zones", "exposed_extent", '"gridcode" = 1')

        arcpy.env.overwriteOutput = False
Esempio n. 11
0
    def onClick(self):
        print "Vectorisation de la couche matricielle..."
        # Process: Raster to Polygon
        arcpy.RasterToPolygon_conversion("cleaned_raster", "vectorized_raster", "NO_SIMPLIFY", "Value")

        # Process: Simplify Polygon
        arcpy.SimplifyPolygon_cartography("vectorized_raster", "simplified_shoreline", "BEND_SIMPLIFY", "4 Meters",
                                          "10 SquareMeters", "NO_CHECK", "NO_KEEP")
        pythonaddins.MessageBox(
            "Vectorisation et simplification de la couche matricielle terminé!".decode('utf-8').encode('cp1252'),
            "Vectorisation", "0")

        real_classes_gc = [[1, u'Vegetation saine'], [2, u'Vegetation fletrie'], [3, u'Sable sec'], [4, u'Sable humide'], [5, u'Laisse de mer']]

        with arcpy.da.UpdateCursor("simplified_shoreline", "gridcode") as gcc_cursor:
            for row in gcc_cursor:
                if row[0] > 0:
                    for i in Ref_TS_Classes:
                        for j in real_classes_gc:
                            if row[0] == i[0]:
                                temp = i[1]
                                if j[1] == temp:
                                    row[0] = j[0]
                                    gcc_cursor.updateRow(row)
                else:
                    pass
def Spot_Area_Selection(facilityName):
    '''
    Select hot or cold spot areas
    :param facilityName: feature class name of input
    :return:
    '''
    try:
        in_raster = ["Hotspot", "Coldspot"]
        Poly = []
        Dissolved = []
        for i in range(len(in_raster)):
            arcpy.RasterToPolygon_conversion(
                facilityName + in_raster[i],
                facilityName + in_raster[i] + "Poly", "NO_SIMPLIFY")
            arcpy.Dissolve_management(
                facilityName + in_raster[i] + "Poly",
                facilityName + in_raster[i] + "Dissolved")
            arcpy.Intersect_analysis(
                [facilityName + in_raster[i] + "Dissolved", "basemap_cleaned"],
                facilityName + in_raster[i] + "_District")
            # Delete feature classes in processing
            Poly.append(facilityName + in_raster[i] + "Poly")
            Dissolved.append(facilityName + in_raster[i] + "Dissolved")
            if arcpy.Exists(Poly[i]): arcpy.Delete_management(Poly[i])
            if arcpy.Exists(Dissolved[i]):
                arcpy.Delete_management(Dissolved[i])
            print("Complete Spot Area Selection!")
    except:
        print(arcpy.GetMessages())
Esempio n. 13
0
 def run(self):
     self.e.load()
     print "Starting Creating Facets processing..."
     if arcpy.Exists(self.i.cmx):    # this forces overwriting the cmx table
         arcpy.Delete_management(self.i.cmx)
     arcpy.gp.ZonalStatisticsAsTable_sa(self.i.catchment, "HydroID", self.i.fac, self.i.cmx, "DATA", "MAXIMUM")
     # --- old code ---
     # arcpy.gp.MakeFeatureLayer(self.i.catchment, "lyr")
     # arcpy.AddJoin_management("lyr", "HydroID", self.i.cmx, "HydroID", "KEEP_ALL")
     # arcpy.SelectLayerByAttribute_management("lyr","NEW_SELECTION","cmx.MAX > cmx.COUNT")
     # arcpy.Clip_analysis(self.i.drl, "lyr", self.i.drl_c, "")
     # arcpy.SelectLayerByAttribute_management("lyr","NEW_SELECTION","cmx.MAX <= cmx.COUNT")     # ESSA PARTE NAO FOI FEITA
     # arcpy.Clip_analysis(self.i.lfp, "lyr", self.i.lfp_c, "")
     # --- old code ---
     self.process_c(self.i.drl, self.i.drl_c)
     # self.process_c(self.i.lfp, self.i.lfp_c)
     arcpy.Erase_analysis(self.i.drl, self.i.drl_c, self.i.lfp_ct)
     arcpy.SpatialJoin_analysis(target_features=self.i.lfp,join_features=self.i.lfp_ct,out_feature_class=self.i.lfp_c,join_operation="JOIN_ONE_TO_ONE",join_type="KEEP_COMMON",match_option="INTERSECT")
     arcpy.Merge_management(self.i.drl_c + ";" + self.i.lfp_c, self.i.fm_vec)
     arcpy.PolylineToRaster_conversion(self.i.fm_vec, "HydroID", self.i.fm_ras, "MAXIMUM_LENGTH", "NONE", self.e.cs)
     arcpy.gp.Divide_sa(self.i.fm_ras, self.i.fm_ras, self.i.fm_ras_d)
     arcpy.gp.Reclassify_sa(self.i.fm_ras_d, "VALUE", "1 NODATA;NODATA 0", self.i.fm_ras_r, "DATA")
     arcpy.gp.Combine_sa(self.i.fm_ras_r + ";" + self.i.cat, self.i.fm_ras_c)
     arcpy.RasterToPolygon_conversion(self.i.fm_ras_c, self.i.facets, "NO_SIMPLIFY", "VALUE")
     # --- old code ---
     # arcpy.RemoveJoin_management("lyr", "")
     # arcpy.SelectLayerByAttribute_management("lyr", "CLEAR_SELECTION", "")
     # arcpy.Delete_management("lyr")
     # --- old code ---
     print "Ending Creating Facets processing..."
Esempio n. 14
0
def filter_polygon(state, extract_comb):
    poly_1 = constants.out_dir + os.sep + state + os.sep + 'poly_1.shp'
    poly_2 = constants.out_dir + os.sep + state + os.sep + 'poly_2.shp'

    arcpy.RasterToPolygon_conversion(extract_comb, poly_1, "NO_SIMPLIFY",
                                     "VALUE")
    arcpy.CalculateAreas_stats(poly_1, poly_2)
Esempio n. 15
0
def create_boundary(NAME, meter):

    input_path = os.path.abspath("input_folder")
    output_path = os.path.abspath("output_folder/shp_files")

    if os.path.isdir("output_folder/"):
        rmtree('output_folder/')
        os.mkdir("output_folder/")
        os.mkdir("output_folder/shp_files")
    else:
        os.mkdir("output_folder/")
        os.mkdir("output_folder/shp_files")

    asc_file_path = os.path.join(input_path, NAME + ".tif")
    boundary = os.path.join(output_path, "{}_boundary.shp".format(NAME))
    bound_neg2m = os.path.join(output_path, "{}_bound_neg2m.shp".format(NAME))
    bound_rec = os.path.join(output_path, "{}_bound_rec.shp".format(NAME))

    reclassified = arcpy.sa.Reclassify(
        asc_file_path, "VALUE",
        "101.102745 105.156837 1;105.156837 108.940979 1")
    arcpy.RasterToPolygon_conversion(reclassified, boundary, "NO_SIMPLIFY")
    arcpy.Buffer_analysis(boundary, bound_neg2m, "{} Meters".format(meter),
                          "FULL", "ROUND", "NONE", "",
                          "PLANAR")  # require user input for meters
    arcpy.MinimumBoundingGeometry_management(bound_neg2m, bound_rec,
                                             "RECTANGLE_BY_AREA", "NONE", "",
                                             "NO_MBG_FIELDS")
Esempio n. 16
0
 def AreaAndAccuracy(inRaster, inPoly):
     rasterPoly = outRaw + "_poly.shp"
     rasterPolyarea = 0
     lyrPolyarea = 0
     testCount = int(arcpy.GetCount_management(testPoints).getOutput(0))
     arcpy.RasterToPolygon_conversion(inRaster, rasterPoly, "SIMPLIFY",
                                      "Value")
     with arcpy.da.SearchCursor(rasterPoly,
                                ("GRIDCODE", "SHAPE@AREA")) as cursor:
         for row in cursor:
             if row[0] == 1:
                 rasterPolyarea += row[1]
     with arcpy.da.SearchCursor(inPoly, "SHAPE@AREA") as cursor:
         for row in cursor:
             lyrPolyarea += row[0]
     targetAcres = rasterPolyarea / lyrPolyarea
     arcpy.MakeFeatureLayer_management(rasterPoly,
                                       "in_memory\\rasterPoly",
                                       """ "GRIDCODE" = 1 """)
     arcpy.MakeFeatureLayer_management(testPoints,
                                       "in_memory\\testPoints")
     arcpy.SelectLayerByLocation_management("in_memory\\testPoints",
                                            "WITHIN",
                                            "in_memory\\rasterPoly")
     selectCount = int(
         arcpy.GetCount_management("in_memory\\testPoints").getOutput(
             0))
     Accuracy = float(selectCount) / float(testCount)
     indexValue = float(Accuracy) / float(targetAcres)
     arcpy.AddMessage(
         os.path.basename(inRaster) + ": Accuracy = " +
         (str(Accuracy)[:5]) + ", Target Area Proportion = " +
         (str(targetAcres)[:5]) + ", Index = " + (str(indexValue)[:5]))
     arcpy.Delete_management(rasterPoly)
     return targetAcres, Accuracy, indexValue
Esempio n. 17
0
def make_extent_from_dem(dem, output_location):
	arcpy.CheckOutExtension("Spatial")
	environments = store_environments(["mask", "extent", "outputCoordinateSystem"])

	try:
		temp_raster_filename = generate_gdb_filename(scratch=True)

		dem_properties = arcpy.Describe(dem)
		arcpy.env.outputCoordinateSystem = dem_properties.spatialReference  # set the spatial reference environment variable so that the constant raster gets created properly

		geoprocessing_log.info("Creating Constant Raster")
		arcpy.env.mask = dem
		raster = arcpy.sa.CreateConstantRaster(constant_value=1, data_type="INTEGER", cell_size=10, extent=dem)

		geoprocessing_log.info("Saving to output filename")
		print(temp_raster_filename)
		raster.save(temp_raster_filename)

		geoprocessing_log.info("Converting Raster to Polygon")
		arcpy.RasterToPolygon_conversion(temp_raster_filename, output_location, simplify=False, raster_field="Value")

		#arcpy.Delete_management(temp_raster_filename)

	finally:
		arcpy.CheckInExtension("Spatial")
		reset_environments(environments)
def RastToShp(rastinfile, year_day, flag): 
    """Creates the output extent to be used in the rest of the script. Allows all images to be snapped to
    a constant grid, for direct matrix to matrix comparison"""
    if flag == 'N':
        arcrast = TMP + year_day + "_recasttif.tif"
        tmptif = arcpy.Raster(rastinfile)
        tmptif.save(arcrast) #This step can be necessary for Arc tools to work properly on .tif files
        arcpy.CalculateStatistics_management(arcrast)
    if flag == 'Y':
        arcrast = TMP + year_day + "_recasttif.tif"
        arcpy.CopyRaster_management(rastinfile, arcrast, '', '-9999', '-9999')
    arcpy.env.extent = arcrast
    clipshp_tmp = TMP + year_day + "_clipshp_tmp.shp"
    EXT_data = arcpy.sa.Con(arcrast, 1, 0, '"Value" > 0') #Choose only areas with data
    tmp_ext = TMP + year_day + "_clipshp_con.tif"
    EXT_data.save(tmp_ext)
    del EXT_data
    arcpy.RasterToPolygon_conversion(tmp_ext, clipshp_tmp, "NO_SIMPLIFY", "Value") #Convert to Shpfile
    clipshp = TMP + year_day + "_ext.shp"
    clipshp_tmp2 = TMP + year_day + "_ext_tmp2.shp"
    arcpy.Select_analysis(clipshp_tmp, clipshp_tmp2, '"GRIDCODE" = 1')
    arcpy.Buffer_analysis(clipshp_tmp2, clipshp, "-2 Kilometers") #Negative buffer to remove edge effects

    cliprast = TMP + year_day + '_ext_tif.tif'
    arcpy.FeatureToRaster_conversion(clipshp, "GRIDCODE", cliprast, 30) #Convert back to TIF file to use as output extent
    
    arcpy.Delete_management(clipshp)
    arcpy.Delete_management(clipshp_tmp)
    arcpy.Delete_management(clipshp_tmp2)
    arcpy.Delete_management(tmp_ext)
    
    return cliprast, arcrast
Esempio n. 19
0
def raster_to_polygon(feature,
                      raster,
                      scratch,
                      name=None,
                      raster_scaling=1000):
    """Convert raster to a features class, clip it to an input feature and
    calculate the area of each polygon. This new feature class is then 
    returned for calculating statistics. """
    # Build Export Name. This option is largely included in case there is unexpected
    # naming conflicts with other functions
    if name == '' or name == None:
        polygon = os.path.join(scratch, 'Raster_to_Polygon.shp')
    else:
        polygon = os.path.join(scratch, name)

    # Scale the subset DEM and temporarily save it to file. If it is not
    # saved, a VAT error is sometimes thrown when converting to polygon.
    subset = spatial.Int(raster * raster_scaling)

    converted = arcpy.RasterToPolygon_conversion(subset,
                                                 'in_memory\\rtp_result',
                                                 'NO_SIMPLIFY')
    arcpy.Clip_analysis(converted, feature, polygon)

    arcpy.Delete_management(subset)
    arcpy.Delete_management(converted)

    return polygon
Esempio n. 20
0
def getEncroachArea(encrType,encrVals,bufferDist,impVals,impType):
    arcpy.gp.Reclassify_sa(NLCD_MHB, "Value", encrVals, encrType + "_mhb.tif", "NODATA")
    arcpy.RasterToPolygon_conversion(encrType + "_mhb.tif", encrType + "_poly_mhb.shp", "SIMPLIFY", "VALUE")
    arcpy.PolygonToLine_management(encrType + "_poly_mhb.shp", encrType + "_edge_mhb.shp", "IGNORE_NEIGHBORS")
    arcpy.Buffer_analysis(encrType + "_edge_mhb.shp", encrType + "_" + bufferDist + "m_edge_mhb.shp", bufferDist + " Meters", "FULL", "ROUND", "ALL", "")
    arcpy.gp.ExtractByMask_sa(NLCD_MHB, encrType + "_" + bufferDist + "m_edge_mhb.shp", "nlcd_" + encrType + "_" + bufferDist + "m.tif")
    arcpy.gp.Reclassify_sa("nlcd_" + encrType + "_" + bufferDist + "m.tif", "VALUE", impVals, impType + "_encroach_" + bufferDist + "m.tif", "NODATA")
Esempio n. 21
0
def footprint(workspace="C:/workspace",
              static_value=1,
              mask="C:/data/maskpoly.jpg",
              outFootprint="footprint.shp",
              outIntRaster="IntRaster.jpg"):
    """ calculates the footprint of a raster input.
        Procedure: Creating a mask (of input raster), calculating an integer raster,
        converting integer raster to polygon, deleting integer raster
    """
    try:
        arcpy.CheckOutExtension("Spatial")
        arcpy.env.workspace = workspace
        #set mask
        arcpy.env.mask = mask
        #compute integer raster and save it
        outInt = Int(static_value)
        outInt.save(outIntRaster)
        #convert integer raster to polygon
        arcpy.RasterToPolygon_conversion(outInt, outFootprint, "NO_SIMPLIFY")
        #TODO delete processes and int raster

    except:
        print "Footprint (lokale Funktion -> point2map-library) konnte nicht berechnet werden."
        arcpy.AddWarning(
            "Footprint (lokale Funktion -> point2map-library) konnte nicht berechnet werden."
        )
Esempio n. 22
0
def CreateUnsuitableSlopes(in_dem, outUnsuitSlope):
    arcpy.ProjectRaster_management(in_dem, "project", spatial_ref)
    x = arcpy.Describe("project").meanCellWidth
    y = arcpy.Describe("project").meanCellHeight
    cellSizeXY = "{} {}".format(x, y)
    #print(cellSizeXY)
    arcpy.Resample_management("project", "resample", cellSizeXY, "CUBIC")
    arcpy.Delete_management("project")
    # Run slope generation
    slope_raster = arcpy.sa.Slope("resample", "PERCENT_RISE")
    outInt = arcpy.sa.Int(slope_raster)
    arcpy.Delete_management("resample")
    del slope_raster
    # Set parameters for raster Reclassification as a boolean
    max_slope = outInt.maximum
    myRemapRange = arcpy.sa.RemapRange([[0, 10, 0], [10, int(max_slope), 1]])
    ### Run reclassification
    outReclassRR = arcpy.sa.Reclassify(outInt, "Value", myRemapRange)  #
    del outInt
    ### Query Bad Slopes
    #arcpy.ProjectRaster_management(outInt, "project", spatial_ref)
    slope_unSuit = arcpy.sa.ExtractByAttributes(outReclassRR, 'Value = 1')
    del outReclassRR
    ## Convert to Vector
    arcpy.RasterToPolygon_conversion(slope_unSuit,
                                     outUnsuitSlope,
                                     raster_field="Value")
    del slope_unSuit
    # Clean up geometry
    arcpy.RepairGeometry_management(outUnsuitSlope)
Esempio n. 23
0
def createComplainRasterFeature(SelectSQL,InputComplainFeatures,POIFeatures,FinalResultFeature):
    logging.info("Process: 创建"+FinalResultFeature)
    if(arcpy.Exists(FinalResultFeature)):
        arcpy.Delete_management(FinalResultFeature, "FeatureClass")
    rmNo = random.randint(100000000,999999999)
    print rmNo
    # Process: 筛选
    print "Process: 筛选"
    logging.info("Process: 筛选")
    FeatureSelect=arcpy.Select_analysis(InputComplainFeatures, "in_memory/FeatureSelect"+repr(rmNo), SelectSQL)
    # Process: 点转栅格
    print FeatureSelect
    rowSear =  arcpy.SearchCursor(FeatureSelect)
    row = rowSear.next()
    if(row):
        print "Process: 点转栅格"
        logging.info("Process: 点转栅格")
        tempEnvironment0 = arcpy.env.extent
        arcpy.env.extent = "115 23 122 29"
        ResultRaster=arcpy.PointToRaster_conversion(FeatureSelect, "OBJECTID", "in_memory/ResultRaster"+repr(rmNo), "COUNT", "NONE", ".0018")
        arcpy.env.extent = tempEnvironment0
        # Process: 栅格转点 
        print "Process: 栅格转点"
        logging.info("Process: 栅格转点")
        COMPLAIN_RASTER_POINTS=arcpy.RasterToPoint_conversion(ResultRaster, "in_memory/COMPLAIN_RASTER_POINTS"+repr(rmNo), "VALUE")
        print "Process: 空间连接"
        # Process: 空间连接
        COMPLAIN_POI_UNION=arcpy.SpatialJoin_analysis(COMPLAIN_RASTER_POINTS, POI, "in_memory/COMPLAIN_POI_UNION"+repr(rmNo), "JOIN_ONE_TO_ONE", "KEEP_ALL", "","CLOSEST", ".1 DecimalDegrees", "DISTANCE")
        print "Process: 点转栅格 (2)"
        logging.info("Process: 点转栅格 (2)")
        # Process: 点转栅格 (2)
        tempEnvironment0 = arcpy.env.extent
        arcpy.env.extent = "115 23 122 29"
        ResultRaster2=arcpy.PointToRaster_conversion(COMPLAIN_POI_UNION, "OBJECTID", "in_memory/ResultRaster2"+repr(rmNo), "MOST_FREQUENT", "NONE", ".0018")
        arcpy.env.extent = tempEnvironment0
        print "Process: 栅格转面"
        logging.info("Process: 栅格转面")
        # Process: 栅格转面
        ResultFeature=arcpy.RasterToPolygon_conversion(ResultRaster2, "in_memory/ResultFeature"+repr(rmNo), "NO_SIMPLIFY", "VALUE")
        print "Process: 空间连接 (2)"
        logging.info("Process: 空间连接 (2)")
        # Process: 空间连接 (2)
        ResultFeatureZj=arcpy.SpatialJoin_analysis(ResultFeature, COMPLAIN_POI_UNION, "in_memory/ResultFeatureZj"+repr(rmNo), "JOIN_ONE_TO_ONE", "KEEP_ALL", "", "INTERSECT", "", "")
        # Process: 空间连接 (3)
        arcpy.SpatialJoin_analysis(FeatureSelect, ResultFeatureZj, FinalResultFeature, "JOIN_ONE_TO_ONE", "KEEP_ALL", "", "INTERSECT", "", "")
        #arcpy.SpatialJoin_analysis(FeatureSelect, ResultFeatureZj, FinalResultFeature, "JOIN_ONE_TO_ONE", "KEEP_ALL", "TIME \"TIME\" true true false 8 Date 0 0 ,First,#,D:\\HasmbyGis\\Cache.gdb\\GIS_OBJECT_COMPLAIN_Select1,TIME,-1,-1;WORK_ORDER_ID \"WORK_ORDER_ID\" true true false 100 Text 0 0 ,First,#,D:\\HasmbyGis\\Cache.gdb\\GIS_OBJECT_COMPLAIN_Select1,WORK_ORDER_ID,-1,-1;DISTANCE \"DISTANCE\" true true false 8 Double 0 0 ,First,#,D:\\HasmbyGis\\Cache.gdb\\Complain20140509_SpatialJoin,DISTANCE,-1,-1;POINTID \"POINTID\" true true false 4 Long 0 0 ,First,#,D:\\HasmbyGis\\Cache.gdb\\Complain20140509_SpatialJoin,POINTID,-1,-1;GRID_CODE \"聚合数\" true true false 4 Long 0 0 ,First,#,D:\\HasmbyGis\\Cache.gdb\\Complain20140509_SpatialJoin,GRID_CODE,-1,-1;Name \"聚合地址\" true true false 160 Text 0 0 ,First,#,D:\\HasmbyGis\\Cache.gdb\\Complain20140509_SpatialJoin,Name,-1,-1;Ctype \"聚合地址类型(原始)\" true true false 64 Text 0 0 ,First,#,D:\\HasmbyGis\\Cache.gdb\\Complain20140509_SpatialJoin,Ctype,-1,-1;CnType \"聚合地址类型\" true true false 50 Text 0 0 ,First,#,D:\\HasmbyGis\\Cache.gdb\\Complain20140509_SpatialJoin,CnType,-1,-1;CITY \"地市\" true true false 32 Text 0 0 ,First,#,D:\\HasmbyGis\\Cache.gdb\\Complain20140509_SpatialJoin,CITY,-1,-1;COUNTY \"区县\" true true false 32 Text 0 0 ,First,#,D:\\HasmbyGis\\Cache.gdb\\Complain20140509_SpatialJoin,COUNTY,-1,-1;GRID \"GRID\" true true false 32 Text 0 0 ,First,#,D:\\HasmbyGis\\Cache.gdb\\Complain20140509_SpatialJoin,GRID,-1,-1;SGLON \"栅格POI经度\" true true false 8 Double 0 0 ,First,#,D:\\HasmbyGis\\Cache.gdb\\Complain20140509_SpatialJoin,SGLON,-1,-1;SGLAT \"栅格POI纬度\" true true false 8 Double 0 0 ,First,#,D:\\HasmbyGis\\Cache.gdb\\Complain20140509_SpatialJoin,SGLAT,-1,-1;CQ_REGION \"城区网格所属区域\" true true false 60 Text 0 0 ,First,#,D:\\HasmbyGis\\Cache.gdb\\Complain20140509_SpatialJoin,CQ_REGION,-1,-1;CQ_REGION_TYPE \"城区网格区域属性\" true true false 60 Text 0 0 ,First,#,D:\\HasmbyGis\\Cache.gdb\\Complain20140509_SpatialJoin,CQ_REGION_TYPE,-1,-1;TEST_ID \"测试网格ID\" true true false 10 Text 0 0 ,First,#,D:\\HasmbyGis\\Cache.gdb\\Complain20140509_SpatialJoin,TEST_ID,-1,-1;TEST_GRIDID \"测试网格编号\" true true false 20 Text 0 0 ,First,#,D:\\HasmbyGis\\Cache.gdb\\Complain20140509_SpatialJoin,TEST_GRIDID,-1,-1;TEST_CLASS \"测试网格类型\" true true false 10 Text 0 0 ,First,#,D:\\HasmbyGis\\Cache.gdb\\Complain20140509_SpatialJoin,TEST_CLASS,-1,-1", "INTERSECT", "", "")

        
        arcpy.Delete_management(COMPLAIN_POI_UNION)
        arcpy.Delete_management(COMPLAIN_RASTER_POINTS)
        arcpy.Delete_management(ResultRaster)
        arcpy.Delete_management(ResultRaster2)
        arcpy.Delete_management(ResultFeature)
        arcpy.Delete_management(ResultFeatureZj)
        del COMPLAIN_POI_UNION,COMPLAIN_RASTER_POINTS,ResultRaster,ResultRaster2,ResultFeature,ResultFeatureZj
    arcpy.Delete_management(FeatureSelect)
    del FeatureSelect,rowSear
    logging.info("清理内存~~")
    gc.collect()
Esempio n. 24
0
    def run(self):
        self.e.load()
        print "Starting Facet Classification processing..."
        arcpy.gp.Times_sa(self.i.sdg_f, 0, self.i.empty)
        arcpy.gp.ZonalGeometry_sa(self.i.facets, self.Zone_field,
                                  self.i.facet_a, "AREA", self.i.empty)
        arcpy.gp.Int_sa(self.i.empty, self.i.empty_i)
        arcpy.gp.ZonalStatistics_sa(self.i.empty_i, "VALUE", self.i.facet_a,
                                    self.i.facet_mn_a, "MINIMUM", "DATA")
        arcpy.gp.ZonalStatistics_sa(self.i.empty_i, "VALUE", self.i.facet_a,
                                    self.i.facet_mx_a, "MAXIMUM", "DATA")
        self.process_facet_a_r()

        arcpy.gp.ZonalStatistics_sa(self.i.facets, self.Zone_field,
                                    self.i.sdg_f, self.i.m_slp, "MEAN", "DATA")
        arcpy.gp.ZonalStatistics_sa(self.i.empty_i, "VALUE", self.i.m_slp,
                                    self.i.m_slp_mn, "MINIMUM", "DATA")
        arcpy.gp.ZonalStatistics_sa(self.i.empty_i, "VALUE", self.i.m_slp,
                                    self.i.m_slp_mx, "MAXIMUM", "DATA")
        self.process_m_slp_r()

        arcpy.gp.ZonalStatistics_sa(self.i.facets, self.Zone_field, self.e.d,
                                    self.i.m_d, "MEAN", "DATA")
        arcpy.gp.ZonalStatistics_sa(self.i.empty_i, "VALUE", self.i.m_d,
                                    self.i.m_d_mn, "MINIMUM", "DATA")
        arcpy.gp.ZonalStatistics_sa(self.i.empty_i, "VALUE", self.i.m_d,
                                    self.i.m_d_mx, "MAXIMUM", "DATA")
        self.process_m_d_r()

        print "Please wait...",
        if sleep(180) == None:
            self.e.load()
            print "Processing algorithm...",
            inputs = ";".join([
                self.i.m_slp_r, self.i.m_d_r
            ])  # only rasters zoned by facets can be entered here
            arcpy.gp.IsoClusterUnsupervisedClassification_sa(
                self.i.m_slp_r, self.Number_of_classes, self.i.fc,
                self.Minimum_class_size, self.Sample_interval, self.i.sig)
            arcpy.RasterToPolygon_conversion(self.i.fc, self.i.fc_shp,
                                             "NO_SIMPLIFY", "VALUE")
            print "Dissolving...",
            arcpy.Dissolve_management(self.i.fc_shp, self.i.fc_shp_d,
                                      "GRIDCODE", "", "MULTI_PART",
                                      "DISSOLVE_LINES")
            print "Joining features..."
            # Now it is a point feature
            # instead of a polygon one.
            # This was modified in 11/30/2019
            arcpy.SpatialJoin_analysis(target_features=self.e.s,
                                       join_features=self.i.fc_shp_d,
                                       out_feature_class=self.i.fc_shp_osj,
                                       join_operation="JOIN_ONE_TO_MANY",
                                       join_type="KEEP_ALL",
                                       match_option="INTERSECT",
                                       search_radius="#",
                                       distance_field_name="#")
            # arcpy.SpatialJoin_analysis(target_features=self.i.fc_shp_d, join_features=self.e.s, out_feature_class=self.i.fc_shp_osj, join_operation="JOIN_ONE_TO_ONE", join_type="KEEP_ALL", match_option="CLOSEST",search_radius="5000", distance_field_name="#")  # modified match_option="INTERSECT" in 06/15/2019 to "CLOSEST"
        print "Ending Facet Classification processing..."
Esempio n. 25
0
def floodplain(slop_dem, colorivers):
    
    
#    jeff_dem_slop = sa.Raster(r"D:\Deliverable\source_data\dem_utm_120mclip_aspectslope.tif")
    dem = r"D:\Deliverable\source_data\dem_utm_120mclip.tif"
    jeff_dem_ras = sa.Raster(dem)

    llpnt2 = jeff_dem_ras.extent.lowerLeft
    demSize2 = jeff_dem_ras.meanCellHeight
    
    slop_Arr = arcpy.RasterToNumPyArray(slop_dem)
    slop_Arr[slop_Arr <= 0] = 0
    
    flat_grid = np.where((slop_Arr <= 25.0),1,0)
   
    flatRaster = arcpy.NumPyArrayToRaster(flat_grid,llpnt2,demSize2,demSize2)
#    steepRaster = arcpy.NumPyArrayToRaster(steep_grid,llpnt,demSize,demSize)
#   
    arcpy.Delete_management("flat_FP")
    arcpy.DefineProjection_management(flatRaster,jeff_dem_ras.spatialReference)
    flatRaster.save('flat_FP')
    
#    floodplain(jeff_dem, colorivers)
    
    
    ZST = ZonalStatisticsAsTable(r"D:\Deliverable\source_data\intersect_dem", "PIN",  r"D:\Deliverable\flat_fp",
                                      r"D:\Deliverable\ZST_fp_dem", "NODATA", "Maximum")
    
    
    # 7. Perform final joins so that suitability output raster has correct parcel data
    #----------------------------------------------------------------------------------------------
    inFeatures = r"D:\Deliverable\source_data\intersect_dem"
    joinField = "PIN"
    joinTable = r"D:\Deliverable\ZST_fp_dem"
    fieldList = ["MAX"]
    # Join two feature classes by the zonecode field and only carry over the land use and land cover fields
    arcpy.JoinField_management(inFeatures, joinField, joinTable, joinField, 
                               fieldList)
    
    arcpy.RasterToPolygon_conversion(r"D:\Deliverable\source_data\intersect_dem", r"D:\Deliverable\source_data\intersect_dem"+".shp", "NO_SIMPLIFY", "MAX")
    
    
    inter_steep = r"D:\Deliverable\source_data\inter_steep.shp"
    inter_shp = r"D:\Deliverable\source_data\intersect_dem.shp"
    agri = """"gridcode" = 0 """
    arcpy.Select_analysis(inter_shp, inter_steep,agri)
    
    inter_flat = r"D:\Deliverable\source_data\inter_flat.shp"
    inter_shp = r"D:\Deliverable\source_data\intersect_dem.shp"
    agri = """"gridcode" = 1 """
    arcpy.Select_analysis(inter_shp, inter_flat,agri )
    
    arcpy.Clip_analysis(colorivers, inter_flat, r"D:\Deliverable\flatClip2.shp")
    arcpy.Clip_analysis(colorivers, inter_steep, r"D:\Deliverable\steepClip2.shp")
    
    arcpy.Delete_management("flatBuff")
    arcpy.Buffer_analysis(r"D:\Deliverable\flatClip2.shp", "flatBuff", "400 meters")
    arcpy.Delete_management("steepBuff")
    arcpy.Buffer_analysis(r"D:\Deliverable\steepClip2.shp", "steepBuff", "150 meters")
    def createViewshed(self):
        try:
            tempEnvironment0 = arcpy.env.extent
            arcpy.env.extent = self.buffer
            tempEnvironment1 = arcpy.env.cellSize
            arcpy.env.cellSize = self.cellsize
            arcpy.AddMessage("cellsize: " + str(arcpy.env.cellSize))
            tempEnvironment2 = arcpy.env.mask
            arcpy.env.mask = self.buffer
            #outraster = sa.Viewshed(self.islyr, self.obsproc, 1, "FLAT_EARTH", 0.13)
            outraster = sa.Visibility(self.islyr,
                                      self.obsproc,
                                      analysis_type="FREQUENCY",
                                      nonvisible_cell_value="ZERO",
                                      z_factor=1,
                                      curvature_correction="CURVED_EARTH",
                                      refractivity_coefficient=0.13,
                                      observer_offset=self.height,
                                      outer_radius=self.radius,
                                      vertical_upper_angle=90,
                                      vertical_lower_angle=-90)
            #outrastertemp = os.path.join(r"C:\GEE\visibility", 'outvis')
            #outraster.save(outrastertemp)
            vshedtmp = os.path.join("in_memory", 'vshedtmp')
            vsheddis = os.path.join("in_memory", 'vsheddis')
            #vshed_proj = os.path.join(self.scratchgdb, 'vshedproj')
            arcpy.AddMessage("temp vshed fc:" + vshedtmp)
            arcpy.AddMessage("dissolved vshed fc: " + vsheddis)
            arcpy.env.extent = tempEnvironment0
            arcpy.env.cellSize = tempEnvironment1
            arcpy.env.mask = tempEnvironment2
            arcpy.RasterToPolygon_conversion(outraster, vshedtmp,
                                             "NO_SIMPLIFY", "VALUE")
            arcpy.Dissolve_management(vshedtmp, vsheddis, "gridcode", "",
                                      "MULTI_PART", "DISSOLVE_LINES")

            ##            if(self.wkidproc != self.wkidout):
            ##                arcpy.AddMessage("Projecting output vshed...")
            ##                arcpy.AddMessage("projected vshed fc: " + vshed_proj)
            ##                arcpy.Project_management(vsheddis, vshed_proj, self.srOut)
            ##                vshed=vshed_proj
            ##            else:
            ##                vshed=vsheddis
            #vistmp = os.path.join('in_memory', 'visibility')
            vis = os.path.join(self.scratchgdb, 'visibility')
            arcpy.AddMessage('creating output viewshed: ' + vis)
            arcpy.Clip_analysis(vsheddis, self.mask, vis, "")
            arcpy.AddMessage("Coppying to output...")
            #arcpy.CopyFeatures_management(vistmp, vis)
            fset = arcpy.FeatureSet()
            fset.load(vis)
            return fset
        except arcpy.ExecuteError:
            EH = ErrorHandling.ErrorHandling()
            line, filename, err = EH.trace()
            m = "Python error on " + line + " of " + __file__ + \
                " : with error - " + err
            arcpy.AddError(m)
Esempio n. 27
0
    def mu_maker(self, h_raster, u_raster, full_out_ras_name,
                 full_out_shp_name, *mu):
        # h_raster: STR - full path to depth raster
        # u_raster: STR - full path to velocity raster
        # full_out_ras_name: STR - full path of the results raster name
        # full_out_shp_name: STR - full path of the result shapefile name
        # mu = LIST(STR) - (optional) - restricts analysis to a list of morphological units according to mu.xlsx

        # start with raster calculations
        self.logger.info("Raster Processing    --- --- ")
        self.license_state = arcpy.CheckOutExtension(
            'Spatial')  # check out license
        arcpy.gp.overwriteOutput = True
        arcpy.env.workspace = self.path
        arcpy.env.extent = "MAXOF"

        try:
            self.mu_names = mu[
                0]  # limit mu analysis to optional list, if provided
        except:
            pass

        out_ras = self.calculate_mu(h_raster, u_raster)

        try:
            self.logger.info(" > Saving Raster ...")
            out_ras.save(full_out_ras_name)
            self.logger.info("   * OK")
        except:
            self.logger.info("ERROR: Could not save MU raster.")
        arcpy.CheckInExtension('Spatial')  # release license
        self.logger.info("Raster Processing OK     --- \n")

        self.logger.info("Shapefile Processing --- --- ")
        self.logger.info(" > Converting mu raster to shapefile ...")
        temporary_shp = full_out_shp_name.split(".shp")[0] + "1.shp"
        arcpy.RasterToPolygon_conversion(arcpy.Raster(full_out_ras_name),
                                         temporary_shp, 'NO_SIMPLIFY')

        self.logger.info(" > Calculating Polygon areas ...")
        arcpy.CalculateAreas_stats(temporary_shp, full_out_shp_name)

        self.logger.info("   * OK - Removing remainders ...")
        arcpy.Delete_management(temporary_shp)

        self.logger.info(" > Adding MU field ...")
        arcpy.AddField_management(full_out_shp_name,
                                  "MorphUnit",
                                  "TEXT",
                                  field_length=50)
        expression = "the_dict[!gridcode!]"
        codeblock = "the_dict = " + str(
            dict(
                zip(self.mu_names_number.values(),
                    self.mu_names_number.keys())))
        arcpy.CalculateField_management(full_out_shp_name, "MorphUnit",
                                        expression, "PYTHON", codeblock)
        self.logger.info("Shapefile Processing OK  --- ")
Esempio n. 28
0
def raster_overlap(file_A, file_B, outpath, NoData_A=None, NoData_B=None):
    """
    Finds overlaping area between two raster images.
     
    this function examines two images and outputs a raster identifying pixels where both
    rasters have non-NoData values. Output raster has 1's where both images have data and
    0's where one or both images are missing data.

    inputs:
        file_A      the first file
        file_B      the second file
        outpath     the output filename for the desired output. must end in ".tif"
        NoData_A    the NoData value of file A
        NoData_B    the NoData value of file B

    This function automatically invokes
        clip_and_snap
        null_define
    """

    if not is_rast(file_A) or not is_rast(file_B):
        raise Exception(' both inputs must be rasters!')

    # load the rasters as numpy arays.
    a, metaA = to_numpy(file_A)
    b, metaB = to_numpy(file_B)

    # set no_datas
    if NoData_A is None:
        NoData_A = metaA.NoData_Value
    if NoData_B is None:
        NoData_B = metaB.NoData_Value

    # spatially match the rasters
    print('preparing input rasters!')
    clip_and_snap(file_A, file_B, outpath.replace(".shp", ".tif"), NoData_B)

    # reload the rasters as numpy arrays now that spatial matching is done
    a, metaA = to_numpy(file_A)
    b, metaB = to_numpy(file_B)

    # create work matrix and find the overlap
    print('Finding overlaping pixels!')
    Workmatrix = a.mask + b.mask
    Workmatrix = Workmatrix.astype('uint8')
    Workmatrix[Workmatrix == 1] = 2

    print('Saving overlap file!')
    metaA.numpy_datatype = 'uint8'
    from_numpy(Workmatrix,
               metaA,
               outpath.replace(".shp", ".tif"),
               NoData_Value=2)
    arcpy.RasterToPolygon_conversion(outpath.replace(".shp", ".tif"),
                                     outpath.replace(".tif", ".shp"),
                                     'NO_SIMPLIFY')

    return metaA, metaB
Esempio n. 29
0
def krigingFromPointCSV(inTable, valueField, xField, yField, inClipFc, workspace = "assignment3.gdb"):
    #import arcpy and os and set overwriteOutput to true
    import arcpy
    import os
    arcpy.env.overwriteOutput = True
    #set the workspace to be the geodatabase that is provided
    arcpy.env.workspace = geodatabase
    #map out the X and Y points from a table
    arcpy.management.XYTableToPoint(inTable, "year", xField, yField)
    #.Describe to find the features of the year dataset
    desc= arcpy.Describe("year")
    #set variable cellsize to 0
    CellSize= 0
    #set the width and the height to the .extent width and height
    width = desc.extent.width
    height= desc.extent.height
    #if the width is smaller it divded by 1000 is the cellsize, else the height divided by 1000 is the cell size
    if width < height:
        CellSize = width / 1000
    else:
        CellSize = height / 1000
    #set a variable for the output name
    field = "F2018_PREC"
    #preform the Kriging on the dataset and save it as F2018k
    outKriging = Kriging("year, field, '#', CellSize)
    outKriging.save("F2018k")
    #.Describe the clip feature class to find it's extent then set variable rectangle to be equal to it's extent information
    descClip = arcpy.Describe(inClipFc)
    rectangle = str(descClip.extent.XMin) + " " + str(descClip.extent.YMin) + " " + str(descClip.extent.XMax) + " " + str(descClip.extent.YMax)
    #clip the raster
    arcpy.Clip_management(outKriging, rectangle, "F2018KC", inClipFc, "#", "ClippingGeometry", "MAINTAIN_EXTENT" )
    #change to an integer from a floating point raster
    outInt = Int("F2018KC")
    outInt.save("F2018KCI")
    #find the min and max values of the outInt
    min_F2018 = arcpy.management.GetRasterProperties(outInt, "MINIMUM")
    max_F2018 = arcpy.management.GetRasterProperties(outInt, "MAXIMUM")
    #set a variable with the number of classes
    numofclasses = 5
    #find the class range for equal interval breaks
    eq_interval = (int(max_F2018.getOutput(0)) - int(min_F2018.getOutput(0))) / numofclasses
    print(eq_interval)
    #set an empty list
    remapRangeList = []
    mybreak = int(min_F2018.getOutput(0))
    #the following for loop populates the list with the lower bound, upper bound, and the class number for each of the 5 classes                    
    for t in range(0, numofclasses):
        newClassCode = t + 1
        lowerBound = mybreak
        upperbound = mybreak + eq_interval
        remap = [lowerBound, upperbound, newClassCode]
        remapRangeList.append(remap)
        mybreak += eq_interval
    #reclassify the raster into an isarithmic map based on the remapRangeList
    outReclassRR = Reclassify("F2018KCI", "Value", RemapRange(remapRangeList), "NODATA")
    outReclassRR.save("F2018_CL")
    #convert to polygon
    arcpy.RasterToPolygon_conversion("F2018_CL", "F2018_ismc")
Esempio n. 30
0
 def vectorize(georef_mask):
     basename = os.path.splitext(os.path.basename(georef_mask))[0]
     os.makedirs(resources.temp_vectorized, exist_ok=True)
     vectorized = os.path.join(resources.temp_vectorized,
                               "{}.shp".format(basename))
     field = "VALUE"
     arcpy.RasterToPolygon_conversion(georef_mask, vectorized,
                                      "NO_SIMPLIFY", field)
     return vectorized