def get_image_paths(in_mosaic):
    temp_image_table = path.join("in_memory", "temp_image_table")
    ExportMosaicDatasetPaths(in_mosaic, temp_image_table, '', "ALL",
                             "RASTER;ITEM_CACHE")
    images = set([row[0] for row in da.SearchCursor(temp_image_table, "Path")])
    Delete(temp_image_table)
    return images
示例#2
0
def _tri_tool():
    """Triangulation for tool
    """
    in_fc = sys.argv[1]
    tri_type = sys.argv[2]
    out_fc = sys.argv[3]
    xtent = sys.argv[4]
    desc = Describe(in_fc)
    SR = desc['spatialReference']
    flds = ['SHAPE@X', 'SHAPE@Y']
    allpnts = False
    z = FeatureClassToNumPyArray(in_fc, flds, "", SR, allpnts)
    a = np.zeros((z.shape[0], 2), dtype='<f8')
    a[:, 0] = z['SHAPE@X']
    a[:, 1] = z['SHAPE@Y']
    #
    if tri_type == 'Delaunay':
        tweet("Delaunay... clip extent {}".format(xtent))
        t = tri_pnts(a, True)  # must be a list of list of points
        polys = poly(t, SR)
        if Exists(out_fc):
            Delete(out_fc)
        CopyFeatures(polys, "in_memory/temp")
        MakeFeatureLayer("in_memory/temp", "temp")
        if xtent not in ("", None):
            Clip("temp", xtent, out_fc, None)
        else:
            CopyFeatures("temp", out_fc)
    else:
        tweet("Voronoi... clip extent {}".format(xtent))
        c = infinity_circle(a, fac=10)
        aa = np.vstack((a, c))
        v = vor_pnts(aa, testing=False)
        polys = poly([v], SR)
        if Exists(out_fc):
            Delete(out_fc)
        CopyFeatures(polys, "in_memory/temp")
        MakeFeatureLayer("in_memory/temp", "temp")
        if xtent not in (
                "",
                None,
        ):
            Clip("temp", xtent, out_fc, None)
        else:
            CopyFeatures("temp", out_fc)
def las_tile_to_numpy_pandas(lidar_tile, sr, returns, class_codes,
                             format_for_library):
    temp_lasd = "{0}_temp.lasd".format(splitext(lidar_tile)[0])
    if Exists(temp_lasd):
        Delete(temp_lasd)
    arcpy.CreateLasDataset_management(lidar_tile,
                                      temp_lasd,
                                      spatial_reference=sr)
    point_spacing = arcpy.Describe(temp_lasd).pointSpacing
    Delete(temp_lasd)

    temp_pts_multi = join("in_memory", "temp_pts_multi")
    if Exists(temp_pts_multi):
        Delete(temp_pts_multi)
    LASToMultipoint(input=lidar_tile,
                    out_feature_class=temp_pts_multi,
                    average_point_spacing=point_spacing,
                    class_code=class_codes,
                    _return=returns,
                    input_coordinate_system=sr)

    if format_for_library == "numpy":
        lidar_points = da.FeatureClassToNumPyArray(
            in_table=temp_pts_multi,
            # field_names=["OID@", "SHAPE@X", "SHAPE@Y", "SHAPE@Z"],
            field_names=["SHAPE@X", "SHAPE@Y", "SHAPE@Z"],
            # field_names=["SHAPE@XYZ"],
            spatial_reference=sr,
            explode_to_points=True)
        Delete(temp_pts_multi)
        # Numpy Processing Operation Goes Here!
        numpy_operation_here(lidar_points)

    elif format_for_library == "pandas":
        lidar_points = pd.DataFrame.spatial.from_featureclass(
            location=temp_pts_multi)
        #fields=["SHAPE@X", "SHAPE@Y", "SHAPE@Z"])
        Delete(temp_pts_multi)
        # Numpy Processing Operation Goes Here!
        pandas_operation_here(lidar_points)

    del lidar_points
def execute_ChannelCorrection(demras, boundary, riverbed, rivernet, breachedmnt, messages):

    arcpy.env.outputCoordinateSystem = demras.spatialReference
    env.snapRaster = demras

    ends = CreateScratchName("loob", data_type="FeatureClass", workspace="in_memory")
    CopyFeatures(boundary, ends)

    AddField(ends, "dummy", "LONG", field_alias="dummy", field_is_nullable="NULLABLE")
    CalculateField(ends, "dummy", "1", "PYTHON")

    endsras = CreateScratchName("loras", data_type="RasterDataset", workspace=env.scratchWorkspace)
    PolylineToRaster(ends, "dummy", endsras, "MAXIMUM_LENGTH", cellsize=demras)
    statpts = FocalStatistics(endsras, NbrRectangle(3, 3, "CELL"), "MAXIMUM", "DATA")

    env.extent = demras

    rasterbed = CreateScratchName("loras", data_type="RasterDataset", workspace=env.scratchWorkspace)
    PolygonToRaster(riverbed, arcpy.Describe(riverbed).OIDFieldName, rasterbed, "CELL_CENTER", cellsize=demras)
    rasterline = CreateScratchName("loras", data_type="RasterDataset", workspace=env.scratchWorkspace)
    PolylineToRaster(rivernet, arcpy.Describe(rivernet).OIDFieldName, rasterline, cellsize=demras)

    streambed = Con(IsNull(rasterline), Con(IsNull(rasterbed) == 0, 1), 1)

    bedwalls = FocalStatistics(streambed, NbrRectangle(3, 3, "CELL"), "MAXIMUM", "DATA")

    env.extent = bedwalls

    chanelev = Con(streambed, demras)
    chanmax = chanelev.maximum
    chanwalls = chanelev.minimum - 100
    switchtemp = CreateScratchName("loras", data_type="RasterDataset", workspace=env.scratchWorkspace)
    switchelev = -1 * (Con(IsNull(streambed), Con(bedwalls, Con(IsNull(statpts), chanwalls)), chanelev) - chanmax)
    switchelev.save(switchtemp)
    Delete(statpts)
    Delete(chanelev)

    switchfilled = Fill(switchtemp)
    Delete(switchtemp)

    env.extent = demras
    breachedtemp = Con(IsNull(streambed), demras, (-1*switchfilled) + chanmax)
    breachedtemp.save(breachedmnt)

    Delete(bedwalls)
    Delete(endsras)
    Delete(rasterline)
    Delete(rasterbed)
    Delete(switchfilled)
    return
示例#5
0
def split_at_vertices(in_fc, out_fc):
    """Split at vertices.  Unique segments retained."""
    info = "split at vertices"
    g, oids, shp_kind, k, m, SR = _in_(in_fc, info)
    od = g.segment_polys(as_basic=False, shift_back=True, as_3d=False)
    tmp = "memory/tmp"
    if Exists(tmp):
        Delete(tmp)
    ags.da.NumPyArrayToTable(od, tmp)
    xyxy = list(od.dtype.names[:4])
    args = [tmp, out_fc] + xyxy + ["GEODESIC", "Orig_id", SR]
    XYToLine(*args)
    return
示例#6
0
def temp_fc(geo, name, kind, SR):
    """Similar to _out_ but creates a `memory` featureclass."""
    polys = Geo_to_arc_shapes(geo, as_singlepart=True)
    wkspace = env.workspace = 'memory'  # legacy is in_memory
    tmp_name = "{}\\{}".format(wkspace, name)
    # tmp = MultipartToSinglepart(in_fc, r"memory\in_fc_temp")
    if Exists(tmp_name):
        Delete(tmp_name)
    CreateFeatureclass(wkspace, name, kind, spatial_reference=SR)
    AddField(tmp_name, 'ID_arr', 'LONG')
    with ags.da.InsertCursor(name, ['SHAPE@', 'ID_arr']) as cur:
        for row in polys:
            cur.insertRow(row)
    return tmp_name
def process():
    # Detect Unit of Measurement (Feet -vs- Meter)
    cell_factor = getCellFactor(in_mosaic_dataset)

    # Obatin List of Raster Files in Mosaic Dataset
    temp_table = join("memory", "temp_table")
    ExportMosaicDatasetPaths(in_mosaic_dataset, temp_table, "#", "ALL", "RASTER")
    rasters = set(row[0] for row in da.SearchCursor(temp_table, "Path"))
    Delete(temp_table)

    if not exists(out_directory):
        makedirs(out_directory)

    # Process each raster
    for in_raster in rasters:
        root_dir, file = split(in_raster)
        AddMessage("da filename is: {}".format(file))
        out_raster = join(out_directory, file)

        desc = Describe(in_raster)
        cell_size_height = desc.children[0].meanCellHeight  # Cell size in the Y axis and / or
        cell_size_width = desc.children[0].meanCellWidth  # Cell size in the X axis
        cell_size = "{0} {1}".format(cell_size_height*cell_factor, cell_size_width*cell_factor)

        if unitsCalc(in_mosaic_dataset) == "Foot":
            outTimes = Times(in_raster, 0.3048)
            ProjectRaster(in_raster=outTimes,
                          out_raster=out_raster,
                          out_coor_system=out_spatial_reference,
                          resampling_type=resampling_type,
                          cell_size=cell_size,
                          geographic_transform=geographic_transform,
                          in_coor_system=input_spatial_reference)
        else:
            ProjectRaster(in_raster=in_raster,
                          out_raster=out_raster,
                          out_coor_system=out_spatial_reference,
                          resampling_type=resampling_type,
                          cell_size=cell_size,
                          geographic_transform=geographic_transform,
                          in_coor_system=input_spatial_reference)

    # Delete Intermediate Data
    del rasters
    if out_mosaic_dataset:
        root_dir, file = split(out_mosaic_dataset)
        # TODO: Automatically detect Pixel Type from input Mosaic Dataset Rasters and pass below
        createMosaics(root_dir, file, out_directory, out_spatial_reference, "32_BIT_UNSIGNED")
def texture_image(in_image, height, width, position, max_height, max_width,
                  in_texture, in_polygon, out_raster, method, blur_distance):
    from create_mask import create_mask
    from fill_masked_image import mask_image
    from arcpy.management import BuildPyramids
    from pathlib import Path
    from PIL import Image

    # Convert the Modified polygon that now covers entire extent of Interest to Raster
    temp_mask_raster = path.join(path.dirname(out_raster),
                                 Path(out_raster).stem + "_mask.jpg")
    create_mask(in_image, in_polygon, temp_mask_raster)

    #################################
    # Apply Texture Map to Image
    ###############################
    # Prep Texture for process... Align
    def get_clip_ext(position):
        if position == "bl":
            return max_width - width, max_height - height, width, height
        if position == "tl":
            return max_width - width, max_height - height, width, height
        if position == "tr":
            return 0, max_height - height, width, max_height
        if position is "br":
            return 0, max_height - height, width, height
        if position == "l":
            return max_width - width, 0, width, height
        if position == "t":
            return 0, max_height - height, width, max_height
        if position == "r":
            return 0, 0, width, height
        if position == "b":
            return 0, max_height - height, width, height
        if position == "i":
            return 0, 0, width, height

    texture = Image.open(in_texture).resize((max_width, max_height),
                                            Image.ANTIALIAS)
    texture_cropped = texture.crop(get_clip_ext(position))

    mask_image(in_image, temp_mask_raster, texture_cropped, out_raster, method,
               blur_distance)
    BuildPyramids(out_raster, -1, "NONE", "NEAREST", "DEFAULT", 75,
                  "OVERWRITE")
    Delete(temp_mask_raster)  # Delete Intermediate Data
示例#9
0
def subset_image_for_texture(in_image, in_polygon, area, out_raster):
    from os import path
    from arcpy import Describe, AddWarning
    from arcpy.management import Delete
    from math import sqrt
    temp_rast = path.join("in_memory", "temp_rast")
    ClipRaster(in_image, image_extent_2(in_polygon), temp_rast, "#", "#", "NONE")
    desc = Describe(temp_rast).children[0]
    height = desc.height
    width = desc.width
    cell_height = desc.meancell_height
    cell_width = desc.meancell_width
    r_length = height*cell_height
    r_width = width*cell_width
    if r_length > sqrt(area) and r_width > sqrt(area):
        subset_image(temp_rast, area, out_raster)
    else:
        AddWarning("Geometry Length and Width do not fit Area| Length = {0} | Width = {1}".format(r_length, r_width))
        AddWarning("Draw a larger area where length and width fit within the area as a square")
    Delete(temp_rast)
示例#10
0
def Geo_to_fc(geo, gdb=None, name=None, kind=None, SR=None):
    """Return a FeatureClass from a Geo array."""
    SR = SR
    if kind in (None, 0, 1, 2):
        print("\n ``kind`` must be one of Polygon, Polyline or Point.")
        return None
    #
    # dx, dy = geo.LL
    # geo = geo.shift(dx, dy)
    polys = Geo_to_arc_shapes(geo, as_singlepart=True)
    out_name = gdb.replace("\\", "/") + "/" + name
    wkspace = env.workspace = 'memory'  # legacy is in_memory
    tmp_name = "{}\\{}".format(wkspace, "tmp")
    if Exists(tmp_name):
        Delete(tmp_name)
    CreateFeatureclass(wkspace, "tmp", kind, spatial_reference=SR)
    AddField("tmp", 'ID_arr', 'LONG')
    with InsertCursor("tmp", ['SHAPE@', 'ID_arr']) as cur:
        for row in polys:
            cur.insertRow(row)
    CopyFeatures("tmp", out_name)
    return
def execute_ChannelCorrection2(demras, boundary, riverbed, rivernet, breachedmnt, messages):

    arcpy.env.outputCoordinateSystem = demras.spatialReference
    env.snapRaster = demras






    env.extent = demras

    rasterbed = CreateScratchName("loras", data_type="RasterDataset", workspace=env.scratchWorkspace)
    PolygonToRaster(riverbed, arcpy.Describe(riverbed).OIDFieldName, rasterbed, "CELL_CENTER", cellsize=demras)
    rasterline = CreateScratchName("loras", data_type="RasterDataset", workspace=env.scratchWorkspace)
    PolylineToRaster(rivernet, arcpy.Describe(rivernet).OIDFieldName, rasterline, cellsize=demras)

    streambed = Con(IsNull(rasterline), Con(IsNull(rasterbed) == 0, 1), 1)

    bedwalls = FocalStatistics(streambed, NbrRectangle(3, 3, "CELL"), "MAXIMUM", "DATA")

    env.extent = bedwalls

    chanelev = Con(streambed, demras)
    chanmax = chanelev.maximum
    chanwalls = chanelev.minimum - 100
    switchtemp = CreateScratchName("loras", data_type="RasterDataset", workspace=env.scratchWorkspace)
    switchelev = -1 * (Con(IsNull(streambed), Con(bedwalls, Con(IsNull(boundary), chanwalls)), chanelev) - chanmax)
    switchelev.save(switchtemp)
    Delete(chanelev)

    switchfilled = Fill(switchtemp)
    Delete(switchtemp)

    env.extent = demras
    breachedtemp = Con(IsNull(streambed), demras, (-1*switchfilled) + chanmax)
    breachedtemp.save(breachedmnt)

    Delete(bedwalls)
    Delete(rasterline)
    Delete(rasterbed)
    Delete(switchfilled)
    return
示例#12
0
def pick_tool(tool, in_fc, out_fc, gdb, name):
    """Pick the tool and run the option."""
    # --
    #
    tweet(dedent(f0).format(script, tool, in_fc, out_fc))
    #
    # ---- Attribute tools
    if tool == 'Attribute sort':  # ---- (1) attribute sort
        sort_flds = str(sys.argv[4])
        out_fld = str(sys.argv[5])
        sort_flds = sort_flds.split(";")
        tweet(dedent(f1).format(in_fc, sort_flds, out_fld))
        oid_fld = ags.da.Describe(in_fc)['OIDFieldName']
        flds = [oid_fld] + sort_flds
        a = ags.da.TableToNumPyArray(in_fc, flds)
        out = attr_sort(a, oid_fld, sort_flds, out_fld)  # run... attr_sort
        ags.da.ExtendTable(in_fc, oid_fld, out, oid_fld, append_only=False)
    elif tool == 'Frequency and Stats':  # ---- (2) freq and stats
        cls_flds = sys.argv[4]
        stat_fld = sys.argv[5]
        cls_flds = cls_flds.split(";")  # multiple to list, singleton a list
        if stat_fld in (None, 'NoneType', ""):
            stat_fld = None
        # use the whole array and skip nulls
        if stat_fld is not None:
            all_flds = cls_flds + [stat_fld]
        a = ags.da.TableToNumPyArray(in_fc,
                                     field_names=all_flds,
                                     skip_nulls=True)
        out = freq(a, cls_flds, stat_fld)  # do freq analysis
        if Exists(out_fc) and env.overwriteOutput:
            Delete(out_fc)
        ags.da.NumPyArrayToTable(out, out_fc)
    #
    # ---- Containers
    elif tool in [
            'Bounding Circles', 'Convex Hulls', 'Extent Polys',
            'Minimum area bounding rectangle'
    ]:
        out_kind = sys.argv[4].upper()
        if tool == 'Bounding Circles':  # ---- (1) bounding circles
            circles(in_fc, gdb, name, out_kind)
        elif tool == 'Convex Hulls':  # ---- (2) convex hulls
            convex_hull_polys(in_fc, gdb, name, out_kind)
        elif tool == 'Extent Polys':  # ---- (3) extent_poly
            extent_poly(in_fc, gdb, name, out_kind)
        elif tool == 'Minimum area bounding rectangle':
            mabr(in_fc, gdb, name, out_kind)
    #
    # ---- Conversion
    elif tool in ['Features to Points', 'Vertices to Points']:
        if tool == 'Features to Points':  # ---- (1) features to point
            out, SR = f2pnts(in_fc)
        elif tool == 'Vertices to Points':  # ---- (2) feature to vertices
            out, SR = p_uni_pnts(in_fc)
        ags.da.NumPyArrayToFeatureClass(out, out_fc, ['Xs', 'Ys'], SR)
    elif tool == 'Polygons to Polylines':  # ---- (3) polygon to polyline
        pgon_to_pline(in_fc, gdb, name)
    elif tool == 'Split at Vertices':  # ---- (4) split at vertices
        split_at_vertices(in_fc, out_fc)
    #
    # ---- Sort geometry
    elif tool in ['Area Sort', 'Length Sort', 'Geometry Sort']:
        srt_type = tool.split(" ")[0].lower()
        tweet("...\n{} as {}".format(tool, 'input'))
        sort_geom(in_fc, gdb, name, srt_type)
    elif tool == 'Extent Sort':
        srt_type = int(sys.argv[4][0])
        tweet("...\n{} as {}".format(tool, 'input'))
        sort_extent(in_fc, gdb, name, srt_type)
    #
    # ---- Alter geometry
    elif tool == 'Densify by Distance':  # ---- (1) densify distance
        dist = float(sys.argv[4])
        dens_dist(in_fc, gdb, name, dist)
    elif tool == 'Densify by Percent':  # ---- (2) densify percent
        dist = float(sys.argv[4])
        if dist < 1. or dist > 100.:  # limit of 1 to 100%
            dist = np.abs(min(dist, 100.))
        dens_dist(in_fc, gdb, name, dist)
    elif tool == 'Densify by Factor':  # ---- (3) densify percent
        dist = float(sys.argv[4])
        dens_fact(in_fc, gdb, name, dist)
    elif tool == 'Fill Holes':  # ---- (4) fill holes
        fill_holes(in_fc, gdb, name)
    elif tool == 'Keep Holes':  # ---- (5) keep holes
        keep_holes(in_fc, gdb, name)
    elif tool == 'Rotate Features':  # ---- (6) rotate
        clockwise = False
        as_group = False
        rot_type = str(sys.argv[4])  # True: extent center. False: shape center
        angle = float(sys.argv[5])
        clockwise = str(sys.argv[6])
        if rot_type == "shape center":
            as_group = True
        if clockwise.lower() == "true":
            clockwise = True
        rotater(in_fc, gdb, name, as_group, angle, clockwise)
    elif tool == 'Shift Features':  # ---- (7) shift
        dX = float(sys.argv[4])
        dY = float(sys.argv[5])
        shifter(in_fc, gdb, name, dX=dX, dY=dY)
    elif tool == 'Dissolve Boundaries':
        dissolve_boundaries(in_fc, gdb, name)
    #
    # ---- Triangulation
    elif tool == 'Delaunay':  # ---- (1) Delaunay
        out_kind = sys.argv[4].upper()
        constrained = sys.argv[5]
        if constrained == "True":
            constrained = True
        else:
            constrained = False
        tri_poly(in_fc, gdb, name, out_kind, constrained)
    elif tool == 'Voronoi':  # ---- (2) Voronoi
        out_kind = sys.argv[4].upper()
        vor_poly(in_fc, gdb, name, out_kind)
    else:
        tweet("Tool {} not found".format(tool))
        return None
def create_mask(in_raster, in_polygon, out_raster):
    from os import path
    from arcpy import env, EnvManager, ResetEnvironments, AddError
    from arcpy.ia import Con, IsNull
    from arcpy.management import Delete, CopyRaster, GetCount, Clip as ClipRaster, GetRasterProperties
    from arcpy.conversion import PolygonToRaster
    from arcpy.analysis import Clip
    env.overwriteOutput = True

    # Clip raster and apply geometries at Bottom-left ant top-right corners to ensure Raster covers Ortho tile extent
    polygon_clipped = path.join("in_memory", "polygon_clipped")
    Clip(in_polygon, raster_extent_polygon(in_raster), polygon_clipped)
    generate_squares(polygon_clipped, in_raster)

    def is_masked(in_polygon):
        if int(GetCount(in_polygon)[0]) == 1:
            return True, int(GetCount(in_polygon)[0])
        if int(GetCount(in_polygon)[0]) == 2:
            return False, int(GetCount(in_polygon)[0])
        if int(GetCount(in_polygon)[0]) > 2:
            return True, int(GetCount(in_polygon)[0])

    _is_masked = is_masked(polygon_clipped)
    # Set the Environment Extent to the extent of the Ortho-Image as well as other settings to align.
    EnvManager(cellSize=in_raster,
               extent=image_extent(in_raster),
               snapRaster=in_raster)  # , mask=in_raster)
    file, extension = path.splitext(out_raster)
    # Convert the Modified polygon that now covers entire extent of Interest to Raster
    temp_raster = file + "Temp" + ".tif"
    PolygonToRaster(polygon_clipped, "OBJECTID", temp_raster, "CELL_CENTER",
                    "", in_raster)
    Delete(polygon_clipped)
    # Clip the Polygon Raster
    temp_clip_rast = file + "TempClipped" + ".tif"
    ClipRaster(temp_raster, image_extent_2(in_raster), temp_clip_rast,
               in_raster, "-1", "NONE", "MAINTAIN_EXTENT")
    if _is_masked[0]:
        if _is_masked[1] < 4:
            mask_raster = Con(temp_clip_rast, 255, 0, "VALUE = 0")
        else:
            # Deal with Masks covering the entire image
            mask_raster = Con(IsNull(temp_clip_rast), 0, 255, "Value = 0")
            # Deal with Masks covering a corner of image
            if int(
                    GetRasterProperties(mask_raster,
                                        "UNIQUEVALUECOUNT").getOutput(0)) < 2:
                Delete(mask_raster)
                mask_raster = Con(temp_clip_rast, 0, 255,
                                  "VALUE <= {0}".format(_is_masked[1] - 2))
    else:
        mask_raster = Con(temp_clip_rast, 255, 255, "VALUE = 0")
    temp_mask_raster = file + "TempMask" + ".tif"
    mask_raster.save(temp_mask_raster)

    ext = path.splitext(out_raster)[1]

    if "jpg" in ext.lower():
        # Convert the raster to .jpg format
        # Combine the band 3x for final output as RGB
        CopyRaster(temp_mask_raster, out_raster, '', None, '', "NONE",
                   "ColormapToRGB", "8_BIT_UNSIGNED", "NONE", "NONE", "JPEG",
                   "NONE", "CURRENT_SLICE", "NO_TRANSPOSE")
    if "tif" in ext.lower():
        # Convert the raster to .jpg format
        # Combine the band 3x for final output as RGB
        CopyRaster(temp_mask_raster, out_raster, '', None, '', "NONE",
                   "ColormapToRGB", "8_BIT_UNSIGNED", "NONE", "NONE", "TIFF",
                   "NONE", "CURRENT_SLICE", "NO_TRANSPOSE")
    if ext.lower() not in [".tif", ".jpg"]:
        AddError(
            "Process Failed. Currently ony supports .jpg and .tif as output formats"
        )
    # Delete Intermediate Data
    Delete(temp_clip_rast)
    Delete(temp_mask_raster)
    Delete(temp_raster)
    # Reset geoprocessing environment settings
    ResetEnvironments()
        fieldtype = "DOUBLE"
    elif typename == "str":
        fieldtype = "TEXT"
    elif typename == "bool":
        fieldtype = "SHORT"
    else:
        raise ValueError("Unsupported field type: %s" % typename)

    nullable = "NULLABLE" if fielddesc.null_ok else "NON_NULLABLE"

    AddField(temp_table, fielddesc.name, fieldtype, fielddesc.precision,
             fielddesc.scale, fielddesc.internal_size, None, nullable)

# Get field names for temp_table, which may differ from in_table
fieldnames = [field.name for field in arcpy.Describe(temp_table).fields \
              if field.type != "OID"]

# Copy rows into temporary table
with InsertCursor(temp_table, fieldnames) as out_cursor:
    for in_row in in_cursor:
        out_cursor.insertRow(in_row)

# Convert temporary table to table in geodatabase
TableToTable(in_rows=temp_table,
             out_path='E://QGIS//geocoding2018//geocoding.gdb',
             out_name='address')

# Delete temporary table
if arcpy.Exists(temp_table):
    Delete(temp_table)
def batch_create_tiled_ortho_mosaics(in_folder, image_format, num_bands,
                                     pixel_depth, product_definition,
                                     product_band_definitions, pixel_size,
                                     out_folder):
    from arcpy.management import CreateMosaicDataset, AddRastersToMosaicDataset, SplitRaster, CreateFileGDB, Delete
    from arcpy import Describe, env
    from arcpy import SetProgressor, SetProgressorLabel, SetProgressorPosition, ResetProgressor
    from os.path import join, exists
    from os import listdir, mkdir, makedirs

    env.overwriteOutput = True

    if not exists(out_folder):
        makedirs(out_folder)

    CreateFileGDB(out_folder, "scratch_mosaics.gdb")
    scratchGDB = join(out_folder, "scratch_mosaics.gdb")
    CreateFileGDB(out_folder, "ortho_mosaics.gdb")
    fileGDB = join(out_folder, "ortho_mosaics.gdb")
    count = 0
    images = [
        f for f in listdir(in_folder)
        if f.lower().endswith(image_format.lower())
    ]
    num_images = len(images)
    SetProgressor("step", "Begin Processing Files...", 0, num_images, 1)
    for fileName in images:
        print("processing Image {0} of {1}".format(count, num_images))
        file = join(in_folder, fileName)
        sr = Describe(file).spatialReference
        Name = "mosaic{}".format(count)
        SetProgressorLabel(
            "Creating Mosaic Dataset for {0}...".format(fileName))
        CreateMosaicDataset(scratchGDB, Name, sr, num_bands, pixel_depth,
                            product_definition, product_band_definitions)
        mosaic_dataset = join(scratchGDB, Name)
        SetProgressorLabel(
            "Adding Rasters to Mosaic Dataset for {0}...".format(fileName))
        AddRastersToMosaicDataset(
            mosaic_dataset, "Raster Dataset", file, "UPDATE_CELL_SIZES",
            "UPDATE_BOUNDARY", "NO_OVERVIEWS", None, 0, 1500, None, '',
            "SUBFOLDERS", "ALLOW_DUPLICATES", "NO_PYRAMIDS", "NO_STATISTICS",
            "NO_THUMBNAILS", '', "NO_FORCE_SPATIAL_REFERENCE", "NO_STATISTICS",
            None, "NO_PIXEL_CACHE")
        out_tile_folder = join(out_folder, "tiles{}".format(count))
        mkdir(out_tile_folder)
        SetProgressorLabel(
            "Splitting Rasters into Small Tiles for {0}...".format(fileName))
        SplitRaster(mosaic_dataset, out_tile_folder, "tile", "SIZE_OF_TILE",
                    "JPEG", "NEAREST", "1 1", "{0} {0}".format(pixel_size), 0,
                    "PIXELS", None, None, None, "NONE", "DEFAULT", '')
        Delete(mosaic_dataset)
        mosaic_name = "tiles{}_".format(count)
        mosaic_dataset = join(fileGDB, mosaic_name)
        SetProgressorLabel(
            "Creating Mosaic Dataset for Tiles of {0}...".format(fileName))
        CreateMosaicDataset(fileGDB, mosaic_name, sr, num_bands, pixel_depth,
                            product_definition, product_band_definitions)
        SetProgressorLabel(
            "Adding of {0} to Mosaic Dataset...".format(fileName))
        AddRastersToMosaicDataset(
            mosaic_dataset, "Raster Dataset", out_tile_folder,
            "UPDATE_CELL_SIZES", "UPDATE_BOUNDARY", "NO_OVERVIEWS", None, 0,
            1500, None, '', "SUBFOLDERS", "ALLOW_DUPLICATES", "NO_PYRAMIDS",
            "NO_STATISTICS", "NO_THUMBNAILS", '', "NO_FORCE_SPATIAL_REFERENCE",
            "NO_STATISTICS", None, "NO_PIXEL_CACHE")
        SetProgressorPosition()
        count += 1
    Delete(scratchGDB)
    ResetProgressor()
def execute_RiverPolygon(r_watsurf, maxwidth, minwidth, islands, surface, messages):
    sws = env.scratchWorkspace
    env.extent = r_watsurf

    cellsize = int(r_watsurf.meanCellHeight)

    bedpoly = CreateScratchName("gepo", data_type="FeatureClass", workspace=sws)
    RasterToPolygon(r_watsurf, bedpoly, "NO_SIMPLIFY", "VALUE", "MULTIPLE_OUTER_PART")

    islandup = CreateScratchName("gepo", data_type="FeatureClass", workspace="in_memory")
    GraphicBuffer(bedpoly, islandup, "{0} Meters".format(cellsize), "SQUARE", "MITER", 10, "0 Meters")
    Delete(bedpoly)

    smoothed = CreateScratchName("gepo", data_type="FeatureClass", workspace="in_memory")
    GraphicBuffer(islandup, smoothed, "{0} Meters".format(-cellsize), "SQUARE", "MITER", 10, "0 Meters")

    fillbed = CreateScratchName("gepo", data_type="FeatureClass", workspace="in_memory")
    EliminatePolygonPart(smoothed, fillbed, "PERCENT", "0 SquareMeters", 50, "CONTAINED_ONLY")

    multiland = CreateScratchName("gepo", data_type="FeatureClass", workspace="in_memory")
    Erase(fillbed, smoothed, multiland, None)
    MultipartToSinglepart(multiland, islands)

    mindist = max(cellsize, int(minwidth / (2 * cellsize)) * cellsize)
    maxdist = max(mindist, int(maxwidth / (2 * cellsize)) * cellsize)
    gblist = list(range(mindist, maxdist + cellsize, cellsize))
    scalelist = ["" for ii in range(0, len(gblist), 1)]
    partlist = []
    scaledn = CreateScratchName("gepo", data_type="FeatureClass", workspace="in_memory")
    GraphicBuffer(smoothed, scaledn, "{0} Meters".format(-cellsize / 2), "SQUARE", "MITER", 10, "0 Meters")

    trimmed = CreateScratchName("gepo", data_type="FeatureClass", workspace="in_memory")
    GraphicBuffer(scaledn, trimmed, "{0} Meters".format(cellsize / 2), "SQUARE", "MITER", 10, "0 Meters")

    thins = CreateScratchName("gepo", data_type="FeatureClass", workspace="in_memory")
    Erase(smoothed, trimmed, thins)

    inflated = CreateScratchName("gepo", data_type="FeatureClass", workspace="in_memory")
    GraphicBuffer(thins, inflated, "{0} Meters".format(cellsize / 2), "SQUARE", "MITER", 10, "0 Meters")
    partlist.append(inflated)

    scalelist[0] = CreateScratchName("gepo", data_type="FeatureClass", workspace="in_memory")
    if gblist[0] == cellsize:
        GraphicBuffer(trimmed, scalelist[0], "{0} Meters".format(-cellsize), "SQUARE", "MITER", 10, "0 Meters")
        partlist.append(trimmed)
        del gblist[0]
        fdist = 3*cellsize
    else:
        scalelist[0] = trimmed
        fdist = gblist[0]

    ii = 0
    for gbdist in gblist:
        scaleup = CreateScratchName("gepo", data_type="FeatureClass", workspace="in_memory")
        GraphicBuffer(scalelist[ii], scaleup, "{0} Meters".format(fdist), "SQUARE", "MITER", 10, "0 Meters")
        fdist = (2 * gbdist) + cellsize

        smoothed = CreateScratchName("gepo", data_type="FeatureClass", workspace="in_memory")
        GraphicBuffer(scaleup, smoothed, "{0} Meters".format(-gbdist), "SQUARE", "MITER", 10, "0 Meters")

        if ii < len(gblist)-1:
            scalelist[ii+1] = CreateScratchName("gepo", data_type="FeatureClass", workspace="in_memory")
            GraphicBuffer(smoothed, scalelist[ii+1], "{0} Meters".format(-gbdist), "SQUARE", "MITER", 10, "0 Meters")

        partlist.append(smoothed)
        ii += 1

    merged = CreateScratchName("gepo", data_type="FeatureClass", workspace="in_memory")
    Merge(partlist, merged)
    Dissolve(merged, surface, "ORIG_FID", "", "MULTI_PART", "DISSOLVE_LINES")

    return
示例#17
0
def process():
    def ensure_dir(file_path):
        directory = path.dirname(file_path)
        if not path.exists(directory):
            makedirs(directory)

    def zipper(in_list, out_file_path):
        out_file = '{0}.zip'.format(out_file_path)
        ensure_dir(out_file)
        with zipfile.ZipFile(out_file, 'w') as zipMe:
            for f in in_list:
                arcname = f.replace(path.dirname(out_file_path), "")
                zipMe.write(f, arcname=arcname, compress_type=zipfile.ZIP_DEFLATED)

    def zipper_gdb(in_gdb, out_file_name):
        assert in_gdb.endswith('.gdb'), "Error: file extension {0} not detected in in_folder".format(".gdb")
        root_dir = path.dirname(in_gdb)
        gdb_name = path.basename(in_gdb)
        myzip = zipfile.ZipFile(path.join(root_dir, out_file_name), 'w', zipfile.ZIP_DEFLATED)
        for folder, subfolder, file in walk(path.join(root_dir, gdb_name)):
            for each in subfolder + file:
                source = path.join(folder, each)
                if not source.endswith(".lock"):
                    # remove the absolute path to compose arcname
                    # also handles the remaining leading path separator with lstrip
                    arcname = source[len(root_dir):].lstrip(sep)
                    # write the file under a different name in the archive
                    myzip.write(source, arcname=arcname)
        myzip.close()

    def zip_folder(in_folder, out_file_name):
        myzip = zipfile.ZipFile(path.join(in_folder, out_file_name), 'w', zipfile.ZIP_DEFLATED)
        for folder, subfolder, file in walk(in_folder):
            for each in subfolder + file:
                source = path.join(folder, each)
                # remove the absolute path to compose arcname
                # also handles the remaining leading path separator with lstrip
                arcname = source[len(in_folder):].lstrip(sep)
                # write the file under a different name in the archive
                myzip.write(source, arcname=arcname)
        myzip.close()


    class LicenseError(Exception):
        pass

    try:
        if CheckExtension("3D") == "Available":
            CheckOutExtension("3D")
        else:
            # raise a custom exception
            raise LicenseError

        # Ensure output folder exists
        if not path.exists(out_folder):
            makedirs(out_folder)

        unique_values = set(row[0] for row in da.SearchCursor(in_buildings, tile_fid))
        for v in unique_values:
            print(v)

        for val in unique_values:
            out_name = out_file_basename + "_{0}".format(val)
            if out_format == "Multipatch SHP":
                out_file = path.join(out_folder, out_name+'.shp')
                Select(in_buildings, out_file, "{0} = {1}".format(tile_fid, val))
                if zip_files:
                    stem = path.join(out_folder, out_name)
                    in_list = [out_file,
                               '{}.shp.xml'.format(stem),
                               '{}.shx'.format(stem),
                               '{}.sbx'.format(stem),
                               '{}.sbn'.format(stem),
                               '{}.prj'.format(stem),
                               '{}.dbf'.format(stem),
                               '{}.cpg'.format(stem)]
                    zipper(in_list, stem)
                    Delete(out_file)
            if out_format == "Multipatch GDB":
                gdb = path.join(out_folder, out_name + '.gdb')
                CreateFileGDB(out_folder, out_name + '.gdb')
                out_file = path.join(gdb, out_name)
                Select(in_buildings, out_file, "{0} = {1}".format(tile_fid, val))
                if zip_files:
                    out_zip = out_name + '.zip'
                    zipper_gdb(gdb, out_zip)
                    Delete(gdb)
            if out_format == "DAE":
                folder = path.join(out_folder, out_name)
                # Ensure output folder exists
                if not path.exists(folder):
                    makedirs(folder)
                MakeFeatureLayer(in_buildings, "bldg_layer", "{0} = {1}".format(tile_fid, val), None)
                MultipatchToCollada("bldg_layer", folder, "PREPEND_NONE", "OBJECTID")
                Delete("bldg_layer")
                if zip_files:
                    zip_folder(folder, folder + ".zip")
                    Delete(folder)

        # Check back in 3D Analyst license
        CheckInExtension("3D")
    except LicenseError:
        AddError("3D Analyst license is unavailable")
        print("3D Analyst license is unavailable")
    except ExecuteError:
        print(GetMessages(2))
示例#18
0
def process():
    class LicenseError(Exception):
        pass

    try:
        if CheckExtension("3D") == "Available":
            CheckOutExtension("3D")
        else:
            # raise a custom exception
            raise LicenseError

        # Constants - DO NOT MODIFY
        split_area = "split_area"
        orig_area = "orig_area"

        def calc_area(in_fc, field_name):
            AddField(in_fc, field_name, "DOUBLE")
            with da.UpdateCursor(in_fc, [field_name, "SHAPE@AREA"]) as cursor1:
                for r1 in cursor1:
                    r1[0] = r1[1]
                    cursor1.updateRow(r1)

        def field_exists(in_fc, in_field):
            from arcpy import ListFields
            if in_field in [f.name for f in ListFields(in_fc)]:
                return True
            else:
                return False

        def delete_field_if_exists(in_fc, in_field):
            if field_exists(in_fc, in_field):
                DeleteField(in_fc, in_field)

        assert field_exists(in_buildings, building_fid), \
            "no attribute named {} in feature class".format(building_fid)

        for field in [tile_fid, file_name]:
            delete_field_if_exists(in_buildings, field)

        temp_fp = join("in_memory", "mp_fp")
        ddd.MultiPatchFootprint(in_buildings, temp_fp, "bldg_fid")

        calc_area(in_fc=temp_fp, field_name=orig_area)

        temp_isect = join("in_memory", "temp_isect")
        Intersect(r"{0} #;{1} #".format(temp_fp, in_tiles), temp_isect, "ALL",
                  None, "INPUT")

        # Delete Temporary Multipatch Footprint
        Delete(temp_fp)

        calc_area(in_fc=temp_isect, field_name=split_area)

        temp_isect_asc = join("in_memory", "temp_isect_asc")
        Sort(temp_isect, temp_isect_asc, [[building_fid, "ASCENDING"]])
        # Delete Temporary Intersect Feature Class
        Delete(temp_isect)

        fields = [building_fid, tile_fid, file_name, orig_area, split_area]

        # Generate a list of duplicates
        bldg_list = []
        with da.SearchCursor(temp_isect_asc, building_fid) as cursor2:
            for row in cursor2:
                bldg_list.append(row[0])

        duplicates = [
            item for item, count in Counter(bldg_list).items() if count > 1
        ]

        duplicates_list = []
        for i in duplicates:
            duplicates_list.append([i, bldg_list.count(i)])

        # TODO: Resolve why tile_fid is not showing up below when BuildingFID and TileFID are OID fields. "In_memory" issue
        '''
        # \\ Begin Debug print code
        from arcpy import AddMessage
        fds = [f.name for f in arcpy.ListFields(temp_isect_asc) if f.name in fields]
        AddMessage(fds)
        nfds = [f.name for f in arcpy.ListFields(temp_isect_asc) if f.name not in fields]
        AddMessage(nfds)
        # End Debug pring code //
        '''
        final_list = []
        with da.SearchCursor(temp_isect_asc, fields) as cursor3:
            prev_area = -1
            prev_item_list = []
            item_count = 0
            fcound = 0
            for row in cursor3:
                if row[0] not in duplicates:
                    final_list.append([row[0], row[1], row[2]])
                else:
                    area = row[3] - row[4]
                    index = duplicates.index(row[0])
                    total_items = duplicates_list[index][1]
                    if row[0] == duplicates[
                            0] and item_count == 0:  # Deal with first item differently
                        item_count += 1
                        prev_area = area
                        prev_item_list = [row[0], row[1], row[2]]
                    elif item_count + 1 == total_items:  # Deal with last item in list
                        if prev_area <= area:
                            prev_area = area
                            prev_item_list = [row[0], row[1], row[2]]
                        final_list.append(prev_item_list)
                        item_count = 0
                        prev_area = -1
                        prev_item_list = []
                    elif item_count + 1 != total_items:
                        if prev_area <= area:
                            prev_area = area
                            prev_item_list = [row[0], row[1], row[2]]
                        item_count += 1
        # Append results back to Input Feature Class
        AddField(in_buildings, tile_fid, "LONG")
        AddField(in_buildings, file_name, "TEXT")
        with da.UpdateCursor(in_buildings,
                             [building_fid, tile_fid, file_name]) as cursor:
            for r in cursor:
                for i in final_list:
                    if r[0] == i[0]:
                        r[1] = int(i[1])
                        r[2] = str(i[2])
                cursor.updateRow(r)

        Delete(temp_isect)
        del bldg_list
        del duplicates_list
        del duplicates

        # Check back in 3D Analyst license
        CheckInExtension("3D")
    except LicenseError:
        AddError("3D Analyst license is unavailable")
        print("3D Analyst license is unavailable")
    except ExecuteError:
        AddError("3D Analyst license is unavailable")
        print(GetMessages(2))
示例#19
0
def process():
    class LicenseError(Exception):
        pass

    try:
        if CheckExtension("ImageAnalyst") == "Available":
            CheckOutExtension("ImageAnalyst")
        else:
            # raise a custom exception
            raise LicenseError

        # System Parameters
        tile_name = "FileName"

        # Begin Script
        temp_fc = join("in_memory", "temp_fc")
        CopyFeatures(in_fc, temp_fc)
        for f in file_names:
            AddField(temp_fc, f, "TEXT")

        df = pd.read_excel(in_xlsx, index_col=0)

        def attribute_tile(in_feature_class,
                           in_tile_name,
                           in_df,
                           in_name,
                           xlsx_row_name=xlsx_row_name):
            with da.UpdateCursor(in_feature_class,
                                 [in_tile_name, in_name]) as cursor:
                for fc_r in cursor:
                    for df_i, df_r in in_df.iterrows():
                        url = df_r[xlsx_row_name]
                        n = Path(url).stem
                        t_name = fc_r[0]
                        t_n = Path(t_name).stem
                        if n.startswith(in_name) and t_n in n:
                            fc_r[1] = url
                    cursor.updateRow(fc_r)

        # Attribute the LiDAR Derivatives
        for n in file_names:
            attribute_tile(temp_fc, tile_name, df, n)

        def attribute_tile_lidar(in_feature_class,
                                 in_tile_name,
                                 in_df,
                                 in_name,
                                 xlsx_row_name=xlsx_row_name):
            with da.UpdateCursor(in_feature_class,
                                 [in_tile_name, in_name]) as cursor:
                for fc_r in cursor:
                    for df_i, df_r in in_df.iterrows():
                        url = df_r[xlsx_row_name]
                        n = split(url)[1]
                        t_name = fc_r[0]
                        if n == t_name:
                            fc_r[1] = url
                    cursor.updateRow(fc_r)

        # Attribute the LiDAR tile now
        AddField(temp_fc, in_lidar_format, "TEXT")
        attribute_tile_lidar(temp_fc,
                             tile_name,
                             df,
                             in_lidar_format,
                             xlsx_row_name=xlsx_row_name)
        '''
        # Print Fields for debugging/assessing results of above operations
        file_names.append(in_lidar_format)
        print(file_names)
        with da.SearchCursor(temp_fc, file_names) as cursor:
            for fc_r in cursor:
                print(fc_r)
        '''

        # Delete Pandas Dataframe from Memory
        del df

        # Copy in_memory temporary feature class to output location
        CopyFeatures(temp_fc, out_fc)

        # Delete Temporary Feature Class
        Delete(temp_fc)

        # Check back in Image Analyst license
        CheckInExtension("ImageAnalyst")
    except LicenseError:
        AddError("ImageAnalyst license is unavailable")
        print("ImageAnalyst license is unavailable")
    except ExecuteError:
        AddError(GetMessages(2))
        print(GetMessages(2))