def assign_max_zones():
    """Add an attribute to max stops that indicates which 'MAX Zone' it
    falls within, the max_zone feature class is used in conjunction with
    max stops to make this determination
    """

    # Create a mapping from zone object id's to their names
    max_zone_dict = dict()
    fields = ['OID@', UNIQUE_FIELD]
    with SearchCursor(MAX_ZONES, fields) as cursor:
        for oid, name in cursor:
            max_zone_dict[oid] = name

    # Find the nearest zone to each stop
    stop_zone_table = join(TEMP_DIR, 'stop_zone_near_table.dbf')
    GenerateNearTable(MAX_STOPS, MAX_ZONES, stop_zone_table)

    # Create a mapping from stop oid's to zone oid's
    stop2zone = dict()
    fields = ['IN_FID', 'NEAR_FID']
    with SearchCursor(stop_zone_table, fields) as cursor:
        for stop_oid, zone_oid in cursor:
            stop2zone[stop_oid] = zone_oid

    f_type = 'TEXT'
    AddField(MAX_STOPS, ZONE_FIELD, f_type)

    fields = ['OID@', ZONE_FIELD]
    with UpdateCursor(MAX_STOPS, fields) as cursor:
        for oid, zone in cursor:
            zone = max_zone_dict[stop2zone[oid]]

            cursor.updateRow((oid, zone))
def add_iso_attributes():
    """Append attributes from the original max stops data to the
    isochrones feature class, matching features stop id's field
    (which are in the 'stop_id' and 'name' fields
    """

    rail_stop_dict = dict()
    s_fields = [ID_FIELD, STOP_FIELD, ROUTES_FIELD, ZONE_FIELD, YEAR_FIELD]
    with SearchCursor(MAX_STOPS, s_fields) as s_cursor:
        sid_ix = s_cursor.fields.index(ID_FIELD)
        
        for row in s_cursor:
            stop_id = row[sid_ix]
            rail_stop_dict[stop_id] = list(row)

    # area value will be used to check for errors in isochrone creation
    iso_fields = [f.name for f in ListFields(ISOCHRONES)]
    area_field = 'area'
    if area_field not in iso_fields:
        f_type = 'DOUBLE'
        AddField(ISOCHRONES, area_field, f_type)
    
    area_val = 'SHAPE@AREA'
    u_fields = s_fields + [area_field, area_val]
    with UpdateCursor(ISOCHRONES, u_fields) as u_cursor:
        sid_ix = u_cursor.fields.index(ID_FIELD)
        val_ix = u_cursor.fields.index(area_val)
        
        for row in u_cursor:
            stop_id = row[sid_ix]
            area = row[val_ix]
            
            i_row = rail_stop_dict[stop_id]
            i_row.extend([area, area])
            u_cursor.updateRow(i_row)
def create_isochrone_fc():
    """Create a new feature class to store all isochrones created later
    in the work flow
    """

    geom_type = 'POLYGON'
    ospn = SpatialReference(2913)
    CreateFeatureclass(dirname(ISOCHRONES), basename(ISOCHRONES),
                       geom_type, spatial_reference=ospn)

    field_names = [
        ID_FIELD,  STOP_FIELD,  ROUTES_FIELD,
        ZONE_FIELD, YEAR_FIELD, DIST_FIELD]

    for f_name in field_names:
        if f_name in (ID_FIELD, YEAR_FIELD):
            f_type = 'LONG'
        elif f_name in (STOP_FIELD, ROUTES_FIELD, ZONE_FIELD):
            f_type = 'TEXT'
        elif f_name == DIST_FIELD:
            f_type = 'DOUBLE'

        AddField(ISOCHRONES, f_name, f_type)

    # drop Id field that is created by default
    DeleteField(ISOCHRONES, 'Id')
Ejemplo n.º 4
0
def temp_fc(geo, name, kind, SR):
    """Similar to _out_ but creates a `memory` featureclass."""
    polys = Geo_to_arc_shapes(geo, as_singlepart=True)
    wkspace = env.workspace = 'memory'  # legacy is in_memory
    tmp_name = "{}\\{}".format(wkspace, name)
    # tmp = MultipartToSinglepart(in_fc, r"memory\in_fc_temp")
    if Exists(tmp_name):
        Delete(tmp_name)
    CreateFeatureclass(wkspace, name, kind, spatial_reference=SR)
    AddField(tmp_name, 'ID_arr', 'LONG')
    with ags.da.InsertCursor(name, ['SHAPE@', 'ID_arr']) as cur:
        for row in polys:
            cur.insertRow(row)
    return tmp_name
def execute_ChannelCorrection(demras, boundary, riverbed, rivernet, breachedmnt, messages):

    arcpy.env.outputCoordinateSystem = demras.spatialReference
    env.snapRaster = demras

    ends = CreateScratchName("loob", data_type="FeatureClass", workspace="in_memory")
    CopyFeatures(boundary, ends)

    AddField(ends, "dummy", "LONG", field_alias="dummy", field_is_nullable="NULLABLE")
    CalculateField(ends, "dummy", "1", "PYTHON")

    endsras = CreateScratchName("loras", data_type="RasterDataset", workspace=env.scratchWorkspace)
    PolylineToRaster(ends, "dummy", endsras, "MAXIMUM_LENGTH", cellsize=demras)
    statpts = FocalStatistics(endsras, NbrRectangle(3, 3, "CELL"), "MAXIMUM", "DATA")

    env.extent = demras

    rasterbed = CreateScratchName("loras", data_type="RasterDataset", workspace=env.scratchWorkspace)
    PolygonToRaster(riverbed, arcpy.Describe(riverbed).OIDFieldName, rasterbed, "CELL_CENTER", cellsize=demras)
    rasterline = CreateScratchName("loras", data_type="RasterDataset", workspace=env.scratchWorkspace)
    PolylineToRaster(rivernet, arcpy.Describe(rivernet).OIDFieldName, rasterline, cellsize=demras)

    streambed = Con(IsNull(rasterline), Con(IsNull(rasterbed) == 0, 1), 1)

    bedwalls = FocalStatistics(streambed, NbrRectangle(3, 3, "CELL"), "MAXIMUM", "DATA")

    env.extent = bedwalls

    chanelev = Con(streambed, demras)
    chanmax = chanelev.maximum
    chanwalls = chanelev.minimum - 100
    switchtemp = CreateScratchName("loras", data_type="RasterDataset", workspace=env.scratchWorkspace)
    switchelev = -1 * (Con(IsNull(streambed), Con(bedwalls, Con(IsNull(statpts), chanwalls)), chanelev) - chanmax)
    switchelev.save(switchtemp)
    Delete(statpts)
    Delete(chanelev)

    switchfilled = Fill(switchtemp)
    Delete(switchtemp)

    env.extent = demras
    breachedtemp = Con(IsNull(streambed), demras, (-1*switchfilled) + chanmax)
    breachedtemp.save(breachedmnt)

    Delete(bedwalls)
    Delete(endsras)
    Delete(rasterline)
    Delete(rasterbed)
    Delete(switchfilled)
    return
def add_name_field():
    """Only a field called 'name' will be retained when locations are
    loaded into a service area analysis, as the MAX stops will be.  
    This field is populated that field with unique identifiers so that
    the other attributes from this data can be linked to the network 
    analyst output
    """

    fields = [f.name for f in ListFields(MAX_STOPS)]

    if UNIQUE_FIELD not in fields:
        f_type = 'LONG'
        AddField(MAX_STOPS, UNIQUE_FIELD, f_type)
    
        u_fields = [ID_FIELD, UNIQUE_FIELD]
        with UpdateCursor(MAX_STOPS, u_fields) as cursor:
            for stop_id, name in cursor:
                name = stop_id
                cursor.updateRow((stop_id, name))
Ejemplo n.º 7
0
def geometry_fc(a, IFT, p_type=None, gdb=None, fname=None, sr=None):
    """Form poly features from the list of arrays created by `fc_geometry`.

    Parameters
    ----------
    a : array or list of arrays
        Some can be object arrays, normally created by ``pnts_arr``
    IFT : list/array
        Identifies which feature each input belongs to.  This enables one to
        account for multipart shapes
    p_type : string
        Uppercase geometry type eg POLYGON.
    gdb : text
        Geodatabase path and name.
    fname : text
        Featureclass name.
    sr : spatial reference
        name or object

    Returns
    -------
    Singlepart and/or multipart featureclasses.

    Notes
    -----
    The work is done by ``array_poly``.
    """
    if p_type is None:
        p_type = "POLYGON"
    out = array_poly(a, p_type.upper(), sr=sr, IFT=IFT)  # call array_poly
    name = gdb + "/" + fname
    wkspace = env.workspace = 'memory'  # legacy is in_memory
    CreateFeatureclass(wkspace, fname, p_type, spatial_reference=sr)
    AddField(fname, 'ID_arr', 'LONG')
    with InsertCursor(fname, ['SHAPE@', 'ID_arr']) as cur:
        for row in out:
            cur.insertRow(row)
    CopyFeatures(fname, name)
    return
Ejemplo n.º 8
0
def Geo_to_fc(geo, gdb=None, name=None, kind=None, SR=None):
    """Return a FeatureClass from a Geo array."""
    SR = SR
    if kind in (None, 0, 1, 2):
        print("\n ``kind`` must be one of Polygon, Polyline or Point.")
        return None
    #
    # dx, dy = geo.LL
    # geo = geo.shift(dx, dy)
    polys = Geo_to_arc_shapes(geo, as_singlepart=True)
    out_name = gdb.replace("\\", "/") + "/" + name
    wkspace = env.workspace = 'memory'  # legacy is in_memory
    tmp_name = "{}\\{}".format(wkspace, "tmp")
    if Exists(tmp_name):
        Delete(tmp_name)
    CreateFeatureclass(wkspace, "tmp", kind, spatial_reference=SR)
    AddField("tmp", 'ID_arr', 'LONG')
    with InsertCursor("tmp", ['SHAPE@', 'ID_arr']) as cur:
        for row in polys:
            cur.insertRow(row)
    CopyFeatures("tmp", out_name)
    return
def add_inception_year():
    """Each MAX line has a decision to build year, add that information
    as an attribute to the max stops.  If a max stop serves multiple
    lines the year from the oldest line will be assigned.
    """

    f_type = 'LONG'
    AddField(MAX_STOPS, YEAR_FIELD, f_type)

    # Note that 'MAX Year' for stops within the CBD are variable as
    # stops within that region were not all built at the same time
    # (this is not the case for all other MAX zones)
    fields = [ID_FIELD, DESC_FIELD, ZONE_FIELD, YEAR_FIELD]
    with UpdateCursor(MAX_STOPS, fields) as cursor:
        for stop_id, rte_desc, zone, year in cursor:
            if 'MAX Blue Line' in rte_desc \
                    and zone not in ('West Suburbs', 'Southwest Portland'):
                year = 1980
            elif 'MAX Blue Line' in rte_desc:
                year = 1990
            elif 'MAX Red Line' in rte_desc:
                year = 1997
            elif 'MAX Yellow Line' in rte_desc \
                    and zone != 'Central Business District':
                year = 1999
            elif 'MAX Green Line' in rte_desc:
                year = 2003
            elif 'MAX Orange Line' in rte_desc:
                year = 2008
            else:
                print 'Stop {} not assigned a MAX Year, cannot proceed ' \
                      'with out this assignment, examine code/data for ' \
                      'errors'.format(stop_id)
                exit()

            cursor.updateRow((stop_id, rte_desc, zone, year))
Ejemplo n.º 10
0
 def calc_area(in_fc, field_name):
     AddField(in_fc, field_name, "DOUBLE")
     with da.UpdateCursor(in_fc, [field_name, "SHAPE@AREA"]) as cursor1:
         for r1 in cursor1:
             r1[0] = r1[1]
             cursor1.updateRow(r1)
Ejemplo n.º 11
0
def process():
    class LicenseError(Exception):
        pass

    try:
        if CheckExtension("3D") == "Available":
            CheckOutExtension("3D")
        else:
            # raise a custom exception
            raise LicenseError

        # Constants - DO NOT MODIFY
        split_area = "split_area"
        orig_area = "orig_area"

        def calc_area(in_fc, field_name):
            AddField(in_fc, field_name, "DOUBLE")
            with da.UpdateCursor(in_fc, [field_name, "SHAPE@AREA"]) as cursor1:
                for r1 in cursor1:
                    r1[0] = r1[1]
                    cursor1.updateRow(r1)

        def field_exists(in_fc, in_field):
            from arcpy import ListFields
            if in_field in [f.name for f in ListFields(in_fc)]:
                return True
            else:
                return False

        def delete_field_if_exists(in_fc, in_field):
            if field_exists(in_fc, in_field):
                DeleteField(in_fc, in_field)

        assert field_exists(in_buildings, building_fid), \
            "no attribute named {} in feature class".format(building_fid)

        for field in [tile_fid, file_name]:
            delete_field_if_exists(in_buildings, field)

        temp_fp = join("in_memory", "mp_fp")
        ddd.MultiPatchFootprint(in_buildings, temp_fp, "bldg_fid")

        calc_area(in_fc=temp_fp, field_name=orig_area)

        temp_isect = join("in_memory", "temp_isect")
        Intersect(r"{0} #;{1} #".format(temp_fp, in_tiles), temp_isect, "ALL",
                  None, "INPUT")

        # Delete Temporary Multipatch Footprint
        Delete(temp_fp)

        calc_area(in_fc=temp_isect, field_name=split_area)

        temp_isect_asc = join("in_memory", "temp_isect_asc")
        Sort(temp_isect, temp_isect_asc, [[building_fid, "ASCENDING"]])
        # Delete Temporary Intersect Feature Class
        Delete(temp_isect)

        fields = [building_fid, tile_fid, file_name, orig_area, split_area]

        # Generate a list of duplicates
        bldg_list = []
        with da.SearchCursor(temp_isect_asc, building_fid) as cursor2:
            for row in cursor2:
                bldg_list.append(row[0])

        duplicates = [
            item for item, count in Counter(bldg_list).items() if count > 1
        ]

        duplicates_list = []
        for i in duplicates:
            duplicates_list.append([i, bldg_list.count(i)])

        # TODO: Resolve why tile_fid is not showing up below when BuildingFID and TileFID are OID fields. "In_memory" issue
        '''
        # \\ Begin Debug print code
        from arcpy import AddMessage
        fds = [f.name for f in arcpy.ListFields(temp_isect_asc) if f.name in fields]
        AddMessage(fds)
        nfds = [f.name for f in arcpy.ListFields(temp_isect_asc) if f.name not in fields]
        AddMessage(nfds)
        # End Debug pring code //
        '''
        final_list = []
        with da.SearchCursor(temp_isect_asc, fields) as cursor3:
            prev_area = -1
            prev_item_list = []
            item_count = 0
            fcound = 0
            for row in cursor3:
                if row[0] not in duplicates:
                    final_list.append([row[0], row[1], row[2]])
                else:
                    area = row[3] - row[4]
                    index = duplicates.index(row[0])
                    total_items = duplicates_list[index][1]
                    if row[0] == duplicates[
                            0] and item_count == 0:  # Deal with first item differently
                        item_count += 1
                        prev_area = area
                        prev_item_list = [row[0], row[1], row[2]]
                    elif item_count + 1 == total_items:  # Deal with last item in list
                        if prev_area <= area:
                            prev_area = area
                            prev_item_list = [row[0], row[1], row[2]]
                        final_list.append(prev_item_list)
                        item_count = 0
                        prev_area = -1
                        prev_item_list = []
                    elif item_count + 1 != total_items:
                        if prev_area <= area:
                            prev_area = area
                            prev_item_list = [row[0], row[1], row[2]]
                        item_count += 1
        # Append results back to Input Feature Class
        AddField(in_buildings, tile_fid, "LONG")
        AddField(in_buildings, file_name, "TEXT")
        with da.UpdateCursor(in_buildings,
                             [building_fid, tile_fid, file_name]) as cursor:
            for r in cursor:
                for i in final_list:
                    if r[0] == i[0]:
                        r[1] = int(i[1])
                        r[2] = str(i[2])
                cursor.updateRow(r)

        Delete(temp_isect)
        del bldg_list
        del duplicates_list
        del duplicates

        # Check back in 3D Analyst license
        CheckInExtension("3D")
    except LicenseError:
        AddError("3D Analyst license is unavailable")
        print("3D Analyst license is unavailable")
    except ExecuteError:
        AddError("3D Analyst license is unavailable")
        print(GetMessages(2))
    elif typename == "float":
        fieldtype = "FLOAT"
    elif typename == "Decimal" and fielddesc.precision < 7:
        fieldtype = "FLOAT"
    elif typename == "Decimal":
        fieldtype = "DOUBLE"
    elif typename == "str":
        fieldtype = "TEXT"
    elif typename == "bool":
        fieldtype = "SHORT"
    else:
        raise ValueError("Unsupported field type: %s" % typename)

    nullable = "NULLABLE" if fielddesc.null_ok else "NON_NULLABLE"

    AddField(temp_table, fielddesc.name, fieldtype, fielddesc.precision,
             fielddesc.scale, fielddesc.internal_size, None, nullable)

# Get field names for temp_table, which may differ from in_table
fieldnames = [field.name for field in arcpy.Describe(temp_table).fields \
              if field.type != "OID"]

# Copy rows into temporary table
with InsertCursor(temp_table, fieldnames) as out_cursor:
    for in_row in in_cursor:
        out_cursor.insertRow(in_row)

# Convert temporary table to table in geodatabase
TableToTable(in_rows=temp_table,
             out_path='E://QGIS//geocoding2018//geocoding.gdb',
             out_name='address')
Ejemplo n.º 13
0
def process():
    class LicenseError(Exception):
        pass

    try:
        if CheckExtension("ImageAnalyst") == "Available":
            CheckOutExtension("ImageAnalyst")
        else:
            # raise a custom exception
            raise LicenseError

        # System Parameters
        tile_name = "FileName"

        # Begin Script
        temp_fc = join("in_memory", "temp_fc")
        CopyFeatures(in_fc, temp_fc)
        for f in file_names:
            AddField(temp_fc, f, "TEXT")

        df = pd.read_excel(in_xlsx, index_col=0)

        def attribute_tile(in_feature_class,
                           in_tile_name,
                           in_df,
                           in_name,
                           xlsx_row_name=xlsx_row_name):
            with da.UpdateCursor(in_feature_class,
                                 [in_tile_name, in_name]) as cursor:
                for fc_r in cursor:
                    for df_i, df_r in in_df.iterrows():
                        url = df_r[xlsx_row_name]
                        n = Path(url).stem
                        t_name = fc_r[0]
                        t_n = Path(t_name).stem
                        if n.startswith(in_name) and t_n in n:
                            fc_r[1] = url
                    cursor.updateRow(fc_r)

        # Attribute the LiDAR Derivatives
        for n in file_names:
            attribute_tile(temp_fc, tile_name, df, n)

        def attribute_tile_lidar(in_feature_class,
                                 in_tile_name,
                                 in_df,
                                 in_name,
                                 xlsx_row_name=xlsx_row_name):
            with da.UpdateCursor(in_feature_class,
                                 [in_tile_name, in_name]) as cursor:
                for fc_r in cursor:
                    for df_i, df_r in in_df.iterrows():
                        url = df_r[xlsx_row_name]
                        n = split(url)[1]
                        t_name = fc_r[0]
                        if n == t_name:
                            fc_r[1] = url
                    cursor.updateRow(fc_r)

        # Attribute the LiDAR tile now
        AddField(temp_fc, in_lidar_format, "TEXT")
        attribute_tile_lidar(temp_fc,
                             tile_name,
                             df,
                             in_lidar_format,
                             xlsx_row_name=xlsx_row_name)
        '''
        # Print Fields for debugging/assessing results of above operations
        file_names.append(in_lidar_format)
        print(file_names)
        with da.SearchCursor(temp_fc, file_names) as cursor:
            for fc_r in cursor:
                print(fc_r)
        '''

        # Delete Pandas Dataframe from Memory
        del df

        # Copy in_memory temporary feature class to output location
        CopyFeatures(temp_fc, out_fc)

        # Delete Temporary Feature Class
        Delete(temp_fc)

        # Check back in Image Analyst license
        CheckInExtension("ImageAnalyst")
    except LicenseError:
        AddError("ImageAnalyst license is unavailable")
        print("ImageAnalyst license is unavailable")
    except ExecuteError:
        AddError(GetMessages(2))
        print(GetMessages(2))