def spatialize_lakes(lake_points_csv, out_fc, in_x_field, in_y_field, in_crs='NAD83'): """ Casts xy data as spatial points. :param lake_points_csv: The lake water quality dataset containing coordinates as text columns. :param out_fc: The output feature class :param in_x_field: Field containing the longitude or x coordinates :param in_y_field: Field containing the latitude or y coordinates :param in_crs: Abbreviation of the coordinate reference system used to specify the coordinates. Options supported are 'WGS84', 'NAD83', 'NAD27. :return: The output feature class """ if in_crs not in CRS_DICT.keys(): raise ValueError('Use one of the following CRS names: {}'.format( ','.join(CRS_DICT.keys()))) DM.MakeXYEventLayer(lake_points_csv, in_x_field, in_y_field, 'xylayer', arcpy.SpatialReference(CRS_DICT[in_crs])) arcpy.env.outputCoordinateSystem = arcpy.SpatialReference(102039) DM.CopyFeatures('xylayer', out_fc) arcpy.Delete_management('xylayer') return (out_fc)
def import_cad(input_fc, param_file, output_fc): # Copy the cad features mgmt.CopyFeatures(input_fc, output_fc) if param_file: # X/Y offset from the center of the fc extent for link source points. LINK_OFFSET = 1000.0 xfm = Transform() xfm.load(param_file) desc = arcpy.Describe(output_fc) ext = desc.extent ctr_x, ctr_y = (ext.XMin + ext.XMax) / 2.0, (ext.YMin + ext.YMax) / 2.0 src_ul = arcpy.Point(ctr_x - LINK_OFFSET, ctr_y + LINK_OFFSET) src_lr = arcpy.Point(ctr_x + LINK_OFFSET, ctr_y - LINK_OFFSET) links = [] sr = desc.spatialReference for src in (src_ul, src_lr): dst = arcpy.Point(*xfm.forward((src.X, src.Y))) links.append(arcpy.Polyline(arcpy.Array([src, dst]), sr)) arcpy.edit.TransformFeatures(output_fc, links, method='SIMILARITY') mgmt.RecalculateFeatureClassExtent(output_fc) return
def find_states(fc, state_fc): """Populate *_states field. States fc must have field 'states' with length 255 and state abbreviations within.""" states_field = '{}_states'.format(os.path.basename(fc)) if arcpy.ListFields(fc, states_field): DM.DeleteField(fc, states_field) # reverse buffer the states slightly to avoid "D", "I", "J" situations in "INTERSECT" illustration # from graphic examples of ArcGIS join types "Select polygon using polygon" section in Help # make a field mapping that gathers all the intersecting states into one new value field_list = [f.name for f in arcpy.ListFields(fc) if f.type <> 'OID' and f.type <> 'Geometry'] field_mapping = arcpy.FieldMappings() for f in field_list: map = arcpy.FieldMap() map.addInputField(fc, f) field_mapping.addFieldMap(map) map_states = arcpy.FieldMap() map_states.addInputField(state_fc, 'states') map_states.mergeRule = 'Join' map_states.joinDelimiter = ' ' field_mapping.addFieldMap(map_states) # perform join and use output to replace original fc spjoin = AN.SpatialJoin(fc, state_fc, 'in_memory/spjoin_intersect', 'JOIN_ONE_TO_ONE', field_mapping=field_mapping, match_option='INTERSECT') DM.AlterField(spjoin, 'states', new_field_name=states_field, clear_field_alias=True) DM.Delete(fc) DM.CopyFeatures(spjoin, fc) DM.Delete(spjoin)
def merge_matching_master(output_list, output_fc, master_file, join_field='lagoslakeid'): arcpy.env.scratchWorkspace = os.getenv("TEMP") arcpy.env.workspace = arcpy.env.scratchGDB if arcpy.Exists('outputs_merged'): arcpy.Delete_management('outputs_merged') if len(output_list) == NUM_SUBREGIONS: field_list = arcpy.ListFields(output_list[0]) arcpy.AddMessage("Merging outputs...") outputs_merged = lagosGIS.efficient_merge(output_list, 'outputs_merged') arcpy.AddMessage("Merge completed, trimming to master list...") data_type = arcpy.Describe(outputs_merged).dataType # if data_type == 'FeatureClass': # outputs_merged_lyr = DM.MakeFeatureLayer(outputs_merged, 'outputs_merged_lyr') # print(int(DM.GetCount(outputs_merged_lyr).getOutput(0))) # else: # outputs_merged_lyr = DM.MakeTableView(outputs_merged, 'outputs_merged_lyr') master_set = { r[0] for r in arcpy.da.SearchCursor(master_file, join_field) } with arcpy.da.UpdateCursor(outputs_merged, join_field) as u_cursor: for row in u_cursor: if row[0] not in master_set: u_cursor.deleteRow() # DM.AddJoin(outputs_merged_lyr, join_field, master_file, join_field, 'KEEP_COMMON') # # master_prefix = os.path.splitext(os.path.basename(master_file))[0] # select_clause = '{}.{} is not null'.format(master_prefix, join_field) # DM.SelectLayerByAttribute(outputs_merged_lyr, 'NEW_SELECTION', select_clause) # DM.RemoveJoin(outputs_merged_lyr) if data_type == "FeatureClass": DM.CopyFeatures(outputs_merged, output_fc) else: DM.CopyRows(outputs_merged, output_fc) # copy_field_list = arcpy.ListFields(output_fc) # print("Delete fields") # print(copy_field_list) # drop_fields = [f for f in copy_field_list if f not in field_list] # for f in copy_field_list: # if f not in field_list: # print(f) # arcpy.DeleteField_management(output_fc, f) #DM.Delete(outputs_merged_lyr) #DM.Delete(arcpy.env.scratchGDB) else: print("Incomplete list of inputs. There should be {} inputs.".format( NUM_SUBREGIONS)) return output_fc
def generateMatchCode(): """Generate a code for the collapse dual carriageway tool that will indicate if two segments are eligible to be snapped to each other""" service_dict = {'frequent': 1, 'standard': 2, 'rush-hour': 3} # create a copy of distinct_routes.shp so that the original is not modified management.CopyFeatures(distinct_routes_src, distinct_routes) merge_field, f_type = 'merge_id', 'LONG' management.AddField(distinct_routes, merge_field, f_type) u_fields = ['route_id', 'serv_level', merge_field] with da.UpdateCursor(distinct_routes, u_fields) as u_cursor: for route, service, merge in u_cursor: # create a unique id based on frequency and route that is an integer merge = int(str(int(route)) + '000' + str(service_dict[service])) u_cursor.updateRow((route, service, merge))
def generateMatchCode(): """Generate a code for the collapse dual carriageway tool that will indicate if two segments are eligible to be snapped to each other""" service_dict = {'frequent': 1, 'standard': 2, 'rush-hour': 3} # create a copy of service_level_routes.shp so that the original is not modified management.CopyFeatures(serv_level_routes_src, serv_level_routes) merge_field, f_type = 'merge_id', 'LONG' management.AddField(serv_level_routes, merge_field, f_type) u_fields = ['serv_level', 'route_type', merge_field] with da.UpdateCursor(serv_level_routes, u_fields) as u_cursor: for service, r_type, merge in u_cursor: # match field must be of type int merge = int(str(service_dict[service]) + str(int(r_type))) u_cursor.updateRow((service, r_type, merge))
def createOutput(self, outputFC): """Creates an Output Feature Class with the Directional Mean Results. INPUTS: outputFC (str): path to the output feature class """ #### Validate Output Workspace #### ERROR.checkOutputPath(outputFC) #### Shorthand Attributes #### ssdo = self.ssdo caseField = self.caseField #### Create Output Feature Class #### ARCPY.SetProgressor("default", ARCPY.GetIDMessage(84003)) tempCFLayer = "tmpCFLayer" try: DM.MakeFeatureLayer(ssdo.inputFC, tempCFLayer) first = True for key, value in self.cf.iteritems(): oids = value[0] for oid in oids: sqlString = ssdo.oidName + '=' + str(oid) if first: DM.SelectLayerByAttribute(tempCFLayer, "NEW_SELECTION", sqlString) first = False else: DM.SelectLayerByAttribute(tempCFLayer, "ADD_TO_SELECTION", sqlString) UTILS.clearExtent(DM.CopyFeatures(tempCFLayer, outputFC)) except: ARCPY.AddIDMessage("ERROR", 210, outputFC) raise SystemExit() #### Set Attribute #### self.outputFC = outputFC
def CopyAndOverwrite(): #Create new feature classes locally from the hosted source (needs work) '''for key, value in dict.items(): print(key) for featureclass in value: print (" "+featureclass) ''' from arcpy import management, env, Exists env.overwriteOutput = 1 for source in m.listLayers(): if source.isWebLayer is True and source.isFeatureLayer is True: print("hosted ", source.name) for target in m.listLayers(): if target.isWebLayer is False and target.isFeatureLayer is True: db, owner, targetname = target.name.split(".") if targetname == source.name: output = r"F:/Cart/projects/KanPlan/MXD/HostedServices/Collect16/Collector_Prod/Collector_Prod.gdb/"+targetname print("local ", target) management.CopyFeatures(source, output) """
if row.getValue(flds[i]) == 1: sppCd = flds[i][11:] sppString = sppString + ", " + sppCd #find the full name Sci = sciNames[sppCd.lower()] sciString = sciString + ", " + Sci Comm = commNames[sppCd.lower()] comnString = comnString + ", " + Comm Hab = habitat_Use[sppCd.lower()] ##Consider if Hab == "": habString = habString else: habString = habString + ", " + Sci + ": " + Hab #habString = habString + ", " + Hab row.codeList = sppString[2:] #chop the first comma-space row.SciNames = sciString[2:] row.CommNames = comnString[2:] row.HabitatUse = habString[2:] rows.updateRow(row) del rows, row ## delete unwanted fields outFeat = "zon_FullLists_C_test" + classLevel man.CopyFeatures(curZo, outFeat) fldList = [f.name for f in arcpy.ListFields(outFeat, "VAT_zon_*")] man.DeleteField(outFeat, fldList) fldList = [f.name for f in arcpy.ListFields(outFeat, "hyp_backOut*")] man.DeleteField(outFeat, fldList) print(" final file is " + outFeat) # consider automating the labeling of all pro polys with Facility name, and also other attribute fields we need to add
def import_gpx(gpx_file, wpt_fc, trk_fc): GCS_WGS_84 = arcpy.SpatialReference(4326) GCS_TRANSFORMS = 'WGS_1984_(ITRF08)_To_NAD_1983_2011; NAD_1927_To_NAD_1983_NADCON' arcpy.env.geographicTransformations = arcpy.env.geographicTransformations or GCS_TRANSFORMS arcpy.AddMessage('Geographic Transformations: %s' % arcpy.env.geographicTransformations) scratch = arcpy.env.scratchWorkspace arcpy.env.addOutputsToMap = False WPT_FIELDS = [ ('ELEVATION', 'gpx:ele'), ('TIME', 'gpx:time'), ('NAME', 'gpx:name'), ('DESCRIPTION', 'gpx:desc'), ('SYMBOL', 'gpx:sym'), ('TYPE', 'gpx:type'), ('SAMPLES', 'gpx:extensions/wptx1:WaypointExtension/wptx1:Samples') ] ns = { 'gpx': 'http://www.topografix.com/GPX/1/1', 'gpxx': 'http://www.garmin.com/xmlschemas/GpxExtensions/v3', 'wptx1': 'http://www.garmin.com/xmlschemas/WaypointExtension/v1', 'ctx': 'http://www.garmin.com/xmlschemas/CreationTimeExtension/v1', } etree.register_namespace('', 'http://www.topografix.com/GPX/1/1') etree.register_namespace('gpxx', 'http://www.garmin.com/xmlschemas/GpxExtensions/v3') etree.register_namespace('wptx1', 'http://www.garmin.com/xmlschemas/WaypointExtension/v1') etree.register_namespace('ctx', 'http://www.garmin.com/xmlschemas/CreationTimeExtension/v1') gpx = etree.parse(gpx_file).getroot() sr = arcpy.env.outputCoordinateSystem if wpt_fc: create_points_feature_class(wpt_fc, sr) waypoints = [] for wpt in gpx.findall('gpx:wpt', ns): x, y = wpt.get('lon'), wpt.get('lat') row = [arcpy.PointGeometry(arcpy.Point(x, y), GCS_WGS_84).projectAs(sr)] for field, tag in WPT_FIELDS: elem = wpt.find(tag, ns) if elem is None: row.append(None) elif field == 'ELEVATION': row.append('%0.4f' % (float(elem.text) / sr.metersPerUnit)) elif field == 'NAME' and elem.text.isdigit(): row.append('%d' % int(elem.text)) else: row.append(elem.text) waypoints.append(row) if waypoints: fields = ['SHAPE@'] + [f[0] for f in WPT_FIELDS] cur = arcpy.da.InsertCursor(wpt_fc, fields) for row in waypoints: cur.insertRow(row) del cur if trk_fc: # idle time between trkpts to start a new track segment TRKSEG_IDLE_SECS = 600 tracks = [] track_num = 0 for trk in gpx.findall('gpx:trk', ns): track_num += 1 elem = trk.find('gpx:name', ns) if elem is None: track_name = 'track-%04d' % track_num else: track_name = elem.text track_pts = [] dt_last = None segment_num = 0 for trkpt in trk.findall('./gpx:trkseg/gpx:trkpt', ns): x, y = trkpt.get('lon'), trkpt.get('lat') pt = arcpy.PointGeometry(arcpy.Point(x, y), GCS_WGS_84).projectAs(sr).firstPoint # See if there's a track point time elem = trkpt.find('gpx:time', ns) if elem is None: dt_last = None else: dt = utils.default_tzinfo(parser.parse(elem.text), tz.UTC) if dt_last and (dt - dt_last).seconds > TRKSEG_IDLE_SECS: # start a new segment if len(track_pts) > 1: segment_num += 1 if segment_num > 1: segment_name = '%s SEG-%04d' % (track_name, segment_num) else: segment_name = track_name geom = arcpy.Polyline(arcpy.Array(track_pts), sr) tracks.append([geom , segment_name, len(track_pts)]) else: arcpy.AddMessage('Skipping track "%s": track_pts=%d' % (track_name, len(track_pts))) track_pts = [] dt_last = dt track_pts.append(pt) if len(track_pts) > 1: segment_num += 1 if segment_num > 1: segment_name = '%s SEG-%04d' % (track_name, segment_num) else: segment_name = track_name geom = arcpy.Polyline(arcpy.Array(track_pts), sr) tracks.append([geom, segment_name, len(track_pts)]) else: arcpy.AddMessage('Skipping track "%s": track_pts=%d' % (track_name, len(track_pts))) if tracks: temp_fc = os.path.join(scratch, os.path.basename(trk_fc) + '_Temp') if sr is None: arcpy.AddError('Geoprocessing environment not set: outputCoordinateSystem') return None fc = mgmt.CreateFeatureclass(*os.path.split(temp_fc), geometry_type='POLYLINE', spatial_reference=sr) mgmt.AddField(fc, 'NAME', 'TEXT', field_length=64) mgmt.AddField(fc, 'POINTS', 'LONG') cur = arcpy.da.InsertCursor(fc, ('SHAPE@', 'NAME', 'POINTS')) for row in tracks: cur.insertRow(row) del cur mgmt.CopyFeatures(temp_fc, trk_fc) del fc
def process_zone(zone_fc, output, zone_name, zone_id_field, zone_name_field, other_keep_fields, clip_hu8, lagosne_name): # dissolve fields by the field that zone_id is based on (the field that identifies a unique zone) dissolve_fields = [ f for f in "{}, {}, {}".format(zone_id_field, zone_name_field, other_keep_fields).split(', ') if f != '' ] print("Dissolving...") dissolve1 = DM.Dissolve(zone_fc, 'dissolve1', dissolve_fields) # update name field to match our standard DM.AlterField(dissolve1, zone_name_field, 'name') # original area DM.AddField(dissolve1, 'originalarea', 'DOUBLE') DM.CalculateField(dissolve1, 'originalarea', '!shape.area@hectares!', 'PYTHON') #clip print("Clipping...") clip = AN.Clip(dissolve1, MASTER_CLIPPING_POLY, 'clip') if clip_hu8 == 'Y': final_clip = AN.Clip(clip, HU8_OUTPUT, 'final_clip') else: final_clip = clip print("Selecting...") # calc new area, orig area pct, compactness DM.AddField(final_clip, 'area_ha', 'DOUBLE') DM.AddField(final_clip, 'originalarea_pct', 'DOUBLE') DM.AddField(final_clip, 'compactness', 'DOUBLE') DM.JoinField(final_clip, zone_id_field, dissolve1, zone_id_field, 'originalarea_pct') uCursor_fields = [ 'area_ha', 'originalarea_pct', 'originalarea', 'compactness', 'SHAPE@AREA', 'SHAPE@LENGTH' ] with arcpy.da.UpdateCursor(final_clip, uCursor_fields) as uCursor: for row in uCursor: area, orig_area_pct, orig_area, comp, shape_area, shape_length = row area = shape_area / 10000 # convert from m2 to hectares orig_area_pct = round(100 * area / orig_area, 2) comp = 4 * 3.14159 * shape_area / (shape_length**2) row = (area, orig_area_pct, orig_area, comp, shape_area, shape_length) uCursor.updateRow(row) # if zones are present with <5% of original area and a compactness measure of <.2 (ranges from 0-1) # AND ALSO they are no bigger than 500 sq. km. (saves Chippewa County and a WWF), filter out # save eliminated polygons to temp database as a separate layer for inspection # Different processing for HU4 and HU8, so that they match the extent of HU8 more closely but still throw out tiny slivers # County also only eliminated if a tiny, tiny, tiny sliver (so: none should be eliminated) if zone_name not in ('hu4', 'hu12', 'county'): selected = AN.Select( final_clip, 'selected', "originalarea_pct >= 5 OR compactness >= .2 OR area_ha > 50000") not_selected = AN.Select( final_clip, '{}_not_selected'.format(output), "originalarea_pct < 5 AND compactness < .2 AND area_ha < 50000") else: selected = final_clip # eliminate small slivers, re-calc area fields, add perimeter and multipart flag # leaves the occasional errant sliver but some areas over 25 hectares are more valid so this is # CONSERVATIVE print("Trimming...") trimmed = DM.EliminatePolygonPart(selected, 'trimmed', 'AREA', '25 Hectares', part_option='ANY') # gather up a few calculations into one cursor because this is taking too long over the HU12 layer DM.AddField(trimmed, 'perimeter_m', 'DOUBLE') DM.AddField(trimmed, 'multipart', 'TEXT', field_length=1) uCursor_fields = [ 'area_ha', 'originalarea_pct', 'originalarea', 'perimeter_m', 'multipart', 'SHAPE@' ] with arcpy.da.UpdateCursor(trimmed, uCursor_fields) as uCursor: for row in uCursor: area, orig_area_pct, orig_area, perim, multipart, shape = row area = shape.area / 10000 # convert to hectares from m2 orig_area_pct = round(100 * area / orig_area, 2) perim = shape.length # multipart flag calc if shape.isMultipart: multipart = 'Y' else: multipart = 'N' row = (area, orig_area_pct, orig_area, perim, multipart, shape) uCursor.updateRow(row) # delete intermediate fields DM.DeleteField(trimmed, 'compactness') DM.DeleteField(trimmed, 'originalarea') print("Zone IDs....") # link to LAGOS-NE zone IDs DM.AddField(trimmed, 'zoneid', 'TEXT', field_length=40) trimmed_lyr = DM.MakeFeatureLayer(trimmed, 'trimmed_lyr') if lagosne_name: # join to the old master GDB path on the same master field and copy in the ids old_fc = os.path.join(LAGOSNE_GDB, lagosne_name) old_fc_lyr = DM.MakeFeatureLayer(old_fc, 'old_fc_lyr') if lagosne_name == 'STATE' or lagosne_name == 'COUNTY': DM.AddJoin(trimmed_lyr, zone_id_field, old_fc_lyr, 'FIPS') else: DM.AddJoin(trimmed_lyr, zone_id_field, old_fc_lyr, zone_id_field) # usually works because same source data # copy DM.CalculateField(trimmed_lyr, 'zoneid', '!{}.ZoneID!.lower()'.format(lagosne_name), 'PYTHON') DM.RemoveJoin(trimmed_lyr) # generate new zone ids old_ids = [row[0] for row in arcpy.da.SearchCursor(trimmed, 'zoneid')] with arcpy.da.UpdateCursor(trimmed, 'zoneid') as cursor: counter = 1 for row in cursor: if not row[ 0]: # if no existing ID borrowed from LAGOS-NE, assign a new one new_id = '{name}_{num}'.format(name=zone_name, num=counter) # ensures new ids don't re-use old numbers but fills in all positive numbers eventually while new_id in old_ids: counter += 1 new_id = '{name}_{num}'.format(name=zone_name, num=counter) row[0] = new_id cursor.updateRow(row) counter += 1 print("Edge flags...") # add flag fields DM.AddField(trimmed, 'onlandborder', 'TEXT', field_length=2) DM.AddField(trimmed, 'oncoast', 'TEXT', field_length=2) # identify border zones border_lyr = DM.MakeFeatureLayer(LAND_BORDER, 'border_lyr') DM.SelectLayerByLocation(trimmed_lyr, 'INTERSECT', border_lyr) DM.CalculateField(trimmed_lyr, 'onlandborder', "'Y'", 'PYTHON') DM.SelectLayerByAttribute(trimmed_lyr, 'SWITCH_SELECTION') DM.CalculateField(trimmed_lyr, 'onlandborder', "'N'", 'PYTHON') # identify coastal zones coastal_lyr = DM.MakeFeatureLayer(COASTLINE, 'coastal_lyr') DM.SelectLayerByLocation(trimmed_lyr, 'INTERSECT', coastal_lyr) DM.CalculateField(trimmed_lyr, 'oncoast', "'Y'", 'PYTHON') DM.SelectLayerByAttribute(trimmed_lyr, 'SWITCH_SELECTION') DM.CalculateField(trimmed_lyr, 'oncoast', "'N'", 'PYTHON') print("State assignment...") # State? DM.AddField(trimmed, "state", 'text', field_length='2') state_center = arcpy.SpatialJoin_analysis( trimmed, STATE_FC, 'state_center', join_type='KEEP_COMMON', match_option='HAVE_THEIR_CENTER_IN') state_intersect = arcpy.SpatialJoin_analysis(trimmed, STATE_FC, 'state_intersect', match_option='INTERSECT') state_center_dict = { row[0]: row[1] for row in arcpy.da.SearchCursor(state_center, ['ZoneID', 'STUSPS']) } state_intersect_dict = { row[0]: row[1] for row in arcpy.da.SearchCursor(state_intersect, ['ZoneID', 'STUSPS']) } with arcpy.da.UpdateCursor(trimmed, ['ZoneID', 'state']) as cursor: for updateRow in cursor: keyValue = updateRow[0] if keyValue in state_center_dict: updateRow[1] = state_center_dict[keyValue] else: updateRow[1] = state_intersect_dict[keyValue] cursor.updateRow(updateRow) # glaciation status? # TODO as version 0.6 # preface the names with the zones DM.DeleteField(trimmed, 'ORIG_FID') fields = [ f.name for f in arcpy.ListFields(trimmed, '*') if f.type not in ('OID', 'Geometry') and not f.name.startswith('Shape_') ] for f in fields: new_fname = '{zn}_{orig}'.format(zn=zone_name, orig=f).lower() try: DM.AlterField(trimmed, f, new_fname, clear_field_alias='TRUE') # sick of debugging the required field message-I don't want to change required fields anyway except: pass DM.CopyFeatures(trimmed, output) # cleanup lyr_objects = [ lyr_object for var_name, lyr_object in locals().items() if var_name.endswith('lyr') ] temp_fcs = arcpy.ListFeatureClasses('*') for l in lyr_objects + temp_fcs: DM.Delete(l)
def flatten_overlaps(zone_fc, zone_field, output_fc, output_table, cluster_tolerance=' 3 Meters'): orig_env = arcpy.env.workspace arcpy.env.workspace = 'in_memory' objectid = [f.name for f in arcpy.ListFields(zone_fc) if f.type == 'OID'][0] zone_type = [f.type for f in arcpy.ListFields(zone_fc, zone_field)][0] fid1 = 'FID_{}'.format(os.path.basename(zone_fc)) flat_zoneid = 'flat{}'.format(zone_field) flat_zoneid_prefix = 'flat{}_'.format(zone_field.replace('_zoneid', '')) # Union with FID_Only (A) arcpy.AddMessage("Splitting overlaps in polygons...") zoneid_dict = { r[0]: r[1] for r in arcpy.da.SearchCursor(zone_fc, [objectid, zone_field]) } self_union = AN.Union([zone_fc], 'self_union', 'ONLY_FID', cluster_tolerance=cluster_tolerance) # #If you don't run this section, Find Identical fails with error 999999. Seems to have to do with small slivers # #having 3 vertices and/or only circular arcs in the geometry. arcpy.AddMessage("Repairing self-union geometries...") # DM.AddGeometryAttributes(self_union, 'POINT_COUNT; AREA') # union_fix = DM.MakeFeatureLayer(self_union, 'union_fix', where_clause='PNT_COUNT <= 10 OR POLY_AREA < 5000') # arcpy.Densify_edit(union_fix, 'DISTANCE', distance = '1 Meters', max_deviation='1 Meters') # selection ON, edits self_union disk DM.RepairGeometry( self_union, 'DELETE_NULL' ) # eliminate empty geoms. selection ON, edits self_union disk # for field in ['PNT_COUNT', 'POLY_AREA']: # DM.DeleteField(self_union, field) # Find Identical by Shape (B) if arcpy.Exists('identical_shapes'): DM.Delete( 'identical_shapes' ) # causes failure in FindIdentical even when overwrite is allowed identical_shapes = DM.FindIdentical(self_union, 'identical_shapes', 'Shape') # Join A to B and calc flat[zone]_zoneid = FEAT_SEQ (C) DM.AddField(self_union, flat_zoneid, 'TEXT', field_length=20) union_oid = [ f.name for f in arcpy.ListFields(self_union) if f.type == 'OID' ][0] identical_shapes_dict = { r[0]: r[1] for r in arcpy.da.SearchCursor(identical_shapes, ['IN_FID', 'FEAT_SEQ']) } with arcpy.da.UpdateCursor(self_union, [union_oid, flat_zoneid]) as u_cursor: for row in u_cursor: row[1] = '{}{}'.format(flat_zoneid_prefix, identical_shapes_dict[row[0]]) u_cursor.updateRow(row) # Add the original zone ids and save to table (E) arcpy.AddMessage("Assigning temporary IDs to split polygons...") unflat_table = DM.CopyRows(self_union, 'unflat_table') DM.AddField(unflat_table, zone_field, zone_type) # default text length of 50 is fine if needed with arcpy.da.UpdateCursor(unflat_table, [fid1, zone_field]) as u_cursor: for row in u_cursor: row[1] = zoneid_dict[row[0]] # assign zone id u_cursor.updateRow(row) # Delete Identical (C) (save as flat[zone]) with arcpy.da.UpdateCursor(self_union, 'OID@') as cursor: visited = [] for row in cursor: feat_seq = identical_shapes_dict[row[0]] if feat_seq in visited: cursor.deleteRow() visited.append(feat_seq) DM.DeleteField(self_union, fid1) DM.DeleteField(unflat_table, fid1) # save outputs output_fc = DM.CopyFeatures(self_union, output_fc) output_table = DM.CopyRows(unflat_table, output_table) # cleanup for item in [self_union, identical_shapes, unflat_table]: DM.Delete(item) arcpy.env.workspace = orig_env return output_fc
def snap_points_to_mask_raster (in_file, mask, out_file, distance, workspace): if distance is None or len (distance) == 0: distance = "100 METERS" if arcpy.env.outputCoordinateSystem is None: arcpy.env.outputCoordinateSystem = mask print arcpy.env.outputCoordinateSystem.name if len(workspace): arcpy.env.workspace = workspace if arcpy.env.workspace is None or len(arcpy.env.workspace) == 0: arcpy.env.workspace = os.getcwd() arcpy.AddMessage ("workspace is %s" % arcpy.env.workspace) try: suffix = None wk = arcpy.env.workspace if not '.gdb' in wk: suffix = '.shp' poly_file = arcpy.CreateScratchName(None, suffix, 'POLYGON') arcpy.RasterToPolygon_conversion (mask, poly_file, 'NO_SIMPLIFY') except: raise arcpy.AddMessage ("poly_file is %s" % poly_file) # handle layers and datasets desc = arcpy.Describe(in_file) in_file = desc.catalogPath # add .shp extension if needed - clunky, but otherwise system fails below re_gdb = re.compile ('\.gdb$') re_shp = re.compile ('\.shp$') path = os.path.dirname(out_file) if len (path) == 0: path = arcpy.env.workspace if not re_gdb.search (path) and not re_shp.search (out_file): out_file += '.shp' arcpy.AddMessage ("Input point file is %s" % in_file) arcpy.AddMessage ("Output point file is %s" % out_file) arcmgt.CopyFeatures (in_file, out_file) try: snap_layer_name = 'get_layer_for_snapping' arcmgt.MakeFeatureLayer (out_file, snap_layer_name) arcmgt.SelectLayerByLocation (snap_layer_name, 'intersect', poly_file, '#', 'NEW_SELECTION') arcmgt.SelectLayerByAttribute(snap_layer_name, 'SWITCH_SELECTION') if arcmgt.GetCount(snap_layer_name) > 0: arcpy.Snap_edit (snap_layer_name, [[poly_file, "EDGE", distance]]) else: arcpy.AddMessage ('No features selected, no snapping applied') except Exception as e: print arcpy.GetMessages() raise e arcmgt.Delete (snap_layer_name) arcmgt.Delete (poly_file) print arcpy.GetMessages() print "Completed" return
def doIntegrate(self): #### Initial Data Assessment #### printOHSSection(84428, prependNewLine=True) printOHSSubject(84431, addNewLine=False) #### Find Unique Locations #### msg = ARCPY.GetIDMessage(84441) ARCPY.SetProgressor("default", msg) initCount = UTILS.getCount(self.ssdo.inputFC) self.checkIncidents(initCount) collectedPointFC = UTILS.returnScratchName("Collect_InitTempFC") collInfo = EVENTS.collectEvents(self.ssdo, collectedPointFC) self.cleanUpList.append(collectedPointFC) collSSDO = SSDO.SSDataObject(collectedPointFC, explicitSpatialRef=self.ssdo.spatialRef, useChordal=True) collSSDO.obtainDataGA(collSSDO.oidName) ################################# #### Locational Outliers #### lo = UTILS.LocationInfo(collSSDO, concept="EUCLIDEAN", silentThreshold=True, stdDeviations=3) printOHSLocationalOutliers(lo, aggType=self.aggType) #### Raster Boundary #### if self.outputRaster: self.validateRaster(collSSDO.xyCoords) #### Agg Header #### printOHSSection(84444) #### Copy Features for Integrate #### msg = ARCPY.GetIDMessage(84443) ARCPY.SetProgressor("default", msg) intFC = UTILS.returnScratchName("Integrated_TempFC") self.cleanUpList.append(intFC) DM.CopyFeatures(self.ssdo.inputFC, intFC) #### Make Feature Layer To Avoid Integrate Bug with Spaces #### mfc = "Integrate_MFC_2" DM.MakeFeatureLayer(intFC, mfc) self.cleanUpList.append(mfc) #### Snap Subject #### printOHSSubject(84442, addNewLine=False) nScale = (collSSDO.numObs * 1.0) / self.cnt if lo.nonZeroAvgDist < lo.nonZeroMedDist: useDist = lo.nonZeroAvgDist * nScale useType = "average" else: useDist = lo.nonZeroMedDist * nScale useType = "median" distance2Integrate = lo.distances[lo.distances < useDist] distance2Integrate = NUM.sort(distance2Integrate) numDists = len(distance2Integrate) #### Max Snap Answer #### msg = ARCPY.GetIDMessage(84445) useDistStr = self.ssdo.distanceInfo.printDistance(useDist) msg = msg.format(useDistStr) printOHSAnswer(msg) percs = [10, 25, 100] indices = [int(numDists * (i * .01)) for i in percs] if indices[-1] >= numDists: indices[-1] = -1 ARCPY.SetProgressor("default", msg) for pInd, dInd in enumerate(indices): dist = distance2Integrate[dInd] snap = self.ssdo.distanceInfo.linearUnitString(dist, convert=True) DM.Integrate(mfc, snap) del collSSDO #### Run Collect Events #### collectedFC = UTILS.returnScratchName("Collect_TempFC") self.cleanUpList.append(collectedFC) intSSDO = SSDO.SSDataObject(intFC, explicitSpatialRef=self.ssdo.spatialRef, silentWarnings=True, useChordal=True) intSSDO.obtainDataGA(intSSDO.oidName) EVENTS.collectEvents(intSSDO, collectedFC) descTemp = ARCPY.Describe(collectedFC) oidName = descTemp.oidFieldName #### Delete Integrated FC #### del intSSDO #### Set VarName, MasterField, AnalysisSSDO #### self.createAnalysisSSDO(collectedFC, "ICOUNT")
classLevel = hypZ[i][-1:] curZo = wrk + "/zon_C" + classLevel polyZo = wrk + "/hyp_backOut_dissolve_" + classLevel polyZoLyr = "polyZoLayer" # join the table from the raster to the poly zone layer man.MakeFeatureLayer(polyZo, polyZoLyr) man.AddJoin(polyZoLyr, "OBJECTID", curZo, "OBJECTID", "KEEP_ALL") # find any polys with Richness below zone level # each dict entry is [zone: min richness] dictMinRich = {1: 1, 2: 2, 3: 5} targMinRich = dictMinRich[int(classLevel)] expr = "Richness >= " + str(targMinRich) man.SelectLayerByAttribute(polyZoLyr, "NEW_SELECTION", expr) # write out the selected set outFeat = wrk + "/zon_Joined_C" + classLevel man.CopyFeatures(polyZoLyr, outFeat) # if rows were dropped AND we are above level 1, then need # to add dropped polys to one level down. numRowsSelSet = int(man.GetCount(polyZoLyr).getOutput(0)) numRowsLyr = int(man.GetCount(polyZo).getOutput(0)) if numRowsSelSet < numRowsLyr & int(classLevel) > 1: expr = "Richness < " + str(targMinRich) man.SelectLayerByAttribute(polyZoLyr, "NEW_SELECTION", expr) destinedLevel = int(classLevel) - 1 # write out the selected set outFeat = wrk + "/zon_AddThesePolysTo_C" + str(destinedLevel) man.CopyFeatures(polyZoLyr, outFeat) # if the prev if statement was acted on, then grab # those data in the next loop feats = arcpy.ListFeatureClasses() primFeat = wrk + "/zon_Joined_C" + classLevel
def classify_lakes(nhd, out_feature_class, exclude_intermit_flowlines=False, debug_mode=False): if debug_mode: arcpy.env.overwriteOutput = True temp_gdb = cu.create_temp_GDB('classify_lake_connectivity') arcpy.env.workspace = temp_gdb arcpy.AddMessage('Debugging workspace located at {}'.format(temp_gdb)) else: arcpy.env.workspace = 'in_memory' if arcpy.Exists("temp_fc"): print("There is a problem here.") raise Exception # Tool temporary feature classes temp_fc = "temp_fc" csiwaterbody_10ha = "csiwaterbody_10ha" nhdflowline_filtered = "nhdflowline_filtered" dangles = "dangles" start = "start" end = "end" startdangles = "startdangles" enddangles = "enddangles" non_artificial_end = "non_artificial_end" flags_10ha_lake_junctions = "flags_10ha_lake_junctions" midvertices = "midvertices" non10vertices = "non10vertices" non10junctions = "non10junctions" all_non_flag_points = "all_non_flag_points" barriers = "barriers" trace1_junctions = "trace1_junctions" trace1_flowline = "trace1_flowline" trace2_junctions = "trace2junctions" trace2_flowline = "trace2_flowline" # Clean up workspace in case of bad exit from prior run in same session. this_tool_layers = [ "dangles_lyr", "nhdflowline_lyr", "junction_lyr", "midvertices_lyr", "all_non_flag_points_lyr", "non10vertices_lyr", "out_fc_lyr", "trace1", "trace2" ] this_tool_temp = [ temp_fc, csiwaterbody_10ha, nhdflowline_filtered, dangles, start, end, startdangles, enddangles, non_artificial_end, flags_10ha_lake_junctions, midvertices, non10vertices, non10junctions, all_non_flag_points, barriers, trace1_junctions, trace1_flowline, trace2_junctions, trace2_flowline ] for item in this_tool_layers + this_tool_temp: try: DM.Delete(item) except: pass # Local variables: nhdflowline = os.path.join(nhd, "Hydrography", "NHDFLowline") nhdjunction = os.path.join(nhd, "Hydrography", "HYDRO_NET_Junctions") nhdwaterbody = os.path.join(nhd, "Hydrography", "NHDWaterbody") network = os.path.join(nhd, "Hydrography", "HYDRO_NET") # Get lakes, ponds and reservoirs over a hectare. #csi_population_filter = '''"AreaSqKm" >=0.01 AND\ #"FCode" IN (39000,39004,39009,39010,39011,39012,43600,43613,43615,43617,43618,43619,43621)''' all_lakes_reservoirs_filter = '''"FType" IN (390, 436)''' # Can't see why we shouldn't just attribute all lakes and reservoirs # arcpy.Select_analysis(nhdwaterbody, "csiwaterbody", lake_population_filter) arcpy.AddMessage("Initializing output.") if exclude_intermit_flowlines: DM.CopyFeatures(out_feature_class, temp_fc) DM.Delete(out_feature_class) else: arcpy.Select_analysis(nhdwaterbody, temp_fc, all_lakes_reservoirs_filter) # Get lakes, ponds and reservoirs over 10 hectares. lakes_10ha_filter = '''"AreaSqKm" >= 0.1 AND "FType" IN (390, 436)''' arcpy.Select_analysis(nhdwaterbody, csiwaterbody_10ha, lakes_10ha_filter) # Exclude intermittent flowlines, if requested if exclude_intermit_flowlines: flowline_where_clause = '''"FCode" NOT IN (46003,46007)''' nhdflowline = arcpy.Select_analysis(nhdflowline, nhdflowline_filtered, flowline_where_clause) # Make dangle points at end of nhdflowline DM.FeatureVerticesToPoints(nhdflowline, dangles, "DANGLE") DM.MakeFeatureLayer(dangles, "dangles_lyr") # Isolate start dangles from end dangles. DM.FeatureVerticesToPoints(nhdflowline, start, "START") DM.FeatureVerticesToPoints(nhdflowline, end, "END") DM.SelectLayerByLocation("dangles_lyr", "ARE_IDENTICAL_TO", start) DM.CopyFeatures("dangles_lyr", startdangles) DM.SelectLayerByLocation("dangles_lyr", "ARE_IDENTICAL_TO", end) DM.CopyFeatures("dangles_lyr", enddangles) # Special handling for lakes that have some intermittent flow in and some permanent if exclude_intermit_flowlines: DM.MakeFeatureLayer(nhdflowline, "nhdflowline_lyr") DM.SelectLayerByAttribute("nhdflowline_lyr", "NEW_SELECTION", '''"WBArea_Permanent_Identifier" is null''') DM.FeatureVerticesToPoints("nhdflowline_lyr", non_artificial_end, "END") DM.SelectLayerByAttribute("nhdflowline_lyr", "CLEAR_SELECTION") arcpy.AddMessage("Found source area nodes.") # Get junctions from lakes >= 10 hectares. DM.MakeFeatureLayer(nhdjunction, "junction_lyr") DM.SelectLayerByLocation("junction_lyr", "INTERSECT", csiwaterbody_10ha, XY_TOLERANCE, "NEW_SELECTION") DM.CopyFeatures("junction_lyr", flags_10ha_lake_junctions) arcpy.AddMessage("Found lakes >= 10 ha.") # Make points shapefile and layer at flowline vertices to act as potential flags and/or barriers. arcpy.AddMessage("Tracing...") DM.FeatureVerticesToPoints(nhdflowline, midvertices, "MID") DM.MakeFeatureLayer(midvertices, "midvertices_lyr") # Get vertices that are not coincident with 10 hectare lake junctions. DM.SelectLayerByLocation("midvertices_lyr", "INTERSECT", flags_10ha_lake_junctions, "", "NEW_SELECTION") DM.SelectLayerByLocation("midvertices_lyr", "INTERSECT", flags_10ha_lake_junctions, "", "SWITCH_SELECTION") DM.CopyFeatures("midvertices_lyr", non10vertices) # Get junctions that are not coincident with 10 hectare lake junctions. DM.SelectLayerByLocation("junction_lyr", "INTERSECT", flags_10ha_lake_junctions, "", "NEW_SELECTION") DM.SelectLayerByLocation("junction_lyr", "INTERSECT", flags_10ha_lake_junctions, "", "SWITCH_SELECTION") DM.CopyFeatures("junction_lyr", non10junctions) # Merge non10vertices with non10junctions DM.Merge([non10junctions, non10vertices], all_non_flag_points) # inputs both point fc in_memory DM.MakeFeatureLayer(all_non_flag_points, "all_non_flag_points_lyr") # Tests the counts...for some reason I'm not getting stable behavior from the merge. mid_n = int(DM.GetCount(non10vertices).getOutput(0)) jxn_n = int(DM.GetCount(non10junctions).getOutput(0)) merge_n = int(DM.GetCount(all_non_flag_points).getOutput(0)) if merge_n < mid_n + jxn_n: arcpy.AddWarning( "The total number of flags ({0}) is less than the sum of the input junctions ({1}) " "and input midpoints ({2})".format(merge_n, jxn_n, mid_n)) # For tracing barriers, select all_non_flag_points points that intersect a 10 ha lake. DM.SelectLayerByLocation("all_non_flag_points_lyr", "INTERSECT", csiwaterbody_10ha, XY_TOLERANCE, "NEW_SELECTION") DM.CopyFeatures("all_non_flag_points_lyr", barriers) # Trace1-Trace downstream to first barrier (junctions+midvertices in 10 ha lake) starting from flags_10ha_lake_junctions flag points. DM.TraceGeometricNetwork(network, "trace1", flags_10ha_lake_junctions, "TRACE_DOWNSTREAM", barriers) # Save trace1 flowlines and junctions to layers on disk. DM.CopyFeatures("trace1\HYDRO_NET_Junctions", trace1_junctions) # extra for debugging DM.CopyFeatures("trace1\NHDFlowline", trace1_flowline) # Select vertice midpoints that intersect trace1 flowlines selection for new flags for trace2. DM.MakeFeatureLayer(non10vertices, "non10vertices_lyr") DM.SelectLayerByLocation("non10vertices_lyr", "INTERSECT", trace1_flowline, "", "NEW_SELECTION") # Trace2-Trace downstream from midpoints of flowlines that intersect the selected flowlines from trace1. DM.TraceGeometricNetwork(network, "trace2", "non10vertices_lyr", "TRACE_DOWNSTREAM") # Save trace1 flowlines and junctions to layers and then shapes on disk. DM.CopyFeatures("trace2\HYDRO_NET_Junctions", trace2_junctions) DM.CopyFeatures("trace2\NHDFlowline", trace2_flowline) # extra for debugging arcpy.AddMessage("Done tracing.") # Make shapefile for seepage lakes. (Ones that don't intersect flowlines) if exclude_intermit_flowlines: class_field_name = "Lake_Connectivity_Permanent" else: class_field_name = "Lake_Connectivity_Class" DM.AddField(temp_fc, class_field_name, "TEXT", field_length=13) DM.MakeFeatureLayer(temp_fc, "out_fc_lyr") DM.SelectLayerByLocation("out_fc_lyr", "INTERSECT", nhdflowline, XY_TOLERANCE, "NEW_SELECTION") DM.SelectLayerByLocation("out_fc_lyr", "INTERSECT", nhdflowline, "", "SWITCH_SELECTION") DM.CalculateField("out_fc_lyr", class_field_name, """'Isolated'""", "PYTHON") # New type of "Isolated" classification, mostly for "permanent" but there were some oddballs in "maximum" too DM.SelectLayerByLocation("out_fc_lyr", "INTERSECT", startdangles, XY_TOLERANCE, "NEW_SELECTION") DM.SelectLayerByLocation("out_fc_lyr", "INTERSECT", enddangles, XY_TOLERANCE, "SUBSET_SELECTION") DM.CalculateField("out_fc_lyr", class_field_name, """'Isolated'""", "PYTHON") # Get headwater lakes. DM.SelectLayerByLocation("out_fc_lyr", "INTERSECT", startdangles, XY_TOLERANCE, "NEW_SELECTION") DM.SelectLayerByAttribute( "out_fc_lyr", "REMOVE_FROM_SELECTION", '''"{}" = 'Isolated' '''.format(class_field_name)) DM.CalculateField("out_fc_lyr", class_field_name, """'Headwater'""", "PYTHON") # Select csiwaterbody that intersect trace2junctions arcpy.AddMessage("Beginning connectivity attribution...") DM.SelectLayerByLocation("out_fc_lyr", "INTERSECT", trace2_junctions, XY_TOLERANCE, "NEW_SELECTION") DM.CalculateField("out_fc_lyr", class_field_name, """'DrainageLk'""", "PYTHON") # Get stream drainage lakes. Either unassigned so far or convert "Headwater" if a permanent stream flows into it, # which is detected with "non_artificial_end" DM.SelectLayerByAttribute("out_fc_lyr", "NEW_SELECTION", '''"{}" IS NULL'''.format(class_field_name)) DM.CalculateField("out_fc_lyr", class_field_name, """'Drainage'""", "PYTHON") if exclude_intermit_flowlines: DM.SelectLayerByAttribute( "out_fc_lyr", "NEW_SELECTION", '''"{}" = 'Headwater' '''.format(class_field_name)) DM.SelectLayerByLocation("out_fc_lyr", "INTERSECT", non_artificial_end, XY_TOLERANCE, "SUBSET_SELECTION") DM.CalculateField("out_fc_lyr", class_field_name, """'Drainage'""", "PYTHON") # Prevent 'upgrades' due to very odd flow situations and artifacts of bad digitization. The effects of these # are varied--to avoid confusion, just keep the class assigned with all flowlines # 1--Purely hypothetical, not seen in testing DM.SelectLayerByAttribute( "out_fc_lyr", "NEW_SELECTION", '''"Lake_Connectivity_Class" = 'Isolated' AND "Lake_Connectivity_Permanent" <> 'Isolated' ''' ) DM.CalculateField("out_fc_lyr", class_field_name, """'Isolated'""", "PYTHON") # 2--Headwater to Drainage upgrade seen in testing with odd multi-inlet flow situation DM.SelectLayerByAttribute( "out_fc_lyr", "NEW_SELECTION", '''"Lake_Connectivity_Class" = 'Headwater' AND "Lake_Connectivity_Permanent" IN ('Drainage', 'DrainageLk') ''' ) DM.CalculateField("out_fc_lyr", class_field_name, """'Headwater'""", "PYTHON") # 3--Drainage to DrainageLk upgrade seen in testing when intermittent stream segments were used # erroneously instead of artificial paths DM.SelectLayerByAttribute( "out_fc_lyr", "NEW_SELECTION", '''"Lake_Connectivity_Class" = 'Drainage' AND "Lake_Connectivity_Permanent" = 'DrainageLk' ''' ) DM.CalculateField("out_fc_lyr", class_field_name, """'Drainage'""", "PYTHON") DM.SelectLayerByAttribute("out_fc_lyr", "CLEAR_SELECTION") # Add change flag for users DM.AddField(temp_fc, "Lake_Connectivity_Fluctuates", "Text", field_length="1") flag_codeblock = """def flag_calculate(arg1, arg2): if arg1 == arg2: return 'N' else: return 'Y'""" expression = 'flag_calculate(!Lake_Connectivity_Class!, !Lake_Connectivity_Permanent!)' DM.CalculateField(temp_fc, "Lake_Connectivity_Fluctuates", expression, "PYTHON", flag_codeblock) # Project output once done with both. Switching CRS earlier causes trace problems. if not exclude_intermit_flowlines: DM.CopyFeatures(temp_fc, out_feature_class) else: DM.Project(temp_fc, out_feature_class, arcpy.SpatialReference(102039)) # Clean up if not debug_mode: for item in this_tool_layers + this_tool_temp: if arcpy.Exists(item): DM.Delete(item) if not debug_mode: DM.Delete("trace1") DM.Delete("trace2") arcpy.AddMessage("{} classification is complete.".format(class_field_name))