def process_ws(ws_fc, zone_name): # generate new zone ids DM.AddField(ws_fc, 'zoneid', 'TEXT', field_length=10) DM.CalculateField(ws_fc, 'zoneid', '!lagoslakeid!', 'PYTHON') ws_fc_lyr = DM.MakeFeatureLayer(ws_fc) # multipart DM.AddField(ws_fc, 'ismultipart', 'TEXT', field_length=2) with arcpy.da.UpdateCursor(ws_fc, ['ismultipart', 'SHAPE@']) as u_cursor: for row in u_cursor: if row[1].isMultipart: row[0] = 'Y' else: row[0] = 'N' u_cursor.updateRow(row) print("Edge flags...") # add flag fields DM.AddField(ws_fc, 'onlandborder', 'TEXT', field_length = 2) DM.AddField(ws_fc, 'oncoast', 'TEXT', field_length = 2) # identify border zones border_lyr = DM.MakeFeatureLayer(LAND_BORDER, 'border_lyr') DM.SelectLayerByLocation(ws_fc_lyr, 'INTERSECT', border_lyr) DM.CalculateField(ws_fc_lyr, 'onlandborder', "'Y'", 'PYTHON') DM.SelectLayerByAttribute(ws_fc_lyr, 'SWITCH_SELECTION') DM.CalculateField(ws_fc_lyr, 'onlandborder' ,"'N'", 'PYTHON') # identify coastal zones coastal_lyr = DM.MakeFeatureLayer(COASTLINE, 'coastal_lyr') DM.SelectLayerByLocation(ws_fc_lyr, 'INTERSECT', coastal_lyr) DM.CalculateField(ws_fc_lyr, 'oncoast', "'Y'", 'PYTHON') DM.SelectLayerByAttribute(ws_fc_lyr, 'SWITCH_SELECTION') DM.CalculateField(ws_fc_lyr, 'oncoast' ,"'N'", 'PYTHON') print("State assignment...") # States state_geo = r'D:\Continental_Limnology\Data_Working\LAGOS_US_GIS_Data_v0.6.gdb\Spatial_Classifications\state' find_states(ws_fc, STATES_GEO) # glaciation status? calc_glaciation(ws_fc, 'zoneid') # preface the names with the zones DM.DeleteField(ws_fc, 'ORIG_FID') fields = [f.name for f in arcpy.ListFields(ws_fc, '*') if f.type not in ('OID', 'Geometry') and not f.name.startswith('Shape_')] for f in fields: new_fname = '{zn}_{orig}'.format(zn=zone_name, orig = f).lower() try: DM.AlterField(ws_fc, f, new_fname, clear_field_alias = 'TRUE') # sick of debugging the required field message-I don't want to change required fields anyway except: pass # cleanup lyr_objects = [lyr_object for var_name, lyr_object in locals().items() if var_name.endswith('lyr')] for l in lyr_objects: DM.Delete(l)
def drought_analysis(date_string): ARCPY.env.overwriteOutput = True working_dir = r"C:\Data\git\devsummit-14-python" zip_name = "USDM_" + date_string + "_M.zip" url = "http://droughtmonitor.unl.edu/data/shapefiles_m/" + zip_name mxd_path = OS.path.join(working_dir, "MapTemplate.mxd") lyr_template = OS.path.join(working_dir, "CurrentDroughtConditions.lyr") zip_name = OS.path.basename(url) drought_zip_file = URLLIB.URLopener() dzf = drought_zip_file.retrieve(url, OS.path.join(r"C:\Temp", zip_name)) zf = ZIPFILE.ZipFile(dzf[0], "r") shp_name = [n for n in zf.namelist() if n.endswith('.shp')][0] zf.extractall(working_dir) drought = OS.path.splitext(shp_name)[0] DM.MakeFeatureLayer(OS.path.join(working_dir, shp_name), drought) #### Add Winery Data #### beerWinePath = OS.path.join(working_dir, "BeerWine", "BeerWine.gdb", "BeerWine") intermediate_output = OS.path.join(working_dir, "BeerWine", "BeerWine.gdb", "BeerWineDrought") wine = "BeerWine" wine_drought = "Wine_Drought" DM.MakeFeatureLayer(beerWinePath, wine) DM.SelectLayerByAttribute(wine, "NEW_SELECTION", "Type = 'Winery'") ANALYSIS.SpatialJoin(drought, wine, intermediate_output, "JOIN_ONE_TO_ONE", "KEEP_ALL") try: DM.DeleteField(intermediate_output, "NAME") except: pass final_wine_drought = "Wine_Drought_Summary" DM.MakeFeatureLayer(intermediate_output, final_wine_drought) lf = DM.SaveToLayerFile( final_wine_drought, OS.path.join(working_dir, '{}.lyr'.format(final_wine_drought))) DM.ApplySymbologyFromLayer(lf, lyr_template) pw = "PASSWORDHERE" #GETPASS.getpass("Enter AGOL password:"******"Drought_Wine_Service" agol = AGOLHandler("USERNAMEHERE", pw, service_name) publish_service(agol, service_name, mxd_path, lf[0]) TIME.sleep(5) fs_url = agol.findItemURL('Feature Service') TIME.sleep(35) gp_url, jsondata = enrich(agol, fs_url + '/0', '{}_Enriched'.format(service_name), agol.token) check_job_status(gp_url, jsondata, agol.token) DM.Delete(OS.path.join(working_dir, shp_name)) DM.Delete(OS.path.join(working_dir, lf[0]))
def createOutput(self, outputFC): """Creates an Output Feature Class with the Directional Mean Results. INPUTS: outputFC (str): path to the output feature class """ #### Validate Output Workspace #### ERROR.checkOutputPath(outputFC) #### Shorthand Attributes #### ssdo = self.ssdo caseField = self.caseField #### Create Output Feature Class #### ARCPY.SetProgressor("default", ARCPY.GetIDMessage(84003)) tempCFLayer = "tmpCFLayer" try: DM.MakeFeatureLayer(ssdo.inputFC, tempCFLayer) first = True for key, value in self.cf.iteritems(): oids = value[0] for oid in oids: sqlString = ssdo.oidName + '=' + str(oid) if first: DM.SelectLayerByAttribute(tempCFLayer, "NEW_SELECTION", sqlString) first = False else: DM.SelectLayerByAttribute(tempCFLayer, "ADD_TO_SELECTION", sqlString) UTILS.clearExtent(DM.CopyFeatures(tempCFLayer, outputFC)) except: ARCPY.AddIDMessage("ERROR", 210, outputFC) raise SystemExit() #### Set Attribute #### self.outputFC = outputFC
def erase_and_clean(tract_id, block_id): tract_clause = '"STATEFP" = ' + "'" + tract_id + "'" # First, generate the state-specific layer state_tracts = mg.SelectLayerByAttribute(tract_layer, "NEW_SELECTION", tract_clause) # Next, read in the appropriate block filter blocks = "data/blocks/nopop/" + block_id + "_nopop.shp" block_layer = mg.MakeFeatureLayer(blocks) # Run the Erase tool out_erase = "data/erase/" + block_id + "_erase.shp" arcpy.analysis.Erase(state_tracts, block_layer, out_feature_class = out_erase) # Check for slivers and remove them - let's set a threshold of 5000 sq meters out_singlepart = "data/singlepart/" + block_id + "_single.shp" mg.MultipartToSinglepart(out_erase, out_singlepart) single_layer = mg.MakeFeatureLayer(out_singlepart) mg.AddGeometryAttributes(single_layer, Geometry_Properties = "AREA", Area_Unit = "SQUARE_METERS") # Now, specify the where clause and dissolve the remainder area_clause = '"POLY_AREA" >= 5000' mg.SelectLayerByAttribute(single_layer, "NEW_SELECTION", area_clause) out_dissolve = "data/dissolve/" + block_id + "_dissolve.shp" mg.Dissolve(single_layer, out_dissolve, dissolve_field = "GISJOIN")
def lake_from_to(nhd_subregion_gdb, output_table): arcpy.env.workspace = 'in_memory' waterbody0 = os.path.join(nhd_subregion_gdb, 'NHDWaterbody') network = os.path.join(nhd_subregion_gdb, 'Hydrography', 'HYDRO_NET') junctions0 = os.path.join(nhd_subregion_gdb, 'HYDRO_NET_Junctions') # use layers for selections. We will only work with lakes over 1 hectare for this tool. waterbody = DM.MakeFeatureLayer(waterbody0, 'waterbody', where_clause=LAGOS_LAKE_FILTER) num_wbs = int(arcpy.GetCount_management(waterbody).getOutput(0)) junctions = DM.MakeFeatureLayer(junctions0, 'junctions') DM.SelectLayerByLocation(junctions, 'INTERSECT', waterbody, '1 Meters', 'NEW_SELECTION') junctions_1ha = DM.MakeFeatureLayer(junctions, 'junctions_1ha') # insert results into output table DM.CreateTable(os.path.dirname(output_table), os.path.basename(output_table)) DM.AddField(output_table, 'FROM_PERMANENT_ID', 'TEXT', field_length=40) DM.AddField(output_table, 'TO_PERMANENT_ID', 'TEXT', field_length=40) # create a dictionary to hold results in memory results = [] counter = 0 progress = .01 arcpy.AddMessage("Starting network tracing...") with arcpy.da.SearchCursor(waterbody, 'Permanent_Identifier') as cursor: for row in cursor: # set up a progress printer counter += 1 if counter >= float(num_wbs) * progress: progress += .01 arcpy.AddMessage("{}% complete...".format( round(progress * 100), 1)) # select this lake id = row[0] where_clause = """"{0}" = '{1}'""".format('Permanent_Identifier', id) this_waterbody = DM.MakeFeatureLayer(waterbody, 'this_waterbody', where_clause) # select junctions overlapping this lake. only the downstream one matters, rest have no effect DM.SelectLayerByLocation(junctions_1ha, 'INTERSECT', this_waterbody, '1 Meters') count_junctions = int( arcpy.GetCount_management(junctions_1ha).getOutput(0)) if count_junctions == 0: # add a row with no "TO" lake to the results results.append({'FROM': id, 'TO': None}) else: # copy with selection on this_junctions = DM.MakeFeatureLayer(junctions_1ha, 'this_junctions') DM.TraceGeometricNetwork(network, 'downstream', this_junctions, 'TRACE_DOWNSTREAM') # select lakes that intersect the downstream network with a tolerance of 1 meters DM.SelectLayerByLocation(waterbody, 'INTERSECT', 'downstream/NHDFlowline', '1 Meters', 'NEW_SELECTION') # remove this lake DM.SelectLayerByAttribute(waterbody, 'REMOVE_FROM_SELECTION', where_clause) # get the count, if it's 0 then there should be no table entry or something? count_waterbody = int( arcpy.GetCount_management(waterbody).getOutput(0)) # copy those into the table that you're storing stuff in if count_waterbody == 0: # add a row with no "TO" lake to the results results.append({'FROM': id, 'TO': None}) else: # for each ID, how am I getting those to_ids = [ row[0] for row in arcpy.da.SearchCursor( waterbody, 'Permanent_Identifier') ] for to_id in to_ids: result = {'FROM': id, 'TO': to_id} results.append(result) # delete all the intermediates DM.SelectLayerByAttribute(waterbody, 'CLEAR_SELECTION') for item in [this_waterbody, this_junctions, 'downstream']: DM.Delete(item) # insert the results in the table insert_cursor = arcpy.da.InsertCursor( output_table, ['FROM_PERMANENT_ID', 'TO_PERMANENT_ID']) for result in results: insert_cursor.insertRow([result['FROM'], result['TO']]) # delete everything for item in [waterbody, junctions, junctions_1ha, 'in_memory']: DM.Delete(item) arcpy.AddMessage("Completed.")
def get_path_residence_times (in_file, cost_rast, out_raster, t_diff_fld_name, workspace): if len (out_raster) == 0: arcpy.AddError ("Missing argument: out_rast") raise Exception if len (t_diff_fld_name) == 0: t_diff_fld_name = "T_DIFF_HRS" arcpy.env.overwriteOutput = True # This is underhanded. It should be an argument. if arcpy.env.outputCoordinateSystem is None: arcpy.env.outputCoordinateSystem = cost_rast arcpy.AddMessage ("coordinate system is %s" % arcpy.env.outputCoordinateSystem.name) if len(workspace): arcpy.env.workspace = workspace if arcpy.env.workspace is None or len(arcpy.env.workspace) == 0: arcpy.env.workspace = os.getcwd() if '.gdb' in arcpy.env.workspace: arcpy.AddError ( "Worskpace is a geodatabase. " + "This brings too much pain for this script to work.\n" + "%s" % arcpy.env.workspace ) raise WorkspaceIsGeodatabase r = Raster(cost_rast) if r.maximum == 0 and r.minimum == 0: arcpy.AddMessage ('Cost raster has only zero value. Cannot calculate cost distances.') raise CostRasterIsZero size = r.height * r.width * 4 if size > 2 * 1028 ** 3: import struct struct_size = struct.calcsize("P") * 8 if struct_size == 32: size_in_gb = float (size) / (1028 ** 3) arcpy.AddMessage ( 'Cost raster exceeds 2 GiB in size (%s GiB). This is too large for a 32 bit NumPy.' % size_in_gb ) raise NumPyArrayExceedsSizeLimits if not check_points_are_in_cost_raster(in_file, cost_rast): arcpy.AddError ('One or more input points do not intersect the cost raster') raise PointNotOnRaster arcpy.env.snapRaster = cost_rast suffix = None wk = arcpy.env.workspace if not '.gdb' in wk: suffix = '.shp' ext = arcpy.env.extent if ext is None: arcpy.env.extent = r.extent arcpy.AddMessage ("Extent is %s" % arcpy.env.extent) arcpy.env.cellSize = r.meanCellWidth arcpy.AddMessage ("Cell size is %s" % arcpy.env.cellSize) cellsize_used = float (arcpy.env.cellSize) extent = arcpy.env.extent lower_left_coord = extent.lowerLeft arcpy.AddMessage ('Currently in directory: %s\n' % os.getcwd()) arcpy.AddMessage ('Workspace is: %s' % arcpy.env.workspace) arcpy.AddMessage ("lower left is %s" % lower_left_coord) if arcpy.env.mask is None: arcpy.AddMessage ("Setting mask to %s" % cost_rast) arcpy.env.mask = cost_rast # accumulated transits transit_array_accum = arcpy.RasterToNumPyArray (Raster(cost_rast) * 0) feat_layer = "feat_layer" arcmgt.MakeFeatureLayer(in_file, feat_layer) desc = arcpy.Describe (feat_layer) oid_fd_name = desc.OIDFieldName arcpy.AddMessage("oid_fd_name = %s" % oid_fd_name) # variable name is redundant now??? - should all calls be to oid_fd_name? target_fld = oid_fd_name proc_layer = "process_layer" arcmgt.MakeFeatureLayer(in_file, proc_layer) rows = arcpy.SearchCursor(proc_layer) last_target = None for row_cur in rows: transit_time = row_cur.getValue (t_diff_fld_name) if last_target is None or transit_time == 0: message = 'Skipping %s = %s' % (oid_fd_name, row_cur.getValue(oid_fd_name)) if transit_time == 0: message = message + " Transit time is zero" arcpy.AddMessage(message) last_target = row_cur.getValue(target_fld) last_oid = row_cur.getValue(oid_fd_name) continue arcpy.AddMessage ("Processing %s %i" % (oid_fd_name, row_cur.getValue(oid_fd_name))) arcmgt.SelectLayerByAttribute( feat_layer, "NEW_SELECTION", '%s = %s' % (target_fld, last_target) ) backlink_rast = arcpy.CreateScratchName("backlink") path_dist_rast = PathDistance(feat_layer, cost_rast, out_backlink_raster = backlink_rast) # extract the distance from the last point shp = row_cur.shape centroid = shp.centroid (x, y) = (centroid.X, centroid.Y) result = arcmgt.GetCellValue(path_dist_rast, "%s %s" % (x, y), "1") res_val = result.getOutput(0) if res_val == "NoData": this_oid = row_cur.getValue(oid_fd_name) arcpy.AddMessage ("Got nodata for coordinate (%s, %s)" % (x, y)) arcpy.AddMessage ("Is the path between features %s and %s wholly contained by the cost raster?" % (last_oid, this_oid)) pras_name = "pth_%s_%s.tif" % (last_oid, this_oid) arcpy.AddMessage ("Attempting to save path raster as %s" % pras_name) try: path_dist_rast.save(pras_name) except Exception as e: arcpy.AddMessage (e) raise PathDistanceIsNoData try: path_distance = float (res_val) except: # kludge around locale/radix issues if res_val.find(","): res_val = res_val.replace(",", ".") path_distance = float (res_val) else: raise arcpy.AddMessage("Path distance is %s\nTransit time is %s" % (path_distance, transit_time)) # get a raster of the path from origin to destination condition = '%s in (%i, %i)' % (oid_fd_name, last_oid, row_cur.getValue(oid_fd_name)) dest_layer = "dest_layer" + str (last_oid) arcmgt.MakeFeatureLayer(in_file, dest_layer, where_clause = condition) count = arcmgt.GetCount(dest_layer) count = int (count.getOutput(0)) if count == 0: raise NoFeatures("No features selected. Possible coordinate system issues.\n" + condition) try: path_cost_rast = CostPath(dest_layer, path_dist_rast, backlink_rast) #path_dist_rast.save("xx_pr" + str (last_oid)) except Exception as e: raise try: pcr_mask = 1 - IsNull (path_cost_rast) #pcr_mask.save ("xx_pcr_mask" + str (last_oid)) dist_masked = path_dist_rast * pcr_mask path_array = arcpy.RasterToNumPyArray(dist_masked, nodata_to_value = -9999) path_array_idx = numpy.where(path_array > 0) transit_array = numpy.zeros_like(path_array) # past experience suggests we might need to use a different approach to guarantee we get zeroes except: raise path_sum = None arcpy.AddMessage ("processing %i cells of path raster" % (len(path_array_idx[0]))) if path_distance == 0 or not len(path_array_idx[0]): path_sum = 1 # stayed in the same cell mask_array = arcpy.RasterToNumPyArray(pcr_mask, nodata_to_value = -9999) mask_array_idx = numpy.where(mask_array == 1) i = mask_array_idx[0][0] j = mask_array_idx[1][0] transit_array[i][j] = path_sum else: row_count = len (path_array) col_count = len (path_array[0]) for idx in range (len(path_array_idx[0])): i = path_array_idx[0][idx] j = path_array_idx[1][idx] val = path_array[i][j] nbrs = [] for k in (i-1, i, i+1): if k < 0 or k >= row_count: continue checkrow = path_array[k] for l in (j-1, j, j+1): if l < 0 or l >= col_count: continue if k == i and j == l: continue # don't check self checkval = checkrow[l] # negs are nodata, and this way we # don't need to care what that value is if checkval >= 0: diff = val - checkval if diff > 0: nbrs.append(diff) #arcpy.AddMessage ("Check and diff vals are %s %s" % (checkval, diff)) diff = min (nbrs) #arcpy.AddMessage ("Diff val is %s" % diff) transit_array[i][j] = diff path_sum = path_array.max() # could use path_distance? #arcpy.AddMessage ("path_array.max is %s" % path_sum) # sometimes we get a zero path_sum even when the path_distance is non-zero if path_sum == 0: path_sum = 1 # Increment the cumulative transit array by the fraction of the # transit time spent in each cell. # Use path_sum because it corrects for cases where we stayed in the same cell. transit_array_accum = transit_array_accum + ((transit_array / path_sum) * transit_time) #xx = arcpy.NumPyArrayToRaster (transit_array, lower_left_coord, cellsize_used, cellsize_used, 0) #tmpname = "xx_t_arr_" + str (last_oid) #print "Saving transit array to %s" % tmpname #xx.save (tmpname) try: arcmgt.Delete(backlink_rast) arcmgt.Delete(dest_layer) except Exception as e: arcpy.AddMessage (e) # getting off-by-one errors when using the environment, so use this directly ext = path_cost_rast.extent lower_left_coord = ext.lowerLeft last_target = row_cur.getValue(target_fld) last_oid = row_cur.getValue(oid_fd_name) # need to use env settings to get it to be the correct size try: arcpy.AddMessage ("lower left is %s" % lower_left_coord) xx = arcpy.NumPyArrayToRaster (transit_array_accum, lower_left_coord, cellsize_used, cellsize_used, 0) print "Saving to %s" % out_raster xx.save (out_raster) except: raise print "Completed" return ()
def georeference_lakes( lake_points_fc, out_fc, lake_id_field, lake_name_field, lake_county_field='', state='', master_gdb=r'C:\Users\smithn78\Dropbox\CL_HUB_GEO\Lake_Georeferencing\Masters_for_georef.gdb' ): """ Evaluate water quality sampling point locations and either assign the point to a lake polygon or flag the point for manual review. :param lake_points_fc: :param out_fc: :param lake_id_field: :param lake_name_field: :param lake_county_field: :param state: :param master_gdb: Location of master geodatabase used for linking :return: """ master_lakes_fc = os.path.join(master_gdb, MASTER_LAKES_FC) master_lakes_lines = os.path.join(master_gdb, MASTER_LAKES_LINES) master_streams_fc = os.path.join(master_gdb, MASTER_STREAMS_FC) master_xwalk = os.path.join(master_gdb, MASTER_XWALK) # setup arcpy.AddMessage("Joining...") state = state.upper() if state not in STATES: raise ValueError('Use the 2-letter state code abbreviation') arcpy.env.workspace = 'in_memory' out_short = os.path.splitext(os.path.basename(out_fc))[0] join1 = '{}_1'.format(out_short) join2 = '{}_2'.format(out_short) join3 = '{}_3'.format(out_short) join3_select = join3 + '_select' join4 = '{}_4'.format(out_short) join5 = '{}_5'.format(out_short) joinx = '{}_x'.format(out_short) county_name_results = arcpy.ListFields( lake_points_fc, '{}*'.format(lake_county_field))[0].name if lake_county_field and not lake_county_field in county_name_results: print('{} field does not exist in dataset.'.format(lake_county_field)) raise Exception point_fields = [f.name for f in arcpy.ListFields(lake_points_fc)] # update the lake id to a text field if not already lake_id_field_type = arcpy.ListFields(lake_points_fc, lake_id_field)[0].type if lake_id_field_type != 'String': temp_id_field = '{}_t'.format(lake_id_field) arcpy.AddField_management(lake_points_fc, '{}_t'.format(lake_id_field), 'TEXT', '255') expr = '!{}!'.format(lake_id_field) arcpy.CalculateField_management(lake_points_fc, temp_id_field, expr, 'PYTHON') arcpy.DeleteField_management(lake_points_fc, lake_id_field) arcpy.AlterField_management(lake_points_fc, temp_id_field, new_field_name=lake_id_field) # Try to make some spatial connections and fulfill some logic to assign a link join1 = AN.SpatialJoin(lake_points_fc, master_lakes_fc, join1, 'JOIN_ONE_TO_MANY', 'KEEP_ALL', match_option='INTERSECT') join2 = AN.SpatialJoin(join1, master_streams_fc, join2, 'JOIN_ONE_TO_MANY', 'KEEP_ALL', match_option='INTERSECT') join3 = AN.SpatialJoin(join2, master_lakes_fc, join3, 'JOIN_ONE_TO_MANY', 'KEEP_ALL', match_option='INTERSECT', search_radius='10 meters') join4 = AN.SpatialJoin(join3, master_lakes_fc, join4, 'JOIN_ONE_TO_MANY', 'KEEP_ALL', match_option='INTERSECT', search_radius='100 meters') # setup for editing lake assignment values DM.AddField(join4, 'Auto_Comment', 'TEXT', field_length=100) DM.AddField(join4, 'Manual_Review', 'SHORT') DM.AddField(join4, 'Shared_Words', 'TEXT', field_length=100) DM.AddField(join4, 'Linked_lagoslakeid', 'LONG') DM.AddField(join4, 'GEO_Discovered_Name', 'TEXT', field_length=255) DM.AddField(join4, 'Duplicate_Candidate', 'TEXT', field_length=1) DM.AddField(join4, 'Is_Legacy_Link', 'TEXT', field_length=1) update_fields = [ lake_id_field, lake_name_field, MASTER_LAKE_ID, MASTER_GNIS_NAME, # 0m match 'PERMANENT_IDENTIFIER_1', 'GNIS_NAME_1', # stream match MASTER_LAKE_ID + '_1', MASTER_GNIS_NAME + '_12', # 10m match MASTER_LAKE_ID + '_12', MASTER_GNIS_NAME + '_12_13', # 100m match 'Auto_Comment', 'Manual_Review', 'Shared_Words', 'Linked_lagoslakeid' ] # use a cursor to go through each point and evaluate its assignment cursor = arcpy.da.UpdateCursor(join4, update_fields) arcpy.AddMessage("Calculating link status...") for row in cursor: id, name, mid_0, mname_0, stream_id, streamname_0, mid_10, mname_10, mid_100, mname_100, comment, review, words, lagosid = row if mid_0 is not None: # if the point is directly in a polygon if name and mname_0: words = lagosGIS.list_shared_words(name, mname_0, exclude_lake_words=False) comment = 'Exact location link' lagosid = mid_0 review = -1 elif mid_0 is None and mid_10 is not None: # if the point is only within 10m of a lake if name and mname_10: words = lagosGIS.list_shared_words(name, mname_10, exclude_lake_words=False) if words: comment = 'Linked by common name and location' lagosid = mid_10 review = -1 else: comment = 'Linked by common location' lagosid = mid_10 review = 1 elif mid_0 is None and mid_10 is None: if stream_id is not None: # if there is a stream match comment = 'Not linked because represented as river in NHD' review = 2 else: if mid_100 is not None: # if the point is only within 100m of lake(s) if name and mname_100: words = lagosGIS.list_shared_words( name, mname_100, exclude_lake_words=True) # TODO: Frequency check if words: comment = 'Linked by common name and location' lagosid = mid_100 review = 1 else: comment = 'Linked by common location' lagosid = mid_100 review = 2 cursor.updateRow( (id, name, mid_0, mname_0, stream_id, streamname_0, mid_10, mname_10, mid_100, mname_100, comment, review, words, lagosid)) # # So I haven't been able to get the county logic to work and it hasn't been that important yet, ignore for now # Select down to a minimum set because we're about to join on county, which will create lots of duplicate matches # Then join calculated results back to full set # if lake_county_field: # join5 = AN.Select(join4, join5, 'Manual_Review IS NULL') # lakes_state = AN.Select(MASTER_LAKES_FC, 'lakes_state', "{0} = '{1}'".format(MASTER_STATE_NAME, state)) # lakes_state_lyr = DM.MakeFeatureLayer(lakes_state, 'lakes_state_lyr') # join5_lyr = DM.MakeFeatureLayer(join5, 'join5_lyr') # DM.AddJoin(join5_lyr, lake_county_field, lakes_state_lyr, MASTER_COUNTY_NAME) # join5_with_county = DM.CopyFeatures(join5_lyr, 'join5_with_cty') # j5 = 'DEDUPED_CA_SWAMP_data_linked_5.' # # county_update_fields = [j5 + lake_id_field, j5 + lake_name_field, j5 + lake_county_field, # 'lakes_state.' + MASTER_LAKE_ID, 'lakes_state.' + MASTER_GNIS_NAME, 'lakes_state.' + MASTER_COUNTY_NAME, # j5 + 'Auto_Comment', j5 + 'Manual_Review', j5 + 'Shared_Words', # j5 + 'Linked_lagoslakeid'] # with arcpy.da.UpdateCursor(join5_lyr, county_update_fields) as cursor: # for row in cursor: # id, name, county, mid_cty, mname_cty, mcounty, comment, review, words, lagosid = row # if county is not None and mcounty is not None: # if name and mname_cty: # words = lagosGIS.list_shared_words(name, mname_cty, exclude_lake_words=True) # if words: # comment = 'PRELIMINARY: Linked by common name and location' # lagosid = mid_cty # review = 2 # cursor.updateRow((id, name, county, mid_cty, mname_cty, mcounty, comment, review, words, lagosid)) # DM.RemoveJoin(join5_lyr) # join5_with_county = DM.CopyFeatures(join5_lyr, 'join5_with_county') # # # join5 = DM.JoinField(join5, lake_county_field, lakes_state, MASTER_COUNTY_NAME, # fields = [MASTER_COUNTY_NAME, MASTER_LAKE_ID, MASTER_GNIS_NAME]) # # # This is a long way to make a join # join_dict = {} # with arcpy.da.SearchCursor(lakes_state, [MASTER_COUNTY_NAME, MASTER_LAKE_ID, MASTER_GNIS_NAME]) as cursor: # for row in cursor: # join_value, val1, val2 = row # join_dict[join_value] = [val1, val2] # # arcpy.AddField_management(join5, MASTER_LAKE_ID + 'cntyj', 'LONG') # arcpy.AddField_management(join5, MASTER_GNIS_NAME + 'cntyj', 'TEXT', 255) # # with arcpy.da.SearchCursor(join5, [lake_county_field, MASTER_LAKE_ID + 'cntyj', MASTER_GNIS_NAME + 'cntyj']) as cursor: # for row in cursor: # key_value = row[0] # words = lagosGIS.list_shared_words() # if join_dict.has_key(key_value): # row[1] = join_dict[key_value][0] # row[2] = join_dict[key_value][1] # else: # row[1] = None # row[2] = None # cursor.updateRow(row) # # # county_update_fields = [lake_id_field, lake_name_field, lake_county_field, # MASTER_LAKE_ID + '_12_13_14', MASTER_GNIS_NAME + '_12_13', MASTER_COUNTY_NAME + '_12_13', # county # 'Auto_Comment', 'Manual_Review', 'Shared_Words', # 'Linked_lagoslakeid'] # cursor = arcpy.da.UpdateCursor(join5, county_update_fields) # for row in cursor: # id, name, county, lagosid_cty, lagosname_cty, mcounty, comment, mreview, words, linked_lagosid = row # if mcounty is not None: # words = lagosGIS.list_shared_words() # else: # join5 = join4 # if state in LAGOSNE_STATES: DM.JoinField(join4, lake_id_field, master_xwalk, 'lagosne_legacyid', ['lagoslakeid', 'lagos_lakename', 'lagos_state']) update_fields = [ lake_id_field, lake_name_field, MASTER_LAKE_ID + '_12_13', 'lagos_lakename', 'lagos_state', # crosswalk match 'Auto_Comment', 'Manual_Review', 'Shared_Words', 'Linked_lagoslakeid', 'Is_Legacy_Link' ] with arcpy.da.UpdateCursor(join4, update_fields) as uCursor: for uRow in uCursor: id, name, mid_x, mname_x, state_x, comment, review, words, lagosid, legacy_flag = uRow # fields are populated already from links above. Revise only if legacy links if mid_x is not None: if state == state_x: legacy_flag = 'Y' # set to Y regardless of whether using legacy comment if state matches if comment != 'Exact location link': review = 1 if state != state_x: review = 3 # downgrade if states mismatch--border lakes OK, random common IDs NOT. Check. legacy_flag = 'Y' comment = 'LAGOS-NE legacy link' # only comment non-exact location matches lagosid = mid_x if name and mname_x: words = lagosGIS.list_shared_words( name, mname_x) # update words only if legacy comment new_row = id, name, mid_x, mname_x, state_x, comment, review, words, lagosid, legacy_flag uCursor.updateRow(new_row) # # Undo the next line if you ever bring this chunk back. join5 = join4 # then re-code the no matches as a 3 and copy comments to the editable field # compress the joined lake ids into one field # having two fields lets us keep track of how many of the auto matches are bad if arcpy.ListFields(join5, 'Comment'): comment_field_name = 'Comment_LAGOS' else: comment_field_name = 'Comment' DM.AddField(join5, comment_field_name, 'TEXT', field_length=100) with arcpy.da.UpdateCursor( join5, ['Manual_Review', 'Auto_Comment', 'Comment']) as cursor: for flag, ac, comment in cursor: if flag is None: flag = 3 ac = 'Not linked' comment = ac cursor.updateRow((flag, ac, comment)) # Re-code points more than 100m into the polygon of the lake as no need to check DM.MakeFeatureLayer(join5, 'join5_lyr') DM.MakeFeatureLayer(master_lakes_lines, 'lake_lines_lyr') DM.SelectLayerByAttribute('join5_lyr', 'NEW_SELECTION', "Auto_Comment = 'Exact location link'") DM.SelectLayerByLocation('join5_lyr', 'INTERSECT', 'lake_lines_lyr', '100 meters', 'SUBSET_SELECTION', 'INVERT') DM.CalculateField('join5_lyr', 'Manual_Review', '-2', 'PYTHON') DM.Delete('join5_lyr', 'lake_lines_lyr') # Then make sure to only keep the fields necessary when you write to an output copy_fields = point_fields + [ 'Linked_lagoslakeid', 'Auto_Comment', 'Manual_Review', 'Is_Legacy_Link', 'Shared_Words', 'Comment', 'Duplicate_Candidate', 'GEO_Discovered_Name' ] copy_fields.remove('Shape') copy_fields.remove('OBJECTID') lagosGIS.select_fields(join5, out_fc, copy_fields) DM.AssignDomainToField(out_fc, 'Comment', 'Comment') DM.AddField(out_fc, 'Total_points_in_lake_poly', 'Short') # Remove any duplicates. (These originate from the join3/join4 transition because a point can be both # within 10m and 100m of lakes, this code takes the closest lake as true for my current sanity.) # Or, in other words, this is a hack solution. out_fc_fields = [ f.name for f in arcpy.ListFields(out_fc) if f.name != 'OBJECTID' ] DM.DeleteIdentical(out_fc, out_fc_fields) # Get the join_count for each limno lake ID # De-dupe anything resulting from limno ID duplicates first before counting id_pairs = list( set( arcpy.da.SearchCursor(out_fc, [lake_id_field, 'Linked_lagoslakeid']))) # THEN pull out LAGOS id. Any duplicate now are only due to multiple distinct points within lake lagos_ids = [ids[1] for ids in id_pairs] sample_ids = [ids[0] for ids in id_pairs] lagos_lake_counts = Counter(lagos_ids) linked_multiple_lake_counts = Counter(sample_ids) # Get the count of points in the polygon with arcpy.da.UpdateCursor( out_fc, ['Linked_lagoslakeid', 'Total_points_in_lake_poly']) as cursor: for lagos_id, join_count in cursor: join_count = lagos_lake_counts[lagos_id] cursor.updateRow((lagos_id, join_count)) # Mark any samples linked to more than one lake so that the analyst can select the correct lake in the # manual process with arcpy.da.UpdateCursor( out_fc, [lake_id_field, 'Duplicate_Candidate']) as cursor: for sample_id, duplicate_flag in cursor: duplicate_count = linked_multiple_lake_counts[sample_id] if duplicate_count > 1: duplicate_flag = "Y" else: duplicate_flag = "N" cursor.updateRow((sample_id, duplicate_flag)) # clean up DM.AddField(out_fc, 'Note', 'TEXT', field_length=140) DM.Delete('in_memory') arcpy.AddMessage('Completed.')
except Exception as e: print e.message arcpy.AddError(e.message) raise rows = arcpy.UpdateCursor(table_view) last_target = None for row in rows: if last_target is None: last_target = row.getValue(target_fld) continue arcmgt.SelectLayerByAttribute(layer, "NEW_SELECTION", '%s = %s' % (target_fld, last_target)) raster = PathDistance(layer, cost_rast) shp = row.shape centroid = shp.centroid (x, y) = (centroid.X, centroid.Y) result = arcmgt.GetCellValue(raster, "%s %s" % (x, y), "1") value = result.getOutput(0) row.setValue("PATH_TO", float(row.getValue(target_fld))) row.setValue("PATH_FROM", float(last_target)) row.setValue("PATH_DIST", float(value)) print "%s,%s,%s" % (row.getValue(target_fld), last_target, result.getOutput(0)) rows.updateRow(row) last_target = row.getValue(target_fld)
def process_zone(zone_fc, output, zone_name, zone_id_field, zone_name_field, other_keep_fields, clip_hu8, lagosne_name): # dissolve fields by the field that zone_id is based on (the field that identifies a unique zone) dissolve_fields = [ f for f in "{}, {}, {}".format(zone_id_field, zone_name_field, other_keep_fields).split(', ') if f != '' ] print("Dissolving...") dissolve1 = DM.Dissolve(zone_fc, 'dissolve1', dissolve_fields) # update name field to match our standard DM.AlterField(dissolve1, zone_name_field, 'name') # original area DM.AddField(dissolve1, 'originalarea', 'DOUBLE') DM.CalculateField(dissolve1, 'originalarea', '!shape.area@hectares!', 'PYTHON') #clip print("Clipping...") clip = AN.Clip(dissolve1, MASTER_CLIPPING_POLY, 'clip') if clip_hu8 == 'Y': final_clip = AN.Clip(clip, HU8_OUTPUT, 'final_clip') else: final_clip = clip print("Selecting...") # calc new area, orig area pct, compactness DM.AddField(final_clip, 'area_ha', 'DOUBLE') DM.AddField(final_clip, 'originalarea_pct', 'DOUBLE') DM.AddField(final_clip, 'compactness', 'DOUBLE') DM.JoinField(final_clip, zone_id_field, dissolve1, zone_id_field, 'originalarea_pct') uCursor_fields = [ 'area_ha', 'originalarea_pct', 'originalarea', 'compactness', 'SHAPE@AREA', 'SHAPE@LENGTH' ] with arcpy.da.UpdateCursor(final_clip, uCursor_fields) as uCursor: for row in uCursor: area, orig_area_pct, orig_area, comp, shape_area, shape_length = row area = shape_area / 10000 # convert from m2 to hectares orig_area_pct = round(100 * area / orig_area, 2) comp = 4 * 3.14159 * shape_area / (shape_length**2) row = (area, orig_area_pct, orig_area, comp, shape_area, shape_length) uCursor.updateRow(row) # if zones are present with <5% of original area and a compactness measure of <.2 (ranges from 0-1) # AND ALSO they are no bigger than 500 sq. km. (saves Chippewa County and a WWF), filter out # save eliminated polygons to temp database as a separate layer for inspection # Different processing for HU4 and HU8, so that they match the extent of HU8 more closely but still throw out tiny slivers # County also only eliminated if a tiny, tiny, tiny sliver (so: none should be eliminated) if zone_name not in ('hu4', 'hu12', 'county'): selected = AN.Select( final_clip, 'selected', "originalarea_pct >= 5 OR compactness >= .2 OR area_ha > 50000") not_selected = AN.Select( final_clip, '{}_not_selected'.format(output), "originalarea_pct < 5 AND compactness < .2 AND area_ha < 50000") else: selected = final_clip # eliminate small slivers, re-calc area fields, add perimeter and multipart flag # leaves the occasional errant sliver but some areas over 25 hectares are more valid so this is # CONSERVATIVE print("Trimming...") trimmed = DM.EliminatePolygonPart(selected, 'trimmed', 'AREA', '25 Hectares', part_option='ANY') # gather up a few calculations into one cursor because this is taking too long over the HU12 layer DM.AddField(trimmed, 'perimeter_m', 'DOUBLE') DM.AddField(trimmed, 'multipart', 'TEXT', field_length=1) uCursor_fields = [ 'area_ha', 'originalarea_pct', 'originalarea', 'perimeter_m', 'multipart', 'SHAPE@' ] with arcpy.da.UpdateCursor(trimmed, uCursor_fields) as uCursor: for row in uCursor: area, orig_area_pct, orig_area, perim, multipart, shape = row area = shape.area / 10000 # convert to hectares from m2 orig_area_pct = round(100 * area / orig_area, 2) perim = shape.length # multipart flag calc if shape.isMultipart: multipart = 'Y' else: multipart = 'N' row = (area, orig_area_pct, orig_area, perim, multipart, shape) uCursor.updateRow(row) # delete intermediate fields DM.DeleteField(trimmed, 'compactness') DM.DeleteField(trimmed, 'originalarea') print("Zone IDs....") # link to LAGOS-NE zone IDs DM.AddField(trimmed, 'zoneid', 'TEXT', field_length=40) trimmed_lyr = DM.MakeFeatureLayer(trimmed, 'trimmed_lyr') if lagosne_name: # join to the old master GDB path on the same master field and copy in the ids old_fc = os.path.join(LAGOSNE_GDB, lagosne_name) old_fc_lyr = DM.MakeFeatureLayer(old_fc, 'old_fc_lyr') if lagosne_name == 'STATE' or lagosne_name == 'COUNTY': DM.AddJoin(trimmed_lyr, zone_id_field, old_fc_lyr, 'FIPS') else: DM.AddJoin(trimmed_lyr, zone_id_field, old_fc_lyr, zone_id_field) # usually works because same source data # copy DM.CalculateField(trimmed_lyr, 'zoneid', '!{}.ZoneID!.lower()'.format(lagosne_name), 'PYTHON') DM.RemoveJoin(trimmed_lyr) # generate new zone ids old_ids = [row[0] for row in arcpy.da.SearchCursor(trimmed, 'zoneid')] with arcpy.da.UpdateCursor(trimmed, 'zoneid') as cursor: counter = 1 for row in cursor: if not row[ 0]: # if no existing ID borrowed from LAGOS-NE, assign a new one new_id = '{name}_{num}'.format(name=zone_name, num=counter) # ensures new ids don't re-use old numbers but fills in all positive numbers eventually while new_id in old_ids: counter += 1 new_id = '{name}_{num}'.format(name=zone_name, num=counter) row[0] = new_id cursor.updateRow(row) counter += 1 print("Edge flags...") # add flag fields DM.AddField(trimmed, 'onlandborder', 'TEXT', field_length=2) DM.AddField(trimmed, 'oncoast', 'TEXT', field_length=2) # identify border zones border_lyr = DM.MakeFeatureLayer(LAND_BORDER, 'border_lyr') DM.SelectLayerByLocation(trimmed_lyr, 'INTERSECT', border_lyr) DM.CalculateField(trimmed_lyr, 'onlandborder', "'Y'", 'PYTHON') DM.SelectLayerByAttribute(trimmed_lyr, 'SWITCH_SELECTION') DM.CalculateField(trimmed_lyr, 'onlandborder', "'N'", 'PYTHON') # identify coastal zones coastal_lyr = DM.MakeFeatureLayer(COASTLINE, 'coastal_lyr') DM.SelectLayerByLocation(trimmed_lyr, 'INTERSECT', coastal_lyr) DM.CalculateField(trimmed_lyr, 'oncoast', "'Y'", 'PYTHON') DM.SelectLayerByAttribute(trimmed_lyr, 'SWITCH_SELECTION') DM.CalculateField(trimmed_lyr, 'oncoast', "'N'", 'PYTHON') print("State assignment...") # State? DM.AddField(trimmed, "state", 'text', field_length='2') state_center = arcpy.SpatialJoin_analysis( trimmed, STATE_FC, 'state_center', join_type='KEEP_COMMON', match_option='HAVE_THEIR_CENTER_IN') state_intersect = arcpy.SpatialJoin_analysis(trimmed, STATE_FC, 'state_intersect', match_option='INTERSECT') state_center_dict = { row[0]: row[1] for row in arcpy.da.SearchCursor(state_center, ['ZoneID', 'STUSPS']) } state_intersect_dict = { row[0]: row[1] for row in arcpy.da.SearchCursor(state_intersect, ['ZoneID', 'STUSPS']) } with arcpy.da.UpdateCursor(trimmed, ['ZoneID', 'state']) as cursor: for updateRow in cursor: keyValue = updateRow[0] if keyValue in state_center_dict: updateRow[1] = state_center_dict[keyValue] else: updateRow[1] = state_intersect_dict[keyValue] cursor.updateRow(updateRow) # glaciation status? # TODO as version 0.6 # preface the names with the zones DM.DeleteField(trimmed, 'ORIG_FID') fields = [ f.name for f in arcpy.ListFields(trimmed, '*') if f.type not in ('OID', 'Geometry') and not f.name.startswith('Shape_') ] for f in fields: new_fname = '{zn}_{orig}'.format(zn=zone_name, orig=f).lower() try: DM.AlterField(trimmed, f, new_fname, clear_field_alias='TRUE') # sick of debugging the required field message-I don't want to change required fields anyway except: pass DM.CopyFeatures(trimmed, output) # cleanup lyr_objects = [ lyr_object for var_name, lyr_object in locals().items() if var_name.endswith('lyr') ] temp_fcs = arcpy.ListFeatureClasses('*') for l in lyr_objects + temp_fcs: DM.Delete(l)
def classify_lakes(nhd, out_feature_class, exclude_intermit_flowlines=False, debug_mode=False): if debug_mode: arcpy.env.overwriteOutput = True temp_gdb = cu.create_temp_GDB('classify_lake_connectivity') arcpy.env.workspace = temp_gdb arcpy.AddMessage('Debugging workspace located at {}'.format(temp_gdb)) else: arcpy.env.workspace = 'in_memory' if arcpy.Exists("temp_fc"): print("There is a problem here.") raise Exception # Tool temporary feature classes temp_fc = "temp_fc" csiwaterbody_10ha = "csiwaterbody_10ha" nhdflowline_filtered = "nhdflowline_filtered" dangles = "dangles" start = "start" end = "end" startdangles = "startdangles" enddangles = "enddangles" non_artificial_end = "non_artificial_end" flags_10ha_lake_junctions = "flags_10ha_lake_junctions" midvertices = "midvertices" non10vertices = "non10vertices" non10junctions = "non10junctions" all_non_flag_points = "all_non_flag_points" barriers = "barriers" trace1_junctions = "trace1_junctions" trace1_flowline = "trace1_flowline" trace2_junctions = "trace2junctions" trace2_flowline = "trace2_flowline" # Clean up workspace in case of bad exit from prior run in same session. this_tool_layers = [ "dangles_lyr", "nhdflowline_lyr", "junction_lyr", "midvertices_lyr", "all_non_flag_points_lyr", "non10vertices_lyr", "out_fc_lyr", "trace1", "trace2" ] this_tool_temp = [ temp_fc, csiwaterbody_10ha, nhdflowline_filtered, dangles, start, end, startdangles, enddangles, non_artificial_end, flags_10ha_lake_junctions, midvertices, non10vertices, non10junctions, all_non_flag_points, barriers, trace1_junctions, trace1_flowline, trace2_junctions, trace2_flowline ] for item in this_tool_layers + this_tool_temp: try: DM.Delete(item) except: pass # Local variables: nhdflowline = os.path.join(nhd, "Hydrography", "NHDFLowline") nhdjunction = os.path.join(nhd, "Hydrography", "HYDRO_NET_Junctions") nhdwaterbody = os.path.join(nhd, "Hydrography", "NHDWaterbody") network = os.path.join(nhd, "Hydrography", "HYDRO_NET") # Get lakes, ponds and reservoirs over a hectare. #csi_population_filter = '''"AreaSqKm" >=0.01 AND\ #"FCode" IN (39000,39004,39009,39010,39011,39012,43600,43613,43615,43617,43618,43619,43621)''' all_lakes_reservoirs_filter = '''"FType" IN (390, 436)''' # Can't see why we shouldn't just attribute all lakes and reservoirs # arcpy.Select_analysis(nhdwaterbody, "csiwaterbody", lake_population_filter) arcpy.AddMessage("Initializing output.") if exclude_intermit_flowlines: DM.CopyFeatures(out_feature_class, temp_fc) DM.Delete(out_feature_class) else: arcpy.Select_analysis(nhdwaterbody, temp_fc, all_lakes_reservoirs_filter) # Get lakes, ponds and reservoirs over 10 hectares. lakes_10ha_filter = '''"AreaSqKm" >= 0.1 AND "FType" IN (390, 436)''' arcpy.Select_analysis(nhdwaterbody, csiwaterbody_10ha, lakes_10ha_filter) # Exclude intermittent flowlines, if requested if exclude_intermit_flowlines: flowline_where_clause = '''"FCode" NOT IN (46003,46007)''' nhdflowline = arcpy.Select_analysis(nhdflowline, nhdflowline_filtered, flowline_where_clause) # Make dangle points at end of nhdflowline DM.FeatureVerticesToPoints(nhdflowline, dangles, "DANGLE") DM.MakeFeatureLayer(dangles, "dangles_lyr") # Isolate start dangles from end dangles. DM.FeatureVerticesToPoints(nhdflowline, start, "START") DM.FeatureVerticesToPoints(nhdflowline, end, "END") DM.SelectLayerByLocation("dangles_lyr", "ARE_IDENTICAL_TO", start) DM.CopyFeatures("dangles_lyr", startdangles) DM.SelectLayerByLocation("dangles_lyr", "ARE_IDENTICAL_TO", end) DM.CopyFeatures("dangles_lyr", enddangles) # Special handling for lakes that have some intermittent flow in and some permanent if exclude_intermit_flowlines: DM.MakeFeatureLayer(nhdflowline, "nhdflowline_lyr") DM.SelectLayerByAttribute("nhdflowline_lyr", "NEW_SELECTION", '''"WBArea_Permanent_Identifier" is null''') DM.FeatureVerticesToPoints("nhdflowline_lyr", non_artificial_end, "END") DM.SelectLayerByAttribute("nhdflowline_lyr", "CLEAR_SELECTION") arcpy.AddMessage("Found source area nodes.") # Get junctions from lakes >= 10 hectares. DM.MakeFeatureLayer(nhdjunction, "junction_lyr") DM.SelectLayerByLocation("junction_lyr", "INTERSECT", csiwaterbody_10ha, XY_TOLERANCE, "NEW_SELECTION") DM.CopyFeatures("junction_lyr", flags_10ha_lake_junctions) arcpy.AddMessage("Found lakes >= 10 ha.") # Make points shapefile and layer at flowline vertices to act as potential flags and/or barriers. arcpy.AddMessage("Tracing...") DM.FeatureVerticesToPoints(nhdflowline, midvertices, "MID") DM.MakeFeatureLayer(midvertices, "midvertices_lyr") # Get vertices that are not coincident with 10 hectare lake junctions. DM.SelectLayerByLocation("midvertices_lyr", "INTERSECT", flags_10ha_lake_junctions, "", "NEW_SELECTION") DM.SelectLayerByLocation("midvertices_lyr", "INTERSECT", flags_10ha_lake_junctions, "", "SWITCH_SELECTION") DM.CopyFeatures("midvertices_lyr", non10vertices) # Get junctions that are not coincident with 10 hectare lake junctions. DM.SelectLayerByLocation("junction_lyr", "INTERSECT", flags_10ha_lake_junctions, "", "NEW_SELECTION") DM.SelectLayerByLocation("junction_lyr", "INTERSECT", flags_10ha_lake_junctions, "", "SWITCH_SELECTION") DM.CopyFeatures("junction_lyr", non10junctions) # Merge non10vertices with non10junctions DM.Merge([non10junctions, non10vertices], all_non_flag_points) # inputs both point fc in_memory DM.MakeFeatureLayer(all_non_flag_points, "all_non_flag_points_lyr") # Tests the counts...for some reason I'm not getting stable behavior from the merge. mid_n = int(DM.GetCount(non10vertices).getOutput(0)) jxn_n = int(DM.GetCount(non10junctions).getOutput(0)) merge_n = int(DM.GetCount(all_non_flag_points).getOutput(0)) if merge_n < mid_n + jxn_n: arcpy.AddWarning( "The total number of flags ({0}) is less than the sum of the input junctions ({1}) " "and input midpoints ({2})".format(merge_n, jxn_n, mid_n)) # For tracing barriers, select all_non_flag_points points that intersect a 10 ha lake. DM.SelectLayerByLocation("all_non_flag_points_lyr", "INTERSECT", csiwaterbody_10ha, XY_TOLERANCE, "NEW_SELECTION") DM.CopyFeatures("all_non_flag_points_lyr", barriers) # Trace1-Trace downstream to first barrier (junctions+midvertices in 10 ha lake) starting from flags_10ha_lake_junctions flag points. DM.TraceGeometricNetwork(network, "trace1", flags_10ha_lake_junctions, "TRACE_DOWNSTREAM", barriers) # Save trace1 flowlines and junctions to layers on disk. DM.CopyFeatures("trace1\HYDRO_NET_Junctions", trace1_junctions) # extra for debugging DM.CopyFeatures("trace1\NHDFlowline", trace1_flowline) # Select vertice midpoints that intersect trace1 flowlines selection for new flags for trace2. DM.MakeFeatureLayer(non10vertices, "non10vertices_lyr") DM.SelectLayerByLocation("non10vertices_lyr", "INTERSECT", trace1_flowline, "", "NEW_SELECTION") # Trace2-Trace downstream from midpoints of flowlines that intersect the selected flowlines from trace1. DM.TraceGeometricNetwork(network, "trace2", "non10vertices_lyr", "TRACE_DOWNSTREAM") # Save trace1 flowlines and junctions to layers and then shapes on disk. DM.CopyFeatures("trace2\HYDRO_NET_Junctions", trace2_junctions) DM.CopyFeatures("trace2\NHDFlowline", trace2_flowline) # extra for debugging arcpy.AddMessage("Done tracing.") # Make shapefile for seepage lakes. (Ones that don't intersect flowlines) if exclude_intermit_flowlines: class_field_name = "Lake_Connectivity_Permanent" else: class_field_name = "Lake_Connectivity_Class" DM.AddField(temp_fc, class_field_name, "TEXT", field_length=13) DM.MakeFeatureLayer(temp_fc, "out_fc_lyr") DM.SelectLayerByLocation("out_fc_lyr", "INTERSECT", nhdflowline, XY_TOLERANCE, "NEW_SELECTION") DM.SelectLayerByLocation("out_fc_lyr", "INTERSECT", nhdflowline, "", "SWITCH_SELECTION") DM.CalculateField("out_fc_lyr", class_field_name, """'Isolated'""", "PYTHON") # New type of "Isolated" classification, mostly for "permanent" but there were some oddballs in "maximum" too DM.SelectLayerByLocation("out_fc_lyr", "INTERSECT", startdangles, XY_TOLERANCE, "NEW_SELECTION") DM.SelectLayerByLocation("out_fc_lyr", "INTERSECT", enddangles, XY_TOLERANCE, "SUBSET_SELECTION") DM.CalculateField("out_fc_lyr", class_field_name, """'Isolated'""", "PYTHON") # Get headwater lakes. DM.SelectLayerByLocation("out_fc_lyr", "INTERSECT", startdangles, XY_TOLERANCE, "NEW_SELECTION") DM.SelectLayerByAttribute( "out_fc_lyr", "REMOVE_FROM_SELECTION", '''"{}" = 'Isolated' '''.format(class_field_name)) DM.CalculateField("out_fc_lyr", class_field_name, """'Headwater'""", "PYTHON") # Select csiwaterbody that intersect trace2junctions arcpy.AddMessage("Beginning connectivity attribution...") DM.SelectLayerByLocation("out_fc_lyr", "INTERSECT", trace2_junctions, XY_TOLERANCE, "NEW_SELECTION") DM.CalculateField("out_fc_lyr", class_field_name, """'DrainageLk'""", "PYTHON") # Get stream drainage lakes. Either unassigned so far or convert "Headwater" if a permanent stream flows into it, # which is detected with "non_artificial_end" DM.SelectLayerByAttribute("out_fc_lyr", "NEW_SELECTION", '''"{}" IS NULL'''.format(class_field_name)) DM.CalculateField("out_fc_lyr", class_field_name, """'Drainage'""", "PYTHON") if exclude_intermit_flowlines: DM.SelectLayerByAttribute( "out_fc_lyr", "NEW_SELECTION", '''"{}" = 'Headwater' '''.format(class_field_name)) DM.SelectLayerByLocation("out_fc_lyr", "INTERSECT", non_artificial_end, XY_TOLERANCE, "SUBSET_SELECTION") DM.CalculateField("out_fc_lyr", class_field_name, """'Drainage'""", "PYTHON") # Prevent 'upgrades' due to very odd flow situations and artifacts of bad digitization. The effects of these # are varied--to avoid confusion, just keep the class assigned with all flowlines # 1--Purely hypothetical, not seen in testing DM.SelectLayerByAttribute( "out_fc_lyr", "NEW_SELECTION", '''"Lake_Connectivity_Class" = 'Isolated' AND "Lake_Connectivity_Permanent" <> 'Isolated' ''' ) DM.CalculateField("out_fc_lyr", class_field_name, """'Isolated'""", "PYTHON") # 2--Headwater to Drainage upgrade seen in testing with odd multi-inlet flow situation DM.SelectLayerByAttribute( "out_fc_lyr", "NEW_SELECTION", '''"Lake_Connectivity_Class" = 'Headwater' AND "Lake_Connectivity_Permanent" IN ('Drainage', 'DrainageLk') ''' ) DM.CalculateField("out_fc_lyr", class_field_name, """'Headwater'""", "PYTHON") # 3--Drainage to DrainageLk upgrade seen in testing when intermittent stream segments were used # erroneously instead of artificial paths DM.SelectLayerByAttribute( "out_fc_lyr", "NEW_SELECTION", '''"Lake_Connectivity_Class" = 'Drainage' AND "Lake_Connectivity_Permanent" = 'DrainageLk' ''' ) DM.CalculateField("out_fc_lyr", class_field_name, """'Drainage'""", "PYTHON") DM.SelectLayerByAttribute("out_fc_lyr", "CLEAR_SELECTION") # Add change flag for users DM.AddField(temp_fc, "Lake_Connectivity_Fluctuates", "Text", field_length="1") flag_codeblock = """def flag_calculate(arg1, arg2): if arg1 == arg2: return 'N' else: return 'Y'""" expression = 'flag_calculate(!Lake_Connectivity_Class!, !Lake_Connectivity_Permanent!)' DM.CalculateField(temp_fc, "Lake_Connectivity_Fluctuates", expression, "PYTHON", flag_codeblock) # Project output once done with both. Switching CRS earlier causes trace problems. if not exclude_intermit_flowlines: DM.CopyFeatures(temp_fc, out_feature_class) else: DM.Project(temp_fc, out_feature_class, arcpy.SpatialReference(102039)) # Clean up if not debug_mode: for item in this_tool_layers + this_tool_temp: if arcpy.Exists(item): DM.Delete(item) if not debug_mode: DM.Delete("trace1") DM.Delete("trace2") arcpy.AddMessage("{} classification is complete.".format(class_field_name))
def selected_paths(paths, secs_lyr, subsecs_lyr, clear_selection): if clear_selection: mgmt.SelectLayerByAttribute(secs_lyr, 'CLEAR_SELECTION') mgmt.SelectLayerByAttribute(subsecs_lyr, 'CLEAR_SELECTION') # Get selected sections sec_paths = [] if arcpy.Describe(secs_lyr).FIDSet: with arcpy.da.SearchCursor(secs_lyr, ['TRS_PATH']) as cur: sec_paths += [row[0] for row in cur] # Get selected subsections subsec_paths = [] if arcpy.Describe(subsecs_lyr).FIDSet: with arcpy.da.SearchCursor(subsecs_lyr, ['TRS_PATH']) as cur: subsec_paths += [row[0] for row in cur] # Add in any TRS path specs from input parameter if paths: try: for path in expand_paths(re.split(';?\s+', paths.upper())): if path[-1].isdigit(): sec_paths.append(path) else: subsec_paths.append(path) except ValueError as err: arcpy.AddError(err) return None if sec_paths: sql = '"TRS_PATH" IN (\'%s\')' % '\',\''.join(sec_paths) mgmt.SelectLayerByAttribute(secs_lyr, 'ADD_TO_SELECTION', where_clause=sql) # Expand full section paths into subsections expanded_sec_paths = expand_paths(sec + '.A-P' for sec in sec_paths) sql = '"TRS_PATH" IN (\'%s\')' % '\',\''.join(expanded_sec_paths) mgmt.SelectLayerByAttribute(subsecs_lyr, 'ADD_TO_SELECTION', where_clause=sql) if subsec_paths: sql = '"TRS_PATH" IN (\'%s\')' % '\',\''.join(subsec_paths) mgmt.SelectLayerByAttribute(subsecs_lyr, 'ADD_TO_SELECTION', where_clause=sql) # Add full section path for *.A-P and remove redundant full section paths subsec_paths = abbrev_paths(subsec_paths) for i in range(len(subsec_paths) - 1, -1, -1): sec = '.'.join(subsec_paths[i].split('.')[0:3]) if subsec_paths[i].endswith('A-P'): # Convert to a full section path subsec_paths.pop(i) if sec not in sec_paths: sec_paths.append(sec) elif sec in sec_paths: # Remove redundant full section path sec_paths.remove(sec) subsec_paths = expand_paths(subsec_paths) paths = '; '.join(abbrev_paths(sec_paths + subsec_paths)) return paths
zf = ZIPFILE.ZipFile(dzf[0], "r") shp_name = [n for n in zf.namelist() if n.endswith('.shp')][0] zf.extractall(working_dir) drought = OS.path.splitext(shp_name)[0] DM.MakeFeatureLayer(OS.path.join(working_dir, shp_name), drought) #### Add Winery Data #### beerWinePath = OS.path.join(working_dir, "BeerWine", "BeerWine.gdb", "BeerWine") intermediate_output = OS.path.join(working_dir, "BeerWine", "BeerWine.gdb", "BeerWineDrought") wine = "BeerWine" wine_drought = "Wine_Drought" DM.MakeFeatureLayer(beerWinePath, wine) DM.SelectLayerByAttribute(wine, "NEW_SELECTION", "Type = 'Winery'") ANALYSIS.SpatialJoin(drought, wine, intermediate_output, "JOIN_ONE_TO_ONE", "KEEP_ALL") try: DM.DeleteField(intermediate_output, "NAME") except: pass final_wine_drought = "Wine_Drought_Summary" DM.MakeFeatureLayer(intermediate_output, final_wine_drought) lf = DM.SaveToLayerFile( final_wine_drought, OS.path.join(working_dir, '{}.lyr'.format(final_wine_drought))) DM.ApplySymbologyFromLayer(lf, lyr_template) pw = "test" #GETPASS.getpass("Enter AGOL password:")
def snap_points_to_mask_raster (in_file, mask, out_file, distance, workspace): if distance is None or len (distance) == 0: distance = "100 METERS" if arcpy.env.outputCoordinateSystem is None: arcpy.env.outputCoordinateSystem = mask print arcpy.env.outputCoordinateSystem.name if len(workspace): arcpy.env.workspace = workspace if arcpy.env.workspace is None or len(arcpy.env.workspace) == 0: arcpy.env.workspace = os.getcwd() arcpy.AddMessage ("workspace is %s" % arcpy.env.workspace) try: suffix = None wk = arcpy.env.workspace if not '.gdb' in wk: suffix = '.shp' poly_file = arcpy.CreateScratchName(None, suffix, 'POLYGON') arcpy.RasterToPolygon_conversion (mask, poly_file, 'NO_SIMPLIFY') except: raise arcpy.AddMessage ("poly_file is %s" % poly_file) # handle layers and datasets desc = arcpy.Describe(in_file) in_file = desc.catalogPath # add .shp extension if needed - clunky, but otherwise system fails below re_gdb = re.compile ('\.gdb$') re_shp = re.compile ('\.shp$') path = os.path.dirname(out_file) if len (path) == 0: path = arcpy.env.workspace if not re_gdb.search (path) and not re_shp.search (out_file): out_file += '.shp' arcpy.AddMessage ("Input point file is %s" % in_file) arcpy.AddMessage ("Output point file is %s" % out_file) arcmgt.CopyFeatures (in_file, out_file) try: snap_layer_name = 'get_layer_for_snapping' arcmgt.MakeFeatureLayer (out_file, snap_layer_name) arcmgt.SelectLayerByLocation (snap_layer_name, 'intersect', poly_file, '#', 'NEW_SELECTION') arcmgt.SelectLayerByAttribute(snap_layer_name, 'SWITCH_SELECTION') if arcmgt.GetCount(snap_layer_name) > 0: arcpy.Snap_edit (snap_layer_name, [[poly_file, "EDGE", distance]]) else: arcpy.AddMessage ('No features selected, no snapping applied') except Exception as e: print arcpy.GetMessages() raise e arcmgt.Delete (snap_layer_name) arcmgt.Delete (poly_file) print arcpy.GetMessages() print "Completed" return
def doFishnet(self): #### Initial Data Assessment #### printOHSSection(84428, prependNewLine=True) printOHSSubject(84431, addNewLine=False) #### Find Unique Locations #### msg = ARCPY.GetIDMessage(84441) ARCPY.SetProgressor("default", msg) initCount = UTILS.getCount(self.ssdo.inputFC) self.checkIncidents(initCount) collectedPointFC = UTILS.returnScratchName("Collect_InitTempFC") collInfo = EVENTS.collectEvents(self.ssdo, collectedPointFC) self.cleanUpList.append(collectedPointFC) collSSDO = SSDO.SSDataObject(collectedPointFC, explicitSpatialRef=self.ssdo.spatialRef, useChordal=True) collSSDO.obtainDataGA(collSSDO.oidName) ################################# if self.boundaryFC: #### Assure Boundary FC Has Area and Obtain Chars #### self.checkBoundary() #### Location Outliers #### lo = UTILS.LocationInfo(collSSDO, concept="EUCLIDEAN", silentThreshold=True, stdDeviations=3) printOHSLocationalOutliers(lo, aggType=self.aggType) #### Agg Header #### printOHSSection(84444) if self.boundaryFC: extent = self.boundExtent forMercExtent = self.boundExtent countMSGNumber = 84453 else: countMSGNumber = 84452 extent = None forMercExtent = collSSDO.extent if collSSDO.useChordal: extentFC_GCS = UTILS.returnScratchName("TempGCS_Extent") extentFC_Merc = UTILS.returnScratchName("TempMercator_Extent") points = NUM.array([[forMercExtent.XMin, forMercExtent.YMax], [forMercExtent.XMax, forMercExtent.YMin]]) UTILS.createPointFC(extentFC_GCS, points, spatialRef=collSSDO.spatialRef) DM.Project(extentFC_GCS, extentFC_Merc, mercatorProjection) d = ARCPY.Describe(extentFC_Merc) extent = d.extent fishOutputCoords = mercatorProjection else: fishOutputCoords = self.ssdo.spatialRef #### Fish Subject #### printOHSSubject(84449, addNewLine=False) dist = scaleDecision(lo.nonZeroAvgDist, lo.nonZeroMedDist) area = 0.0 #### Construct Fishnet #### fish = UTILS.FishnetInfo(collSSDO, area, extent, explicitCellSize=dist) dist = fish.quadLength snap = self.ssdo.distanceInfo.linearUnitString(dist) #### Cell Size Answer #### snapStr = self.ssdo.distanceInfo.printDistance(dist) msg = ARCPY.GetIDMessage(84450).format(snapStr) printOHSAnswer(msg) self.fish = fish #### Fishnet Count Subject #### printOHSSubject(84451, addNewLine=False) #### Create Temp Fishnet Grid #### gridFC = UTILS.returnScratchName("Fishnet_TempFC") self.cleanUpList.append(gridFC) #### Apply Output Coords to Create Fishnet #### oldSpatRef = ARCPY.env.outputCoordinateSystem ARCPY.env.outputCoordinateSystem = fishOutputCoords #### Fish No Extent #### oldExtent = ARCPY.env.extent ARCPY.env.extent = "" #### Apply Max XY Tolerance #### fishWithXY = UTILS.funWithXYTolerance(DM.CreateFishnet, self.ssdo.distanceInfo) #### Execute Fishnet #### fishWithXY(gridFC, self.fish.origin, self.fish.rotate, self.fish.quadLength, self.fish.quadLength, self.fish.numRows, self.fish.numCols, self.fish.corner, "NO_LABELS", self.fish.extent, "POLYGON") #### Project Back to GCS if Use Chordal #### if collSSDO.useChordal: gridFC_ProjBack = UTILS.returnScratchName("TempFC_Proj") DM.Project(gridFC, gridFC_ProjBack, collSSDO.spatialRef) UTILS.passiveDelete(gridFC) gridFC = gridFC_ProjBack #### Set Env Output Coords Back #### ARCPY.env.outputCoordinateSystem = oldSpatRef #### Create Empty Field Mappings to Ignore Atts #### fieldMap = ARCPY.FieldMappings() fieldMap.addTable(self.ssdo.inputFC) fieldMap.removeAll() #### Fishnet Count Answer #### printOHSAnswer(ARCPY.GetIDMessage(countMSGNumber)) #### Create Weighted Fishnet Grid #### tempFC = UTILS.returnScratchName("Optimized_TempFC") self.cleanUpList.append(tempFC) joinWithXY = UTILS.funWithXYTolerance(ANA.SpatialJoin, self.ssdo.distanceInfo) joinWithXY(gridFC, self.ssdo.inputFC, tempFC, "JOIN_ONE_TO_ONE", "KEEP_ALL", "EMPTY") #### Clean Up Temp FCs #### UTILS.passiveDelete(gridFC) #### Remove Locations Outside Boundary FC #### featureLayer = "ClippedPointFC" DM.MakeFeatureLayer(tempFC, featureLayer) if self.boundaryFC: msg = ARCPY.GetIDMessage(84454) ARCPY.SetProgressor("default", msg) DM.SelectLayerByLocation(featureLayer, "INTERSECT", self.boundaryFC, "#", "NEW_SELECTION") DM.SelectLayerByLocation(featureLayer, "INTERSECT", "#", "#", "SWITCH_SELECTION") DM.DeleteFeatures(featureLayer) else: if additionalZeroDistScale == "ALL": msg = ARCPY.GetIDMessage(84455) ARCPY.SetProgressor("default", msg) DM.SelectLayerByAttribute(featureLayer, "NEW_SELECTION", '"Join_Count" = 0') DM.DeleteFeatures(featureLayer) else: distance = additionalZeroDistScale * fish.quadLength distanceStr = self.ssdo.distanceInfo.linearUnitString( distance, convert=True) nativeStr = self.ssdo.distanceInfo.printDistance(distance) msg = "Removing cells further than %s from input pointsd...." ARCPY.AddMessage(msg % nativeStr) DM.SelectLayerByLocation(featureLayer, "INTERSECT", self.ssdo.inputFC, distanceStr, "NEW_SELECTION") DM.SelectLayerByLocation(featureLayer, "INTERSECT", "#", "#", "SWITCH_SELECTION") DM.DeleteFeatures(featureLayer) DM.Delete(featureLayer) del collSSDO ARCPY.env.extent = oldExtent self.createAnalysisSSDO(tempFC, "JOIN_COUNT")
print("linking zones up to polys") print("working on " + hypZ[i]) # don't assume i is the class level -- extract class here classLevel = hypZ[i][-1:] curZo = wrk + "/zon_C" + classLevel polyZo = wrk + "/hyp_backOut_dissolve_" + classLevel polyZoLyr = "polyZoLayer" # join the table from the raster to the poly zone layer man.MakeFeatureLayer(polyZo, polyZoLyr) man.AddJoin(polyZoLyr, "OBJECTID", curZo, "OBJECTID", "KEEP_ALL") # find any polys with Richness below zone level # each dict entry is [zone: min richness] dictMinRich = {1: 1, 2: 2, 3: 5} targMinRich = dictMinRich[int(classLevel)] expr = "Richness >= " + str(targMinRich) man.SelectLayerByAttribute(polyZoLyr, "NEW_SELECTION", expr) # write out the selected set outFeat = wrk + "/zon_Joined_C" + classLevel man.CopyFeatures(polyZoLyr, outFeat) # if rows were dropped AND we are above level 1, then need # to add dropped polys to one level down. numRowsSelSet = int(man.GetCount(polyZoLyr).getOutput(0)) numRowsLyr = int(man.GetCount(polyZo).getOutput(0)) if numRowsSelSet < numRowsLyr & int(classLevel) > 1: expr = "Richness < " + str(targMinRich) man.SelectLayerByAttribute(polyZoLyr, "NEW_SELECTION", expr) destinedLevel = int(classLevel) - 1 # write out the selected set outFeat = wrk + "/zon_AddThesePolysTo_C" + str(destinedLevel) man.CopyFeatures(polyZoLyr, outFeat) # if the prev if statement was acted on, then grab
def mergeDualCarriageways(): """Collapse dual carriageways and turning circles in single, striagt-line roadways, the tools that achieve these effects are run on each route separately then the routes are added back to a single feature class as this yields better results""" generateMatchCode() # create at feature class to store all of the outputs geom_type = 'POLYLINE' template = distinct_routes_src oregon_spn = arcpy.SpatialReference(2913) management.CreateFeatureclass(os.path.dirname(collapsed_routes), os.path.basename(collapsed_routes), geom_type, template, spatial_reference=oregon_spn) # make a feature layer of the source routes so that selections can be made on it distinct_rte_lyr = 'distinct_transit_routes' management.MakeFeatureLayer(distinct_routes, distinct_rte_lyr) route_service_list = getRouteServicePairs() temp_merge = os.path.join(temp_shp_dir, 'temp_merge.shp') temp_collapse = os.path.join(temp_shp_dir, 'temp_collapse.shp') route_fields = ['Shape@', 'route_id', 'serv_level', 'route_type'] i_cursor = da.InsertCursor(collapsed_routes, route_fields) for route, service in route_service_list: select_type = 'NEW_SELECTION' where_clause = """"route_id" = {0} AND "serv_level" = '{1}'""".format( route, service) management.SelectLayerByAttribute(distinct_rte_lyr, select_type, where_clause) # merge dual carriageways merge_field = 'merge_id' # '0' in this field means won't be merged merge_distance = 100 # feet cartography.MergeDividedRoads(distinct_rte_lyr, merge_field, merge_distance, temp_merge) # collapse turing circles collapse_distance = 550 cartography.CollapseRoadDetail(temp_merge, collapse_distance, temp_collapse) with da.SearchCursor(temp_collapse, route_fields) as s_cursor: for row in s_cursor: i_cursor.insertRow(row) del i_cursor # now merge contiguous line segments with common attributes, now that dual carriage- # ways have been collapsed the data can be reduced to fewer segments dissolve_fields = ['route_id', 'serv_level', 'route_type'] geom_class = 'SINGLE_PART' line_handling = 'UNSPLIT_LINES' management.Dissolve(collapsed_routes, dissolved_routes, dissolve_fields, multi_part=geom_class, unsplit_lines=line_handling)
#desc = arcpy.Describe(polygon_file) #sr = desc.spatialReference arcpy.env.outputCoordinateSystem = default_coord_sys arcpy.env.geographicTransformations = "GDA_1994_To_WGS_1984" arcpy.env.XYResolution = "0.0000000001 Meters" arcpy.env.XYTolerance = "0.0000000001 Meters" temp_xy = arcpy.CreateScratchName("xx", ".shp") add_msg_and_print("temp_xy is %s" % temp_xy) try: arcmgt.XYToLine(in_table, temp_xy, "Longitude_dd", "Lattitude_dd", "Longitude_dd_2", "Lattitude_dd_2", "GEODESIC", "New_WP") except: add_msg_and_print("Unable to create XY to line feature class") raise layer = "feat_layer" arcmgt.MakeFeatureLayer(temp_xy, layer) arcmgt.SelectLayerByLocation(layer, "COMPLETELY_WITHIN", polygon_file) arcmgt.SelectLayerByAttribute(layer, "SWITCH_SELECTION") temp_overlap = arcpy.CreateScratchName("xx_overlap_", ".shp") arcpy.CopyFeatures_management(layer, temp_overlap) # now we need to iterate over those overlapping vertices and integrate them with the boundary polygon print "Completed"
veg = gis.content.get("6341228ec82a4bfbaf52d977a14e99ce") b = time.perf_counter() / 60 print(f"Layers loaded: {round(b - a, 2)} minutes") #Load vegetation layer veg2 = veg.layers[0] veg3 = veg2.url veg4 = r"memory\veg" DM.MakeFeatureLayer(veg3, veg4) c = time.perf_counter() / 60 print(f"Veg feature generated: {round(c - b, 2)} minutes") #Select vegetation type selection = 'Forest & Woodland' #input habitat type here where = "LIFEFORM = '" + selection + "'" DM.SelectLayerByAttribute(veg4, "NEW_SELECTION", where, None) d = time.perf_counter() / 60 print(f"Veg type selected: {round(d - c, 2)} minutes") #Make conditional vegetation layer raster vegCon = r"memory\veg2" path = script + r"\veg.shp" CO.FeatureClassToShapefile(veg4, script) CO.FeatureToRaster(path, "LIFEFORM", vegCon, 30) e = time.perf_counter() / 60 print(f"Veg conditional raster created: {round(e - d, 2)} minutes") #Load non-normalized digital surface model dsm2 = dsm.layers[0] dsm3 = dsm2.url dsm4 = r"memory\dsm"