def PointGEOM(fc, tbl, workspace, layer_name, fields): #Updates the Geometry point location based on the XY attributes in the GIS table, run this after the XY attributes have been updated try: MakeFeatureLayer_management(fc, layer_name) #the tolerance is how close a lat/long field value must match the coordinate position Tolerance = 0.000001 #start the edit operation using the DA cursor edit = da.Editor(workspace) # @UndefinedVariable edit.startEditing() edit.startOperation() with da.UpdateCursor(fc, fields) as ucursor: # @UndefinedVariable for row in ucursor: #rows 0 and 1 are the lat long fields in the table point = Point(row[0], row[1]) #row 2 is the geometry lat long tuple, and needs to be split in to lat/long parts rowx, rowy = (row[2]) rowvalues = (row[0], row[1], point, datetime.datetime.now()) #compare the lat long table values to the point location if (type(rowx) == float): intolX = abs(row[0] - rowx) intolY = abs(row[1] - rowy) if intolX < Tolerance and intolY < Tolerance: pass else: #if the shape needs to be adjusted, this will update the coordinate position from the feild info point = Point(row[0], row[1]) rowvalues = (row[0], row[1], point, datetime.datetime.now()) print "these rows are outside the position tolerance:" print(rowvalues) ucursor.updateRow(rowvalues) #print (rowvalues) else: point = Point(row[0], row[1]) rowvalues = (row[0], row[1], point, datetime.datetime.now()) print "these rows need to be calculated:" print(rowvalues) ucursor.updateRow(rowvalues) edit.stopOperation() edit.stopEditing(True) del layer_name, fc, fields, workspace print "point geometry updated" except ExecuteError: print(GetMessages(2)) endingTime = datetime.datetime.now() ScriptStatusLogging('POINT_UPDATE_PROD.py', 'CIIMS.Static_Crossings', scriptFailure, startingTime, endingTime, GetMessages(2))
def raster_extent_polygon(in_raster): from arcpy import Array, Point, Polygon, Describe desc = Describe(in_raster) XMin = desc.extent.XMin XMax = desc.extent.XMax YMin = desc.extent.YMin YMax = desc.extent.YMax # Create a polygon geometry array = Array([ Point(XMin, YMin), Point(XMin, YMax), Point(XMax, YMax), Point(XMax, YMin) ]) return Polygon(array)
def _array_to_poly_(arr, SR=None, as_type="Polygon"): """Convert array-like objects to arcpy geometry. This can include an `ndarray`, an `object array` or a `list of lists` which represent polygon or polyline geometry. Parameters ---------- arr : list-like A list of arrays representing the poly parts, or an object array. SR : spatial reference Leave as None if not known. Enclose in quotes. eg. "4326" as_type : text Polygon or Polyline. Notes ----- Polygon geometries require a duplicate first and last point. Outer rings are ordered clockwise with inner rings (holes) counterclockwise. No check are made to the integrety of the geometry in this function. """ subs = np.asarray(arr, dtype='O') aa = [] for sub in subs: aa.append([Point(*pairs) for pairs in sub]) if as_type.upper() == 'POLYGON': poly = Polygon(Array(aa), SR) elif as_type.upper() == 'POLYLINE': poly = Polyline(Array(aa), SR) return poly
def _p2p_(poly): """Convert a single ``poly`` shape to numpy arrays or object.""" sub = [] pt = Point() # arcpy.Point() for arr in poly: pnts = [[p.X, p.Y] for p in arr if pt] sub.append(np.asarray(pnts, dtype='O')) return sub
def calc_linhas_largura(dict_circ_desc, ponto): """criar linhas de largura""" if dict_circ_desc["tipo_circulo"] == "meio": linha_nao_intersecta_ponto = None point_circ = Point() point_circ.X = dict_circ_desc["pt_medios_circ"]["x_ptm"] point_circ.Y = dict_circ_desc["pt_medios_circ"]["y_ptm"] array = Array([point_circ, ponto.getPart(0)]) linha_circulo = Polyline(array, projecao_geo) for parte_linha in dict_circ_desc["partes"]: if not dict_circ_desc["partes"][parte_linha]["cruza_ponto"]: linha_nao_intersecta_ponto = dict_circ_desc["partes"][parte_linha]["linha_geometria"] if linha_circulo.disjoint(linha_nao_intersecta_ponto): array.removeAll() point_circ = Point() point_circ.X = dict_circ_desc["pt_medios_circ"]["x_ptm_inv"] point_circ.Y = dict_circ_desc["pt_medios_circ"]["y_ptm_inv"] array = Array([point_circ, ponto.getPart(0)]) linha_circulo = Polyline(array, projecao_geo) linha_largura = linha_circulo.intersect(poligono_ma, 2) array.removeAll() else: linha_largura = linha_circulo.intersect(poligono_ma, 2) array.removeAll() return linha_largura, linha_circulo
def criar_linha_largura_app(linha, largura_app): x_1_c = linha.firstPoint.X y_1_c = linha.firstPoint.Y x_2_c = linha.lastPoint.X y_2_c = linha.lastPoint.Y pt1_geometry = PointGeometry(linha.firstPoint, projecao_geo) pt2_geometry = PointGeometry(linha.lastPoint, projecao_geo) circ_poly_1 = pt1_geometry.projectAs(projecao_plana).buffer(largura_app).projectAs(projecao_geo).boundary() x_1_nocirc = circ_poly_1.firstPoint.X y_1_nocirc = circ_poly_1.firstPoint.Y obj_circ = CircVetores(x_1_c,y_1_c) raio = obj_circ.eq_circ_achar_raio(x_1_nocirc,y_1_nocirc) angulo_rad = obj_circ.retorna_angulo_atraves_ponto(x_2_c,y_2_c) x_1_final, y_1_final = obj_circ.retorna_ponto_de_angulo_inverso(angulo_rad, raio) del obj_circ circ_poly_2 = pt2_geometry.projectAs(projecao_plana).buffer(largura_app).projectAs(projecao_geo).boundary() x_2_nocirc = circ_poly_2.firstPoint.X y_2_nocirc = circ_poly_2.firstPoint.Y obj_circ = CircVetores(x_2_c,y_2_c) raio = obj_circ.eq_circ_achar_raio(x_2_nocirc,y_2_nocirc) angulo_rad = obj_circ.retorna_angulo_atraves_ponto(x_1_c,y_1_c) x_2_final, y_2_final = obj_circ.retorna_ponto_de_angulo_inverso(angulo_rad, raio) del obj_circ point1_final = Point() point1_final.X = x_1_final point1_final.Y = y_1_final point2_final = Point() point2_final.X = x_2_final point2_final.Y = y_2_final array = Array([point1_final, point2_final]) linha_app = Polyline(array, projecao_geo) del array return linha_app
def PointGEOM(fc, tbl, workspace, layer_name, fields): #Updates the Geometry point location based on the XY attributes in the GIS table, run this after the XY attributes have been updated try: MakeFeatureLayer_management(fc, layer_name) Tolerance = 0.0000001 #start the edit operation edit = da.Editor(workspace) edit.startEditing() edit.startOperation() with da.UpdateCursor(fc, fields) as ucursor: for row in ucursor: point = Point(row[0], row[1]) rowx, rowy = (row[2]) rowvalues = (row[0], row[1], point, datetime.datetime.now()) if (type(rowx) == float): intolX = abs(row[0] - rowx) intolY = abs(row[1] - rowy) if intolX < Tolerance and intolY < Tolerance: pass else: point = Point(row[0], row[1]) rowvalues = (row[0], row[1], point, datetime.datetime.now()) print(rowvalues) ucursor.updateRow(rowvalues) #print (rowvalues) else: point = Point(row[0], row[1]) rowvalues = (row[0], row[1], point, datetime.datetime.now()) print "these rows are outside the position tolerance:" print(rowvalues) ucursor.updateRow(rowvalues) edit.stopOperation() edit.stopEditing(True) del layer_name, fc, fields, workspace print "point geometry updated" except ExecuteError: print(GetMessages(2))
def arr2poly(a, SR): """Construct the poly feature from lists or arrays. The inputs may be nested and mixed in content. """ aa = [] for pairs in a: sub = pairs[0] oid = pairs[1] aa.append([Point(*pairs) for pairs in sub]) if p_type.upper() == 'POLYGON': poly = Polygon(Array(aa), SR) elif p_type.upper() == 'POLYLINE': poly = Polyline(Array(aa), SR) return (poly, oid)
def _arr_poly_(arr, SR, as_type): """Slice the array where nan values appear, splitting them off.""" subs = [] s = np.isnan(arr[:, 0]) if np.any(s): w = np.where(s)[0] ss = np.split(arr, w) subs = [ss[0]] subs.extend(i[1:] for i in ss[1:]) else: subs.append(arr) aa = [] for sub in subs: aa.append([Point(*pairs) for pairs in sub]) if as_type.upper() == 'POLYGON': poly = Polygon(Array(aa), SR) elif as_type.upper() == 'POLYLINE': poly = Polyline(Array(aa), SR) return poly
def _load_polygons( polygon_file: os.PathLike) -> Generator[Polygon, None, None]: """ Extract polygons from the given filename Parameters ---------- polygon_file Path to a CSV file with format: 12345, 3.141592, 1.337 Where the columns are, in order: - Unique ID of the curve being described - X value of the point - Y value of the point Notes ----- I assume the points that make up an `Array` or `Polygon` are all adjacent to each other. If this were not the case, I would probably use a `dict[int, Array]` to accumulate `Point`s and then do a post-processing pass to turn the accumulated results into `Polygon`s. """ filepth = Path(polygon_file) points = [] for line in fileinput.input(filepth): pid, x, y = line.split(",") pid = int(pid) x = float(x) y = float(y) pt = Point(ID=pid, X=x, Y=y) if points and points[-1].ID != pt.ID: # if we have accumulated points and the ID changed, we're starting a new Polygon, so yield the old one first arr = PointArray(points) polygon = Polygon(arr) points = [pt] yield polygon else: # we're still building the next Polygon, accumulate this point points.append(pt) if points: arr = PointArray(points) polygon = Polygon(arr) yield polygon
def generate_squares(in_polygon, in_raster): from arcpy import Describe, Array, Point, Polygon, da desc = Describe(in_raster) eXMin = desc.extent.XMin eYMin = desc.extent.YMin eXMax = desc.extent.XMax eYMax = desc.extent.YMax offset = 1 sqLen = 1 # Store extent values as list of coordinate blX = eXMin - offset blY = eYMin - offset bottom_left_square = Array([ Point(blX - sqLen, blY - sqLen), Point(blX - sqLen, blY), Point(blX, blY), Point(blX, blY - sqLen) ]) trX = eXMax + offset trY = eYMax + offset top_right_square = Array([ Point(trX, trY), Point(trX, trY + sqLen), Point(trX + sqLen, trY + sqLen), Point(trX + sqLen, trY) ]) # Get coordinate system # Open an InsertCursor and insert the new geometry cursor = da.InsertCursor(in_polygon, ['SHAPE@']) for sq in [bottom_left_square, top_right_square]: # Create a polygon geometry polygon = Polygon(sq) cursor.insertRow([polygon]) # Delete cursor object del cursor
def view_poly(geo, id_num=1, view_as=2): """View a single poly feature as an SVG in the console. Parameters ---------- geo : Geo array The Geo array part to view. id_num : integer The shape in the Geo array to view. view_as : integer Polygon = 2, Polygon = 1, Multipoint = 0 Notes ----- These provide information on the content of the svg representation. >>> p0.__getSVG__() >>> p0._repr_svg_() f = [" M {},{} " + "L {},{} "*(len(b) - 1) for b in g0.bits] ln = [f[i].format(*b.ravel()) for i, b in enumerate(g0.bits)] st = "".join(ln) + "z" """ if id_num not in (geo.IDs): msg = "Id ... {} ... not found.\n Use geo.IDs to see their values" print(msg.format(id_num)) return shp = geo.get_shapes(id_num) z = [Array([Point(*i) for i in b]) for b in shp.bits] if view_as == 2: return Polygon(Array(z)) elif view_as == 1: return Polyline(Array(z)) else: zz = [] for i in z: zz.extend(i) return Multipoint(Array(zz))
def curse(self): try: op_status_f = AddFieldDelimiters(self.hqiis, "OPERATIONAL_STATUS_NAME") type_f = AddFieldDelimiters(self.hqiis, "RPA_TYPE_CODE") with SearchCursor(self.hqiis, ["INSTALLATION_CODE", "SITE_UID", "RPA_UID", "RPA_PREDOMINANT_CURRENT_USE_CAT"], where_clause="{0}<>'{1}' AND {2}<>'{3}'".format(op_status_f, "Closed ", type_f, "L")) as s_cursor: for r in s_cursor: if r[2] in self.previous_rpuids: continue fcs = self.__is_applicable(r[3]) if not fcs: self.previous_rpuids.append(r[2]) continue if not self.__check_exist_in_db(r[2], fcs): self.layer_status.add_status(r[0], r[1], fcs[0], 2) ft = fcs[0] shape = self.__lookup_geometry(r[1]) if not shape: gen_point = Point(0, 0, 0) shape = PointGeometry(gen_point) self.log.info(str(r[1]) + " not in 'Site' layer.") need = NewNeed(shape, r[1], r[2], ft, r[0]) need.push() del need else: self.layer_status.add_status(r[0], r[1], fcs[0], 1) self.previous_rpuids.append(r[2]) except Exception as e: cache = File("CheckForNeeds", self.previous_rpuids) if not cache.save(): self.log.error("Cache did not work.") self.layer_status.write_cache() self.log.exception(e.message) raise Exit() else: self.layer_status.post_to_table()
def linha_de_largura(self, dict_descricao, ponto): if dict_descricao["tipo"] == "meio": linha_nao_intersecta_ponto = None point_circ = Point() point_circ.X = dict_descricao["ptc_x"] point_circ.Y = dict_descricao["ptc_y"] array = Array([point_circ, ponto.getPart(0)]) linha_circulo = Polyline(array,self.spatial_geo_sirgas_2000) for parte_linha in self.dict_partes: if self.dict_partes[parte_linha]["cruza_ponto"] == False: linha_nao_intersecta_ponto = self.dict_partes[parte_linha]["linha_geometria"] if linha_circulo.disjoint(linha_nao_intersecta_ponto): array.removeAll() point_circ = Point() point_circ.X = self.ptc_x_inv point_circ.Y = self.ptc_y_inv array = Array([point_circ, ponto.getPart(0)]) linha_circulo = Polyline(array,self.spatial_geo_sirgas_2000) linha_largura = linha_circulo.intersect(self.poligono_ma_geo, 2) array.removeAll() else: linha_largura = linha_circulo.intersect(self.poligono_ma_geo, 2) array.removeAll() return linha_largura, linha_circulo
pth0 = "/".join(pth) + "/Data/r00.tif" r = Raster(pth0) out_arr = "/".join(pth) + "/Data/r01.npy" frmt = "Result...\n{}" # print(frmt.format(a)) else: testing = False pth = sys.argv[1] out_arr = sys.argv[2] r = Raster(pth) # parameters here LL = r.extent.lowerLeft cols = int(r.extent.width/r.meanCellWidth) rows = int(r.extent.height/r.meanCellWidth) a = RasterToNumPyArray(r, lower_left_corner=Point(LL.X, LL.Y), ncols=cols, nrows=rows, nodata_to_value=r.noDataValue ) # # ---- overwrite existing outputs if os.path.isfile(out_arr): tweet("\nRemoving ... {}\nbefore saving".format(out_arr)) os.remove(out_arr) np.save(out_arr, a) if testing: tweet('\nScript source... {}'.format(script)) print('\nCleaning up') del r, tweet, rasterfile_info, Point, Raster, RasterToNumPyArray # ----------------------------------------------------------------------
# if save_output: # r.save(out_file) # del r # ============================================================================= from arcpy import NumPyArrayToRaster, Point path = r"C:\Temp\dem.txt" ncols = 317 nrows = 204 xllcorner = 2697732 yllcorner = 1210264 cellsize = 2 NODATA_value = -9999 a = np.genfromtxt(path, np.float, delimiter=' ', skip_header=6) a0 = np.where(a == -9999., np.nan, a) LL = Point(xllcorner, yllcorner) out = NumPyArrayToRaster(a0, LL, 2.0, 2.0, np.nan) # out.save(r"C:\Temp\dem_np.tif") # ---------------------------------------------------------------------- # __main__ .... code section if __name__ == "__main__": """Optionally... : - print the script source name. : - run the _demo """ # print("Script... {}".format(script)) # _demo()
def extendAndIntersectRoadFeatures( quarterOrHalf ): # Place the operations that extend each road line segment by a certain distance here. # Should extend all the features that exist in the post-erase dataset. Might be more difficult # to calculate the angle of these lines accurately, but it should be easier to figure out # than trying to get the lines to split correctly with the buggy SplitLineAtPoint tool. if quarterOrHalf.lower() == "quarter": extensionLinesTextName = "createdExtensionLines_Q" createdExtensionLines = createdExtensionLines_Q # 9000 ft increase for _Q version. # Must be larger than the county bufferDistance (20000) extensionDistance = 31176 extensionLinesTextName = "createdExtensionLines_Q" countyRoadNameRosette = countyRoadNameRosette_Q rosetteTextName = "countyRoadNameRosette_Q" tempRoadNameRosette = tempRoadNameRosette_Q tempRosetteTextName = "tempRoadNameRosette_Q" tempRoadNameRosetteSP = tempRoadNameRosetteSinglePoint_Q tempRosetteSPTextName = "tempRoadNameRosetteSinglePoint_Q" countyBorderFeature = countyBorderFeature_Q elif quarterOrHalf.lower() == "half": extensionLinesTextName = "createdExtensionLines_H" createdExtensionLines = createdExtensionLines_H # Must be larger than the county bufferDistance (11000) extensionDistance = 22176 extensionLinesTextName = "createdExtensionLines_H" countyRoadNameRosette = countyRoadNameRosette_H rosetteTextName = "countyRoadNameRosette_H" tempRoadNameRosette = tempRoadNameRosette_H tempRosetteTextName = "tempRoadNameRosette_H" tempRoadNameRosetteSP = tempRoadNameRosetteSinglePoint_H tempRosetteSPTextName = "tempRoadNameRosetteSinglePoint_H" countyBorderFeature = countyBorderFeature_H else: print "quarterOrHalf variable not correctly defined." raise (Exception("quarterOrHalf value error.")) print "Starting to extend and intersect road features." if Exists(createdExtensionLines): Delete_management(createdExtensionLines) else: pass CreateFeatureclass_management(inMemGDB, extensionLinesTextName, "POLYLINE", "", "", "", spatialReferenceProjection) # Add a column for roadname called roadNameForSplit. AddField_management(createdExtensionLines, "roadNameForSplit", "TEXT", "", "", "55") # Add a column which stores the angle to display a label called called LabelAngle. AddField_management(createdExtensionLines, "LabelAngle", "DOUBLE", "", "", "") # Change to double. # Add a column which stores the County Number. AddField_management(createdExtensionLines, "County_Number", "DOUBLE", "", "", "") roadLinesToInsertList = list() roadLinesList = getRoadLinesList() for roadLinesItem in roadLinesList: roadNameToUse = roadLinesItem[2] countyNumber = roadLinesItem[3] linePointsArray = ArcgisArray() firstPointTuple = (roadLinesItem[1].firstPoint.X, roadLinesItem[1].firstPoint.Y) lastPointTuple = (roadLinesItem[1].lastPoint.X, roadLinesItem[1].lastPoint.Y) # Make this a two-step process. # Might be as simple as # adding _1 to the end of the first set of variables, # adding _2 to the end of the second set of variables, # then making the extensions in both directions # and creating a new line that has the endpoints # from both sides as it's first and last point. # if necessary, could add the other points in between # but probably not necessary just for generating # an intersection point. yValue_1 = -(lastPointTuple[1] - firstPointTuple[1] ) # made y value negative xValue_1 = lastPointTuple[0] - firstPointTuple[0] lineDirectionAngle_1 = math.degrees(math.atan2( xValue_1, yValue_1)) # reversed x and y lineDirectionAngle_1 = -(((lineDirectionAngle_1 + 180) % 360) - 180 ) # correction for certain quadrants #print "lineDirectionAngle: " + str(lineDirectionAngle_1) origin_x_1 = firstPointTuple[0] origin_y_1 = firstPointTuple[1] yValue_2 = -(firstPointTuple[1] - lastPointTuple[1] ) # made y value negative xValue_2 = firstPointTuple[0] - lastPointTuple[0] lineDirectionAngle_2 = math.degrees(math.atan2( xValue_2, yValue_2)) # reversed x and y lineDirectionAngle_2 = -(((lineDirectionAngle_2 + 180) % 360) - 180 ) # correction for certain quadrants #print "lineDirectionAngle: " + str(lineDirectionAngle_2) origin_x_2 = lastPointTuple[0] origin_y_2 = lastPointTuple[1] (disp_x_1, disp_y_1) = (extensionDistance * math.sin(math.radians(lineDirectionAngle_1)), extensionDistance * math.cos(math.radians(lineDirectionAngle_1))) (end_x_1, end_y_1) = (origin_x_1 + disp_x_1, origin_y_1 + disp_y_1) (disp_x_2, disp_y_2) = (extensionDistance * math.sin(math.radians(lineDirectionAngle_2)), extensionDistance * math.cos(math.radians(lineDirectionAngle_2))) (end_x_2, end_y_2) = (origin_x_2 + disp_x_2, origin_y_2 + disp_y_2) startPoint = ArcgisPoint() endPoint = ArcgisPoint() startPoint.ID = 0 startPoint.X = end_x_1 startPoint.Y = end_y_1 endPoint.ID = 1 endPoint.X = end_x_2 endPoint.Y = end_y_2 linePointsArray.add(startPoint) linePointsArray.add(endPoint) newLineFeature = ArcgisPolyLine(linePointsArray) # Need to create an extension for both ends of the line and add them # to the array. #newLineFeature = createdExtensionLinesCursor.newRow() #newLineFeature.SHAPE = linePointsArray lineDirectionOutput = "0" if lineDirectionAngle_1 > 0: lineDirectionOutput = lineDirectionAngle_1 elif lineDirectionAngle_2 > 0: lineDirectionOutput = lineDirectionAngle_2 else: pass roadLinesToInsertList.append( [newLineFeature, roadNameToUse, lineDirectionOutput, countyNumber]) #createdExtensionLinesCursor.insertRow([newLineFeature, roadNameToUse, lineDirectionOutput]) if "newLineFeature" in locals(): del newLineFeature else: pass # Consider building this as a separate list and then just looping # through the list to put it into the cursor instead # of doing logic and inserting into the cursor at the same place. #start editing session #newEditingSession = daEditor(sqlGdbLocation) #newEditingSession.startEditing() #newEditingSession.startOperation() createdExtensionLinesCursor = daInsertCursor( createdExtensionLines, ["SHAPE@", "roadNameForSplit", "LabelAngle", "County_Number"]) for roadLinesToInsertItem in roadLinesToInsertList: createdExtensionLinesCursor.insertRow(roadLinesToInsertItem) # End editing session #newEditingSession.stopOperation() #newEditingSession.stopEditing(True) if "createdExtensionLinesCursor" in locals(): del createdExtensionLinesCursor else: pass # Remove the previous countyRoadNameRosette so that it can be recreated. if Exists(rosetteTextName): Delete_management(rosetteTextName) else: pass CreateFeatureclass_management(sqlGdbLocation, rosetteTextName, "POINT", "", "", "", spatialReferenceProjection) AddField_management(countyRoadNameRosette, "roadNameForSplit", "TEXT", "", "", "55") AddField_management(countyRoadNameRosette, "LabelAngle", "DOUBLE", "", "", "") # Change to double. AddField_management(countyRoadNameRosette, "County_Number", "DOUBLE", "", "", "") AddField_management(countyRoadNameRosette, "COUNTY_NAME", "TEXT", "", "", "55") # Now then, need to check for the existence # of and delete the point intersection layer # if it exists. # Then, recreate it and the proper fields. inMemoryCountyBorderExtension = "aCountyBorderExtensionBuffer" inMemoryExtensionLines = "aLoadedExtensionLines" try: Delete_management(inMemoryCountyBorderExtension) except: pass try: Delete_management(inMemoryExtensionLines) except: pass # Temporary layer, use CopyFeatures_management to persist to disk. MakeFeatureLayer_management( countyBorderFeature, inMemoryCountyBorderExtension) # County Border extension feature # Temporary layer, use CopyFeatures_management to persist to disk. MakeFeatureLayer_management( createdExtensionLines, inMemoryExtensionLines) # Line extension feature borderFeatureList = getBorderFeatureList(quarterOrHalf) borderFeatureList = sorted(borderFeatureList, key=lambda feature: feature[3]) for borderFeature in borderFeatureList: borderFeatureName = borderFeature[2] borderFeatureNumber = borderFeature[3] print "borderFeatureName: " + str( borderFeatureName) + " & borderFeatureNumber: " + str( int(borderFeatureNumber)) countyBorderWhereClause = ' "COUNTY_NUMBER" = ' + str( int(borderFeatureNumber)) + ' ' SelectLayerByAttribute_management(inMemoryCountyBorderExtension, "NEW_SELECTION", countyBorderWhereClause) countyBorderSelectionCount = GetCount_management( inMemoryCountyBorderExtension) print "County Borders Selected: " + str(countyBorderSelectionCount) # Had to single-quote the borderFeatureNumber because it is stored as a string in the table. # Unsingle quoted because it was changed to a float. extensionLinesWhereClause = ' "COUNTY_NUMBER" = ' + str( int(borderFeatureNumber)) + ' ' SelectLayerByAttribute_management(inMemoryExtensionLines, "NEW_SELECTION", extensionLinesWhereClause) extensionLineSelectionCount = GetCount_management( inMemoryExtensionLines) print "Extension Lines Selected: " + str(extensionLineSelectionCount) if Exists(tempRosetteTextName): Delete_management(tempRosetteTextName) else: pass if Exists(tempRosetteSPTextName): Delete_management(tempRosetteSPTextName) else: pass Intersect_analysis( [inMemoryCountyBorderExtension, inMemoryExtensionLines], tempRoadNameRosette, "ALL", "", "POINT") # Intersect to an output temp layer. # Next, need to loop through all of the counties. # Get the county number and use it to select # a county extension buffer in the county # extension buffers layer. # Then, use the county number to select # all of the lines for that county # in the extension lines layer. # Then, export those to a temp layer in the fgdb. # Change multipoint to singlepoint. # Was working until I moved from gisprod to sdedev for the data source. # not sure why. Check to make sure projections match. # ^ Fixed. try: # Run the tool to create a new fc with only singlepart features MultipartToSinglepart_management(tempRoadNameRosette, tempRoadNameRosetteSP) # Check if there is a different number of features in the output # than there was in the input inCount = int( GetCount_management(tempRoadNameRosette).getOutput(0)) outCount = int( GetCount_management(tempRoadNameRosetteSP).getOutput(0)) if inCount != outCount: print "Found " + str(outCount - inCount) + " multipart features." #print "inCount, including multipart = " + str(inCount) #print "outCount, singlepart only = " + str(outCount) else: print "No multipart features were found" except ExecuteError: print GetMessages() except Exception as e: print e print "Appending the temp point layer to the county point intersection layer." Append_management([tempRoadNameRosetteSP], countyRoadNameRosette, "NO_TEST") # K, worked correctly. Just need to change LabelAngle to a float and it might be what # I want. print "Done adding points to the countyRoadNameRosette feature class."
def main(): # tool inputs INPUT_NETWORK = argv[1] INPUT_POINTS = argv[2] INPUT_ORIGINS_FIELD = argv[3] INPUT_DESTINATIONS_FIELD = argv[4] INPUT_COEFF = float(argv[5]) INPUT_SEARCH_RADIUS = float(argv[6]) if is_number( argv[6]) else float('inf') INPUT_OUTPUT_DIRECTORY = argv[7] INPUT_OUTPUT_FEATURE_CLASS_NAME = argv[8] INPUT_COMPUTE_WAYFINDING = argv[9] == "true" INPUT_VISUALIZATION = argv[10] # check that network has "Length" attribute if "Length" not in network_cost_attributes(INPUT_NETWORK): AddError("Network <%s> does not have Length attribute" % INPUT_NETWORK) return # check that coeff is at least 1 if INPUT_COEFF < 1: AddError("Redundancy coefficient <%s> must be at least 1" % INPUT_COEFF) return # extract origin and destination ids origin_ids = flagged_points(INPUT_POINTS, INPUT_ORIGINS_FIELD) if len(origin_ids) != 1: AddError("Number of origins <%s> must be 1" % len(origin_ids)) return origin_id = origin_ids[0] destination_ids = flagged_points(INPUT_POINTS, INPUT_DESTINATIONS_FIELD) if len(destination_ids) == 0 or origin_ids == destination_ids: AddWarning("No OD pair found, no computation will be done") return # check that the output file does not already exist output_feature_class = "%s.shp" % join(INPUT_OUTPUT_DIRECTORY, INPUT_OUTPUT_FEATURE_CLASS_NAME) if Exists(output_feature_class): AddError("Output feature class <%s> already exists" % output_feature_class) return # obtain visualization method visualize_segments = visualize_polylines = False if INPUT_VISUALIZATION == "Unique Segments": visualize_segments = True elif INPUT_VISUALIZATION == "Path Polylines": visualize_polylines = True elif INPUT_VISUALIZATION != "None": AddError("Visualization method <%s> must be one of 'Unique Segments', " "'Path Polylines', or 'None'" % INPUT_VISUALIZATION) return # setup env.overwriteOutput = True # construct network and points network, points, edge_to_points = construct_network_and_load_buildings( INPUT_POINTS, INPUT_NETWORK) # find redundant paths for each origin-destination AddMessage("Computing redundant paths ...") progress_bar = Progress_Bar(len(destination_ids), 1, "Finding paths ...") # build output table one row at a time, starting from header row answers = [["OrigID", "DestID", "NumPaths", "Redundancy"]] if INPUT_COMPUTE_WAYFINDING: answers[0].append("Wayfinding") # visualization state if visualize_polylines: polylines = [] polyline_data = [] elif visualize_segments: all_unique_segment_counts = defaultdict(int) for destination_id in destination_ids: if origin_id != destination_id: all_paths = find_all_paths(network, points, INPUT_COEFF, origin_id, destination_id, INPUT_SEARCH_RADIUS, INPUT_COMPUTE_WAYFINDING) if all_paths is not None: if INPUT_COMPUTE_WAYFINDING: (all_path_points, unique_segment_counts, num_paths, redundancy, waypoint) = all_paths answers.append([ origin_id, destination_id, num_paths, redundancy, waypoint ]) else: (all_path_points, unique_segment_counts, num_paths, redundancy) = all_paths answers.append( [origin_id, destination_id, num_paths, redundancy]) if visualize_polylines: for i, path_points in enumerate(all_path_points): polylines.append( Polyline( Array([ Point(*coords) for coords in path_points ]))) polyline_data.append((origin_id, destination_id, i)) elif visualize_segments: for edge_id in unique_segment_counts: all_unique_segment_counts[ edge_id] += unique_segment_counts[edge_id] progress_bar.step() AddMessage("\tDone.") # write out results if len(answers) > 1: AddMessage("Writing out results ...") # write out to a table write_rows_to_csv(answers, INPUT_OUTPUT_DIRECTORY, INPUT_OUTPUT_FEATURE_CLASS_NAME) # visualize if visualize_polylines: CopyFeatures_management(polylines, output_feature_class) data_fields = ["OrigID", "DestID", "PathID"] for field in data_fields: AddField_management(in_table=output_feature_class, field_name=field, field_type="INTEGER") rows = UpdateCursor(output_feature_class, data_fields) for j, row in enumerate(rows): row[0], row[1], row[2] = polyline_data[j] rows.updateRow(row) # create a layer of the polylines shapefile and symbolize polylines_layer_name = "%s_layer" % INPUT_OUTPUT_FEATURE_CLASS_NAME polylines_layer = "%s.lyr" % join(INPUT_OUTPUT_DIRECTORY, INPUT_OUTPUT_FEATURE_CLASS_NAME) MakeFeatureLayer_management(output_feature_class, polylines_layer_name) SaveToLayerFile_management(polylines_layer_name, polylines_layer, "ABSOLUTE") ApplySymbologyFromLayer_management( polylines_layer, join(path[0], "Symbology_Layers\sample_polylines_symbology.lyr")) add_layer_to_display(polylines_layer) elif visualize_segments: id_mapping, edges_file = select_edges_from_network( INPUT_NETWORK, all_unique_segment_counts.keys(), INPUT_OUTPUT_DIRECTORY, "%s_edges" % INPUT_OUTPUT_FEATURE_CLASS_NAME) AddField_management(in_table=edges_file, field_name="PathCount", field_type="INTEGER") rows = UpdateCursor(edges_file, ["OID@", "PathCount"]) for row in rows: row[1] = all_unique_segment_counts[id_mapping[row[0]]] rows.updateRow(row) AddMessage("\tDone.") else: AddMessage("No results to write out.")
field_type = field[1] # Field type # Create all the necessary field for the line feature class AddField_management(env.scratchGDB + '/' + OutputLine, field_name, field_type) # The insert cursor for line feature class line_ins = da.InsertCursor(env.scratchGDB + '/' + OutputLine, field_names_shapes) with da.SearchCursor(POK_Table, [X_Start, Y_Start, X_End, Y_End, POK_RID]) as search: for t_row in search: # Iterate over all available POK segment route = t_row[4] # The POK route ID # Start Point and End Point in WGS 1984 projection system start_point = PointGeometry(Point(t_row[0], t_row[1])).projectAs('4326') end_point = PointGeometry(Point(t_row[2], t_row[3])).projectAs('4326') # Start Point and End Point in same projection as the Network Feature Class start_point = start_point.projectAs(Network_SpatRef) end_point = end_point.projectAs(Network_SpatRef) route_found = False # Variable for determining if the requested routes exist in the Network FC # Iterate over all available row in Network Feature Class with da.SearchCursor(Network, 'SHAPE@', where_clause="{0}='{1}'".format( NetworkRID, route)) as search_cur: for s_row in search_cur: route_found = True # If the route exist
### Variables utiles chp_x = "xCentroid" chp_y = "yCentroid" expr_centrX = "!SHAPE.CENTROID!.split()[0]" expr_centrY = "!SHAPE.CENTROID!.split()[1]" new_shp(path.dirname(shp_out), path.basename(shp_out), "Point", spatial_reference = shp_in) add = curs_ins(shp_out) ptArray = Array() pt = Point() ### Préalables new_chp(shp_in, chp_x, "DOUBLE", 18, 11) new_chp(shp_in, chp_y, "DOUBLE", 18, 11) ### Calcul des centroids calc_chp(shp_in, chp_x, expr_centrX, "PYTHON") calc_chp(shp_in, chp_y, expr_centrY, "PYTHON") rows = curs_rec(shp_in) i = 0 for objet in rows: pt.ID = i
def OffsetDirectionMatrix2(offsetOptions): """Update the accidentDataWithOffsetOutput geometry with data from geocodedFeatures. Keyword arguments to be included in the options class: gdbLocation -- The gdb where the outputWithOffsetLocations feature class resides. accidentDataAtIntersections -- A point feature class containing geocoded accident information. accidentDataWithOffsetOutput -- A point feature class with the same structure as the geocodedFeatuers AND an "isOffset" row of type "TEXT" with length of at least 5. whereClauseInUse -- Whether or not the script will use a where clause. Boolean value. roadsFeaturesLocation -- The path to the local roads centerline feature class. aliasTable -- The path to the roads alias table for the roads centerline feature class. maxDegreesDifference -- The number of degrees that a potentially matching accident offset location can be from the direction specified. If this is set to -1, the check will be skipped and no matching accident offset locations will be rejected, even if they are in the opposite direction from where the accident record says they should be. I.e. the accident could be offset to the North when the accident record says that it should be South of the intersection when this check is skipped. XYFieldList -- The list of fields to use from the copy of the geocoded accidents feature class after that copy has had POINT_X and POINT_Y fields added and calculated. """ ########################################################################### ## Function overview: ## For each row in the feature class of accidents that have been geolocated ## to an intersection: ########################################################################### # Make sure that the Status for the point is not 'U' -- Unlocated. # Might take care of test for 'U' points before getting to this # step in the process, but if not, be sure to test for it here. # Create/calculate intersection X & Y field named POINT_X and POINT_Y. # Then, calculate those fields. # Then, create a buffer. # Then, select the On_Road in the roads layer. # Then, intersect the buffer with the roads layer to create an offset # points layer. # Then, split the offset points from potential multipart points to # singlepart points. ########################################################################### # Then, use the "SHAPE@XY" token to access the X & Y of the individual # offset points and compare them to the X & Y values in the POINT_X and # POINT_Y fields, which hold the values for the related roads' intersection # that the accidents were geolocated to. # Then, test the intersected points to find the best one for the given # direction. ########################################################################### # Then, append the information for that point into a list. # Then, delete the buffer and intersection layer. # Repeat for each other row... ########################################################################### # When all the rows are finished, # Append the attribute information for the # related accident into each offset point's row. # Lastly, write the data for all the offset point rows # into the output layer. ########################################################################### # Maximum angle difference code confirmed to be working. -- 2015-03-18 # 771/771 manually checked look good (for the information given) using # UpdateKdotNameInCenterline(), Where Clause for selection, and # Maximum Angle Difference. # Locates 771/862 non-'U' points without the modified versions of # ON_ROAD_NAME/AT_ROAD/AT_ROAD_DIRECTION/AT_ROAD_DIST_FEET labeled fields # and 803/862 with them. ########################################################################### AddMessage("The value of the useKDOTFields option is: " + str(offsetOptions.useKDOTFields)) roadsToIntersect = offsetOptions.roadsFeaturesLocation roadsAliasTable = offsetOptions.aliasTable geocodedFeatures = offsetOptions.accidentDataAtIntersections outputWithOffsetLocations = offsetOptions.accidentDataWithOffsetOutput whereClauseFlag = offsetOptions.whereClauseInUse maximumDegreesDifference = offsetOptions.maxDegreesDifference KDOTFieldUse = offsetOptions.useKDOTFields AddMessage("The value for KDOTFieldUse is: " + str(KDOTFieldUse)) if str(KDOTFieldUse).lower() == 'false': featuresWithXYFieldList = offsetOptions.NonKDOTXYFieldList AddMessage("Using nonKDOTXYFieldList.") else: featuresWithXYFieldList = offsetOptions.KDOTXYFieldList geodatabaseLocation = getGDBLocationFromFC(outputWithOffsetLocations) env.workspace = geodatabaseLocation env.overwriteOutput = True geocodedWhereClause = "STATUS <> 'U'" featuresWithXY = 'geocodedWithXY' geocodedLocXY = r'in_memory\geocodedFeatures_Loc_XY' # Changed this to an in_memory location also. # Scratch data locations intermediateAccidentBuffer = r'in_memory\intermediateAccidentBuffer' intermediateAccidentIntersect = r'in_memory\intermediateAccidentIntersect' intermediateAccidentIntersectSinglePart = r'in_memory\intermediateAccidentIntersectSinglePart' # Added 2016-09-06 after the Wichita Area points started processing. Slowly. intermediateRoadsToIntersect = r'in_memory\roadsToIntersect' intermediateRoadsAliasTable = r'in_memory\roadsAliasTable' descSpatialReference = Describe(geocodedFeatures).spatialReference # Make a feature layer of geocodedFeatures using a where clause to restrict to those points # which have been located to an intersection, then add XY to it. MakeFeatureLayer_management(geocodedFeatures, featuresWithXY, geocodedWhereClause) CopyFeatures_management(featuresWithXY, geocodedLocXY) AddXY_management(geocodedLocXY) roadsAsFeatureLayer = 'ConflatedRoadsFeatureLayer' # Roads copied to memory. CopyFeatures_management(roadsToIntersect, intermediateRoadsToIntersect) MakeFeatureLayer_management(intermediateRoadsToIntersect, roadsAsFeatureLayer) # Use Point_X & Point_Y for the geolocated intersection location. # Use shape tokens for the x & y of the points which # result from intersecting the buffer & road geometries. geocodedAccidentsList = list() singlePartOffsetAccidentsList = list() print "The path of the geocodedFeatures used is: " + geocodedFeatures #AddMessage("The field names used in the search cursor are:") #for fieldListItem in featuresWithXYFieldList: # AddMessage(fieldListItem) accidentsCursor = SearchCursor(geocodedLocXY, featuresWithXYFieldList) for accidentRow in accidentsCursor: geocodedAccidentsList.append(accidentRow) try: del accidentsCursor except: pass print 'whereClauseFlag is: ' + str(whereClauseFlag) print 'Starting the offset process...' accCounter = -1 env.outputCoordinateSystem = descSpatialReference if whereClauseFlag == True: # Don't need to create a relate or a join. # Just need to do a select on the would-be joined/related table # to get the SEGIDs, then use those to do a select # for the GCIDs the conflation roads. # Try using table to table here instead of copy features. # For some reason, arcpy doesn't like this table when it's in the # ar63 FGDBs. TableToTable_conversion(roadsAliasTable, 'in_memory', 'roadsAliasTable') # == intermediateRoadsAliasTable #CopyFeatures_management(roadsAliasTable, intermediateRoadsAliasTable) roadsAliasTableView = MakeTableView_management(intermediateRoadsAliasTable, 'roadsAliasTableView') for geocodedAccident in geocodedAccidentsList: accCounter += 1 print 'Working on geocodedAccident #' + str(accCounter) # Create a point here with the x & y from the geocodedAccident, # add the coordinate system, OBJECTID, and AccidentID # from the geocodedAccident layer. # Then, create a buffer with it. #if geocodedAccident[2] is not None and geocodedAccident[3] is not None: tempPoint = Point(geocodedAccident[2], geocodedAccident[3]) #print "\t " + str(tempPoint.X) + ", " + str(tempPoint.Y) tempPointGeometry = PointGeometry(tempPoint, descSpatialReference) accidentDistanceOffset = geocodedAccident[7] accidentClusterTolerance = 1 try: ##################### # Offsetting while using a WhereClause follows: ##################### if accidentDistanceOffset is not None: # In Python it's None, whereas in an ArcGIS table it's <null> accidentDistanceOffset = int(accidentDistanceOffset) if accidentDistanceOffset != 0: Buffer_analysis(tempPointGeometry, intermediateAccidentBuffer, accidentDistanceOffset) firstRoadName = str(geocodedAccident[5]) if firstRoadName is not None: firstRoadName = firstRoadName.upper() else: firstRoadName = 'NotAValidRoad' secondRoadName = str(geocodedAccident[8]) if secondRoadName is not None: secondRoadName = secondRoadName.upper() else: secondRoadName = 'NotAValidRoad' thirdRoadName = ParseMatchAddr(geocodedAccident[9]) if thirdRoadName is not None: thirdRoadName = thirdRoadName.upper() else: thirdRoadName = 'NotAValidRoad' roadNamesList = [firstRoadName, secondRoadName, thirdRoadName] aliasIDsList = getAliasIDs(roadNamesList, roadsAliasTableView) aliasIDsLength = len(aliasIDsList) if aliasIDsLength != 0: aliasIDsString = """(""" for x in xrange(aliasIDsLength): if (x != (aliasIDsLength - 1)): aliasIDsString += """'""" + aliasIDsList[x] + """',""" else: aliasIDsString += """'""" + aliasIDsList[x] + """')""" streetWhereClause = (""" "RD" = '""" + firstRoadName + """'""" + """ OR """ + """ "LABEL" = '""" + firstRoadName + """'""" + """ OR """ + """ "RD" = '""" + secondRoadName + """'""" + """ OR """ + """ "LABEL" = '""" + secondRoadName + """'""" + """ OR """ + """ "RD" = '""" + thirdRoadName + """'""" + """ OR """ + """ "LABEL" = '""" + thirdRoadName + """'""" + """ OR GCID IN """ + aliasIDsString) else: #Without the aliasIDs. streetWhereClause = (""" "RD" = '""" + firstRoadName + """'""" + """ OR """ + """ "LABEL" = '""" + firstRoadName + """'""" + """ OR """ + """ "RD" = '""" + secondRoadName + """'""" + """ OR """ + """ "LABEL" = '""" + secondRoadName + """'""" + """ OR """ + """ "RD" = '""" + thirdRoadName + """'""" + """ OR """ + """ "LABEL" = '""" + thirdRoadName + """'""") SelectLayerByAttribute_management(roadsAsFeatureLayer, "NEW_SELECTION", streetWhereClause) selectionCount = str(int(GetCount_management(roadsAsFeatureLayer).getOutput(0))) if int(selectionCount) != 0: featuresToIntersect = [roadsAsFeatureLayer, intermediateAccidentBuffer] Intersect_analysis(featuresToIntersect, intermediateAccidentIntersect, "ALL", "", "POINT") if int(str(GetCount_management(intermediateAccidentIntersect))) > 0: MultipartToSinglepart_management(intermediateAccidentIntersect, intermediateAccidentIntersectSinglePart) singlePartsCursor = SearchCursor(intermediateAccidentIntersectSinglePart, ['SHAPE@XY']) for singlePart in singlePartsCursor: singlePartListItem = [singlePart[0], geocodedAccident[2], geocodedAccident[3], geocodedAccident[4], geocodedAccident[6], geocodedAccident[0]] singlePartOffsetAccidentsList.append(singlePartListItem) try: del singlePartsCursor except: pass else: pass try: del intermediateAccidentIntersect except: pass else: pass #print 'Zero road segments selected. Will not attempt to offset.' else: pass #print 'AT_ROAD_DIST_FEET is 0. Will not attempt to offset. else: pass #print 'AT_ROAD_DIST_FEET is null. Will not attempt to offset.' except: # Need to log the warnings with details so that I know what's wrong with them. print "WARNING:" print "An error occurred which prevented the accident point with Acc_Key: " + str(geocodedAccident[4]) print "from being buffered and/or offset properly." errorItem = sys.exc_info()[1] errorStatement = str(errorItem.args[0]) print errorStatement try: del errorItem except: pass elif whereClauseFlag == False: for geocodedAccident in geocodedAccidentsList: # Create a point here with the x & y from the geocodedAccident, # add the coordinate system, OBJECTID, and AccidentID # from the geocodedAccident layer. # Then, create a buffer with it. #if geocodedAccident[2] is not None and geocodedAccident[3] is not None: tempPoint = Point(geocodedAccident[2], geocodedAccident[3]) #print "\t " + str(tempPoint.X) + ", " + str(tempPoint.Y) tempPointGeometry = PointGeometry(tempPoint, descSpatialReference) accidentDistanceOffset = geocodedAccident[7] ##accidentClusterTolerance = 2 try: ##################### # Offsetting while not using a WhereClause follows: ##################### if accidentDistanceOffset is not None: if int(accidentDistanceOffset) != 0: accidentDistanceOffset = int(accidentDistanceOffset) Buffer_analysis(tempPointGeometry, intermediateAccidentBuffer, accidentDistanceOffset) featuresToIntersect = [roadsAsFeatureLayer, intermediateAccidentBuffer] Intersect_analysis(featuresToIntersect, intermediateAccidentIntersect, "ALL", "", "POINT") if int(str(GetCount_management(intermediateAccidentIntersect))) > 0: MultipartToSinglepart_management(intermediateAccidentIntersect, intermediateAccidentIntersectSinglePart) singlePartsCursor = SearchCursor(intermediateAccidentIntersectSinglePart, ['SHAPE@XY']) for singlePart in singlePartsCursor: singlePartListItem = [singlePart[0], geocodedAccident[2], geocodedAccident[3], geocodedAccident[4], geocodedAccident[6], geocodedAccident[0]] singlePartOffsetAccidentsList.append(singlePartListItem) try: del singlePartsCursor except: pass try: del intermediateAccidentIntersect except: pass else: pass else: pass # Need to change this to being offset to the intersection, i.e. no movement, but # considered to be correctly offset all the same. #print 'AT_ROAD_DIST_FEET is 0. Will not attempt to offset.' else: pass #print 'AT_ROAD_DIST_FEET is null. Will not attempt to offset.' except: print "WARNING:" print "An error occurred which prevented the accident point with Acc_Key: " + str(geocodedAccident[4]) print "from being buffered and/or offset properly." errorItem = sys.exc_info()[1] errorStatement = str(errorItem.args[0]) print errorStatement try: del errorItem except: pass else: print 'Please set the whereClauseFlag to either (boolean) True or False.' #pass offsetDictionaryByAccidentKey = dict() listContainer = list() # Group the rows by accident_key for further analysis, # and add them to the dictionary/list/list data structure. for singlePartOffsetItem in singlePartOffsetAccidentsList: if singlePartOffsetItem[3] in offsetDictionaryByAccidentKey.keys(): listContainer = offsetDictionaryByAccidentKey[singlePartOffsetItem[3]] listContainer.append(singlePartOffsetItem) offsetDictionaryByAccidentKey[singlePartOffsetItem[3]] = listContainer else: listContainer = list() listContainer.append(singlePartOffsetItem) offsetDictionaryByAccidentKey[singlePartOffsetItem[3]] = listContainer updateListValues = list() for accidentKey in offsetDictionaryByAccidentKey.keys(): # accidentKey will be a unique accident key from the table listContainer = offsetDictionaryByAccidentKey[accidentKey] updateList = [-1, -1, -1, "False"] try: # Get the AT_ROAD_KDOT_DIRECTION/AT_ROAD_DIRECTION from the first (0th) entry. directionToTest = listContainer[0][4] if directionToTest is not None: directionToTest = str(directionToTest).upper() updateList = findTheMostInterestingRow(listContainer, directionToTest, maximumDegreesDifference) if updateList[0] != -1: updateListValues.append(updateList) else: # -1 is not a valid Acc_Key. Slight optimization for the next for loop that uses this list so that # it doesn't have to be re-checked each time through the list for each accident in the table. pass else: print 'Direction to test is null.' except: pass accidentUpdateCursorFields = ['ACCIDENT_KEY', 'Shape@XY', 'isOffset'] accidentUpdateCursor = UpdateCursor(outputWithOffsetLocations, accidentUpdateCursorFields) for cursorItem in accidentUpdateCursor: for updateListItem in updateListValues: if cursorItem[0] == updateListItem[0]: if str(cursorItem[2]).upper() == 'TRUE': # Don't make any changes if true. AddMessage('The accident point with Acc_Key: ' + str(cursorItem[0]) + ' is already offset.') else: # Otherwise, offset the point. editableCursorItem = list(cursorItem) #AddMessage('Found a matching cursorItem with an Accident_Key of ' + str(cursorItem[0]) + ".") editableCursorItem[1] = (updateListItem[1], updateListItem[2]) editableCursorItem[2] = updateListItem[3] #AddMessage(str(editableCursorItem)) accidentUpdateCursor.updateRow(editableCursorItem) else: pass
strLatitude = GetParameterAsText(0) strLongitude = GetParameterAsText(1) flZCTA = GetParameterAsText(2) flLEPC = GetParameterAsText(3) flCounty = GetParameterAsText(4) strFilePath = GetParameterAsText(5) # Convert values from text to float floLatitude = float(strLatitude) floLongitude = float(strLongitude) # Make a Point Geometry object. try: ptPointOfInterest = Point(X=floLongitude, Y=floLatitude, Z=None, M=None, ID=0) spatial_ref = SpatialReference(4269) ptGeometry = PointGeometry(ptPointOfInterest, spatial_ref) except: strErrorMsg = "Error creating Point or PointGeometry objects." AddWarning(strErrorMsg) SetParameterAsText(6, strErrorMsg) sys.exit() # Open Output File for use try: fhand = open(strFilePath, 'w') except: strErrorMsg = "File did not open"
if pth1[-4:] != ".tif": pth1 += ".tif" in_arr = np.load(pth0) # parameters here # to_pro = True # ---- change to True to produce tif for ArcGIS PRO dt_kind = in_arr.dtype.kind if dt_kind in ('u', 'i'): no_data = np.iinfo(in_arr.dtype.str).max elif dt_kind in ('f'): no_data = np.iinfo(in_arr.dtype.str).max else: no_data = None if to_pro: ras = NumPyArrayToRaster(in_arr, lower_left_corner=Point(LL_x, LL_y), x_cell_size=cell_sze, value_to_nodata=no_data) ras.save(pth1) if testing: print('\nScript source... {}'.format(script)) # ---------------------------------------------------------------------- # __main__ .... code section if __name__ == "__main__": """Optionally... : - print the script source name. : - run the _demo """
def updateFC(self): ''' Updates the feature class based on the distances found in the other two functions input: lstNames - 'b' from the LevDist function ''' try: d0 = time.clock() z = self.LevDist(self.namePairs) dupeCount = 0 modCount = 0 ctr = 0 self.insGNDB = [] while z: # n = varFlag,gUFI,gName,xUFI,xName,gUNI n = z.next() # cursor must apply to external names feature class; else will throw exception # as fields such as "MF" do not exist uc = UpdateCursor(self.path+'/'+self.name) gUFI = n[1] gN = n[2] xUFI = n[3] xN = n[4] gUNI = n[5] # initialize an empty list to put GNDB row objects for records to be demoted # from N to V NT for row in uc: try: uni = 1 # print row.getValue('FULL_NAME'), xN uc_name = row.getValue('FULL_NAME') uc_ufi = row.getValue('UFI') if uc_name == xN and uc_ufi == xUFI: if n[0] == 'DUPLICATE': dupeCount += 1 row.setValue('GEONAME_NOTE',n[0]) # set MF column to M to capture coordinates # or other feature mods row.setValue('MF','M') ################## # # need to change this with new sources row.setValue('USID1','TU-GAZETTEER-09') # ################## row.setValue('SOURCE_MOD_TYPE1','F') row.setValue('UFI', gUFI) row.setValue('UNI', gUNI) uc.updateRow(row) ctr += 1 # if exName is duplicate of GNDB Variant, find and # demote GNDB N name if n[6] == 'V': rr = self.demote_gndb_rec(n[1],gUNI) elif n[0] == 'VARIANT': # Turkey Gazetteer is considered authoritative # so discrepancies favor the gazetteer # handles the external record vRow = self.handle_variants(n,row) uc.updateRow(vRow) # demote the GNDB NT to 'V' rr = self.demote_gndb_rec(n[1], gUNI) # = gndb row self.insGNDB.append(rr) modCount +=1 ctr +=1 print n[1] except StopIteration: print 'uc done' except StopIteration: print "ctr = %d" % ctr print "Feature class modified:\n\t%d duplicate features found \ \n\t%d variant names added" % (dupeCount,modCount) d1 = time.clock() delta = d1-d0 print "%s took %f to complete." % (sys._getframe().f_code.co_name,\ delta) ins_cur = InsertCursor(self.path+'/'+self.name) ins_num = 0 for rec in self.insGNDB: if rec == None: continue pt = Point() row = ins_cur.newRow() for k,d in rec.iteritems(): row.setValue(k,d) row.setValue('MF','M') row.setValue('NT','V') if k == 'LON': pt.X = d elif k == 'LAT': pt.Y = d row.setNull('NAME_RANK') row.setValue('SOURCE_MOD_TYPE1','N') pt_geo = PointGeometry(pt) row.setValue('Shape',pt_geo) ins_cur.insertRow(row) ins_num += 1 print "%d GNDB name records demoted and added to feature class." % \ ins_num del ins_cur except Exception, e: e = sys.exc_info() log_file = open("H:/Programming/PyErrorLog.txt", 'a') log_file.write(str(datetime.datetime.now().time()) + "\n") log_file.write(str(e[0]) + "\n") log_file.write(str(e[1]) + "\n") traceback.print_tb(e[2],None,log_file) log_file.write('\n') log_file.close()
def testPolylineStartEndPointsMatchFunction(spatialReferenceToUse): testLineList = list() print('Trying testLine1') testLine1 = Polyline( arcpyArray( [Point(10000.38476, 22347.18506), Point(235021.997, 14251.778)]), spatialReferenceToUse) testLineList.append(testLine1) print('Trying testLine2') testLine2 = Polyline( arcpyArray( [Point(235021.997, 14251.778), Point(779221.8686, 925361.04623)]), spatialReferenceToUse) testLineList.append(testLine2) print('Trying testLine3') testLine3 = Polyline( arcpyArray([ Point(227386.14822, 816234.4438), Point(226001.4771, 22347.18506) ]), spatialReferenceToUse) testLineList.append(testLine3) print('Trying testLine4') testLine4 = Polyline( arcpyArray( [Point(18245.9122, 44579.8436), Point(10000.38476, 22347.18506)]), spatialReferenceToUse) testLineList.append(testLine4) print('Trying testLine5') testLine5 = Polyline( arcpyArray( [Point(18245.9122, 44579.8436), Point(226001.4771, 22347.18506)]), spatialReferenceToUse) testLineList.append(testLine5) print('Trying testLine6') testLine6 = Polyline( arcpyArray([ Point(847224.7665, 241233.9876), Point(779221.8686, 925361.04623) ]), spatialReferenceToUse) testLineList.append(testLine6) mockXYTolerance = 0.00328083333333 for x1 in xrange(len(testLineList)): currentTestLineP1 = testLineList[x1] for y1 in xrange(x1 + 1, len(testLineList)): currentTestLineP2 = testLineList[y1] print("Testing: " + str(x1) + " and " + str(y1) + ".") print( polylineStartEndPointsMatch(currentTestLineP1, currentTestLineP2, mockXYTolerance))
def calc_coordinates(fc, updateOnlyBlank): ct = CoordConvertor.CoordTranslator() AddMessage("Calculating coordinates. For large datasets, this process can a while.") #get default address point object a = getFCObject(fc) #set field names based on object xField = a.X yField = a.Y NG = a.USNGRID #establish workspace path = dirname(fc) if '.gdb' in path: place = path.find('.gdb') + 4 else: if '.sde' in path: place = path.find('.sde') + 4 else: place = len(path) - 1 workspace = path[:place] AddMessage(workspace) #Start an edit session edit = Editor(workspace) # Edit session is started without an undo/redo stack for versioned data # (for second argument, use False for unversioned data) edit.startEditing(False, True) # Start an edit operation edit.startOperation() fl = "fl" # If necessary, only update blank records if updateOnlyBlank == "true": wc = NG + " IS NULL OR " + NG + " = '' OR " + NG + " = ' '" MakeFeatureLayer_management(fc, fl, wc) else: MakeFeatureLayer_management(fc, fl) #define the field list fields = (xField, yField, NG, "SHAPE@X", "SHAPE@Y") #modify this to access the shape field #get desired spatial reference sr = SpatialReference("WGS 1984") #get current spatial reference sr_org = Describe(fc).SpatialReference #calculate the NG coordinate for each row try: with UpdateCursor(fl, fields) as cursor: for row in cursor: #see if the x/y fields are blank or are populated if row[0] is None or row[0] == 0: #create new point object point = Point() point.X = row[3] point.Y = row[4] #convert to a point geometry pointGeom = PointGeometry(point, sr_org) #reproject the point geometry into WGS 1984 point2 = pointGeom.projectAs(sr, "WGS_1984_(ITRF00)_To_NAD_1983") #turn the point geometry back into a normal point with the "first point" functionality firstPoint = point2.firstPoint #get the x/y position x = firstPoint.X y = firstPoint.Y #update the x & y fields along the way row[0] = x row[1] = y else: x = row[0] y = row[1] #some error trapping, just in case... if x is not None: if y is not None: #convert the x & y coordinates to USNG and update the field row[2] = ct.AsMGRS([y,x], 5, False) cursor.updateRow(row) #release the locks on the data del row del cursor AddMessage("Lat/Long and USNG coordinates successfully updated.") except: AddMessage("Lat/Long and USNG coordinates could not be updated.") finally: # Stop the edit operation. edit.stopOperation() # Stop the edit session and save the changes edit.stopEditing(True) AddMessage("Processing complete.")