def SetDefaultSpatialIndex(self, p_InputWS): env.workspace = p_InputWS ds = [''] for fds in arcpy.ListDatasets(): ds.append("/" + str(fds)) for currentDS in ds: env.workspace = p_InputWS + currentDS messageText = str(p_InputWS + currentDS) + ":-----------------------------" self.Message(messageText, 0) for fc in arcpy.ListFeatureClasses(): desc = arcpy.Describe(fc) if desc.hasSpatialIndex == False: indexGrid = [] result = arcpy.GetCount_management(fc) count = int(result.getOutput(0)) try: resultIdx = arcpy.CalculateDefaultGridIndex_management( fc) for i in range(0, resultIdx.outputCount): indexGrid.append(float(resultIdx.getOutput(i))) spIdx = self.RulesForCalculateDefaultGridIndex( int(indexGrid[0]), count) arcpy.AddSpatialIndex_management(fc, spIdx) messageText = ( "Neuer Index def_array.append(['%s/%s' , '%s' , %s])" ) % (currentDS, fc, desc.shapeType, spIdx) self.Message(messageText, 0) except: messageText = ( "Index '%s/%s' , '%s' von Hand setzen! Vorschlag ArcGIS:%s" ) % (currentDS, fc, desc.shapeType, indexGrid) self.Message(messageText, 2)
def spatialIndex(inFeats, sgOne, sgTwo, sgThree): try: arcpy.AddSpatialIndex_management (in_features=inFeats, spatial_grid_1=sgOne, spatial_grid_2=sgTwo, spatial_grid_3=sgThree) print "Success: spatial index on " + inFeats except Exception as err: print err sendEmail(err)
def landsat_temp(Band10, Band11, Area, OutputGDB): B10_Rad = OutputGDB + '\B10_Rad.tif' B11_Rad = OutputGDB + '\B11_Rad.tif' B10_Temp = OutputGDB + '\B10_Temp.tif' B11_Temp = OutputGDB + '\B11_Temp.tif' Temperature = OutputGDB + '\Temperature.tif' Clip_Temp = OutputGDB + '\Final_Temperature.tif' # Band 10 to Radiance arcpy.gp.RasterCalculator_sa("0.0003342 * " + Band10 + " + 0.1", B10_Rad) # Band 11 to Radiance arcpy.gp.RasterCalculator_sa("0.0003342 * " + Band11 + " + 0.1", B11_Rad) # Band 10 Radiance to Temperature arcpy.gp.RasterCalculator_sa("1321.08 / ln(774.89 / " + B10_Rad + " + 1) - 272.15", B10_Temp) # Band 11 Radiance to Temperature arcpy.gp.RasterCalculator_sa("1201.14 / ln(480.89 / " + B11_Rad + " + 1) - 272.15", B11_Temp) # Execute CellStatistics arcpy.gp.CellStatistics_sa("B10_Temp;B11_Temp", Temperature, "MEAN", "DATA") extents = Temperature.extent # Clip Raster to DC Borders arcpy.Clip_management(Temperature, extents, Clip_Temp, Area, "", "NONE", "NO_MAINTAIN_EXTENT") arcpy.AddSpatialIndex_management(Clip_Temp)
def SetSpatialIndexDefArray(self, p_InputWS, p_Array, p_qualif=""): for param in p_Array: if p_qualif != "": currentVal = str(param[0]).split('/') fc = os.path.join(p_InputWS, p_qualif[1:] + currentVal[1], p_qualif[1:] + currentVal[2]) else: fc = p_InputWS + param[0] spIdx = param[2] try: desc = arcpy.Describe(fc) except: pass try: desc = arcpy.Describe(fc) if desc.hasSpatialIndex: arcpy.RemoveSpatialIndex_management(fc) except arcpy.ExecuteError: messageText = arcpy.GetMessages( 2) + "ERROR --> REMOVE = " + " fuer " + fc self.Message(messageText, 2) try: arcpy.AddSpatialIndex_management(fc, spIdx[0]) messageText = "--> IDX = " + str(spIdx[0]) + " fuer " + fc self.Message(messageText, 0) except arcpy.ExecuteError: messageText = arcpy.GetMessages(2) + "ERROR --> IDX = " + str( spIdx[0]) + " fuer " + fc self.Message(messageText, 2)
def Reproj_Clip_Dissolve_Simplify_Polygon_arcgis(layer_path, Class_Col, tempfolder, mask_layer): """Preprocess user provided polygons Function that will reproject clip input polygon with subbasin polygon and will dissolve the input polygon based on their ID, such as landuse id or soil id. Parameters ---------- processing : qgis object context : qgis object layer_path : string The path to a specific polygon, for example path to landuse layer Project_crs : string the EPSG code of a projected coodinate system that will be used to calcuate HRU area and slope. trg_crs : string the EPSG code of a coodinate system that will be used to calcuate reproject input polygon Class_Col : string the column name in the input polygon (layer_path) that contains their ID, for example land use ID or soil ID. Layer_clip : qgis object A shpfile with extent of the watershed, will be used to clip input input polygon Notes ------- # TODO: May be add some function to simplify the input polygons for example, remove the landuse type with small areas or merge small landuse polygon into the surrounding polygon Returns: ------- layer_dis : qgis object it is a polygon after preprocess """ arcpy.Project_management( layer_path, os.path.join(tempfolder, Class_Col + "_proj.shp"), arcpy.Describe(mask_layer).spatialReference, ) arcpy.Clip_analysis(os.path.join(tempfolder, Class_Col + "_proj.shp"), mask_layer, os.path.join(tempfolder, Class_Col + "_clip.shp")) arcpy.Dissolve_management( os.path.join(tempfolder, Class_Col + "_clip.shp"), os.path.join(tempfolder, Class_Col + "_dislve.shp"), [Class_Col]) arcpy.RepairGeometry_management( os.path.join(tempfolder, Class_Col + "_dislve.shp")) arcpy.AddSpatialIndex_management( os.path.join(tempfolder, Class_Col + "_dislve.shp")) return os.path.join(tempfolder, Class_Col + "_dislve.shp")
def createIndex(targetpath,indexFields = []): """创建空间索引和属性索引,创建属性索引之前判断是否存在该索引""" arcpy.AddSpatialIndex_management(targetpath) indexlist = [str(index.name.lower()) for index in arcpy.ListIndexes(targetpath)] for field in indexFields: if field not in indexlist: try: arcpy.AddIndex_management(targetpath,field,field) except arcpy.ExecuteError: arcpy.GetMessages()
def testCreateReferenceSystemGRGFromArea_GZD(self): ''' Testing with Grid Zone Designator ''' Configuration.Logger.debug( ".....GRGCreateReferenceSystemGRGFromAreaTestCase.testCreateReferenceSystemGRGFromArea_GZD" ) #inputs grid_size = "GRID_ZONE_DESIGNATOR" output = os.path.join(Configuration.militaryScratchGDB, "outgrg_GZD") #Testing runToolMsg = "Running tool (CreateReferenceSystemGRGFromArea)" arcpy.AddMessage(runToolMsg) Configuration.Logger.info(runToolMsg) compareDataset = os.path.join(Configuration.militaryResultsGDB, \ "CompareGZD") toolOutput = None try: toolOutput = arcpy.CreateReferenceSystemGRGFromArea_mt( self.inputArea, self.ref_grid, grid_size, output, self.large_grid_handling) arcpy.AddSpatialIndex_management(output) except arcpy.ExecuteError: UnitTestUtilities.handleArcPyError() except: UnitTestUtilities.handleGeneralError() # 1: Check the expected return value self.assertIsNotNone(toolOutput, "No output returned from tool") outputOut = toolOutput.getOutput(0) self.assertEqual(output, outputOut, "Unexpected return value from tool") self.assertTrue(arcpy.Exists(outputOut), "Output does not exist") # 2: Check the features created self.assertFeatureClassEqual(compareDataset, output, arcpy.Describe(output).oidFieldName, None, "ALL", self.ignore_options, self.xy_tolerance)
def main(Input_Features, Output_Feature_Class, Point_Placement="DISTANCE", Distance=None, Percentage=None, Include_End_Points=False): in_features = Input_Features # String out_fc = Output_Feature_Class # String use_percent = point_placement[Point_Placement] # Str -> Bool end_points = Include_End_Points # Boolean describe = arcpy.Describe(in_features) spatial_info = namedtuple('spatial_info', 'spatialReference extent') sp_info = spatial_info(spatialReference=describe.spatialReference, extent=describe.extent) if use_percent: percentage = Percentage / 100 # Float create_points_from_lines(in_features, out_fc, sp_info.spatialReference, percent=percentage, add_end_points=end_points) else: distance = Distance # String distance, param_linear_units = get_distance_and_units(distance) distance = convert_units(distance, param_linear_units, sp_info) create_points_from_lines(in_features, out_fc, sp_info.spatialReference, dist=distance, add_end_points=end_points) try: arcpy.AddSpatialIndex_management(out_fc) except arcpy.ExecuteError: pass return out_fc
def main(argv): inDir = '' try: opts, args = getopt.getopt(argv, "hi:o:", ["ifile="]) except getopt.GetoptError: print 'addSpatialIndex.py -i <directory>' sys.exit(2) for opt, arg in opts: if opt == '-h': print 'addSpatialIndex.py -i <directory>' sys.exit() elif opt in ("-i", "--ifile"): inDir = arg arcpy.env.workspace = inDir fcs = arcpy.ListFeatureClasses() for fc in fcs: print fc arcpy.AddSpatialIndex_management(fc, "0", "0", "0") print "Spaital Index added for shapefiles at ", inDir
def testCreateReferenceSystemGRGFromArea_GZD(self): ''' Testing with Grid Zone Designator ''' if Configuration.DEBUG is True: print(".....CreateReferenceSystemGRGFromAreaTestCase.testCreateReferenceSystemGRGFromArea_GZD") print("Importing toolbox...") arcpy.ImportToolbox(self.toolboxUnderTest) arcpy.env.overwriteOutput = True #inputs grid_size = "GRID_ZONE_DESIGNATOR" output = os.path.join(self.scratchGDB, "outgrg_GZD") #Testing runToolMsg = "Running tool (CreateReferenceSystemGRGFromArea)" arcpy.AddMessage(runToolMsg) Configuration.Logger.info(runToolMsg) compareDataset = os.path.normpath(os.path.join(Configuration.grgInputGDB, "CompareGZD")) try: arcpy.CreateReferenceSystemGRGFromArea_grg(self.inputArea, self.ref_grid, grid_size, output, self.large_grid_handling) arcpy.AddSpatialIndex_management(output) except arcpy.ExecuteError: UnitTestUtilities.handleArcPyError() except: UnitTestUtilities.handleGeneralError() self.assertFeatureClassEqual(compareDataset, output, arcpy.Describe(output).oidFieldName, None, "ALL", self.ignore_options)
def append(object, source_object, target_object, config): logger = config['LOGGING']['logger'] needs_spatial_index = False if arcpy.Describe(target_object).datasetType == "FeatureClass": needs_spatial_index = True if arcpy.Exists(source_object): if arcpy.Exists(target_object): # Räumlichen Index entfernen if needs_spatial_index and arcpy.Describe(target_object).hasSpatialIndex: logger.info("Spatial Index wird entfernt.") if arcpy.TestSchemaLock(target_object): arcpy.RemoveSpatialIndex_management(target_object) logger.info("Spatial Index erfolgreich entfernt.") else: logger.warn("Spatial Index konnte wegen eines Locks nicht entfernt werden.") logger.info("Truncating " + target_object) arcpy.TruncateTable_management(target_object) logger.info("Appending " + source_object) arcpy.Append_management(source_object, target_object, "TEST") # Räumlichen Index erstellen if needs_spatial_index: logger.info("Spatial Index wird erstellt.") if arcpy.TestSchemaLock(target_object): logger.info("Grid Size wird berechnet.") grid_size = calculate_grid_size(source_object) logger.info("Grid Size ist: " + unicode(grid_size)) if grid_size > 0: arcpy.AddSpatialIndex_management(target_object, grid_size) else: arcpy.AddSpatialIndex_management(target_object) logger.info("Spatial Index erfolgreich erstellt.") else: logger.warn("Spatial Index konnte wegen eines Locks nicht erstellt werden.") logger.info("Zähle Records in der Quelle und im Ziel.") source_count = int(arcpy.GetCount_management(source_object)[0]) logger.info("Anzahl Records in der Quelle: " + unicode(source_count)) target_count = int(arcpy.GetCount_management(target_object)[0]) logger.info("Anzahl Records im Ziel: " + unicode(target_count)) if source_count==target_count: logger.info("Anzahl Records identisch") else: logger.error("Anzahl Records nicht identisch. Ebene " + object) logger.error("Import wird abgebrochen.") avLader.helpers.helper.delete_connection_files(config, logger) sys.exit() else: logger.error("Ziel-Objekt " + target_object + " existiert nicht.") logger.error("Import wird abgebrochen.") avLader.helpers.helper.delete_connection_files(config, logger) sys.exit() else: logger.error("Quell-Objekt " + source_object + " existiert nicht.") logger.error("Import wird abgebrochen.") avLader.helpers.helper.delete_connection_files(config, logger) sys.exit()
def csv2line(): arcpy.env.overwriteOutput = True inPt = arcpy.GetParameterAsText(0) outFeature = arcpy.GetParameterAsText(1) X = arcpy.GetParameterAsText(2) Y = arcpy.GetParameterAsText(3) Z = arcpy.GetParameterAsText(4) idField = arcpy.GetParameterAsText(5) reserveField = arcpy.GetParameterAsText(6) maxvField = "MAX_V" try: outPath, outFC = os.path.split(outFeature) #change C:\Users\leizengxiang\Desktop\drawCsvInArcgis to your directory, and change the wgs84.prj to your projection file arcpy.CreateFeatureclass_management( outPath, outFC, "POLYLINE", "", "DISABLED", "ENABLED", "C:\Users\leizengxiang\Desktop\drawCsvInArcgis\\wgs84.prj") field1 = arcpy.ListFields(inPt, idField)[0] arcpy.AddField_management(outFeature, field1.name, field1.type) if reserveField: field2 = arcpy.ListFields(inPt, reserveField)[0] arcpy.AddField_management(outFeature, field2.name, field2.type) # Add v arcpy.AddField_management(outFeature, maxvField, "double") oCur, iCur, sRow, feat = None, None, None, None shapeName = "Shape" idName = "id" oCur = arcpy.SearchCursor(inPt) iCur = arcpy.InsertCursor(outFeature) array = arcpy.Array() ID = -1 PID = 0 LID = 0 if reserveField: RESERVE = 0 MAXV = 0 TEMPV = 0 X1 = 0 X2 = 0 Y1 = 0 Y2 = 0 Z1 = 0 Z2 = 0 for sRow in oCur: X2 = sRow.getValue(X) Y2 = sRow.getValue(Y) Z2 = sRow.getValue(Z) pt = arcpy.Point(X2, Y2, Z2, None, PID) PID += 1 currentValue = sRow.getValue(idField) if ID == -1: ID = currentValue if reserveField: RESERVE = sRow.getValue(reserveField) X1 = X2 Y1 = Y2 Z1 = Z2 if ID <> currentValue: if array.count >= 2: feat = iCur.newRow() feat.setValue(idField, ID) feat.setValue(shapeName, array) feat.setValue(idName, LID) LID += 1 if reserveField: feat.setValue(reserveField, RESERVE) feat.setValue(maxvField, MAXV) iCur.insertRow(feat) else: arcpy.AddIDMessage("WARNING", 1059, str(ID)) X1 = X2 Y1 = Y2 Z1 = Z2 MAXV = 0 array.removeAll() if reserveField: RESERVE = sRow.getValue(reserveField) if (Z1 < Z2) and (X1 != X2 or Y1 != Y2): TEMPV = 0.36 * getdis(X1, Y1, X2, Y2) / (Z2 - Z1) #KM/H else: TEMPV = 0 MAXV = getmax(MAXV, TEMPV) array.add(pt) X1 = X2 Y1 = Y2 Z1 = Z2 ID = currentValue if array.count > 1: feat = iCur.newRow() feat.setValue(idField, currentValue) feat.setValue(shapeName, array) feat.setValue(idName, LID) if reserveField: feat.setValue(reserveField, RESERVE) feat.setValue(maxvField, MAXV) iCur.insertRow(feat) else: arcpy.AddIDMessage("WARNING", 1059, str(ID)) array.removeAll() except Exception as err: arcpy.AddError(err[0]) finally: if oCur: del oCur if iCur: del iCur if sRow: del sRow if feat: del feat try: # Update the spatial index(es) # r = arcpy.CalculateDefaultGridIndex_management(outFeature) arcpy.AddSpatialIndex_management(outFeature, r.getOutput(0), r.getOutput(1), r.getOutput(2)) except: pass
#arcpy.AddMessage("Failed to load as area wayid="+str(areawayid)) errorCount += 1 builtareas.close() del areawaycursor arcpy.AddMessage("Loaded Areas=" + str(completedways)) arcpy.AddMessage("Step 5 --- %s seconds ---" % (time.time() - stepstarttime)) if 0 < errorCount: errorLog.write("Failed to load %d areas." % errorCount) #Step 6 Create Indexes stepstarttime = time.time() arcpy.AddMessage('Step 6/7') arcpy.AddMessage('Building Indexes') try: arcpy.AddSpatialIndex_management(nodefc, 0.5) arcpy.AddIndex_management(waytagtab, "Way_ID", "Way_Idx", "UNIQUE", "#") arcpy.AddIndex_management(wayfc, "Way_ID", "Way_Idx", "UNIQUE", "#") arcpy.AddIndex_management(areawayfc, "Way_ID", "Way_Idx", "UNIQUE", "#") arcpy.AddIndex_management(nodeothertag, "Node_ID", "Node_Idx", "NON_UNIQUE", "#") arcpy.AddIndex_management(wayothertag, "Way_ID", "Way_Idx", "NON_UNIQUE", "#") arcpy.AddMessage("Step 6 --- %s seconds ---" % (time.time() - stepstarttime)) except Exception as ErrorDesc: arcpy.AddMessage("Failed to build index") #Step Relations..... #create multipolygon relations by searching through the area ways for the referenced parts. #only load those made from areas, if made from lines they are not found.
#CLEANUP arcpy.ResetEnvironments() # Step 4: Select lakes intersecting United States boundaries all_lakes_lyr = arcpy.MakeFeatureLayer_management(ALL_LAKES_FC) states_lyr = arcpy.MakeFeatureLayer_management( US_SPATIAL_EXTENT) # Albers USGS, slower but okay arcpy.SelectLayerByLocation_management(all_lakes_lyr, "INTERSECT", states_lyr) arcpy.CopyFeatures_management(all_lakes_lyr, CONUS_LAKES_FC) arcpy.Delete_management(all_lakes_lyr) # Step 5: # Spatial Join to WQP sites # Get WQP sites ready for spatial join r_file = 'D:/Continental_Limnology/Data_Working/WQP_Sites_into_ArcGIS.shp' arcpy.AddSpatialIndex_management(r_file) arcpy.MakeFeatureLayer_management(r_file, "wqp_sites") # NHD file: from deduping, above. arcpy.MakeFeatureLayer_management(ALL_LAKES_FC, "nhd_lake_polygons") # Spatial Intersect Only arcpy.AddField_management() arcpy.SelectLayerByLocation out_file = 'D:/Continental_Limnology/Data_Working/WQP_NHD_joined.shp' arcpy.SpatialJoin_analysis("wqp_sites", "nhd_lake_polygons", out_file, "JOIN_ONE_TO_MANY", "KEEP_ALL", match_option="INTERSECT")
splitFilename = filename.split(".") if splitFilename[-1] in extList: #merge the split filename again fullpath = os.path.join(root, filename) #counter for files in dirs. count = count + 1 #appends the merged shp filename and path to the mappath list. mappaths.append(fullpath) #print count of shapefiles found. print "found " + str(count) + " files." + "\n" # Create a spatial index for each shapefile. #sort the list of datasources for map in sorted(mappaths): try: arcpy.AddSpatialIndex_management(map, "") print map + " index created!" filecount = filecount + 1 except Exception: pass print "Something went wrong with " + map + " skipping file" + "\n" fileskipcount = fileskipcount + 1 #print summary of files indexed and a timestamp print "\n" + str(filecount) + " file(s) where indexed!" print "\n" + str(fileskipcount) + " file(s) where skipped!" print "\n" + "|" + datetime.datetime.fromtimestamp(time.time()).strftime( '%H:%M:%S') + " all spatial indexes was succesfully created! |" + "\n"
# adds/rebuilds spatial index on feature class import arcpy import sys sde = r"\\path\to\sde\connection.sde" arcpy.env.workspace = sde print("starting spatial indexes...") arcpy.AddSpatialIndex_management("schema.feature_class_name") print("\t\t...finished")
def calc_road_xstreet(estamap_version, temp_lmdb='c:\\temp\\road_xstreet', temp_traversal_lmdb='c:\\temp\\road_xstreet_traversal'): logging.info('environment') em = gis.ESTAMAP('DEV') logging.info('create temp fgdb for ROAD_XSTREET_VALIDATION') if arcpy.Exists(os.path.join(r'c:\temp\road_xstreet_validation.gdb')): arcpy.Delete_management(r'c:\temp\road_xstreet_validation.gdb') arcpy.CreateFileGDB_management(out_folder_path=r'c:\temp', out_name='road_xstreet_validation.gdb') arcpy.CreateFeatureclass_management( out_path=r'c:\temp\road_xstreet_validation.gdb', out_name='ROAD_XSTREET_VALIDATION', geometry_type='POLYLINE', spatial_reference=arcpy.SpatialReference(3111)) arcpy.AddField_management(in_table=os.path.join( r'c:\temp\road_xstreet_validation.gdb', 'ROAD_XSTREET_VALIDATION'), field_name='PFI', field_type='LONG') arcpy.AddField_management(in_table=os.path.join( r'c:\temp\road_xstreet_validation.gdb', 'ROAD_XSTREET_VALIDATION'), field_name='NODE_TYPE', field_type='TEXT', field_length=4) arcpy.AddField_management(in_table=os.path.join( r'c:\temp\road_xstreet_validation.gdb', 'ROAD_XSTREET_VALIDATION'), field_name='TRAVERSAL_DIST', field_type='FLOAT', field_precision=12, field_scale=3) logging.info('create temp fgdb for ROAD_XSTREET_ROAD') if arcpy.Exists(os.path.join(r'c:\temp\road_xstreet_road.gdb')): arcpy.Delete_management(r'c:\temp\road_xstreet_road.gdb') arcpy.CreateFileGDB_management(out_folder_path=r'c:\temp', out_name='road_xstreet_road.gdb') arcpy.CreateFeatureclass_management( out_path=r'c:\temp\road_xstreet_road.gdb', out_name='ROAD_XSTREET_ROAD', geometry_type='POLYLINE', spatial_reference=arcpy.SpatialReference(3111)) arcpy.AddField_management(in_table=os.path.join( r'c:\temp\road_xstreet_road.gdb', 'ROAD_XSTREET_ROAD'), field_name='PFI', field_type='LONG') arcpy.AddField_management(in_table=os.path.join( r'c:\temp\road_xstreet_road.gdb', 'ROAD_XSTREET_ROAD'), field_name='NODE_TYPE', field_type='TEXT', field_length=4) arcpy.AddField_management(in_table=os.path.join( r'c:\temp\road_xstreet_road.gdb', 'ROAD_XSTREET_ROAD'), field_name='XSTREET_PFI', field_type='LONG') logging.info('creating temp lmdb: {}'.format(temp_lmdb)) if os.path.exists(temp_lmdb): shutil.rmtree(temp_lmdb) env = lmdb.Environment(path=temp_lmdb, map_size=1500000000, readonly=False, max_dbs=10) road_db = env.open_db('road', dupsort=True) road_geom_db = env.open_db('road_geom') road_bearing_db = env.open_db('road_bearing', dupsort=True) road_turn_db = env.open_db('road_turn', dupsort=True) road_alias_db = env.open_db('road_alias', dupsort=True) road_infrastructure_db = env.open_db('road_infrastructure', dupsort=True) logging.info('read ROAD') with env.begin(write=True, db=road_db) as txn, \ arcpy.da.SearchCursor(in_table=os.path.join(em.sde, 'ROAD'), field_names=['PFI', 'FROM_UFI', 'TO_UFI', 'FEATURE_TYPE_CODE']) as sc: for enum, row in enumerate(sc): txn.put(str(row[0]), ','.join([str(_) for _ in row[1:]])) if enum % 100000 == 0: logging.info(enum) logging.info(enum) logging.info('read ROAD geom') with env.begin(write=True, db=road_geom_db) as txn, \ arcpy.da.SearchCursor(in_table=os.path.join(em.sde, 'ROAD'), field_names=['PFI', 'SHAPE@WKB']) as sc: for enum, (pfi, wkb) in enumerate(sc): txn.put(str(pfi), str(wkb)) if enum % 100000 == 0: logging.info(enum) logging.info(enum) logging.info('read ROAD_BEARING') with env.begin(write=True, db=road_bearing_db) as txn, \ arcpy.da.SearchCursor(in_table=os.path.join(em.sde, 'ROAD_BEARING'), field_names=['PFI', 'ENTRY_BEARING', 'EXIT_BEARING', 'ENTRY_BEARING_FLIP', 'EXIT_BEARING_FLIP']) as sc: for enum, (pfi, entry_bear, exit_bear, entry_bear_flip, exit_bear_flip) in enumerate(sc): pfi = str(pfi) txn.put(pfi + 'ENTRY', '{:.5f}'.format(entry_bear)) txn.put(pfi + 'EXIT', '{:.5f}'.format(exit_bear)) txn.put(pfi + 'ENTRY_FLIP', '{:.5f}'.format(entry_bear_flip)) txn.put(pfi + 'EXIT_FLIP', '{:.5f}'.format(exit_bear_flip)) if enum % 100000 == 0: logging.info(enum) logging.info(enum) logging.info('read ROAD_TURN') with env.begin(write=True, db=road_turn_db) as txn, \ arcpy.da.SearchCursor(in_table=os.path.join(em.sde, 'ROAD_TURN'), field_names=['UFI', 'FROM_PFI', 'TO_PFI', 'ANGLE', 'FROM_BEARING', 'TO_BEARING']) as sc: for enum, (ufi, from_pfi, to_pfi, angle, from_bearing, to_bearing) in enumerate(sc): txn.put( str(ufi), ','.join([ str(o) for o in (from_pfi, to_pfi, angle, from_bearing, to_bearing) ])) if enum % 100000 == 0: logging.info(enum) logging.info(enum) logging.info('read ROAD_ALIAS') with env.begin(write=True, db=road_alias_db) as txn, \ arcpy.da.SearchCursor(in_table=os.path.join(em.sde, 'ROAD_ALIAS'), field_names=['PFI', 'ROAD_NAME_ID', 'ALIAS_NUMBER']) as sc: for enum, (pfi, rnid, alias_num) in enumerate(sc): txn.put(str(pfi), str(rnid) + ',' + str(alias_num)) if enum % 100000 == 0: logging.info(enum) logging.info(enum) logging.info('read ROAD_INFRASTRUCTURE') with env.begin(write=True, db=road_infrastructure_db) as txn, \ arcpy.da.SearchCursor(in_table=os.path.join(em.sde, 'ROAD_INFRASTRUCTURE'), field_names=['UFI', 'FEATURE_TYPE_CODE']) as sc: for enum, (ufi, ftc) in enumerate(sc): txn.put(str(ufi), str(ftc)) if enum % 100000 == 0: logging.info(enum) logging.info(enum) ############## logging.info('preparation') with env.begin(db=road_db) as road_txn, \ env.begin(db=road_turn_db) as road_turn_txn, \ env.begin(db=road_alias_db) as road_alias_txn, \ env.begin(db=road_infrastructure_db) as road_infrastructure_txn, \ env.begin(db=road_geom_db) as road_geom_txn, \ dbpy.SQL_BULK_COPY(em.server, em.database_name, 'dbo.ROAD_XSTREET') as sbc_xstreet, \ dbpy.SQL_BULK_COPY(em.server, em.database_name, 'dbo.ROAD_XSTREET_TRAVERSAL') as sbc_xstreet_traversal: road_cursor = road_txn.cursor() road_geom_cursor = road_geom_txn.cursor() road_cursor_iter = road_txn.cursor() road_turn_cursor = road_turn_txn.cursor() road_alias_cursor = road_alias_txn.cursor() road_infrastructure_cursor = road_infrastructure_txn.cursor() road_infrastructure_cursor_iter = road_infrastructure_txn.cursor() # convienience functions get_road_nodes_cursor = road_txn.cursor() def get_road_nodes(pfi): return get_road_nodes_cursor.get(pfi).split(',')[:-1] get_road_rnids_cursor = road_alias_txn.cursor() def get_road_rnids(pfi): rnids = [] get_road_rnids_cursor.set_key(pfi) for values in get_road_rnids_cursor.iternext_dup(): rnid, alias_num = values.split(',') rnids.append((rnid, alias_num)) return sorted(rnids, key=lambda x: x[-1]) get_road_ftc_cursor = road_txn.cursor() def get_road_ftc(pfi): return get_road_ftc_cursor.get(pfi).split(',')[-1] get_connecting_pfis_rt_cursor = road_turn_txn.cursor() get_connecting_pfis_ri_cursor = road_infrastructure_txn.cursor() def get_connecting_pfis(ufi, pfi): connecting_pfis = [] get_connecting_pfis_rt_cursor.set_key(ufi) for values in get_connecting_pfis_rt_cursor.iternext_dup(): from_pfi, to_pfi, angle, from_bearing, to_bearing = values.split( ',') if from_pfi == pfi: connecting_pfis.append([to_pfi, angle]) return sorted(connecting_pfis, key=lambda x: abs(float(x[-1]))) get_road_altnode_cursor = road_txn.cursor() def get_road_altnode(pfi, current_node): from_ufi, to_ufi, pfi_ftc = get_road_altnode_cursor.get(pfi).split( ',') if current_node == from_ufi: return to_ufi else: return from_ufi def get_traversal(pfi, ufi): traversal_pfis = get_connecting_pfis(ufi, pfi) traversal_pfis_sort_180 = sorted( traversal_pfis, key=lambda x: abs(180 - abs(float(x[-1])))) if len(traversal_pfis) == 0: # no roads connecting return 'ROAD_END', None, None else: # determine best traversal pfi_rnid = get_road_rnids(pfi)[0][0] # 1. road has SAME_RNID and PFI is not UNNAMED if pfi_rnid <> '1312': for con_pfi, con_angle in traversal_pfis_sort_180: con_pfi_rnids = get_road_rnids(con_pfi) if pfi_rnid in [rnid for rnid, an in con_pfi_rnids]: traversal_desc = 'SAME_RNID' traversal_pfi = con_pfi traversal_ufi = get_road_altnode(con_pfi, ufi) return 'SAME_RNID', con_pfi, get_road_altnode( con_pfi, ufi) # 2. road angle closest to 180 degrees ## traversal_pfis_sort_180 = sorted(traversal_pfis, key=lambda x: abs(180 - abs(float(x[-1])))) traversal_pfi = traversal_pfis_sort_180[0][0] return 'CLOSE_TO_180', traversal_pfi, get_road_altnode( traversal_pfi, ufi) def process_node(pfi, ufi, node_type): pfi_rnid = get_road_rnids(pfi)[0][0] # get PFI RNID (primary rnid) pfi_from_ufi, pfi_to_ufi = get_road_nodes(pfi) traversal = [] xstreet = [] traversal_order = 0 traversal_desc = 'BEGIN' traversal_pfi = pfi if node_type == 'FROM': traversal_ufi = pfi_from_ufi else: traversal_ufi = pfi_to_ufi xstreet_pfi = None xstreet_rnid = None while True: # get connecting PFI at FROM_UFI from_ufi_pfis = get_connecting_pfis(traversal_ufi, traversal_pfi) traversal.append([ pfi, node_type, traversal_order, traversal_pfi, traversal_ufi, len(from_ufi_pfis), traversal_desc ]) if len(from_ufi_pfis) == 0: traversal_desc = 'ROAD_END' break # determine if suitable XSTREET for from_ufi_pfi, from_ufi_angle in from_ufi_pfis: from_ufi_pfi_rnids = get_road_rnids(from_ufi_pfi) from_ufi_pfi_ftc = get_road_ftc(from_ufi_pfi) from_ufi_pfi_rnids_only = [ rnid for rnid, an in from_ufi_pfi_rnids ] if '1312' in from_ufi_pfi_rnids_only: # road is UNNAMED continue if pfi_rnid in from_ufi_pfi_rnids_only: # road has same RNID continue if from_ufi_pfi_ftc == 'TUNNEL': # road type is a TUNNEL continue xstreet_pfi = from_ufi_pfi xstreet_rnid = from_ufi_pfi_rnids[0][0] traversal_desc = 'XSTREET' break if traversal_desc == 'XSTREET': traversal.append([ pfi, node_type, traversal_order, traversal_pfi, traversal_ufi, len(from_ufi_pfis), traversal_desc ]) break # determine next suitable traversal if XSTREET not found traversal_desc, traversal_pfi, traversal_ufi = get_traversal( traversal_pfi, traversal_ufi) # add loop check here if traversal_order > 50: # exit if traversal too long traversal_desc = 'MORE_THAN_50' break traversal_order = traversal_order + 1 ## traversal.append([pfi, node_type, traversal_order, traversal_pfi, traversal_ufi, len(from_ufi_pfis), traversal_desc]) return xstreet_pfi, xstreet_rnid, traversal with arcpy.da.InsertCursor(in_table=os.path.join(r'c:\temp\road_xstreet_validation.gdb', 'ROAD_XSTREET_VALIDATION'), field_names=['PFI', 'NODE_TYPE', 'TRAVERSAL_DIST', 'SHAPE@WKB']) as ic_valid, \ arcpy.da.InsertCursor(in_table=os.path.join(r'c:\temp\road_xstreet_road.gdb', 'ROAD_XSTREET_ROAD'), field_names=['PFI', 'NODE_TYPE', 'XSTREET_PFI', 'SHAPE@WKB']) as ic_road: logging.info('looping roads') for enum_road, pfi in enumerate( road_cursor.iternext(keys=True, values=False)): # get PFI RNID (primary rnid) pfi_rnid = get_road_rnids(pfi)[0][0] pfi_from_ufi, pfi_to_ufi = get_road_nodes(pfi) from_xstreet_pfi, from_xstreet_rnid, from_traversal = process_node( pfi, pfi_from_ufi, 'FROM') to_xstreet_pfi, to_xstreet_rnid, to_traversal = process_node( pfi, pfi_to_ufi, 'TO') # # insert FROM traversal # for f_traversal in from_traversal: sbc_xstreet_traversal.add_row(f_traversal) from_geoms = [] for f_traversal in from_traversal: from_geoms.append( shapely.wkb.loads(road_geom_cursor.get( f_traversal[3]))) from_merged_line = shapely.ops.linemerge(from_geoms) # measure actual traversal distance (subtract base road length) from_traversal_dist = from_merged_line.length - shapely.wkb.loads( road_geom_cursor.get(pfi)).length if from_xstreet_pfi: # (subtract xstreet road length) ## from_traversal_dist = from_traversal_dist - shapely.wkb.loads(road_geom_cursor.get(from_xstreet_pfi)).length # add the xstreet geom from_xstreet_geom = shapely.wkb.loads( road_geom_cursor.get(from_xstreet_pfi)) from_geoms.append(from_xstreet_geom) # insert into ROAD_XSTREET_ROAD ic_road.insertRow([ pfi, 'FROM', from_xstreet_pfi, shapely.wkb.loads( road_geom_cursor.get(from_xstreet_pfi)).wkb ]) from_merged_line_final = shapely.ops.linemerge(from_geoms) ic_valid.insertRow([ pfi, 'FROM', from_traversal_dist, from_merged_line_final.wkb ]) ## # # insert TO traversal # for t_traversal in to_traversal: sbc_xstreet_traversal.add_row(t_traversal) to_geoms = [] for t_traversal in to_traversal: to_geoms.append( shapely.wkb.loads(road_geom_cursor.get( t_traversal[3]))) to_merged_line = shapely.ops.linemerge(to_geoms) # measure actual traversal distance (subtract base road) to_traversal_dist = to_merged_line.length - shapely.wkb.loads( road_geom_cursor.get(pfi)).length if to_xstreet_pfi: # (subtract xstreet road length) ## to_traversal_dist = to_traversal_dist - shapely.wkb.loads(road_geom_cursor.get(to_xstreet_pfi)).length # add the xstreet geom to_xstreet_geom = shapely.wkb.loads( road_geom_cursor.get(to_xstreet_pfi)) to_geoms.append(to_xstreet_geom) # insert into ROAD_XSTREET_ROAD ic_road.insertRow([ pfi, 'TO', to_xstreet_pfi, shapely.wkb.loads( road_geom_cursor.get(to_xstreet_pfi)).wkb ]) to_merged_line_final = shapely.ops.linemerge(to_geoms) ic_valid.insertRow( [pfi, 'TO', to_traversal_dist, to_merged_line_final.wkb]) ## sbc_xstreet.add_row([ pfi, pfi_rnid, pfi_from_ufi, from_xstreet_rnid, from_xstreet_pfi, pfi_to_ufi, to_xstreet_rnid, to_xstreet_pfi ]) if enum_road % 10000 == 0: logging.info(enum_road) sbc_xstreet.flush() sbc_xstreet_traversal.flush() logging.info(enum_road) logging.info('indexes') arcpy.AddIndex_management(in_table=os.path.join( r'c:\temp\road_xstreet_validation.gdb', 'ROAD_XSTREET_VALIDATION'), fields='PFI', index_name='PFI', ascending=True) arcpy.AddIndex_management(in_table=os.path.join( r'c:\temp\road_xstreet_validation.gdb', 'ROAD_XSTREET_VALIDATION'), fields='NODE_TYPE', index_name='NODE') arcpy.AddIndex_management(in_table=os.path.join( r'c:\temp\road_xstreet_validation.gdb', 'ROAD_XSTREET_VALIDATION'), fields='TRAVERSAL_DIST', index_name='DIST') arcpy.AddIndex_management(in_table=os.path.join( r'c:\temp\road_xstreet_road.gdb', 'ROAD_XSTREET_ROAD'), fields='PFI', index_name='PFI', ascending=True) arcpy.AddIndex_management(in_table=os.path.join( r'c:\temp\road_xstreet_road.gdb', 'ROAD_XSTREET_ROAD'), fields='XSTREET_PFI', index_name='XPFI', ascending=True) logging.info('spatial indexes') arcpy.RemoveSpatialIndex_management(in_features=os.path.join( r'c:\temp\road_xstreet_validation.gdb', 'ROAD_XSTREET_VALIDATION')) arcpy.AddSpatialIndex_management(in_features=os.path.join( r'c:\temp\road_xstreet_validation.gdb', 'ROAD_XSTREET_VALIDATION')) arcpy.RemoveSpatialIndex_management(in_features=os.path.join( r'c:\temp\road_xstreet_road.gdb', 'ROAD_XSTREET_ROAD')) arcpy.AddSpatialIndex_management(in_features=os.path.join( r'c:\temp\road_xstreet_road.gdb', 'ROAD_XSTREET_ROAD'))
use_percent = point_placement[arcpy.GetParameter(2)] # Str -> Bool end_points = arcpy.GetParameter(5) # Boolean describe = arcpy.Describe(in_features) spatial_info = namedtuple('spatial_info', 'spatialReference extent') sp_info = spatial_info(spatialReference=describe.spatialReference, extent=describe.extent) if use_percent: percentage = arcpy.GetParameter(4) / 100 # Float create_points_from_lines(in_features, out_fc, sp_info.spatialReference, percent=percentage, add_end_points=end_points) else: distance = arcpy.GetParameterAsText(3) # String distance, param_linear_units = get_distance_and_units(distance) distance = convert_units(distance, param_linear_units, sp_info) create_points_from_lines(in_features, out_fc, sp_info.spatialReference, dist=distance, add_end_points=end_points) try: arcpy.AddSpatialIndex_management(out_fc) except arcpy.ExecuteError: pass
print 'keeping ' + field.name elif field.name == 'SERVICESTATUS': print 'keeping ' + field.name elif field.name == 'CUSTOMERTYPE': print 'keeping ' + field.name elif field.name == 'SERVICEMXLOCATION': print 'keeping ' + field.name else: print 'keeping ' + field.name fieldNameList.append(field.name) arcpy.DeleteField_management("SPss", fieldNameList) if arcpy.Exists(USIC + "\\" + "ServicePointUSICMoEast.shp"): arcpy.Delete_management(USIC + "\\" + "ServicePointUSICMoEast.shp") arcpy.CopyFeatures_management('SPss', USIC + "\\" + "ServicePointUSICMoEast.shp") arcpy.AddSpatialIndex_management(USIC + "\\" + "ServicePointUSICMoEast.shp") if arcpy.Exists(SPs): arcpy.Delete_management(SPs) ################--Test Points TP = r"\\parcser02\GisServerManager\Data\PGISE.sde\LGC_GAS.GasFacilities\LGC_GAS.CPTestPoint" if arcpy.Exists(USIC + "\\" + "AnodeUSICMoEast"): arcpy.Delete_management(USIC + "\\" + "AnodeUSICMoEast.shp") anode = r"C:\temp\NO_GIS_SP_2.gdb\AnodeInt" arcpy.CopyFeatures_management(TP, anode) arcpy.MakeFeatureLayer_management(anode, 'anode2') fieldNameList = [] fields = arcpy.ListFields("anode2") for field in fields: if field.name == 'OBJECTID': print 'keeping ' + field.name elif field.name == 'SHAPE_Length':
# -*- coding:utf-8 -*- import arcpy # need to remove lock manually connection = 'Database Connections/[email protected]' arcpy.env.workspace = connection names = ['RES_' + str(x) for x in range(11, 24)] for ds in names: print ds fc = arcpy.ListFeatureClasses(None, None, ds) print fc for f in fc: des = arcpy.Describe(f) if str(des.hasSpatialIndex) == 'True': print 'delete spatial index:', str(f) arcpy.RemoveSpatialIndex_management(f) print 'add spatial index:', str(f) arcpy.AddSpatialIndex_management(f) print str(f), 'ok'
y['IDs: ']: eval(y['IDs of Links']) for x, y in node_id_to_link_ids_dict.iteritems() } # clipping the shapefile and getting node-coordinate dict nodes_inside = get_all_nodes_in_link(other) #each link is associated with 2 nodes, make its dictionary other_pt = other other_pt = other_pt.replace("intermediate/", "intermediate/pt_") arcpy.MakeFeatureLayer_management(base, base_f) arcpy.MakeFeatureLayer_management(other_pt, other_pt_f) # find the nearest coordinates to those links near_ids = {} no_nearby_linkids = [] id_buffer_dict = {} arcpy.AddSpatialIndex_management( base_f) # adjusted comment from gis.stackoverflow, why? for key in nodes_inside: print key where_clause = """ "_ID_" = %d""" % key arcpy.SelectLayerByAttribute_management(other_pt_f, "NEW_SELECTION", where_clause) ids_list = node_id_to_link_ids_dict[key] print ids_list if len(ids_list) == 0: print("X: {0}".format(key)) #highest range did not work no_nearby_linkids.append(key) continue # print "Proximite link IDs: {1}".format(len(ids_list), ids_list) arcpy.CopyFeatures_management(other_pt_f, m1) #only one row
import arcpy talleyHo = [] pointCount = 0 pointCountK = 0 pointfeat = r".shp" polyfeat = r".shp" arcpy.AddSpatialIndex_management(pointfeat) arcpy.AddSpatialIndex_management(polyfeat) for point in arcpy.da.SearchCursor(pointfeat): pointCount += 1 if pointCount == 1000: pointCountK += 1 pointCount = 0 print pointCountK pointFeature = point.shape #set the comparison geometry to each pnt polycount = 0 for polygon in arcpy.da.SearchCursor(polyfeat): polyFeature = polygon.shape #set base geom to each polygon if polyFeature.contains(pointFeature) == True: polycount += 1 tempStr = str(polygon.FID) + "@" + str(polycount) talleyHo.append(tempStr) for item in talleyHo:
int(max(li)[17:19]))).total_seconds() daysold = ((t / 60) / 60) / 24 if daysold > 29.97: update = 1 arcpy.Delete_management(newdir + "\\DYFITemp.gdb\\" + n) print "Event: " + n + " Data Deleted!" arcpy.env.workspace = "path//to//TempProdGDB//" ls1 = arcpy.ListFeatureClasses("*1kpoly") ls10 = arcpy.ListFeatureClasses("*10kpoly") arcpy.Merge_management(ls1, "path//to//TempProdGDB//onekpoly") arcpy.DefineProjection_management("path//to//TempProdGDB//onekpoly", 4326) arcpy.AddSpatialIndex_management("path//to//TempProdGDB//onekpoly") arcpy.Merge_management(ls10, "path//to//TempProdGDB//tenkpoly") arcpy.DefineProjection_management("path//to//TempProdGDB//tenkpoly", 4326) arcpy.AddSpatialIndex_management("path//to//TempProdGDB//tenkpoly") #Migrates data to prod GDB for files in os.listdir("path\\to\\TempProdGDB"): if files[-5:].lower() != '.lock': shutil.copy2(os.path.join("path\\to\\TempProdGDB", files), os.path.join("path\\to\\ProdGDB", files)) print "Data Migrated to Live Feed" #Removes all .json files jfiles = glob.glob("path\\to\\tempdir\\*.json") for f in jfiles:
def add_spatial_index(self): if self.dataType == 'ShapeFile': arcpy.AddSpatialIndex_management(self.catalogPath)
output = open(LogFile, "w") output.write("RebuildIndex\n") output.write(strmsg1) for fileLine in workspacelist: ENV.workspace = fileLine.replace("\n", "") strmsg1 = "Process Workspace: " + fileLine + "\n" print strmsg1 output.write(strmsg1) try: FCList = arcpy.ListFeatureClasses("*", "all") for FC in FCList: strmsg1 = "Process FeatureClass: " + FC.encode("gb2312") + "\n" print strmsg1 output.write(strmsg1) arcpy.AddSpatialIndex_management(FC, "0", "0", "0") str1 = str(arcpy.GetMessages().encode("gb2312")) output.write(str1 + "\n") print "before" datasets = arcpy.ListDatasets("*", "all") print datasets for ds in datasets: strmsg1 = "Process Dataset: " + ds.encode("gb2312") + "\n" print strmsg1 output.write(strmsg1) for FC in arcpy.ListFeatureClasses("*", "all", ds): strmsg1 = "Process FeatureClass: " + FC.encode("gb2312") + "\n" print strmsg1 output.write(strmsg1) arcpy.AddSpatialIndex_management(FC, "0", "0", "0") str1 = str(arcpy.GetMessages().encode("gb2312"))
def convertPointsToLine(inPts, outFeatures, IDField, cursorSort, close): try: # Assign empty values to cursor and row objects iCur, sRow, feat = None, None, None desc = arcpy.Describe(inPts) shapeName = desc.shapeFieldName # Create the output feature class outPath, outFC = os.path.split(outFeatures) arcpy.CreateFeatureclass_management(outPath, outFC, "POLYLINE", "", getZM("outputMFlag", desc.hasM), getZM("outputZFlag", desc.hasZ), inPts) outShapeName = arcpy.Describe(outFeatures).shapeFieldName # If there is an IDField, add the equivalent to the output if IDField: f = arcpy.ListFields(inPts, IDField)[0] fName = arcpy.ValidateFieldName(f.name, outPath) arcpy.AddField_management(outFeatures, fName, f.type, f.precision, f.scale, f.length, f.aliasName, f.isNullable, f.required, f.domain) # Open an insert cursor for the new feature class iCur = arcpy.InsertCursor(outFeatures) # Create an array needed to create features array = arcpy.Array() # Initialize a variable for keeping track of a feature's ID. ID = -1 fields = shapeName if cursorSort: fields += ";" + cursorSort for sRow in arcpy.gp.SearchCursor(inPts, "", None, fields, cursorSort, arcpy.env.extent): #for sRow in arcpy.gp.searchCursor() pt = sRow.getValue(shapeName).getPart(0) if IDField: currentValue = sRow.getValue(IDField) else: currentValue = None if ID == -1: ID = currentValue if ID <> currentValue: if array.count >= 2: # To close, add first point to the end # if close: array.add(array.getObject(0)) feat = iCur.newRow() if IDField: if ID: #in case the value is None/Null feat.setValue(IDField, ID) feat.setValue(outShapeName, array) iCur.insertRow(feat) else: arcpy.AddIDMessage("WARNING", 1059, unicode(ID)) array.removeAll() array.add(pt) ID = currentValue # Add the last feature if array.count > 1: # To close, add first point to the end if close: array.add(array.getObject(0)) feat = iCur.newRow() if IDField: if ID: #in case the value is None/Null feat.setValue(IDField, currentValue) feat.setValue(outShapeName, array) iCur.insertRow(feat) else: arcpy.AddIDMessage("WARNING", 1059, unicode(ID)) array.removeAll() except Exception as err: import traceback arcpy.AddError( traceback.format_exception_only(type(err), err)[0].rstrip()) finally: if iCur: del iCur if sRow: del sRow if feat: del feat try: # Update the spatial index(es) # r = arcpy.CalculateDefaultGridIndex_management(outFeatures) arcpy.AddSpatialIndex_management(outFeatures, r.getOutput(0), r.getOutput(1), r.getOutput(2)) except: pass
print("starting database view...") databaseVW = r"\\path\to\database_vw.sql" openDatabaseVW = open(databaseVW).read() arcpy.CreateDatabaseView_management( sde, 'database_vw', openDatabaseVW) print("\t\t...finished") # then create a materialized view using oracle tools databaseMVW = r"\\path\to\materialized_vw.sql" openDatabaseMVW = open(databaseMVW, 'r') sqlDatabaseMVW = openDatabaseMVW.read() openDatabaseMVW.close() databaseMVWCommands = sqlDatabaseMVW.split(';') # sql file deletes and re-creates materialized view instead of using rebuild # sql file includes tabular index creation on several fields print("creating materialized view...") for commands in databaseMVWCommands: try: sql.execute(commands) except Exception as err: print(err) print("...done") # finally build spatial indexes on materialized view print("starting spatial indexes...") arcpy.AddSpatialIndex_management("schema.materialized_view_name") print("\t\t...finished")
datetime(int(max(li)[:4]), int(max(li)[5:7]), int(max(li)[8:10]), int(max(li)[11:13]), int(max(li)[14:16]), int(max(li)[17:19]))).total_seconds() daysold = ((t / 60) / 60) / 24 if daysold > 29.97: update = 1 arcpy.Delete_management(newdir + "\\EventTemp.gdb\\" + n) print "Event: " + n + " Deleted!" del cursor GDBdirnew = newdir + "\\Event.gdb" if update == 1: events = arcpy.ListFeatureClasses() arcpy.Delete_management(GDBdirnew + "\\events") arcpy.Merge_management(events, GDBdirnew + "\\events") arcpy.AddSpatialIndex_management("path\\to\\TempEventGDB") #Moves updated GDB to "Live" GDB for GIS Service for files in os.listdir("path\\to\\TempEventGDB"): if files[-5:].lower() != '.lock': shutil.copy2(os.path.join("path\\to\\TempEventGDB", files), os.path.join("path\\to\\ProdEventGDB", files)) print "Data Migrated to Live Feed" jfiles = glob.glob("path\\to\\TempDir\\*.json") for f in jfiles: os.remove(f) print f + " Removed!" servename = "hostname:6080" shakeserviceURL = "http://" + servename + "/arcgis/rest/services/event/MapServer"
def csv2section(): arcpy.env.overwriteOutput = True inPt = arcpy.GetParameterAsText(0) outFeature = arcpy.GetParameterAsText(1) X1 = arcpy.GetParameterAsText(2) Y1 = arcpy.GetParameterAsText(3) X2 = arcpy.GetParameterAsText(4) Y2 = arcpy.GetParameterAsText(5) reserveField = arcpy.GetParameterAsText(6) try: outPath, outFC = os.path.split(outFeature) #change C:\Users\leizengxiang\Desktop\drawCsvInArcgis to your directory, and change the wgs84.prj to your projection file arcpy.CreateFeatureclass_management(outPath, outFC, "POLYLINE", "", "DISABLED", "ENABLED", "C:\Users\leizengxiang\Desktop\drawCsvInArcgis\\wgs84.prj") if reserveField: field = arcpy.ListFields(inPt, reserveField)[0] arcpy.AddField_management(outFeature, field.name, field.type) oCur, iCur, sRow, feat = None, None, None, None shapeName = "Shape" idName = "id" oCur = arcpy.SearchCursor(inPt) iCur = arcpy.InsertCursor(outFeature) array = arcpy.Array() ID = -1 PID = 0 LID = 0 if reserveField: RESERVE = 0 TEMPX1 = 0 TEMPX2 = 0 TEMPY1 = 0 TEMPY2 = 0 for sRow in oCur: TEMPX1 = sRow.getValue(X1) TEMPX2 = sRow.getValue(X2) TEMPY1 = sRow.getValue(Y1) TEMPY2 = sRow.getValue(Y2) (TEMPX1,TEMPY1,TEMPX2,TEMPY2)=getXY(TEMPX1,TEMPY1,TEMPX2,TEMPY2) pt1=arcpy.Point(TEMPX1,TEMPY1,None, None, PID) PID += 1 pt2=arcpy.Point(TEMPX2,TEMPY2,None, None, PID) PID += 1 array.add(pt1) array.add(pt2) if reserveField: RESERVE = sRow.getValue(reserveField) feat = iCur.newRow() feat.setValue(shapeName, array) LID += 1 if reserveField: feat.setValue(reserveField, RESERVE) iCur.insertRow(feat) array.removeAll() except Exception as err: arcpy.AddError(err[0]) finally: if oCur: del oCur if iCur: del iCur if sRow: del sRow if feat: del feat try: # Update the spatial index(es) # r = arcpy.CalculateDefaultGridIndex_management(outFeature) arcpy.AddSpatialIndex_management(outFeature, r.getOutput(0), r.getOutput(1), r.getOutput(2)) except: pass
def hexagon_polygon(inputAOI, outputTheissen, width='500', *args): """A function to check for correct field types between the from and to fields.""" descInput = arcpy.Describe(inputAOI) if descInput.dataType == 'FeatureLayer': inputAreaOfInterest = descInput.CatalogPath else: inputAreaOfInterest = inputAOI # Describe the Input and get its extent properties desc = arcpy.Describe(inputAreaOfInterest) ext = desc.extent xcoord = ext.XMin ycoord = ext.YMin urxcoord = ext.XMax urycoord = ext.YMax height = float(width) * math.sqrt(3) # Invert the height and width so that the flat side of the hexagon is on the bottom and top tempWidth = width width = height height = tempWidth # Calculate new offset origin, opposite corner and Y axis point coordinates factor1 = -2.0 origin = str(xcoord + float(width) * factor1) + " " + str(ycoord + float(height) * factor1) originX = str(xcoord + float(width) * factor1) originY = str(ycoord + float(height) * factor1) factor2 = 2.0 oppositeCorner = str(urxcoord + float(width) * factor2) + " " + str(urycoord + float(height) * factor2) oppositeCornerX = str(urxcoord + float(width) * factor2) oppositeCornerY = str(urycoord + float(height) * factor2) factor3 = 0.5 newOrigin = str(float(originX) + float(width) * factor3) + " " + str(float(originY) + float(height) * factor3) newOriginX = str(float(originX) + float(width) * factor3) newOriginY = str(float(originY) + float(height) * factor3) newOppositeCorner = str(float(oppositeCornerX) + float(width) * factor3) + " " + str(float(oppositeCornerY) + float(height) * factor3) newOppositeCornerX = str(float(oppositeCornerX) + float(width) * factor3) newOppositeCornerY = str(float(oppositeCornerY) + float(height) * factor3) yAxisCoordinates1 = str(float(originX)) + " " + str(float(oppositeCornerY)) yAxisCoordinates2 = str(float(newOriginX)) + " " + str(float(newOppositeCornerY)) # Calculate Length, hexagonal area and number of columns sideLength = float(height) / math.sqrt(3) hexagonArea = 2.598076211 * pow(sideLength, 2) numberOfColumns = int((urxcoord - xcoord) / int(width)) # Add Messages arcpy.AddMessage("------------------------") arcpy.AddMessage("Width: " + str(height)) arcpy.AddMessage("Height: " + str(width)) arcpy.AddMessage("Hexagon Area: " + str(hexagonArea)) arcpy.AddMessage("Number of Columns: " + str(numberOfColumns)) arcpy.AddMessage("------------------------") try: outputWorkspace = os.path.dirname(outputTheissen) arcpy.env.scratchWorkspace = os.path.dirname(outputTheissen) # Process: Create Fishnet... fishnetPath1 = (os.path.join(outputWorkspace, "Fishnet_1")) fishnet1 = arcpy.CreateFishnet_management(fishnetPath1, origin, yAxisCoordinates1, width, height, "0", "0", oppositeCorner, "LABELS", "") # Process: Create Fishnet (2)... fishnetPath2 = (os.path.join(outputWorkspace, "Fishnet_2")) fishnet2 = arcpy.CreateFishnet_management(fishnetPath2, newOrigin, yAxisCoordinates2, width, height, "0", "0", newOppositeCorner, "LABELS") # Process: Create Feature Class... spatialRef = arcpy.Describe(inputAreaOfInterest).spatialReference hexPoints = arcpy.CreateFeatureclass_management(outputWorkspace, "hex_points", "POINT", "", "", "", spatialRef) # Get fishnet labels from the results of the fishnet tool... fishnetLabel1 = fishnet1.getOutput(1) fishnetLabel2 = fishnet2.getOutput(1) # Define projection for the fishnet labels arcpy.DefineProjection_management(fishnetLabel1, spatialRef) arcpy.DefineProjection_management(fishnetLabel2, spatialRef) # Process: Append... inputForAppend = "{0};{1}".format(fishnetLabel1, fishnetLabel2) arcpy.Append_management(inputForAppend, hexPoints, "NO_TEST", "", "") # Process: Create Thiessen Polygons... fullTheissen = arcpy.CreateThiessenPolygons_analysis(hexPoints, (os.path.join(outputWorkspace, "FullTheissen")), "ONLY_FID") arcpy.AddMessage("Creating hexagonal polygons.") # Process: Minimum Bounding Geometry... AOIEnvelope = arcpy.MinimumBoundingGeometry_management(inputAreaOfInterest, (os.path.join(outputWorkspace, "AOIEnvelope")), "ENVELOPE", "ALL" ) # Process: Make Feature Layer... hexLayer = arcpy.MakeFeatureLayer_management(fullTheissen, "Hex_Layer", "", "", "") # Process: Select Layer By Location... arcpy.SelectLayerByLocation_management(hexLayer, "INTERSECT", AOIEnvelope) # Process: Add Field (1)... arcpy.AddField_management(hexLayer, "X_Coord", "DOUBLE", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") # Process: Add Field (2)... arcpy.AddField_management(hexLayer, "Y_Coord", "DOUBLE", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") # Process: Calculate X Value... arcpy.CalculateField_management(hexLayer, "X_Coord", "GetXValue(!shape.centroid!)", "PYTHON", "def GetXValue(centroid):\\n coords = centroid.split(\" \")\\n return round(float(coords[0]),2)") # Process: Calculate Y Value... arcpy.CalculateField_management(hexLayer, "Y_Coord", "GetYValue(!shape.centroid!)", "PYTHON", "def GetYValue(centroid):\\n coords = centroid.split(\" \")\\n return round(float(coords[1]),2)") # Process: Add Field (3)... arcpy.AddField_management(hexLayer, "hexagonID", "LONG", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") #Calculate Hexagon Polygon ID(hexLayer) cur = arcpy.UpdateCursor(hexLayer, "", "", "", "y_coord A; x_coord A") for ID, row in enumerate(cur, 1): row.hexagonID = ID cur.updateRow(row) # Process: Add Spatial Index... arcpy.AddSpatialIndex_management(hexLayer) arcpy.AddMessage("Adding Hexagon Id to polygons.") arcpy.CopyFeatures_management(hexLayer, outputTheissen) # Delete all intermediate data arcpy.Delete_management(fishnet1) arcpy.Delete_management(fishnet2) arcpy.Delete_management(fishnetLabel1) arcpy.Delete_management(fishnetLabel2) arcpy.Delete_management(hexPoints) arcpy.Delete_management(fullTheissen) arcpy.Delete_management(AOIEnvelope) arcpy.AddMessage("Congratulations! You have created the most beautiful polygons ever :)") except: # get the traceback object tb = sys.exc_info()[2] # tbinfo contains the line number that the code failed on and the code from that line tbinfo = traceback.format_tb(tb)[0] # concatenate information together concerning the error into a message string pymsg = "PYTHON ERRORS:\nTraceback Info:\n" + tbinfo + "\nError Info:\n " + \ str(sys.exc_type)+ ": " + str(sys.exc_value) + "\n" # generate a message string for any geoprocessing tool errors msgs = "GP ERRORS:\n" + arcpy.GetMessages(2) + "\n" # return gp messages for use with a script tool arcpy.AddError(msgs) arcpy.AddError(pymsg) # print messages for use in Python/PythonWin print msgs print pymsg