return int( re.sub(r'(\d{3})(?=\d)', r'\1,', str(someNumber)[::-1])[::-1]) except: print_exception() return someNumber ## ================================================================================================================ # Import system modules import sys, os, arcpy, string, traceback, re arcpy.env.overwriteOutput = True # ----------------------------------------------- Determine ArcGIS Install Version d = arcpy.GetInstallInfo('desktop') keys = d.keys() for k in keys: if k == "Version": version = " \nArcGIS %s : %s" % (k, d[k]) if version.find("10.0") > 0 or version.find("10.1") > 0: ArcGIS101 = True else: ArcGIS101 = False
def isArcGISPatched(self): # return values [true | false] # if we're running on python 3+, it's assumed we're on (ArcGIS Pro) and there's no need to check for patches. if (sys.version_info[0] >= 3): return True # if the patch XML node is not properly formatted in structure/with values, MDCS returns an error and will abort the operation. patch_node = self.getXMLNode(self.m_doc, "Patch") if (patch_node == ''): return True if (patch_node.attributes.length == 0): return False if ((self.CVERSION_ATTRIB in patch_node.attributes.keys()) == False): return False target_ver = patch_node.attributes.getNamedItem( self.CVERSION_ATTRIB).nodeValue.strip() if (len(target_ver) == 0): return False search_key = '' patch_desc_node = patch_node.firstChild.nextSibling while (patch_desc_node is not None): node_name = patch_desc_node.nodeName if (node_name == 'Name'): if (patch_desc_node.hasChildNodes() == True): search_key = patch_desc_node.firstChild.nodeValue break patch_desc_node = patch_desc_node.nextSibling.nextSibling if (len(search_key) == 0 ): # if no patch description could be found, return False return False ver = (target_ver + '.0.0.0.0').split('.') for n in range(self.CMAJOR, self.CBUILD + 1): if (ver[n] == ''): ver[n] = 0 ver[n] = int(ver[n]) ver = ver[:4] # accept only the first 4 digits. target_v_str = installed_v_str = '' for i in range(self.CMAJOR, self.CBUILD + 1): target_v_str += "%04d" % ver[i] installed_ver = self.getDesktopVersion() for i in range(self.CMAJOR, self.CBUILD + 1): installed_v_str += "%04d" % installed_ver[i] tVersion = int(target_v_str) iVersion = int(installed_v_str) if ( iVersion > tVersion ): # the first priority is to check for the patch version against the installed version return True # if the installed ArcGIS version is greater than the patch's, it's OK to proceed. if (self.isLinux()): return True # if the installed ArcGIS version is lower than the intended target patch version, continue with the registry key check for the # possible patches installed. # HKEY_LOCAL_MACHINE\SOFTWARE\Wow6432Node\ESRI\Desktop10.2\Updates CPRODUCT_NAME = 'ProductName' CVERSION = 'Version' setupInfo = arcpy.GetInstallInfo() if ((CVERSION in setupInfo.keys()) == False or (CPRODUCT_NAME in setupInfo.keys()) == False): return False key = setupInfo[CPRODUCT_NAME] + setupInfo[CVERSION] try: reg_path = "Software\\Wow6432Node\\ESRI\\%s\\Updates" % (key) arcgis = OpenKey(HKEY_LOCAL_MACHINE, reg_path) i = 0 while True: name = EnumKey(arcgis, i) arcgis_sub = OpenKey(HKEY_LOCAL_MACHINE, reg_path + '\\' + name) try: value, type = QueryValueEx(arcgis_sub, "Name") if (type == 1): # reg_sz if (value.lower().find(search_key.lower()) >= 0): return True # return true if the value is found! except: pass i += 1 except: pass return False
from GeMS_utilityFunctions import * from xml.dom.minidom import * debug = False versionString = 'GeMS_FGDC1_Arc10.py, version of 20 July 2020' addMsgAndPrint(' ' + versionString) if debug: addMsgAndPrint(os.sys.path) addMsgAndPrint('Python version = ' + str(sys.version)) gems = 'GeMS' gemsFullRef = '"GeMS (Geologic Map Schema)--a standard format for the digital publication of geologic maps", available at http://ngmdb.usgs.gov/Info/standards/GeMS/' eaoverviewCitation = 'Detailed descriptions of entities, attributes, and attribute values are given in metadata for constituent elements of the database. See also ' + gemsFullRef + '.' translator = arcpy.GetInstallInfo( "desktop")["InstallDir"] + 'Metadata/Translator/ARCGIS2FGDC.xml' ########################################### def __newElement(dom, tag, text): nd = dom.createElement(tag) ndText = dom.createTextNode(text) nd.appendChild(ndText) return nd def __appendOrReplace(rootNode, newNode, nodeTag): if len(rootNode.getElementsByTagName(nodeTag)) == 0: rootNode.appendChild(newNode) else: rootNode.replaceChild(newNode,
def SLEM(Line, Distance, Output, TempFolder, TF): CopyLine = arcpy.CopyFeatures_management(Line, "%ScratchWorkspace%\CopyLine") fieldnames = [f.name for f in arcpy.ListFields(CopyLine)] #/identification of the polyline type : raw, UGOs, sequenced UGOs, or AGOs k = 0 if "Rank_AGO" in fieldnames: k = 3 elif "Order_ID" in fieldnames: k = 2 elif "Rank_UGO" in fieldnames: k = 1 arcpy.AddMessage(k) if re.search('french', arcpy.GetInstallInfo()['SourceDir'], re.IGNORECASE): props = "Rank_UGO Ligne Distance To_M" else: props = "Rank_UGO LINE Distance To_M" ################################ ########## Raw polyline ######## ################################ # if k == 0: #/shaping of the segmented result arcpy.AddField_management(CopyLine, "Rank_UGO", "LONG", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") arcpy.CalculateField_management(CopyLine, "Rank_UGO", "!" + fieldnames[0] + "!", "PYTHON_9.3", "") arcpy.AddField_management(CopyLine, "From_Measure", "DOUBLE", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") arcpy.CalculateField_management(CopyLine, "From_Measure", "0", "PYTHON_9.3", "") arcpy.AddField_management(CopyLine, "To_Measure", "DOUBLE", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") arcpy.CalculateField_management(CopyLine, "To_Measure", "!shape.length!", "PYTHON_9.3", "") #/conversion in routes LineRoutes = arcpy.CreateRoutes_lr(CopyLine, "Rank_UGO", "%ScratchWorkspace%\\LineRoutes", "TWO_FIELDS", "From_Measure", "To_Measure") #/creation of the event table PointEventTEMP = arcpy.CreateTable_management("%ScratchWorkspace%", "PointEventTEMP", "", "") arcpy.AddField_management(PointEventTEMP, "Rank_UGO", "LONG", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") arcpy.AddField_management(PointEventTEMP, "Distance", "DOUBLE", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") arcpy.AddField_management(PointEventTEMP, "To_M", "DOUBLE", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") UPD_SL.UpToDateShapeLengthField(LineRoutes) rowslines = arcpy.SearchCursor(LineRoutes) rowsevents = arcpy.InsertCursor(PointEventTEMP) for line in rowslines: tempdistance = float(0) while (tempdistance < float(line.Shape_Length)): row = rowsevents.newRow() row.Rank_UGO = line.Rank_UGO row.To_M = tempdistance + float(Distance) row.Distance = tempdistance rowsevents.insertRow(row) tempdistance = tempdistance + float(Distance) del rowslines del rowsevents #/creation of the route event layer MakeRouteEventTEMP = arcpy.MakeRouteEventLayer_lr( LineRoutes, "Rank_UGO", PointEventTEMP, props, "%ScratchWorkspace%\\MakeRouteEventTEMP") Split = arcpy.CopyFeatures_management(MakeRouteEventTEMP, "%ScratchWorkspace%\\Split", "", "0", "0", "0") Sort = arcpy.Sort_management( Split, Output, [["Rank_UGO", "ASCENDING"], ["Distance", "ASCENDING"]]) arcpy.DeleteField_management(Sort, "To_M") #/calculation of the "Distance" field UPD_SL.UpToDateShapeLengthField(Sort) rows1 = arcpy.UpdateCursor(Sort) rows2 = arcpy.UpdateCursor(Sort) line2 = rows2.next() line2.Distance = 0 rows2.updateRow(line2) nrows = int(str(arcpy.GetCount_management(Sort))) n = 0 for line1 in rows1: line2 = rows2.next() if n == nrows - 1: break if n == 0: line1.Distance = 0 if line2.Rank_UGO == line1.Rank_UGO: line2.Distance = line1.Distance + line1.Shape_Length rows2.updateRow(line2) if line2.Rank_UGO != line1.Rank_UGO: line2.Distance = 0 rows2.updateRow(line2) n += 1 #/deleting of the temporary files if str(TF) == "true": arcpy.Delete_management(Split) arcpy.Delete_management(CopyLine) arcpy.Delete_management(LineRoutes) arcpy.Delete_management(PointEventTEMP) ################## ###### UGO ####### ################## if k == 1: #/shaping of the segmented result arcpy.AddField_management(CopyLine, "From_Measure", "DOUBLE", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") arcpy.CalculateField_management(CopyLine, "From_Measure", "0", "PYTHON_9.3", "") arcpy.AddField_management(CopyLine, "To_Measure", "DOUBLE", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") arcpy.CalculateField_management(CopyLine, "To_Measure", "!shape.length!", "PYTHON_9.3", "") #/conversion in routes LineRoutes = arcpy.CreateRoutes_lr(CopyLine, "Rank_UGO", "%ScratchWorkspace%\\LineRoutes", "TWO_FIELDS", "From_Measure", "To_Measure") #/creation of the event table PointEventTEMP = arcpy.CreateTable_management("%ScratchWorkspace%", "PointEventTEMP", "", "") arcpy.AddField_management(PointEventTEMP, "Rank_UGO", "LONG", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") arcpy.AddField_management(PointEventTEMP, "Distance", "DOUBLE", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") arcpy.AddField_management(PointEventTEMP, "To_M", "DOUBLE", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") UPD_SL.UpToDateShapeLengthField(LineRoutes) rowslines = arcpy.SearchCursor(LineRoutes) rowsevents = arcpy.InsertCursor(PointEventTEMP) for line in rowslines: tempdistance = float(0) while (tempdistance < float(line.Shape_Length)): row = rowsevents.newRow() row.Rank_UGO = line.Rank_UGO row.To_M = tempdistance + float(Distance) row.Distance = tempdistance rowsevents.insertRow(row) tempdistance = tempdistance + float(Distance) del rowslines del rowsevents #/creation of the route event layer MakeRouteEventTEMP = arcpy.MakeRouteEventLayer_lr( LineRoutes, "Rank_UGO", PointEventTEMP, props, "%ScratchWorkspace%\\MakeRouteEventTEMP") Split = arcpy.CopyFeatures_management(MakeRouteEventTEMP, "%ScratchWorkspace%\\Split", "", "0", "0", "0") Sort = arcpy.Sort_management( Split, Output, [["Rank_UGO", "ASCENDING"], ["Distance", "ASCENDING"]]) arcpy.DeleteField_management(Sort, "To_M") #/calculation of the "Distance" field UPD_SL.UpToDateShapeLengthField(Sort) rows1 = arcpy.UpdateCursor(Sort) rows2 = arcpy.UpdateCursor(Sort) line2 = rows2.next() line2.Distance = 0 rows2.updateRow(line2) nrows = int(str(arcpy.GetCount_management(Sort))) n = 0 for line1 in rows1: line2 = rows2.next() if n == nrows - 1: break if n == 0: line1.Distance = 0 if line2.Rank_UGO == line1.Rank_UGO: line2.Distance = line1.Distance + line1.Shape_Length rows2.updateRow(line2) if line2.Rank_UGO != line1.Rank_UGO: line2.Distance = 0 rows2.updateRow(line2) n += 1 #/deleting of the temporary files if str(TF) == "true": arcpy.Delete_management(Split) arcpy.Delete_management(CopyLine) arcpy.Delete_management(LineRoutes) arcpy.Delete_management(PointEventTEMP) ################################ ######### Sequenced UGO ######## ################################ if k == 2: #/shaping of the segmented result arcpy.AddField_management(CopyLine, "From_Measure", "DOUBLE", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") arcpy.CalculateField_management(CopyLine, "From_Measure", "0", "PYTHON_9.3", "") arcpy.AddField_management(CopyLine, "To_Measure", "DOUBLE", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") arcpy.CalculateField_management(CopyLine, "To_Measure", "!Shape_Length!", "PYTHON_9.3", "") #/conversion in routes LineRoutes = arcpy.CreateRoutes_lr(CopyLine, "Rank_UGO", "%ScratchWorkspace%\\LineRoutes", "TWO_FIELDS", "From_Measure", "To_Measure") arcpy.AddField_management(LineRoutes, "Order_ID", "LONG", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") Sort = arcpy.Sort_management(Line, "%ScratchWorkspace%\\Sort", [["Rank_UGO", "ASCENDING"]]) rows1 = arcpy.UpdateCursor(LineRoutes) rows2 = arcpy.SearchCursor(Sort) for line1 in rows1: line2 = rows2.next() line1.Order_ID = line2.Order_ID rows1.updateRow(line1) #/creation of the event table PointEventTEMP = arcpy.CreateTable_management("%ScratchWorkspace%", "PointEventTEMP", "", "") arcpy.AddField_management(PointEventTEMP, "To_M", "DOUBLE", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") arcpy.AddField_management(PointEventTEMP, "Order_ID", "LONG", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") arcpy.AddField_management(PointEventTEMP, "Rank_UGO", "LONG", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") arcpy.AddField_management(PointEventTEMP, "Distance", "DOUBLE", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") UPD_SL.UpToDateShapeLengthField(LineRoutes) rowslines = arcpy.SearchCursor(LineRoutes) rowsevents = arcpy.InsertCursor(PointEventTEMP) for line in rowslines: tempdistance = float(0) while (tempdistance < float(line.Shape_Length)): row = rowsevents.newRow() row.To_M = tempdistance + float(Distance) row.Order_ID = line.Order_ID row.Rank_UGO = line.Rank_UGO row.Distance = tempdistance rowsevents.insertRow(row) tempdistance = tempdistance + float(Distance) del rowslines del rowsevents MakeRouteEventTEMP = arcpy.MakeRouteEventLayer_lr( LineRoutes, "Rank_UGO", PointEventTEMP, props, "%ScratchWorkspace%\\MakeRouteEventTEMP") Split = arcpy.CopyFeatures_management(MakeRouteEventTEMP, "%ScratchWorkspace%\\Split", "", "0", "0", "0") Sort = arcpy.Sort_management( Split, Output, [["Rank_UGO", "ASCENDING"], ["Distance", "ASCENDING"]]) arcpy.DeleteField_management(Sort, "To_M") #/calculation of the "Distance" field UPD_SL.UpToDateShapeLengthField(Sort) rows1 = arcpy.UpdateCursor(Sort) rows2 = arcpy.UpdateCursor(Sort) line2 = rows2.next() line2.Distance = 0 rows2.updateRow(line2) nrows = int(str(arcpy.GetCount_management(Split))) n = 0 for line1 in rows1: line2 = rows2.next() if n >= nrows - 1: break if n == 0: line1.Distance = 0 if line2.Rank_UGO == line1.Rank_UGO: line2.Distance = line1.Distance + line1.Shape_Length rows2.updateRow(line2) if line2.Rank_UGO != line1.Rank_UGO: line2.Distance = 0 rows2.updateRow(line2) n += 1 #/deleting of the temporary files if str(TF) == "true": arcpy.Delete_management(Split) arcpy.Delete_management(CopyLine) arcpy.Delete_management(LineRoutes) arcpy.Delete_management(PointEventTEMP) ############# #### AGO #### ############# if k == 3: #/shaping of the segmented result arcpy.AddField_management(CopyLine, "From_Measure", "DOUBLE", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") arcpy.CalculateField_management(CopyLine, "From_Measure", "0", "PYTHON_9.3", "") arcpy.AddField_management(CopyLine, "To_Measure", "DOUBLE", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") try: arcpy.CalculateField_management(CopyLine, "To_Measure", "!shape.length!", "PYTHON_9.3", "") except: arcpy.CalculateField_management(CopyLine, "To_Measure", "!forme.length!", "PYTHON_9.3", "") #/conversion in routes LineRoutes = arcpy.CreateRoutes_lr(CopyLine, "Rank_AGO", "%ScratchWorkspace%\\LineRoutes", "TWO_FIELDS", "From_Measure", "To_Measure") arcpy.AddField_management(LineRoutes, "Order_ID", "LONG", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") arcpy.AddField_management(LineRoutes, "Rank_UGO", "LONG", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") arcpy.AddField_management(LineRoutes, "AGO_Val", "DOUBLE", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") UPD_SL.UpToDateShapeLengthField(LineRoutes) Ext.Export(CopyLine, TempFolder, "ExportTable") fichier = open(TempFolder + "\\ExportTable.txt", 'r') Order_ID = [] Rank_UGO = [] Dist = [] Rank_AGO = [] AGO_Val = [] head = fichier.readline().split('\n')[0].split(';') iOrder_ID = head.index("Order_ID") iRank_UGO = head.index("Rank_UGO") iRank_AGO = head.index("Rank_AGO") iAGO_Val = head.index("AGO_Val") for l in fichier: Order_ID.append(int(l.split('\n')[0].split(';')[iOrder_ID])) Rank_UGO.append(int(l.split('\n')[0].split(';')[iRank_UGO])) Rank_AGO.append(float(l.split('\n')[0].split(';')[iRank_AGO])) AGO_Val.append( float(l.split('\n')[0].split(';')[iAGO_Val].replace(',', '.'))) p = 0 rows1 = arcpy.UpdateCursor(LineRoutes) for line1 in rows1: line1.Order_ID = Order_ID[p] line1.Rank_UGO = Rank_UGO[p] line1.Rank_AGO = Rank_AGO[p] line1.AGO_Val = AGO_Val[p] rows1.updateRow(line1) p += 1 #/creation of the event table PointEventTEMP = arcpy.CreateTable_management("%ScratchWorkspace%", "PointEventTEMP", "", "") arcpy.AddField_management(PointEventTEMP, "Distance_From_Start", "DOUBLE", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") arcpy.AddField_management(PointEventTEMP, "To_M", "DOUBLE", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") arcpy.AddField_management(PointEventTEMP, "Order_ID", "LONG", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") arcpy.AddField_management(PointEventTEMP, "Rank_UGO", "LONG", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") arcpy.AddField_management(PointEventTEMP, "Rank_AGO", "LONG", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") arcpy.AddField_management(PointEventTEMP, "AGO_Val", "DOUBLE", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") rowslines = arcpy.SearchCursor(LineRoutes) rowsevents = arcpy.InsertCursor(PointEventTEMP) for line in rowslines: tempdistance = float(0) while (tempdistance < float(line.Shape_Length)): row = rowsevents.newRow() row.Distance_From_Start = tempdistance row.To_M = tempdistance + float(Distance) row.Order_ID = line.Order_ID row.Rank_UGO = line.Rank_UGO row.Rank_AGO = line.Rank_AGO row.AGO_Val = line.AGO_Val rowsevents.insertRow(row) tempdistance = tempdistance + float(Distance) del rowslines del rowsevents if re.search('french', arcpy.GetInstallInfo()['SourceDir'], re.IGNORECASE): props2 = "Rank_AGO Ligne Distance_From_Start To_M" else: props2 = "Rank_AGO LINE Distance_From_Start To_M" MakeRouteEventTEMP = arcpy.MakeRouteEventLayer_lr( LineRoutes, "Rank_AGO", PointEventTEMP, props2, "%ScratchWorkspace%\\MakeRouteEventTEMP") Split = arcpy.CopyFeatures_management(MakeRouteEventTEMP, "%ScratchWorkspace%\\Split", "", "0", "0", "0") arcpy.AddField_management(Split, "Distance", "LONG", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") arcpy.CalculateField_management(Split, "Distance", "!Distance_From_Start!", "PYTHON_9.3", "") arcpy.DeleteField_management(Split, ["To_M", "Distance_From_Start"]) Sort = arcpy.Sort_management( Split, Output, [["Order_ID", "ASCENDING"], ["Rank_UGO", "ASCENDING"], ["Rank_AGO", "ASCENDING"], ["Distance", "ASCENDING"]]) UPD_SL.UpToDateShapeLengthField(Sort) #/deleting of the temporary files if str(TF) == "true": arcpy.Delete_management(Split) arcpy.Delete_management(CopyLine) arcpy.Delete_management(LineRoutes) arcpy.Delete_management(PointEventTEMP) return Sort
import arcpy install = arcpy.GetInstallInfo() for key in install: print "{0}:{1}".format(key, install[key])
import datetime #startTime = datetime.datetime.now() import arcpy import os import shutil ### definicja katalogu, plikow polaczenia do bazy, projektu mxd myPath = "D:\\_exportDM\\" oracleConnector = myPath + "oracle_dzaw.sde" baza4Connector = myPath + "baza4_dzaw.sde" oracleGISPIG2Connector = myPath + "oracle_gis_pig2.sde" prjFile = os.path.join( arcpy.GetInstallInfo()["InstallDir"], "Coordinate Systems/Projected Coordinate Systems/National Grids/Europe/ETRS 1989 Poland CS92.prj" ) spatialRef = arcpy.SpatialReference(prjFile) def createGDB(tempdb_name): tmpDatabase = myPath + tempdb_name if os.path.exists(tmpDatabase): arcpy.Delete_management(tmpDatabase) #os.remove(tmpDatabase) arcpy.CreateFileGDB_management(myPath, tempdb_name) arcpy.AddMessage(' --> Utworzono baze gdb ' + tempdb_name) createGDB("geofizykaGDB.gdb")
sys.dont_write_bytecode = True # import functions from local modules from layer_functions import getworkspace, checkallhouse, readnearestroads, readnearestbuildings from find_functions import findnearestbuilding, findnearestline from curving import createcurves from orthogonalization import orthogonalizepolygons from aligning import alignpolygons from offsetting import createoffset from cls_editor import Editor from cls_timer import Timer __version__ = '1.0.1' # 10.2 wouldn't work with JSON and curves so use WKT instead json_or_wkt = u'WKT' if arcpy.GetInstallInfo()['Version'] < '10.3' else u'JSON' # default settings dictionary settings_dict = { u'Ortho Threshold': 10, u'Search Radius': 50, u'Rotation Threshold': 20, u'Offset': 10, u'Curves Radius': 5, u'Curves Angle': 0, u'Align to Roads': 0, u'Read Offset from Field': 0 } # align to roads # set default XY tolerance arcpy.env.XYTolerance = u'0.1 Meters'
'https://prd-tnm.s3.amazonaws.com/StagedProducts/Hydrography/NHD/State/HighResolution/GDB/NHD_H_Nevada_State_GDB.xml' ).read() splitSource = re.findall(r'<pubdate>(.*?)</pubdate>', sourceCode) for item in splitSource: pubdate = re.sub(r'<.*?>', '', item) print pubdate #Make sde Connection sde_conn = "####" arcpy.env.workspace = sde_conn #fcList = arcpy.ListFeatureClasses() #for fc in fcList: # print fc #Create xml file of metadata dir = arcpy.GetInstallInfo("desktop")["InstallDir"] translator = dir + "Metadata/Translator/ArcGIS2ISO19139.xml" date = time.strftime('%Y%m%d%I%M%S') xmlFile = "#####" + date + ".xml" arcpy.ExportMetadata_conversion(sde_conn, translator, xmlFile) #Parse the xml file myxml = minidom.parse(xmlFile) purpose = myxml.getElementsByTagName("purpose")[0] document = purpose.getElementsByTagName( "gco:CharacterString")[0].firstChild.data date = re.findall(r'\d\d\d\d\d\d\d\d', document) for item in date: metaDate = '' metaDate += item print metaDate
def execute(self, parameters, messages): inFeatures = parameters[0].valueAsText outFeatures = parameters[1].valueAsText outName = r'doughnut' outDirName, outFcName = os.path.split(outFeatures) pydir = os.path.dirname(os.path.abspath(__file__)) sqliteExe = os.path.join( pydir, 'mod_spatialite-4.3.0a-win-amd64/sqlite3.exe') if (not os.path.exists(sqliteExe)): messages.addErrorMessage( 'need mod_spatalite. see _download_mod_spatilite.ps1 and download it.' ) return with _tempSqlite(None) as tmpLite: print(tmpLite.temp_dir) arcpy.env.workspace = tmpLite.sqliteFile arcpy.CopyFeatures_management(inFeatures, outName) # ArcGIS does not have a unlock function. I cannot be unlock sqlite. shutil.copyfile(tmpLite.sqliteFile, os.path.join(tmpLite.temp_dir, 'calc2.sqlite')) arcpy.Delete_management(tmpLite.sqliteFile) tmpLite.sqliteFile = os.path.join(tmpLite.temp_dir, 'calc2.sqlite') fields = arcpy.ListFields(inFeatures) oidFieldName = None shpFieldName = None for field in fields: if (field.type == "OID"): oidFieldName = field.name elif (field.type == "Geometry"): shpFieldName = field.name srid = 0 with closing(sqlite3.connect(tmpLite.sqliteFile)) as conn: with open(tmpLite.sqlFile, 'w') as f: f.write(""" SELECT load_extension('mod_spatialite'); CREATE VIRTUAL TABLE ElementaryGeometries USING VirtualElementary(); CREATE TABLE test (ORG_OID INTEGER PRIMARY KEY); """) # verson check 400x ? Desktop installDir = os.path.join( arcpy.GetInstallInfo()["InstallDir"], "bin") sys.path.append(installDir) conn.enable_load_extension(True) conn.execute("SELECT load_extension('spatialite400x');") for row in conn.execute( "SELECT ST_SRID(shape) FROM doughnut limit 1;"): srid = row[0] f.write("SELECT AddGeometryColumn('test', 'shape', " + str(srid) + ",'MULTIPOLYGON', 'XY');") f.write("\r\n") f.write( ("INSERT INTO test " "SELECT" " src.oid AS ORG_OID," " ExtractMultiPolygon( GUnion(ST_MakePolygon( ST_ExteriorRing( e.geometry) ))) AS shape " "FROM" " doughnut src " "INNER JOIN" " ElementaryGeometries e " "ON" " e.origin_rowid = src." + oidFieldName + " " + \ "WHERE" " e.db_prefix = 'main' AND" " e.f_table_name = 'doughnut' AND" " e.f_geometry_column = '" + shpFieldName + "' " "GROUP BY" " src." + oidFieldName + \ ";") ) conn.close() res = tmpLite.excuteSql() p = res['process'] stdout_data = res['stdout'] stderr_data = res['stderr'] if (p.returncode != 0): messages.addErrorMessage(stderr_data) #else: # print(stdout_data) arcpy.FeatureClassToFeatureClass_conversion( in_features=os.path.join(tmpLite.sqliteFile, "main.test"), out_path=outDirName, out_name=outFcName)
def header_and_iterator(dataset_name): """Returns a list of column names and an iterator over the same columns""" fDict = {} if arcpy.GetInstallInfo()['Version'] != '10.0': try: transDom = arcpy.env.transferDomains except: pass if transDom: # check that the workspace can support domains allowableWorkspaces = ["LocalDatabase", "RemoteDatabase"] # get the workspace type try: # if stored in a gdb or folder dirname = os.path.dirname(dataset_name) wspacetype = arcpy.Describe(dirname).workspaceType except: try: # if stored in a feature dataset dirname = os.path.dirname(dirname) wspacetype = arcpy.Describe(dirname).workspaceType except: # if its a feature layer or tableview dirname = os.path.dirname( arcpy.Describe(dataset_name).catalogPath) wspacetype = arcpy.Describe(dirname).workspaceType # If workspace type is one that may have domains ('localDatabase' or # 'remoteDatabase'), start looking for domains if wspacetype in allowableWorkspaces: # get a list of domains from the selected table fields = arcpy.ListFields(dataset_name) domainlist = [f.domain for f in fields if len(f.domain) > 0] # if the table has domains associated with it, go on if domainlist: # create lists to populate with domains and attributes dDict = {} domainlist2 = [] # narrow domain list to only coded value domains (domainlist2), # from that list, get a dictionary of all cv domains and their # associated coded values (as dDict) for d in arcpy.da.ListDomains(dirname): if d.domainType == "CodedValue" and d.name in domainlist: dDict[d.name] = d.codedValues domainlist2.append(d.name) # use 'dDict' to create a dictionary of the fields with their # associated domain values. for f in fields: if f.domain in domainlist2: fDict[f.name] = dDict[f.domain] # get a list of the fields in the selected table that arent specialized # (not "Geometry", "Raster", "Blob") data_description = arcpy.Describe(dataset_name) fieldnames = [ f.name for f in data_description.fields if f.type not in ["Geometry", "Raster", "Blob"] ] count = len(fieldnames) def iterator_for_feature(): # if using 10.0, use the old searchcursor if arcpy.GetInstallInfo()['Version'] == '10.0': cursor = arcpy.SearchCursor(dataset_name) for row in cursor: yield [getattr(row, col) for col in fieldnames] # otherwise use the new cursor else: cursor = arcpy.da.SearchCursor(dataset_name, fieldnames) # if the table has any cv domains, go about transfering them. if fDict: for row in cursor: values = [] for i in range(0, count): # if the column has a CV domain if fieldnames[i] in fDict: # get the codes and values of the CV domain # associated with the current column CVDict = fDict[fieldnames[i]] # use that dictionary to write the actual value, # instead of just the code values.append(CVDict[row[i]]) # var3 else: values.append(row[i]) yield values # otherwise, append the base values else: for row in cursor: yield row del row, cursor return fieldnames, iterator_for_feature()
# write scenario configuration to the log for incorporation in the report # ----------------------------------------------------------------------- dump_scenario_info_to_report(the_scenario, logger) if args.task in ['s', 'sc']: create_scenario_config_db(the_scenario, logger) else: check_scenario_config_db(the_scenario, logger) # check that arcpy is available if the -skip_arcpy_check flag is not set # ---------------------------------------------------------------------- if not args.skip_arcpy_check: try: import arcpy arcmap_version = arcpy.GetInstallInfo()['Version'] if not arcmap_version in [ '10.1', '10.2', '10.2.1', '10.2.2', '10.3', '10.3.1', '10.4', '10.4.1', '10.5', '10.5.1', '10.6', '10.6.1', '10.7', '10.7.1' ]: logger.error( "Version {} of ArcGIS is not currently supported. Exiting." .format(arcmap_version)) sys.exit() except RuntimeError: logger.error( "You will need ArcGIS 10.1 or later to run this script. Exiting." ) sys.exit()
def execute(request): """Package inputs to an Esri map or layer package. :param request: json as a dict. """ errors = 0 skipped = 0 layers = [] files = [] app_folder = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) parameters = request['params'] out_format = task_utils.get_parameter_value(parameters, 'output_format', 'value') summary = task_utils.get_parameter_value(parameters, 'summary') tags = task_utils.get_parameter_value(parameters, 'tags') output_file_name = task_utils.get_parameter_value(parameters, 'output_file_name') if not output_file_name: output_file_name = 'package_results' # Get the clip region as an extent object. clip_area = None try: clip_area_wkt = task_utils.get_parameter_value(parameters, 'processing_extent', 'wkt') clip_area = task_utils.get_clip_region(clip_area_wkt) except (KeyError, ValueError): pass out_workspace = os.path.join(request['folder'], 'temp') if not os.path.exists(out_workspace): os.makedirs(out_workspace) num_results, response_index = task_utils.get_result_count(parameters) # if num_results > task_utils.CHUNK_SIZE: # Query the index for results in groups of 25. query_index = task_utils.QueryIndex(parameters[response_index]) fl = query_index.fl query = '{0}{1}{2}'.format(sys.argv[2].split('=')[1], '/select?&wt=json', fl) fq = query_index.get_fq() if fq: groups = task_utils.grouper(range(0, num_results), task_utils.CHUNK_SIZE, '') query += fq elif 'ids' in parameters[response_index]: groups = task_utils.grouper(list(parameters[response_index]['ids']), task_utils.CHUNK_SIZE, '') else: groups = task_utils.grouper(range(0, num_results), task_utils.CHUNK_SIZE, '') headers = {'x-access-token': task_utils.get_security_token(request['owner'])} status_writer.send_status(_('Starting to process...')) for group in groups: if fq: results = requests.get(query + "&rows={0}&start={1}".format(task_utils.CHUNK_SIZE, group[0]), headers=headers) elif 'ids' in parameters[response_index]: results = requests.get(query + '{0}&ids={1}'.format(fl, ','.join(group)), headers=headers) else: results = requests.get(query + "&rows={0}&start={1}".format(task_utils.CHUNK_SIZE, group[0]), headers=headers) input_items = task_utils.get_input_items(results.json()['response']['docs']) if not input_items: input_items = task_utils.get_input_items(parameters[response_index]['response']['docs']) layers, files, errors, skipped = get_items(input_items, out_workspace) # else: # input_items = task_utils.get_input_items(parameters[response_index]['response']['docs']) # layers, files, errors, skipped = get_items(input_items, out_workspace) if errors == num_results: status_writer.send_state(status.STAT_FAILED, _('No results to package')) return try: if out_format == 'MPK': shutil.copyfile(os.path.join(app_folder, 'supportfiles', 'MapTemplate.mxd'), os.path.join(out_workspace, 'output.mxd')) mxd = arcpy.mapping.MapDocument(os.path.join(out_workspace, 'output.mxd')) if mxd.description == '': mxd.description = os.path.basename(mxd.filePath) df = arcpy.mapping.ListDataFrames(mxd)[0] for layer in layers: arcpy.mapping.AddLayer(df, layer) mxd.save() status_writer.send_status(_('Generating {0}. Large input {1} will take longer to process.'.format('MPK', 'results'))) if arcpy.GetInstallInfo()['Version'] == '10.0': arcpy.PackageMap_management(mxd.filePath, os.path.join(os.path.dirname(out_workspace), '{0}.mpk'.format(output_file_name)), 'PRESERVE', extent=clip_area) elif arcpy.GetInstallInfo()['Version'] == '10.1': arcpy.PackageMap_management(mxd.filePath, os.path.join(os.path.dirname(out_workspace), '{0}.mpk'.format(output_file_name)), 'PRESERVE', extent=clip_area, ArcGISRuntime='RUNTIME', version='10', additional_files=files, summary=summary, tags=tags) else: arcpy.PackageMap_management(mxd.filePath, os.path.join(os.path.dirname(out_workspace), '{0}.mpk'.format(output_file_name)), 'PRESERVE', extent=clip_area, arcgisruntime='RUNTIME', version='10', additional_files=files, summary=summary, tags=tags) # Create a thumbnail size PNG of the mxd. task_utils.make_thumbnail(mxd, os.path.join(request['folder'], '_thumb.png')) else: status_writer.send_status(_('Generating {0}. Large input {1} will take longer to process.'.format('LPK', 'results'))) for layer in layers: if layer.description == '': layer.description = layer.name if arcpy.GetInstallInfo()['Version'] == '10.0': arcpy.PackageLayer_management(layers, os.path.join(os.path.dirname(out_workspace), '{0}.lpk'.format(output_file_name)), 'PRESERVE', extent=clip_area, version='10') else: arcpy.PackageLayer_management(layers, os.path.join(os.path.dirname(out_workspace), '{0}.lpk'.format(output_file_name)), 'PRESERVE', extent=clip_area, version='10', additional_files=files, summary=summary, tags=tags) # Create a thumbnail size PNG of the mxd. task_utils.make_thumbnail(layers[0], os.path.join(request['folder'], '_thumb.png')) except (RuntimeError, ValueError, arcpy.ExecuteError) as ex: status_writer.send_state(status.STAT_FAILED, repr(ex)) return # Update state if necessary. if errors > 0 or skipped: status_writer.send_state(status.STAT_WARNING, _('{0} results could not be processed').format(errors + skipped)) task_utils.report(os.path.join(request['folder'], '__report.json'), num_results - (skipped + errors), skipped, errors, errors_reasons, skipped_reasons)
def execute(self, parameters, messages): srcFeatures = parameters[0].valueAsText dstFeatures = parameters[1].valueAsText outFeatures = parameters[2].valueAsText outName = r'srcfc' dstOutName = r'dstfc' outDirName, outFcName = os.path.split(outFeatures) pydir = os.path.dirname(os.path.abspath(__file__)) sqliteExe = os.path.join( pydir, 'mod_spatialite-4.3.0a-win-amd64/sqlite3.exe') if (not os.path.exists(sqliteExe)): messages.addErrorMessage( 'need mod_spatalite. see _download_mod_spatilite.ps1 and download it.' ) return with _tempSqlite(None) as tmpLite: print(tmpLite.temp_dir) arcpy.env.workspace = tmpLite.sqliteFile arcpy.CopyFeatures_management(srcFeatures, outName) if (dstFeatures): arcpy.CopyFeatures_management(dstFeatures, dstOutName) # ArcGIS does not have a unlock function. I cannot be unlock sqlite. shutil.copyfile(tmpLite.sqliteFile, os.path.join(tmpLite.temp_dir, 'calc2.sqlite')) arcpy.Delete_management(tmpLite.sqliteFile) tmpLite.sqliteFile = os.path.join(tmpLite.temp_dir, 'calc2.sqlite') fields = arcpy.ListFields(srcFeatures) oidFieldName = None shpFieldName = None for field in fields: if (field.type == "OID"): oidFieldName = field.name elif (field.type == "Geometry"): shpFieldName = field.name srid = 0 with closing(sqlite3.connect(tmpLite.sqliteFile)) as conn: with open(tmpLite.sqlFile, 'w') as f: # verson check 400x ? Desktop installDir = os.path.join( arcpy.GetInstallInfo()["InstallDir"], "bin") sys.path.append(installDir) conn.enable_load_extension(True) conn.execute("SELECT load_extension('spatialite400x');") for row in conn.execute( "SELECT ST_SRID(shape) FROM srcfc limit 1;"): srid = row[0] addWhere = "" if (dstFeatures): for row in conn.execute( "SELECT ST_SRID(shape) FROM dstfc limit 1;"): if (srid != row[0]): message.addWarningMessage( _("FeatureClass has a different SRID.")) else: addWhere = " AND s.OBJECTID <> d.OBJECTID " dstOutName = outName f.write(""" SELECT load_extension('mod_spatialite'); CREATE TABLE NearTable ( OID INTEGER PRIMARY KEY, NEARID INTEGER, DISTANCE REAL ); SELECT AddGeometryColumn('NearTable', 'shape', """ + str(srid) + """,'LINESTRING', 'XY'); INSERT INTO NearTable SELECT src.OBJECTID OID, dst.OBJECTID NEARID, ST_Distance(src.SHAPE, dst.SHAPE) DISTANCE, ST_ShortestLine(src.SHAPE, dst.SHAPE) SHAPE FROM srcfc src CROSS JOIN """ + dstOutName + """ dst WHERE dst.OBJECTID = (SELECT s.OBJECTID FROM """ + dstOutName + """ AS s CROSS JOIN srcfc d WHERE src.OBJECTID = d.OBJECTID """ + addWhere + """ ORDER BY ST_Distance(d.Shape, s.Shape) LIMIT 1 ) ;""") conn.close() res = tmpLite.excuteSql() p = res['process'] stdout_data = res['stdout'] stderr_data = res['stderr'] if (p.returncode != 0): print(stderr_data) #else: # print(stdout_data) arcpy.FeatureClassToFeatureClass_conversion( in_features=os.path.join(tmpLite.sqliteFile, "main.NearTable"), out_path=outDirName, out_name=outFcName)
# cleans up metadata in an enterprise gdb import os import sys import shutil import arcpy from datetime import datetime start = datetime.now() if arcpy.GetLogHistory(): arcpy.SetLogHistory(False) install_dir = arcpy.GetInstallInfo()['InstallDir'] ss1 = 'Metadata\\Stylesheets\\gpTools\\remove geoprocessing history.xslt' ss2 = 'Metadata\\Stylesheets\\gpTools\\remove local storage info.xslt' ss3 = 'Metadata\\Stylesheets\\gpTools\\remove empty elements.xslt' sspath1 = install_dir + ss1 sspath2 = install_dir + ss2 sspath3 = install_dir + ss3 outXML = 'c:\\XML_out' if os.path.exists(outXML): shutil.rmtree(outXML) os.mkdir(outXML) sde = 'Database Connections\GIS_gis_sde.sde' conn = arcpy.ArcSDESQLExecute(sde) sql = '''select owner, table_name from layers order by owner, table_name''' query = conn.execute(sql)
def main(*argv): fsId = None layerName = None dataToAppend = None fst = None fs = None results = None fl = None existingDef = None scratchGDB = None scratchLayName = None scratchLayer = None try: arcpy.env.overwriteOutput = True proxy_port = None proxy_url = None securityinfo = {} securityinfo['proxy_url'] = proxy_url securityinfo['proxy_port'] = proxy_port securityinfo['referer_url'] = None securityinfo['token_url'] = None securityinfo['certificatefile'] = None securityinfo['keyfile'] = None securityinfo['client_id'] = None securityinfo['secret_id'] = None username = argv[0] password = argv[1] siteURL = argv[2] version = arcpy.GetInstallInfo()['Version'] if re.search("^10\.[0-2]", version) is not None: bReqUserName = True else: bReqUserName = False if bReqUserName and \ (username == None or username == "#" or str(username).strip() == "" or \ password == None or password== "#" or password== "*" or str(password).strip() == ""): outputPrinter( "{0} Requires a username and password".format(version), typeOfMessage='error') return if bReqUserName: securityinfo[ 'security_type'] = 'Portal' #LDAP, NTLM, OAuth, Portal, PKI securityinfo['username'] = username securityinfo['password'] = password securityinfo['org_url'] = siteURL else: securityinfo[ 'security_type'] = 'ArcGIS' #LDAP, NTLM, OAuth, Portal, PKI fsId = argv[3] layerName = argv[4] dataToAppend = argv[5] projection = argv[6] lowerCaseFieldNames = argv[7] showFullResponse = argv[8] scratchGDB = arcpy.env.scratchWorkspace scratchLayName = random_string_generator() scratchLayer = os.path.join(scratchGDB, scratchLayName) if str(lowerCaseFieldNames).upper() == 'TRUE': lowerCaseFieldNames = True else: lowerCaseFieldNames = False fst = featureservicetools.featureservicetools(securityinfo) if fst.valid: fs = fst.GetFeatureService(itemId=fsId, returnURLOnly=False) if not fs is None: if arcpy.Exists(dataset=dataToAppend) == True: lyr = arcpy.Describe(dataToAppend) result = arcpy.GetCount_management(dataToAppend) count = int(result.getOutput(0)) outputPrinter(message="\t\t%s features" % (count)) if count > 0: if projection is not None and projection != "#" and \ projection.strip() !='' : outputPrinter(message="\t\tProjecting %s" % (lyr.name)) result = arcpy.Project_management( dataToAppend, scratchLayer, projection) else: outputPrinter( message="\t\tCopying %s feature from %s" % (count, lyr.name)) arcpy.FeatureClassToFeatureClass_conversion( dataToAppend, scratchGDB, scratchLayName) desc = arcpy.Describe(scratchLayer) if desc.shapeType == 'Polygon': outputPrinter(message="\t\tDensifying %s" % lyr.name) arcpy.Densify_edit(scratchLayer, "ANGLE", "33 Unknown", "0.33 Unknown", "4") if desc.shapeType == 'Polyline': outputPrinter(message="\t\tDensifying %s" % lyr.name) arcpy.Densify_edit(scratchLayer, "ANGLE", "33 Unknown", "0.33 Unknown", "4") syncLayer(fst, fs, scratchLayer, layerName, lyr.name, lowerCaseFieldNames, showFullResponse) outputPrinter(message="\tComplete") outputPrinter(message="\t") else: outputPrinter( message= "\t\t%s does not contain any features, skipping" % lyr.name) outputPrinter(message="\tComplete") outputPrinter(message="\t") else: outputPrinter(message="\t%s does not exist" % dataToAppend) outputPrinter(message="\tComplete") outputPrinter(message="\t") else: outputPrinter( message="\tFeature Service with id %s was not found" % fsId, typeOfMessage='error') arcpy.SetParameterAsText(9, "false") else: outputPrinter(message=fst.message, typeOfMessage="error") arcpy.SetParameterAsText(9, "false") except arcpy.ExecuteError: line, filename, synerror = trace() outputPrinter(message="error on line: %s" % line, typeOfMessage='error') outputPrinter(message="error in file name: %s" % filename, typeOfMessage='error') outputPrinter(message="with error message: %s" % synerror, typeOfMessage='error') outputPrinter(message="ArcPy Error Message: %s" % arcpy.GetMessages(2), typeOfMessage='error') arcpy.SetParameterAsText(9, "false") except (common.ArcRestHelperError), e: outputPrinter(message=e, typeOfMessage='error') arcpy.SetParameterAsText(9, "false")
try: print "Starting at " + time.strftime("%c") ## This section returns the system information for the user. This is used to help ## debug any potential errors returned by the tool. This information does not compromise ## the annonymity of the user or their system. ##--------------------------------------------------------------------------------------- # Get information about ArcGIS version installed... arcpy.SetProgressorLabel("Reading installation information...") arcpy.AddMessage("--------------------------------") installList = arcpy.ListInstallations() for install in installList: arcpy.AddMessage("Product: ArcGIS for " + install + " " + arcpy.GetInstallInfo(install)["Version"] + " SP " + arcpy.GetInstallInfo()["SPNumber"] + ", Build " + arcpy.GetInstallInfo()["BuildNumber"]) print("Product: ArcGIS for " + install + " " + arcpy.GetInstallInfo(install)["Version"] + " SP " + arcpy.GetInstallInfo()["SPNumber"] + ", Build " + arcpy.GetInstallInfo()["BuildNumber"]) arcpy.AddMessage("Installed on: " + arcpy.GetInstallInfo()["InstallDate"] + " " + arcpy.GetInstallInfo()["InstallTime"]) print("Installed on: " + arcpy.GetInstallInfo()["InstallDate"] + " " + arcpy.GetInstallInfo()["InstallTime"]) arcpy.AddMessage("Using " + arcpy.ProductInfo() + " license level") print("Using " + arcpy.ProductInfo() + " license level") arcpy.AddMessage("Script Version 1.0") print("Script Version 1.0")
import _winreg as wreg if __name__ == '__main__': try: userDirectory = arcpy.GetParameterAsText(0) if not os.path.exists(userDirectory): AddMsgAndPrint(userDirectory + " does NOT exist!",2) exit() AddMsgAndPrint("\nUser Name: " + os.environ['USERNAME'].replace('.',' ')) AddMsgAndPrint("Computer Name: " + os.environ['COMPUTERNAME']) """ ------------------------------------- Get ESRI Product Name and Version -------------------------------------------""" if not arcpy.ProductInfo == 'NotInitialized': productName = str(arcpy.GetInstallInfo()['ProductName']) productVersion = str(arcpy.GetInstallInfo()['Version']) if productVersion.count('.') > 1: versionSplit = productVersion.split('.') versionKey = productName + str(versionSplit[0]) + "." + str(versionSplit[1]) del versionSplit else: versionKey = productName + productVersion AddMsgAndPrint("ESRI Product: " + productName) AddMsgAndPrint("ESRI Version: " + productVersion) else: AddMsgAndPrint("\nNo ESRI License found on " + str(os.environ['COMPUTERNAME']) + " Exiting!",2)
def Execute(self, parameters, messages): try: # Save Workspace and set constants and variables enviWorkspace = env.workspace plotname = "" csvfile = "?" #Get and print version information with open( os.path.join(os.path.dirname(__file__), "arcsdm_version.txt"), "r") as myfile: data = myfile.readlines() arcpy.AddMessage("%-20s %s" % ("", data[0])) installinfo = arcpy.GetInstallInfo() arcpy.AddMessage("%-20s %s (%s)" % ("Arcgis environment: ", installinfo['ProductName'], installinfo['Version'])) # Load parameters... # Param 0: Input rasters, Fuzzy Membership functions and parameters, DETable, multiValue=1, Required, Input # columns = Input raster, Membership type, Midpoint Min, Midpoint Max, Midpoint Count, Spread Min, Spread Max, Spread Count # Membership types: Small, Large # Example: rc_till_co Gaussian 1 4 4 2 5 4;rc_till_ca Large 2 5 4 3 4 2;rc_till_au Small 3 4 2 4 5 2;... memberParams = parameters[0].valueAsText.split(';') arcpy.AddMessage("memberParams = " + str(memberParams)) # Param 1: Draw only Fuzzy Membership plots plots = parameters[1].value if (not plots and len(memberParams) < 2): arcpy.AddError("ERROR: Minimum number of Input Rasters is 2.") raise arcpy.ExecuteError # Param 2: ROC True Positives Feature Class, DEFeatureClass, Required, Input truepositives = parameters[2].valueAsText trueDescr = arcpy.Describe(truepositives) true_coord_system = trueDescr.spatialReference.name # Param 3: Output Folder, DEFolder, Required, Input, "File System" output_folder = parameters[3].valueAsText # Param 4: Fuzzy Overlay Parameters, DETable, Required, Input # when plots = False # columns = Overlay type, Parameter # Overlay types: And, Or, Product, Sum, Gamma # parameters[1] = And # (or: Gamma 5) overlayParams = parameters[4].valueAsText.split(' ') # Param 5: Plot display method # when plots = True page_type = "" display_method = parameters[5].valueAsText if display_method == "To Window(s)": page_type = "Win" elif display_method == "To PDF file(s)": page_type = "pdf" elif display_method == "To PNG file(s)": page_type = "png" # Remove old files and datasets cleanup(output_folder, enviWorkspace) # Check and collect input parameters of all rasters inputRasters = [] memberTypes = [] midmins = [] midmaxes = [] midcounts = [] midpointcount = 0 midsteps = [] spreadmins = [] spreadmaxes = [] spreadcounts = [] spreadsteps = [] spreadcountcount = 0 for memberParam in memberParams: fmparams = memberParam.split(' ') if (len(fmparams) != 8): arcpy.AddError( "ERROR: Wrong number of parameters in '" + memberParam + "'. Required: raster function Midpoint-Min Midpoint-Max Midpoint-Count Spread-Min Spread-Max Spread-Count" ) raise arcpy.ExecuteError # Input Raster Name and Membership type inputRasters.append(fmparams[0]) memberTypes.append(fmparams[1]) if len(plotname) > 0: plotname = plotname + "_" plotname = plotname + os.path.basename( fmparams[0]) + "-" + fmparams[1] # Midpoint Min value if fmparams[2] == "" or fmparams[2] == "#": arcpy.AddError("MidPoint Min value of " + str(fmparams[0]) + " is missing.") raise arcpy.ExecuteError midmin = float(fmparams[2]) midmins.append(midmin) # MidPoint Max value cannot be smaller than Midpoint Min if fmparams[3] == "" or fmparams[3] == "#": arcpy.AddError("MidPoint Max value of " + str(fmparams[0]) + " is missing.") raise arcpy.ExecuteError midmax = float(fmparams[3]) if (midmax < midmin): arcpy.AddError( "ERROR: Midpoint Max cannot be smaller than Midpoint Min.") raise arcpy.ExecuteError midmaxes.append(midmax) # MidPoint Count must be at least 1 if fmparams[4] == "" or fmparams[4] == "#": arcpy.AddError("MidPoint Count value of " + str(fmparams[0]) + " is missing.") raise arcpy.ExecuteError midcount = int(fmparams[4]) if (midcount < 1): arcpy.AddError("ERROR: Midpoint Count must be at least 1.") raise arcpy.ExecuteError midcounts.append(midcount) midpointcount = midpointcount + midcount # Calculate midpoint step midstep = 0 if (midmax > midmin): midstep = (midmax - midmin) / (midcount - 1) if midstep == 0: midstep = 1 midsteps.append(midstep) # Spread Min if fmparams[5] == "" or fmparams[5] == "#": arcpy.AddError("Spread Min value of " + str(fmparams[0]) + " is missing.") raise arcpy.ExecuteError spreadmin = float(fmparams[5]) spreadmins.append(spreadmin) # Spread Max cannot be smalled than Spread Min if fmparams[6] == "" or fmparams[6] == "#": arcpy.AddError("Spread Max value of " + str(fmparams[0]) + " is missing.") raise arcpy.ExecuteError spreadmax = float(fmparams[6]) if (spreadmax < spreadmin): arcpy.AddError( "ERROR: Spread Max cannot be smalled than Spread Min.") raise arcpy.ExecuteError spreadmaxes.append(spreadmax) # Spread Count must be at least 1 if fmparams[7] == "" or fmparams[7] == "#": arcpy.AddError("Spread Count value of " + str(fmparams[0]) + " is missing.") raise arcpy.ExecuteError spreadcount = int(fmparams[7]) if (spreadcount < 1): arcpy.AddError("ERROR: Spread Count must be at least 1.") raise arcpy.ExecuteError spreadcounts.append(spreadcount) spreadcountcount = spreadcountcount + spreadcount # Calculate spread step spreadstep = 0 if (spreadmax > spreadmin): spreadstep = (spreadmax - spreadmin) / (spreadcount - 1) if spreadstep == 0: spreadstep = 1 spreadsteps.append(spreadstep) # Draw only Fuzzy Membership plots if plots: draw_plots(memberParams, memberTypes, truepositives, inputRasters, output_folder, page_type, spreadcountcount, spreadmins, spreadmaxes, spreadsteps, midpointcount, midmins, midmaxes, midsteps, plotname) else: # Run Fuzzy Memberships, Fuzzy Overlays and ROC Tool calculation(inputRasters, output_folder, true_coord_system, memberParams, memberTypes, midcounts, midmins, midmaxes, midsteps, spreadcounts, spreadmins, spreadmaxes, spreadsteps, overlayParams, truepositives, enviWorkspace) except arcpy.ExecuteError: arcpy.AddMessage("*" * 30) if csvfile != "?": csvfile.close() arcpy.AddError(arcpy.GetMessages(2)) arcpy.AddError('Aborting FuzzyROC2 (1)') except: arcpy.AddMessage("*" * 30) tb = sys.exc_info()[2] tbinfo = traceback.format_tb(tb)[0] pymsg = "PYTHON ERRORS:\nTraceback info:\n" + tbinfo + "\nError Info:\n" + str( sys.exc_info()[1]) msgs = "ArcPy ERRORS:\n" + arcpy.GetMessages(2) + "\n" arcpy.AddError(pymsg) arcpy.AddError(msgs) if csvfile != "?": csvfile.close() arcpy.AddError('Aborting FuzzyROC2 (2)')
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.''' ################################################################################ import sqlite3, os, csv import arcpy class CustomError(Exception): pass try: # Check the user's version ArcVersionInfo = arcpy.GetInstallInfo("desktop") ProductName = ArcVersionInfo['ProductName'] # Collect user inputs stop_times_file = arcpy.GetParameterAsText(0) SQLDbase = arcpy.GetParameterAsText(1) # ----- Turn stop_times.txt into a SQL table ----- # Grab the data from the stop_times.txt file and insert it into the SQL table arcpy.AddMessage("Inserting stop_times.txt data into SQL table...") col_idxs = [] with open(stop_times_file) as f: # Get the csv data reader = csv.reader(f)
def switchPointsToNodes(routes, county_indexing, nodes_data): """ change the x-y coordinates for non-bus stops to actual nodes return a list of node entries to append to the node file """ #return: stop_nodes = ['N','X','Y','COUNTY','MODE','TYPE','ID'] point_list = {} point_mode = {} id = 100 # to ensure we aren't accidentally matching ids for route in routes: for point in routes[route][2]: if not point in point_list: point_list[point] = id point_mode[id] = {} id += 1 point_mode[point_list[point]][routes[route][ 3]] = None # will be used to check if nodes are shared by different modes arcpy.env.workspace = WORKING_GDB arcpy.env.overwriteOutput = True pt = arcpy.Point() out_coordinate_system = os.path.join(arcpy.GetInstallInfo()['InstallDir'], NAD_83_DIRECTORY) spatial_ref = arcpy.SpatialReference(out_coordinate_system) pt_geoms = [] point_order = [] for point in point_list: pt.X = point[0] pt.Y = point[1] point_order.append(point_list[point]) pt_geoms.append(arcpy.PointGeometry(pt, spatial_ref)) nodes_layer = 'non_bus_nodes' if arcpy.Exists(nodes_layer): arcpy.Delete_management(nodes_layer) arcpy.CopyFeatures_management(pt_geoms, nodes_layer) node_map = {} stop_nodes = [] node_layer = 'node_layer' arcpy.MakeFeatureLayer_management(nodes_layer, node_layer) county_layer = 'county_layer' nc_layer = 'node_county_layer' neg_counter = -100 #placeholder for old ids for i in range(len(COUNTY_MAP)): arcpy.MakeFeatureLayer_management( SOURCE_COUNTY, county_layer, COUNTY_ID_FIELD + " = '" + COUNTY_MAP[i] + "'") arcpy.SelectLayerByLocation_management(node_layer, 'WITHIN', county_layer) arcpy.MakeFeatureLayer_management(node_layer, nc_layer) arcpy.AddXY_management(nc_layer) rows = arcpy.SearchCursor(nc_layer, '', '', 'OBJECTID;POINT_X;POINT_Y') for row in rows: id = point_order[row.OBJECTID - 1] x = row.POINT_X y = row.POINT_Y n = county_indexing[i] county_indexing[i] += 1 node_map[id] = n modes = point_mode[id] if len(modes) > 1: print 'more than one mode for ' + str( (x, y)) + ' : ' + str(modes) stop_nodes.append([n, x, y, i + 1, modes.keys()[0], 0, 0]) nodes_data[neg_counter] = [x, y, n, None] neg_counter -= 1 for route in routes: for point in range(len(routes[route][2])): routes[route][2][point] = node_map[point_list[routes[route][2] [point]]] return stop_nodes
webMercator = arcpy.GetParameterAsText(5) deleteme = [] debug = False leftAngle = 0.0 # degrees rightAngle = 90.0 # degrees if (webMercator == "") or (webMercator is None): webMercator = arcpy.SpatialReference( r"WGS 1984 Web Mercator (Auxiliary Sphere)") try: currentOverwriteOutput = env.overwriteOutput env.overwriteOutput = True installInfo = arcpy.GetInstallInfo("desktop") installDirectory = installInfo["InstallDir"] GCS_WGS_1984 = os.path.join(installDirectory, r"Coordinate Systems", r"Geographic Coordinate Systems", r"World", r"WGS 1984.prj") env.overwriteOutput = True scratch = env.scratchWorkspace prjInFeature = os.path.join(scratch, "prjInFeature") arcpy.AddMessage(str(webMercator) + "\n" + prjInFeature) arcpy.AddMessage("Projecting input points to Web Mercator ...") arcpy.Project_management(inFeature, prjInFeature, webMercator) deleteme.append(prjInFeature) if traversal < 360:
"ED020": ["Swamp", 1, 0], "EE000": ["Miscellaneous Vegetation", 1, 1], "EE010": ["Logging Area", 0, 0], "EE020": ["Land devoid of vegetation", 0, 0] } try: if debug == True: arcpy.AddMessage( "Begin: " + str(time.strftime("%m/%d/%Y %H:%M:%S", time.localtime()))) scratch = env.scratchWorkspace env.overwriteOutput = True installDir = arcpy.GetInstallInfo('desktop')["InstallDir"] prjWGS1984 = arcpy.SpatialReference("WGS 1984") prjWebMercator = arcpy.SpatialReference( "WGS 1984 Web Mercator (Auxiliary Sphere)") # if AOI is Geographic, project to Web Mercator if (arcpy.Describe(input_AOI_Feature).SpatialReference.type == "Geographic" ): arcpy.AddMessage( "AOI features are Geographic, projecting to Web Mercator...") newAOI = os.path.join(scratch, "newAOI") arcpy.Project_management(input_AOI_Feature, newAOI, prjWebMercator) input_AOI_Feature = newAOI deleteme.append(newAOI) prjAOI = arcpy.Describe(input_AOI_Feature).SpatialReference
#Converts an index into a letter, labeled like excel columns, A to Z, AA to ZZ, etc. def ColIdxToXlName(index): if index < 1: raise ValueError("Index is too small") result = "" while True: if index > 26: index, r = divmod(index - 1, 26) result = chr(r + ord('A')) + result else: return chr(index + ord('A') - 1) + result #UPDATE gisVersion = arcpy.GetInstallInfo()["Version"] mxd, df, aprx, mapList = None, None, None, None isPro = False if gisVersion == "1.0": #Pro: from arcpy import mp aprx = arcpy.mp.ArcGISProject("CURRENT") maplist = aprx.listMaps()[0] isPro = True else: from arcpy import mapping mxd = arcpy.mapping.MapDocument('CURRENT') df = arcpy.mapping.ListDataFrames(mxd)[0] isPro = False # If grid size is drawn on the map, use this instead of cell width and cell height
def main(): ''' Main tool method ''' try: #UPDATE gisVersion = arcpy.GetInstallInfo()["Version"] global appEnvironment appEnvironment = Utilities.GetApplication() if DEBUG == True: arcpy.AddMessage("App environment: " + appEnvironment) global mxd global df global aprx global mapList #mxd, df, aprx, mapList = None, None, None, None isPro = False #if gisVersion == "1.0": #Pro: #Update for automated test if appEnvironment == "ARCGIS_PRO": from arcpy import mp aprx = arcpy.mp.ArcGISProject("CURRENT") mapList = aprx.listMaps()[0] isPro = True #else: #Update for automated test if appEnvironment == "ARCMAP": from arcpy import mapping mxd = arcpy.mapping.MapDocument('CURRENT') df = arcpy.mapping.ListDataFrames(mxd)[0] isPro = False # From the template extent, get the origin, y axis, and opposite corner corrdinates if DEBUG == True: arcpy.AddMessage("Getting extent info...") extents = str.split(str(templateExtent)) originCoordinate = extents[0] + " " + extents[1] yAxisCoordinate = extents[0] + " " + extents[1] oppCornerCoordinate = extents[2] + " " + extents[3] centerPoint = str( (float(extents[0]) + float(extents[2])) / 2.0) + " " + str( (float(extents[1]) + float(extents[3])) / 2.0) # If grid size is drawn on the map, use this instead of cell width and cell height inputExtentDrawnFromMap = False angleDrawn = 0 workspace = arcpy.env.workspace global cellWidth global cellHeight if float(cellWidth) == 0 and float(cellHeight) == 0: if DEBUG == True: arcpy.AddMessage("Cell extent from features...") inputExtentDrawnFromMap = True tempGridFC = os.path.join(arcpy.env.scratchWorkspace, "GridSize") arcpy.CopyFeatures_management(gridSize, tempGridFC) pts = None with arcpy.da.SearchCursor(tempGridFC, 'SHAPE@XY', explode_to_points=True) as cursor: pts = [r[0] for r in cursor][0:4] arcpy.Delete_management(tempGridFC) cellWidth = math.sqrt((pts[0][0] - pts[1][0])**2 + (pts[0][1] - pts[1][1])**2) cellHeight = math.sqrt((pts[1][0] - pts[2][0])**2 + (pts[1][1] - pts[2][1])**2) highestPoint = None nextHighestPoint = None for pt in pts: if highestPoint is None or pt[1] > highestPoint[1]: nextHighestPoint = highestPoint highestPoint = pt elif nextHighestPoint is None or pt[1] > nextHighestPoint[1]: nextHighestPoint = pt topLeft = highestPoint if highestPoint[0] < nextHighestPoint[ 0] else nextHighestPoint topRight = highestPoint if highestPoint[0] > nextHighestPoint[ 0] else nextHighestPoint yDiff = topRight[1] - topLeft[1] xDiff = topRight[0] - topLeft[0] # Calculate angle hypotenuse = math.sqrt( math.pow(topLeft[0] - topRight[0], 2) + math.pow(topLeft[1] - topRight[1], 2)) adjacent = topRight[0] - topLeft[0] numberToCos = float(adjacent) / float(hypotenuse) angleInRadians = math.acos(numberToCos) angleDrawn = math.degrees(angleInRadians) if (topRight[1] > topLeft[1]): angleDrawn = 360 - angleDrawn else: if DEBUG == True: arcpy.AddMessage("Cell extent from (" + str(cellWidth) + "," + str(cellHeight) + ")") if (cellUnits == "Feet"): cellWidth = float(cellWidth) * 0.3048 cellHeight = float(cellHeight) * 0.3048 # Set the start position for labeling startPos = None if (labelStartPos == "Upper-Right"): startPos = "UR" elif (labelStartPos == "Upper-Left"): startPos = "UL" elif (labelStartPos == "Lower-Left"): startPos = "LL" elif (labelStartPos == "Lower-Right"): startPos = "LR" # Import the custom toolbox with the fishnet tool in it, and run this. This had to be added to a model. # because of a bug, which will now allow you to pass variables to the Create Fishnet tool. #UPDATE toolboxPath = None if appEnvironment == "ARCGIS_PRO": toolboxPath = os.path.join(os.path.dirname(sysPath), "Clearing Operations Tools.tbx") else: toolboxPath = os.path.join(os.path.dirname(sysPath), "Clearing Operations Tools_10.3.tbx") arcpy.ImportToolbox(toolboxPath) arcpy.AddMessage("Creating Fishnet Grid...") arcpy.Fishnet_ClearingOperations(tempOutput, originCoordinate, yAxisCoordinate, str(cellWidth), str(cellHeight), 0, 0, oppCornerCoordinate, "NO_LABELS", templateExtent, "POLYGON") # Sort the grid upper left to lower right, and delete the in memory one arcpy.AddMessage("Sorting the grid for labeling") tempSort = os.path.join("in_memory", "tempSort") arcpy.Sort_management(tempOutput, tempSort, [["Shape", "ASCENDING"]], startPos) #arcpy.Delete_management("in_memory") # Not sure why we are deleteing in_memory # Add a field which will be used to add the grid labels arcpy.AddMessage("Adding field for labeling the grid") gridField = "Grid" arcpy.AddField_management(tempSort, gridField, "TEXT") # Number the fields arcpy.AddMessage("Numbering the grids") letterIndex = 1 secondLetterIndex = 1 letter = 'A' secondLetter = 'A' number = 1 lastY = -9999 #TODO: update to use DA cursor cursor = arcpy.UpdateCursor(tempSort) for row in cursor: yPoint = row.getValue("SHAPE").firstPoint.Y if (lastY != yPoint) and (lastY != -9999): letterIndex += 1 letter = ColIdxToXlName(letterIndex) if (labelStyle != "Numeric"): number = 1 secondLetter = 'A' secondLetterIndex = 1 lastY = yPoint if (labelStyle == "Alpha-Numeric"): row.setValue(gridField, str(letter) + str(number)) elif (labelStyle == "Alpha-Alpha"): row.setValue(gridField, str(letter) + str(secondLetter)) elif (labelStyle == "Numeric"): row.setValue(gridField, str(number)) cursor.updateRow(row) number += 1 secondLetterIndex += 1 secondLetter = ColIdxToXlName(secondLetterIndex) del row del cursor # Rotate the shape, if needed. if (inputExtentDrawnFromMap): arcpy.AddMessage("Rotating the feature") RotateFeatureClass(tempSort, outputFeatureClass, angleDrawn, centerPoint) else: arcpy.CopyFeatures_management(tempSort, outputFeatureClass) arcpy.Delete_management(tempSort) # Get and label the output feature #TODO: Update once applying symbology in Pro is fixed. #UPDATE targetLayerName = os.path.basename(outputFeatureClass) if appEnvironment == "ARCGIS_PRO": results = arcpy.MakeFeatureLayer_management( outputFeatureClass, targetLayerName).getOutput(0) mapList.addLayer(results, "AUTO_ARRANGE") layer = findLayerByName(targetLayerName) if (layer): labelFeatures(layer, gridField) elif appEnvironment == "ARCMAP": layerToAdd = arcpy.mapping.Layer(outputFeatureClass) arcpy.mapping.AddLayer(df, layerToAdd, "AUTO_ARRANGE") targetLayerName = os.path.basename(outputFeatureClass) layer = findLayerByName(targetLayerName) if (layer): arcpy.AddMessage("Labeling grids") labelFeatures(layer, gridField) else: arcpy.AddMessage("Non-map environment, skipping labeling...") # Set tool output arcpy.SetParameter(6, outputFeatureClass) ### Apply symbology to the GRG layer ###UPDATE ###symbologyPath = os.path.dirname(workspace) + "\\Layers\GRG.lyr" ###arcpy.ApplySymbologyFromLayer_management(layer, symbologyPath) except arcpy.ExecuteError: # Get the tool error messages msgs = arcpy.GetMessages() arcpy.AddError(msgs) print(msgs) except: # Get the traceback object tb = sys.exc_info()[2] tbinfo = traceback.format_tb(tb)[0] # Concatenate information together concerning the error into a message string pymsg = "PYTHON ERRORS:\nTraceback info:\n" + tbinfo + "\nError Info:\n" + str( sys.exc_info()[1]) msgs = "ArcPy ERRORS:\n" + arcpy.GetMessages() + "\n" # Return python error messages for use in script tool or Python Window arcpy.AddError(pymsg) arcpy.AddError(msgs) # Print Python error messages for use in Python / Python Window print(pymsg + "\n") print(msgs)
""" ArcGIS toolbox interface for twcheck.py Mike Bannister [email protected] 2017 """ import twcheck import arcpy import sys # update text updates twcheck.message = arcpy.AddMessage twcheck.warn = arcpy.AddWarning twcheck.error = arcpy.AddError fp_file = arcpy.GetParameterAsText(0) xsec_file = arcpy.GetParameterAsText(1) xs_id_field = arcpy.GetParameterAsText(2) out_file = arcpy.GetParameterAsText(3) # make sure we have the right version, MeasureOnLine() requires >= 10.2.1 info = arcpy.GetInstallInfo() if info['Version'] < '10.2.1': arcpy.AddError('Top Width Check requires ArcMap >= 10.2.1, you are using ' + info['Version'] + \ ', aborting.') sys.exit() twcheck.measure(fp_file, xsec_file, xs_id_field, out_file)
def check_arcpy(self): """ Checks if arcpy and which version is installed """ # 3rd party libraries try: import arcpy esri_info = arcpy.GetInstallInfo() logging.info("ArcPy imported from ArcGIS {} v{} in ({})".format( esri_info.get("ProductName"), esri_info.get("Version"), esri_info.get("InstallDir"))) # end of method return True, esri_info except RuntimeError: logging.error("ArcPy is installed, but not licensed.") return False, "ArcGIS is installed, but not licensed." except ImportError: logging.info( "ArcGIS isn't in the SYSPATH. Trying to find it automatically." ) # checks if ArcGIS is installed if not path.isdir(path.join(env.get("PROGRAMFILES(x86)"), "ArcGIS"))\ and not path.isdir(path.join(env.get("PROGRAMFILES"), "ArcGIS")): logging.info("ArcGIS isn't installed on this computer.") return False, "ArcGIS isn't installed on this computer." else: arcgis_path = path.join( env.get("PROGRAMFILES(x86)", "PROGRAMFILES"), "ArcGIS") pass logging.info("ArcGIS is installed but not well configured.") # path to the last version of 10 branch v = max([ i[-1] for i in listdir(path.realpath(arcgis_path)) if "Desktop10" in i ]) arcgis_path = path.join(arcgis_path, "Desktop10.{}".format(v)) # adding paths to the environment sys.path.append(path.join(arcgis_path, "arcpy")) sys.path.append(path.join(arcgis_path, "bin")) sys.path.append(path.join(arcgis_path, "ArcToolbox\Scripts")) try: import arcpy import site esri_info = arcpy.GetInstallInfo() logging.info("ArcGIS configuration has been fixed.") if hasattr(sys, 'real_prefix'): # inside a venv logging.info("Executing inside a virtualenv. Nice!") pypacks = [ p for p in sys.path if p.endswith('site-packages') ][-1] else: # using system install logging.info("Executing from the main Python install.") pypacks = site.getsitepackages()[1] # creatring pth file for future runs with open(path.join(pypacks, 'arcpy.pth'), 'w') as pth_arcpy: pth_arcpy.write( path.realpath(path.join(arcgis_path, "arcpy")) + "\n") pth_arcpy.write( path.realpath(path.join(arcgis_path, "bin")) + "\n") pth_arcpy.write( path.realpath( path.join(arcgis_path, "ArcToolbox\Scripts")) + "\n") # end of method return True, esri_info except: logging.info("ArcGIS automatic configuration failed.") return False, "ArcGIS automatic configuration failed." else: logging.info("ArcGIS isn't installed on this computer.") return False, "ArcGIS isn't installed on this computer."
def Execute(self, parameters, messages): try: # Save Workspace enviWorkspace = env.workspace csvfile = "?" #Get and print version information with open( os.path.join(os.path.dirname(__file__), "arcsdm_version.txt"), "r") as myfile: data = myfile.readlines() arcpy.AddMessage("%-20s %s" % ("", data[0])) installinfo = arcpy.GetInstallInfo() arcpy.AddMessage("%-20s %s (%s)" % ("Arcgis environment: ", installinfo['ProductName'], installinfo['Version'])) # Load parameters... # Param 0: Input raster names, GPRasterLayer, multiValue=1, Required, Input # parameters[0] = raster name 1;raster name 2;... inputRasters = parameters[0].valueAsText.split(';') if (len(inputRasters) < 2): arcpy.AddError("ERROR: Input raster count must be at least 2.") raise # Param 1: Fuzzy Membership Parameters, DETable, Required, Input # columns = Membership type, Midpoint Min, Midpoint Max, Midpoint Count, Spread Min, Spread Max, Spread Count # Membership types: Gaussian, Small, Large, Near, MSLarge, MSSmall, Linear # parameters[1] = Gaussian 1 4 4 2 5 4;Large 2 5 4 3 4 2;Small 3 4 2 4 5 2;... memberParams = parameters[1].valueAsText.split(';') # Param 2: Fuzzy Overlay Parameters, DETable, Required, Input # columns = Overlay type, Parameter # Overlay types: And, Or, Product, Sum, Gamma # parameters[2] = And # (tai: Gamma 5) overlayParams = parameters[2].valueAsText.split(' ') # Param 3: ROC True Positives Feature Class, DEFeatureClass, Required, Input # parameters[3] = feature class name truepositives = parameters[3].valueAsText trueDescr = arcpy.Describe(truepositives) true_coord_system = trueDescr.spatialReference.name arcpy.AddMessage("ROC True Positives: " + os.path.basename(truepositives) + " " + trueDescr.dataType + " " + true_coord_system) # Param 4: ROC Destination Folder, DEFolder, Required, Input, "File System" output_folder = parameters[4].valueAsText arcpy.AddMessage("output_folder = " + str(output_folder)) # Remove old files from ROC output folder if any arcpy.AddMessage("=" * 30) arcpy.AddMessage("Clean up workspace...") arcpy.env.workspace = output_folder count = len(os.listdir(output_folder)) #arcpy.AddMessage("len(os.listdir(output_folder) = " + str(count)) if (count > 0): count = 0 for oldfile in os.listdir(output_folder): #arcpy.AddMessage(oldfile) if ("results" in oldfile): os.remove(output_folder + "\\" + oldfile) count = count + 1 if (count > 0): arcpy.AddMessage( str(count) + " results* files removed from " + output_folder) # Remove old Raster Datasets from workspace arcpy.env.workspace = enviWorkspace wsdesc = arcpy.Describe(enviWorkspace) arcpy.AddMessage("Workspace is " + str(enviWorkspace) + " and its type is " + wsdesc.workspaceType) import shutil if (wsdesc.workspaceType == "FileSystem"): arcpy.AddMessage( "Remove all FM_ files and all FO_ files from workspace File System" ) # Remove all FM_ files and all FO_ files from workspace File System count = len(os.listdir(output_folder)) if (count > 0): fmcount = 0 focount = 0 for oldfile in os.listdir(output_folder): if (oldfile[0:3] == "FM_"): os.remove(enviWorkspace + "\\" + oldfile) fmcount = fmcount + 1 elif (oldfile[0:3] == "FO_"): os.remove(enviWorkspace + "\\" + oldfile) focount = focount + 1 if (fmcount > 0): arcpy.AddMessage(str(fmcount) + " FM_n_m files removed") if (focount > 0): arcpy.AddMessage(str(focount) + " FO_n files removed") arcpy.AddMessage( "remove all fm_ and fo_ subdirectories and subfiles") # remove all fm_ and fo_ subdirectories and subfiles count = 0 for olddir in os.listdir(enviWorkspace): if os.path.isdir(os.path.join(enviWorkspace, olddir)): if (olddir[0:3] == "fm_" or olddir[0:3] == "fo_"): shutil.rmtree(enviWorkspace + "\\" + olddir) count = count + 1 if (count > 0): arcpy.AddMessage( str(count) + " fm_n_m or fo_n directories removed") else: arcpy.AddMessage( "remove all raster datasets from File Geodatabase") # remove all raster datasets from File Geodatabase count = len(arcpy.ListRasters()) #arcpy.AddMessage("len(arcpy.ListRasters()) = " + str(count)) if (count > 0): count = 0 for raster in arcpy.ListRasters(): #arcpy.AddMessage(raster) if (raster[0:3] == "FM_" or raster[0:3] == "FO_"): arcpy.Delete_management(raster) count = count + 1 if (count > 0): arcpy.AddMessage( str(count) + " FM_ or FO_ raster datasets removed from " + enviWorkspace) # Open CSV file to test lines csvfile = open(output_folder + "\\FuzzyMembership.csv", "w") csvfile.write( "Raster;Function;Midpoint Min;MidPoint Max;Midpoint Step;Spread Min;Spread Max;Spread Step;Result\n" ) # Run Fuzzy Memberships for each raster file and parameter combination arcpy.AddMessage("=" * 30) arcpy.AddMessage("Run Fuzzy Memberships...") ir = -1 outputs = "" fmcount = 0 for inputRaster in inputRasters: # Check Coordinate System of all Input Rasters must be same as True Positives Feature Class inputDescr = arcpy.Describe(inputRaster) coord_system = inputDescr.spatialReference.name arcpy.AddMessage("Input Raster: " + os.path.basename(inputRaster) + " " + inputDescr.dataType + " " + coord_system) #arcpy.AddMessage("Coordinate System of " + inputRaster + " is " + str(coord_system)) if (true_coord_system != coord_system): arcpy.AddError("ERROR: Coordinate System must be " + true_coord_system) raise # Run Fuzzy Membership ir = ir + 1 # first one is 0 fmnum = -1 # first one is 0 for memberParam in memberParams: # memberparam = # Gaussian 1 4 4 2 5 4 (function Midpoint Min Max Count Spread Min Max Count) fmparams = memberParam.split(' ') if (len(fmparams) != 7): arcpy.AddError( "ERROR: Wrong number of parameters in '" + memberParam + "'. Required: function Midpoint-Min Midpoint-Max Midpoint-Count Spread-Min Spread-Max Spread-Count" ) raise # Convert member params to numeric if fmparams[1] == "" or fmparams[1] == "#": arcpy.AddError("MidPoint Min value of " + str(inputRaster) + " is missing.") raise midmin = float(fmparams[1]) if fmparams[2] == "" or fmparams[2] == "#": arcpy.AddError("MidPoint Max value of " + str(inputRaster) + " is missing.") raise midmax = float(fmparams[2]) if (midmax < midmin): arcpy.AddError( "ERROR: Midpoint Max must be less than Midpoint Min.") raise if fmparams[3] == "" or fmparams[3] == "#": arcpy.AddError("MidPoint Count value of " + str(inputRaster) + " is missing.") raise midcount = int(fmparams[3]) if (midcount < 1): arcpy.AddError("ERROR: Midpoint Count must be at least 1.") raise midstep = 0 if (midmax > midmin): midstep = (midmax - midmin) / (midcount - 1) if fmparams[4] == "" or fmparams[4] == "#": arcpy.AddError("Spread Min value of " + str(inputRaster) + " is missing.") raise spreadmin = float(fmparams[4]) if fmparams[5] == "" or fmparams[5] == "#": arcpy.AddError("Spread Max value of " + str(inputRaster) + " is missing.") raise spreadmax = float(fmparams[5]) if (spreadmax < spreadmin): arcpy.AddError( "ERROR: Spread Max must be less than Spread Min.") raise if fmparams[6] == "" or fmparams[6] == "#": arcpy.AddError("Spread Count value of " + str(inputRaster) + " is missing.") raise spreadcount = int(fmparams[6]) if (spreadcount < 1): arcpy.AddError("ERROR: Spread Count must be at least 1.") raise spreadstep = 0 if (spreadmax > spreadmin): spreadstep = (spreadmax - spreadmin) / (spreadcount - 1) # Loop asked count times from mimimum parameter values to maximum values while (midmin <= midmax): spreadmin = float(fmparams[4]) while (spreadmin <= spreadmax): fmnum = fmnum + 1 fmout = "FM_" + str(ir) + "_" + str(fmnum) arcpy.AddMessage(fmout) csvfile.write(inputRaster + ";" + fmparams[0] + ";" + str(midmin) + ";" + str(midmax) + ";" + str(midstep) + ";" + str(spreadmin) + ";" + str(spreadmax) + ";" + str(spreadstep) + ";" + fmout + "\n") fmcount = fmcount + 1 # Run Fuzzy Memberships by function if (fmparams[0] == "Gaussian"): outFzyMember = FuzzyMembership( inputRaster, FuzzyGaussian(midmin, spreadmin)) elif (fmparams[0] == "Large"): outFzyMember = FuzzyMembership( inputRaster, FuzzyLarge(midmin, spreadmin)) elif (fmparams[0] == "Linear"): outFzyMember = FuzzyMembership( inputRaster, FuzzyLinear(midmin, spreadmin)) elif (fmparams[0] == "MSLarge"): outFzyMember = FuzzyMembership( inputRaster, FuzzyMSLarge(midmin, spreadmin)) elif (fmparams[0] == "MSSmall"): outFzyMember = FuzzyMembership( inputRaster, FuzzyMSSmall(midmin, spreadmin)) elif (fmparams[0] == "Near"): outFzyMember = FuzzyMembership( inputRaster, FuzzyNear(midmin, spreadmin)) elif (fmparams[0] == "Small"): outFzyMember = FuzzyMembership( inputRaster, FuzzySmall(midmin, spreadmin)) outFzyMember.save(fmout) if (spreadstep == 0): break spreadmin = spreadmin + spreadstep if (midstep == 0): break midmin = midmin + midstep csvfile.close() arcpy.AddMessage( str(fmcount) + " FM outputs saved to " + env.workspace) csvfile = "?" # Define ROC Tool (Receiver Operator Characteristics) import arcgisscripting gp = arcgisscripting.create() parentfolder = os.path.dirname(sys.path[0]) tbxpath = os.path.join(parentfolder, "toolbox\\arcsdm.pyt") arcpy.ImportToolbox(tbxpath) # Open CSV file to test lines csvfile = open(output_folder + "\\FuzzyOverlay.csv", "w") csvfile.write("Output;Input 1;Input 2\n") # Run Fuzzy Overlays and ROC arcpy.AddMessage("=" * 30) arcpy.AddMessage("Run Fuzzy Overlays and ROC...") num = -1 #if (fmnum == 0): # fmnum=1 for i in range(0, ir): # raster files for j in range( 0, fmnum + 1 ): # output tables or output files (FM_x_y) per raster file, first half for k in range( 0, fmnum + 1 ): # output tables or output files (FM_x_y) per raster file, last half num = num + 1 arcpy.AddMessage("FO_" + str(num) + " = FM_" + str(i) + "_" + str(j) + " + FM_" + str(i + 1) + "_" + str(k)) csvfile.write("FO_" + str(num) + ";FM_" + str(i) + "_" + str(j) + ";FM_" + str(i + 1) + "_" + str(k) + "\n") overlays = [ "FM_" + str(i) + "_" + str(j), "FM_" + str(i + 1) + "_" + str(k) ] if (overlayParams[0] == "Gamma"): outFzyOverlay = FuzzyOverlay(overlays, "Gamma", overlayParams[1]) else: outFzyOverlay = FuzzyOverlay(overlays, overlayParams[0]) outFzyOverlay.save("FO_" + str(num)) #arcpy.AddMessage("FO_" + str(num) + " saved to " + env.workspace) result = arcpy.ROCTool_ArcSDM(truepositives, "", "FO_" + str(num), output_folder) arcpy.AddMessage( str(num + 1) + " FO outputs saved to " + env.workspace) csvfile.close() csvfile = "?" arcpy.AddMessage(" ") arcpy.AddMessage("Get AUC values from ROC output databases...") # Open CSV file to ROC results csvfile = open(output_folder + "\\FuzzyROC.csv", "w") csvfile.write("Model;Auc\n") # Get AUC values from ROC output databases fields = ['MODEL', 'AUC'] arcpy.env.workspace = output_folder for inputfile in arcpy.ListFiles("results*.dbf"): with arcpy.da.SearchCursor(inputfile, fields, 'OID=0') as cursor: for row in cursor: csvfile.write(row[0] + ";" + str(row[1]) + "\n") csvfile.close() csvfile = "?" # Restore Workspace arcpy.env.workspace = enviWorkspace except arcpy.ExecuteError: arcpy.AddMessage("*" * 30) if (csvfile != "?"): csvfile.close() arcpy.AddError(arcpy.GetMessages(2)) arcpy.AddError('Aborting FuzzyROC (1)') except: arcpy.AddMessage("*" * 30) tb = sys.exc_info()[2] tbinfo = traceback.format_tb(tb)[0] pymsg = "PYTHON ERRORS:\nTraceback info:\n" + tbinfo + "\nError Info:\n" + str( sys.exc_info()[1]) msgs = "ArcPy ERRORS:\n" + arcpy.GetMessages(2) + "\n" arcpy.AddError(pymsg) arcpy.AddError(msgs) if (csvfile != "?"): csvfile.close() arcpy.AddError('Aborting FuzzyROC (2)')
def execute(self, parameters, messages): # Load the OpenStreetMap toolbox. instInfo = arcpy.GetInstallInfo() osmToolPath = instInfo[ "InstallDir"] + r"ArcToolbox\Toolboxes\OpenStreetMap Toolbox.tbx" arcpy.ImportToolbox(osmToolPath) originTableName = parameters[0].valueAsText originSnapDistance = parameters[2].valueAsText + " " + parameters[ 1].valueAsText destinationTableName = parameters[3].valueAsText destinationSnapDistance = parameters[5].valueAsText + " " + parameters[ 4].valueAsText # Drivetime cutoff meters. drivetime_cutoff_meters = parameters[6].valueAsText # This is the current map, which should be an OSM base map. curMapDoc = arcpy.mapping.MapDocument("CURRENT") # Get the data from from the map (see the DataFrame object of arcpy). dataFrame = arcpy.mapping.ListDataFrames(curMapDoc, "Layers")[0] if parameters[7].valueAsText != None: ## # User chose to make a new network dataset from OSM. ## # Note that this has "\\". 10.1 has a hard time finding the directory of the _ND file otherwise. dataset_name = parameters[7].valueAsText dataset_name_nd = parameters[7].valueAsText + "\\" + parameters[ 7].valueAsText + "_ND" # The DataFrame object has an "extent" object that has the XMin, XMax, YMin, and YMax. extent = dataFrame.extent messages.addMessage( "Using window extents. XMin: {0}, XMax: {1}, YMin: {2}, YMax: {3}" .format(extent.XMin, extent.XMax, extent.YMin, extent.YMax)) # Download the data from OSM. arcpy.DownloadExtractSymbolizeOSMData2_osmtools( extent, True, dataset_name, "OSMLayer") # Convert the OSM data to a network dataset. arcpy.OSMGPCreateNetworkDataset_osmtools(dataset_name, "DriveGeneric.xml", "ND") else: # Use selected dataset. dataset_name_nd = parameters[8].valueAsText messages.addMessage( "Using existing network dataset: {0}".format(dataset_name_nd)) # Create the OD Cost Matrix layer and get a refrence to the layer. result = arcpy.na.MakeODCostMatrixLayer(dataset_name_nd, "OD Cost Matrix", "Length", drivetime_cutoff_meters) odcmLayer = result.getOutput(0) # The OD Cost Matrix layer will have Origins and Destinations layers. Get # a reference to each of these. odcmSublayers = arcpy.na.GetNAClassNames(odcmLayer) odcmOriginLayer = odcmSublayers["Origins"] odcmDestLayer = odcmSublayers["Destinations"] # Add the origins and destinations to the ODCM. arcpy.na.AddLocations(odcmLayer, odcmOriginLayer, originTableName, "", originSnapDistance) arcpy.na.AddLocations(odcmLayer, odcmDestLayer, destinationTableName, "", destinationSnapDistance) # Solve the matrix. arcpy.na.Solve(odcmLayer) # Show ODCM layer to the user. arcpy.mapping.AddLayer(dataFrame, odcmLayer, "TOP") # Save a cost matrix layer. In 10.1 there is a bug that prevents layers from # being added progranmatically. odcmLayer.saveACopy("ODCM_Network_Crash_Density.lyr") arcpy.RefreshTOC() return
import tempfile import xml.etree.cElementTree as eTree import shutil import requests from utils import status from utils import task_utils import warnings from requests.packages.urllib3.exceptions import InsecureRequestWarning warnings.simplefilter('ignore', InsecureRequestWarning) # Get SSL trust setting. verify_ssl = task_utils.get_ssl_mode() status_writer = status.Writer() import arcpy if arcpy.GetInstallInfo()['Version'] == '10.0': raise ImportError('write_metadata not available with ArcGIS 10.0.') result_count = 0 processed_count = 0. errors_reasons = {} skipped_reasons = {} def index_item(id, header): """Re-indexes an item. :param id: Item's index ID """ solr_url = "{0}/flags?op=add&flag=__to_extract&fq=id:({1})&fl=*,[true]".format( sys.argv[2].split('=')[1], id) requests.post(solr_url, verify=verify_ssl, headers=header)
""" Name: create_enterprise_gdb.py Description: Provide connection information to a DBMS instance and create an enterprise geodatabase. Type create_enterprise_gdb.py -h or create_enterprise_gdb.py --help for usage """ # Import system modules import arcpy import optparse import os import sys # Define usage and version parser = optparse.OptionParser(usage="usage: %prog [Options]", version="%prog 1.0 for " + arcpy.GetInstallInfo()['Version']) #Define help and options parser.add_option( "--DBMS", dest="Database_type", type="choice", choices=['SQLSERVER', 'ORACLE', 'POSTGRESQL', ''], default="SQLSERVER", help="Type of enterprise DBMS: SQLSERVER, ORACLE, or POSTGRESQL.") parser.add_option("-i", dest="Instance", type="string", default="srvr-sql-esri", help="DBMS instance name") parser.add_option("-D",