Пример #1
0
def GetRefposts():
    from arcpy import WFSToFeatureClass_conversion, CreateFileGDB_management, Delete_management
    input_WFS_server = r"http://wfs.ksdot.org/arcgis_web_adaptor/services/Structures/Reference_Post_Signs/MapServer/WFSServer?request=GetCapabilities&service=WFS"
    gdbin = GDB_In + ".gdb"
    try:
        CreateFileGDB_management(r"C:/temp", gdbname, "CURRENT")
        print "new created " + gdbin
    except:
        Delete_management(gdbin, "Workspace")
        CreateFileGDB_management(r"C:/temp", gdbname, "CURRENT")
        print "refreshed " + gdbin
    WFSToFeatureClass_conversion(input_WFS_server, fcname, gdbin, fcname)
Пример #2
0
def create_temp_workspace(directory, name, is_gdb=True):
    """
    Creates a temp workspace for processing. If is_gdb, will create a GDB.
    Else a folder will be created.

    Required: directory -- the directory in which to create the temp GDB
              name -- the name of the temp GDB

    Optional: is_gdb -- whether or not to create a GDB. Default is True.

    Returns:  path -- the full path to the temp workspace
    """
    import shutil
    from arcpy import CreateFileGDB_management

    LOGGER.info("Creating temp workspace {0} in {1}...".format(name,
                                                               directory))

    path = os.path.join(directory, name)

    if is_gdb:
        LOGGER.log(15, "Workspace will be format GDB.")
        path = path + ".gdb"

    if os.path.isdir(path):
        LOGGER.log(15, "Temp workspace already exists; removing...")
        shutil.rmtree(path)

    if is_gdb:
        CreateFileGDB_management(directory, name)
    else:
        os.mkdir(path)

    return path
Пример #3
0
def setupQCGDB():
    print("Setting up the QC GDB.")
    if (Exists(errorFeaturesQCGDB)):
        Delete_management(errorFeaturesQCGDB)
    else:
        pass
    CreateFileGDB_management(mainFolder, errorFeaturesQCGDBName)
Пример #4
0
def UpdateLocalFileGDB():
    import datetime, time
    fDateTime = datetime.datetime.now()
    from arcpy import FeatureClassToFeatureClass_conversion, CreateFileGDB_management, Exists, Delete_management
    from KhubCode25.KhubCode25Config import (
        localProProjectPath, localProFileGDBWorkspace, prodDataSourceSDE,
        devDataSourceSDE, dbname, dbownername, countylines, devorprod)
    if devorprod == 'prod':
        database = prodDataSourceSDE
        print("running on " + devorprod)
    else:
        database = devDataSourceSDE
        print("running on " + devorprod)
    fileformatDateStr = fDateTime.strftime("%Y%m%d")
    localfilegdb = localProFileGDBWorkspace + '\\' + 'KhubRoadCenterlines' + fileformatDateStr + '.gdb'
    #print(fileformatDateStr)
    if Exists(localfilegdb):
        print(localfilegdb + " exists and will be deleted")
        Delete_management(localfilegdb)
        time.sleep(1)
    CreateFileGDB_management(localProFileGDBWorkspace,
                             "KhubRoadCenterlines" + fileformatDateStr,
                             "CURRENT")
    FeatureClassesUsed = [
        'All_Road_Centerlines', 'All_Road_Centerlines_D1', 'MARKUP_POINT',
        'All_Roads_Stitch_Points', 'Videolog_CURRENT_LANETRACE',
        'Videolog_CURRENT_RAMPTRACE', 'HPMS_RAMPS'
    ]
    for FeatureClass in FeatureClassesUsed:
        loopFC = localProProjectPath + '/' + database + "/" + dbname + "." + dbownername + "." + FeatureClass
        FeatureClassToFeatureClass_conversion(loopFC, localfilegdb,
                                              FeatureClass)
    FeatureClassToFeatureClass_conversion(
        localProProjectPath + '/' + countylines, localfilegdb,
        "SHARED_COUNTY_LINES")
Пример #5
0
def create_dir(name="workspace.gdb"):
    try:
        dir = os.getcwd()
        CreateFileGDB_management(dir, name)
    except WindowsError:
        pass
    finally:
        return os.path.abspath(name)
Пример #6
0
def recreateLoganProcessAdditionsGDB():
    print("Starting the recreateLoganProcessAdditionsGDB function!")
    if Exists(loganProcessAdditionsGDB):
        try:
            Delete_management(loganProcessAdditionsGDB)
        except:
            print("Could not delete the loganProcessAdditionsGDB.")
            print("Please remove any locks and try again.")
    else:
        pass

    CreateFileGDB_management(mainFolder, loganProcessAdditionsGDBName)
def roadsNonMonoCheck():
    try:
        # Check out license
        print('The result of CheckExtension("Highways") is ' + str(CheckExtension("Highways")) + '.')
        if CheckExtension("Highways") == 'Available':
            CheckOutExtension("Highways")
            
            # Do the license check before the deletion, so that you don't
            # remove data and then not put it back in the case that the
            # license is not available.
            from arcpy import DetectNonMonotonicRoutes_locref
            
            if Exists(nonMonotonicOutputGDB):
                try:
                    Delete_management(nonMonotonicOutputGDB)
                except:
                    pass
            else:
                pass
            
            nonMonotonicOutputGDBName = returnGDBOrSDEName(nonMonotonicOutputGDB)
            
            CreateFileGDB_management(mainFolder, nonMonotonicOutputGDBName)
            time.sleep(1)
            
            DetectNonMonotonicRoutes_locref(networkToReview, nonMonotonicOutputFC, "Any", "F_Date", "T_Date", "SourceRouteId")
            
            print("The Roads & Highways Non-Monotonic routes check for " + str(networkToReview) + " has completed.\n")
            
        else:
            print('The Roads & Highways extension is not currently available.')
            print('Skipping R&H Non-Monotonicity check.')
        
    except Exception as Exception1:
        # If an error occurred, print line number and error message
        import traceback, sys
        tb = sys.exc_info()[2]
        print "Line %i" % tb.tb_lineno
        print Exception1.message
        try:
            del Exception1
        except:
            pass
    finally:
        try:
            # Check the license back in
            CheckInExtension("Highways")
        except:
            pass
Пример #8
0
def CreateFileGDBReplica():
    outpath = "//gisdata/planning/Cart/projects/Conflation/GIS_DATA/replicas"
    outfile = "R2015102001.gdb"
    CreateFileGDB_management(outpath, outfile)
    CreateReplica_management(input_database_as_owner + "/Conflation.GEO.NG911",
                             "CHECK_OUT",
                             outpath + '/' + outfile,
                             out_name="FGDB_CHECKOUT_" + outfile[:10],
                             access_type="FULL",
                             initial_data_sender="CHILD_DATA_SENDER",
                             expand_feature_classes_and_tables="USE_DEFAULTS",
                             reuse_schema="DO_NOT_REUSE",
                             get_related_data="GET_RELATED",
                             geometry_features="",
                             archiving="ARCHIVING")
Пример #9
0
def SpamData():
    print "refreshing processing file geodatabase in C:/temp"
    from arcpy import CreateFileGDB_management, Exists, Delete_management, ImportXMLWorkspaceDocument_management
    if Exists(spampath):
        try:
            Delete_management(spampath)
            print "deleted existing temp geodatabase"
        except:
            print "there may be a lock on your temporary processing geodatabase"

    else:
        print "no existing temp geodatabase to delete"
    try:
        CreateFileGDB_management(spam, spamdata)
        ImportXMLWorkspaceDocument_management(
            spampath,
            xmlpath,
            import_type="SCHEMA_ONLY",
            config_keyword="MAX_FILE_SIZE_4GB")
    except:
        print "start a new arcmap instance"
Пример #10
0
def reviewData():
    try:
        print("Starting the Data Reviewer batch job at:\n" +
              str(reviewerBatchJob) + ".")
        print("For the data located in:\n" + str(workspaceToReview) + ".")
        print(
            "If one of the feature classes, Routes or CalPts, does not exist in the place that the"
        )
        print(
            "data reviewer batch job looks for it, then you will get an 'Unknown Error'."
        )
        print(
            "This can be remedied by updating the data reviewer batch job's workspace settings."
        )
        # Test the data reviewer part:
        if CheckExtension("datareviewer") == 'Available':
            print("Extension availability check complete.")
            CheckOutExtension("datareviewer")

            # Checking to see if the output already exists.
            # If so, remove it so that it can be recreated. -- For the errors, might need a better process, so that
            # it's possible to track where the errors were at the start and how things progressed.
            if Exists(reviewerSessionGDB):
                Delete_management(reviewerSessionGDB)
            else:
                pass

            # Create new geodatabase
            # Replace with returnGDBOrSDEPath(reviewerSessionGDB), returnGDBOrSDEName(reviewerSessionGDB)
            # or similar functions
            CreateFileGDB_management(reviewerSessionGDBFolder,
                                     returnGDBOrSDEName(reviewerSessionGDB))

            # Execute EnableDataReviewer
            EnableDataReviewer_Reviewer(reviewerSessionGDB, "#", "#",
                                        "DEFAULTS")

            # Create a new Reviewer session
            ##CreateReviewerSession_Reviewer (reviewer_workspace, session_name, {session_template}, {duplicate_checking}, {store_geometry}, {username}, {version})
            CreateReviewerSession_Reviewer(reviewerSessionGDB, reviewerSession,
                                           "", "NONE", "STORE_GEOMETRY")

            # execute the batch job
            batchJobResult = ExecuteReviewerBatchJob_Reviewer(
                reviewerSessionGDB, sessionReviewerSession, reviewerBatchJob,
                workspaceToReview)

            print("Data Reviewer batch job complete.")

            # get the output table view from the result object
            outputTable = batchJobResult.getOutput(0)

            print("The output table is called " + str(outputTable.name) + "."
                  )  # prints REVBATCHRUNTABLE

            CheckInExtension("datareviewer")

        else:
            print(
                "The 'datareviewer' extension is not available. Skipping checks."
            )

    except Exception as Exception1:
        # If an error occurred, print line number and error message
        import traceback, sys
        tb = sys.exc_info()[2]
        print "Line %i" % tb.tb_lineno
        print Exception1.message
        try:
            del Exception1
        except:
            pass
    finally:
        CheckInExtension("datareviewer")
Пример #11
0
 def New(cls, output_dir, name, version="CURRENT"):
     result = CreateFileGDB_management(output_dir, name, version=version)
     return Geodatabase(result.getOutput(0))
def main():
    if usePrefixSetTestingAndReporting == True:
        for prefixKey in prefixSetErrorReportingDict.keys():
            prefixAttributeQuery = prefixSetErrorReportingDict[prefixKey]
            prefixKeyItemDict = outerTestDict[prefixKey]
            prefixSetGdbBaseName = prefixKeyItemDict["prefixSetGdbBaseName"]
            prefixSetSourceGDBName = prefixSetGdbBaseName + '_Source.gdb'
            prefixSetSourceGDBLocation = os.path.join(mainFolder,
                                                      prefixSetSourceGDBName)
            routesSourceOutputLocation = os.path.join(
                prefixSetSourceGDBLocation, 'RoutesSource')

            try:
                Delete_management(
                    routesSourceFCAsALayer
                )  #pre-emptive layer delete prior to rebuilding it
                time.sleep(3)
            except:
                pass

            MakeFeatureLayer_management(routesSourceFC, routesSourceFCAsALayer)
            routesSourceSelectionClause = """ """ + str(
                prefixAttributeQuery) + """ """
            SelectLayerByAttribute_management(routesSourceFCAsALayer,
                                              "NEW_SELECTION",
                                              routesSourceSelectionClause)

            if Exists(returnGDBOrSDEPath(routesSourceOutputLocation)):
                pass
            else:
                CreateFileGDB_management(
                    mainFolder,
                    returnGDBOrSDEName(
                        returnGDBOrSDEPath(routesSourceOutputLocation)))
            # Checking to see if the output already exists.
            # If so, remove it.
            if Exists(routesSourceOutputLocation):
                print(
                    "Deleting the previous routesSourceOutputLocation at: \n" +
                    str(routesSourceOutputLocation) + ".")
                Delete_management(routesSourceOutputLocation)
                time.sleep(7)
            else:
                pass
            # Create a new file for the output.
            print(
                "Making a copy of the selection in the routesSourceFCAsALayer at: \n"
                + routesSourceOutputLocation + ".")
            CopyFeatures_management(routesSourceFCAsALayer,
                                    routesSourceOutputLocation)

            #Repeat for each of the other layers to be copied into the new *_Source.gdb.
            for itemToCopy in otherFCsToCopyList:
                itemToCopyInputLocation = os.path.join(
                    returnGDBOrSDEPath(routesSourceFC), itemToCopy)
                itemToCopyOutputLocation = os.path.join(
                    prefixSetSourceGDBLocation, itemToCopy)
                if Exists(itemToCopyOutputLocation):
                    print(
                        "Deleting the previous itemToCopyOutputLocation at: \n"
                        + str(itemToCopyOutputLocation) + ".")
                    Delete_management(itemToCopyOutputLocation)
                    time.sleep(7)
                else:
                    pass
                print("Making a copy of the itemToCopy at: \n" +
                      str(itemToCopyOutputLocation) + ".")
                CopyFeatures_management(itemToCopyInputLocation,
                                        itemToCopyOutputLocation)

    else:
        print(
            "The usePrefixSetTestingAndReporting value is not True. Will not create separate prefix set gdbs."
        )
Пример #13
0
    DeleteFeatures_management, AddField_management, CalculateField_management,
    CreateRoutes_lr, Append_management, MakeTableView_management,
    AddJoin_management, Buffer_analysis, FlipLine_edit, RemoveJoin_management,
    GetParameterAsText, AddIndex_management, TableToTable_conversion,
    SelectLayerByAttribute_management)
NUSYS = r"Database Connections/RO@sqlgisprod_GIS_cansys.sde/GIS_CANSYS.SHARED.Nusys"
NonState = r"Database Connections/SDEPROD_SHARED.sde/SHARED.NON_STATE_SYSTEM"

destdb = r"C:/temp/Nusys_Check.gdb"
env.overwriteOutput = True

try:
    print "re-setting the working geodatabase in C:temp folder"
    if Exists(destdb):
        Delete_management(destdb)
        CreateFileGDB_management(r"C:/TEMP", "Nusys_Check.gdb")
        print "recreated the geodatabase"
    else:
        CreateFileGDB_management(r"C:/TEMP", "Nusys_Check.gdb")
        print "created the new geodatabase"
except:
    CreateFileGDB_management(r"C:/TEMP", "Nusys_Check.gdb")
    print "DB could not be deleted"

env.workspace = r"C:/TEMP/Nusys_Check.gdb"


def CreateNUSYSLocal():
    MakeFeatureLayer_management(
        NonState, "NonState",
        "(LRS_KEY LIKE '%C%' OR LRS_ROUTE_PREFIX = 'C') AND (MILEAGE_COUNTED = -1 OR SURFACE = 'Propose')"
Пример #14
0
'''
Created on Oct 7, 2013

@author: kyleg
'''

from arcpy import Exists, CreatePersonalGDB_management, CreateFileGDB_management, Delete_management, env
from CONFIG import ws, tempmdb, tempgdb

mdb91 = ws + "//" + tempmdb
print mdb91
if Exists(mdb91):
    Delete_management(mdb91)
CreatePersonalGDB_management(ws, tempmdb, "9.1")

gdb102 = ws + "//" + tempgdb
if Exists(gdb102):
    Delete_management(gdb102)
CreateFileGDB_management(ws, tempgdb)
env.workspace = ws + "/" + tempgdb
Пример #15
0
def GdbCreate():
    CreateFileGDB_management(workspace, gdbname)
    CreateFeatureclass_management(gdb, 'Crossings', 'POINT', '#', 'DISABLED', 'ENABLED', spatialCRS)
Пример #16
0
def StateHighwayCalibrate():
    import datetime
    startDateTime = datetime.datetime.now()
    print("starting SHS calibration at " + str(startDateTime) +
          ", it should take about 15 minutes to calibrate state system routes")
    #Calibration process completed in 0:03:36.252839 hours, minutes, seconds
    from KhubCode25.KhubCode25Config import devorprod, dbname, dbownername, localProFileGDBWorkspace, KDOTConnections, Cmlrs, prodDataSourceSDE, devDataSourceSDE
    fileformatDateStr = startDateTime.strftime("%Y%m%d")
    #theStateHighwaySegments is defined as the roadway segments intended for calibration to the EXOR measures
    if devorprod == 'prod':
        database = prodDataSourceSDE

        print("running on " + devorprod)
    else:
        database = devDataSourceSDE

        print("running on " + devorprod)
    from arcpy import FeatureClassToFeatureClass_conversion, Delete_management, FeatureVerticesToPoints_management, LocateFeaturesAlongRoutes_lr, CreateFileGDB_management, env, MakeFeatureLayer_management, SelectLayerByAttribute_management, DeleteRows_management, MakeTableView_management
    env.overwriteOutput = 1
    try:
        CreateFileGDB_management(localProFileGDBWorkspace,
                                 "KhubRoadCenterlinesTemp" + fileformatDateStr,
                                 "CURRENT")
    except:
        Delete_management(localProFileGDBWorkspace,
                          "KhubRoadCenterlinesTemp" + fileformatDateStr)
        CreateFileGDB_management(localProFileGDBWorkspace,
                                 "KhubRoadCenterlinesTemp" + fileformatDateStr,
                                 "CURRENT")

    #stopped using in_memory after the upgrade to arcgis pro, it doesn't work like it used to do.
    #consider using in memory for not in non-pro script environment, but for this process, probably will not make much difference

    localfilegdb = localProFileGDBWorkspace + '\\' + 'KhubRoadCenterlinesTemp' + fileformatDateStr + '.gdb'
    sdegdb = KDOTConnections + r'\\' + database + r'\\' + dbname + "." + dbownername
    firstCut = sdegdb + ".All_Road_Centerlines"
    NextIter = "_D1"
    nextCut = firstCut + NextIter
    RoadsToCalibrate = [firstCut, nextCut]

    CMLRS = sdegdb + "." + Cmlrs
    Lrm_Dict = {'COUNTY': CMLRS}
    for sderoads in RoadsToCalibrate:
        if sderoads[-3:] == "_D1":
            MakeFeatureLayer_management(
                sderoads, "lyrStateSystemSource" + NextIter,
                "LRS_ROUTE_PREFIX IN ('I', 'U', 'K') And LRS_ROUTE_SUFFIX NOT IN ('Z', 'G')",
                None, "")
            RoadCenterlines = localfilegdb + "/lyrStateSystemSource" + NextIter
            End_List = ['START', 'END']
            # First,  create points at the begin and end of each road centerline segment using Vertices to Points.
            for end in End_List:
                end_name = end + NextIter
                i_end_output = localfilegdb + "/CalibrationPoint" + end_name
                try:
                    FeatureVerticesToPoints_management(RoadCenterlines,
                                                       i_end_output, str(end))
                    #this works in Pro
                except:
                    FeatureVerticesToPoints_management(
                        "lyrStateSystemSource" + NextIter, i_end_output,
                        str(end))
        #and this is the beginning and end of a line, for which we are going to create a vertex point
        #Iterate through the LRMs to bring them into memory and do the processing for each segment begin and end point!
                for key, value in Lrm_Dict.items():
                    FeatureClassToFeatureClass_conversion(
                        value, localfilegdb, "LRM" + str(key))
                    for end in End_List:
                        outtable = localfilegdb + r"/" + str(
                            end_name) + "_" + str(key)
                        outproperties = str(key) + "_LRS POINT MEAS_" + str(
                            key)
                        if key == "STATE":
                            lrskey = str(key) + "_NQR_DESCRIPTION"
                        else:
                            lrskey = "NQR_DESCRIPTION"
                        try:
                            LocateFeaturesAlongRoutes_lr(
                                localfilegdb + r"/CalibrationPoint" +
                                str(end_name), "LRM" + str(key), lrskey,
                                "500 Feet", outtable, outproperties, "ALL",
                                "DISTANCE", "ZERO", "FIELDS", "M_DIRECTON")
                            #this works in Pro
                        except:
                            LocateFeaturesAlongRoutes_lr(
                                localfilegdb + "/CalibrationPoint" +
                                str(end_name),
                                localfilegdb + r"/LRM" + str(key), lrskey,
                                "500 Feet", outtable, outproperties, "ALL",
                                "DISTANCE", "ZERO", "FIELDS", "M_DIRECTON")
                            #this works in non-Pro script environment

                        #that LFAR function located begin/end segment points to ALL ROUTES within 500 feet of the segment endpoint

                        #for calibrating, we are only interested in the points and LFAR Results that where this query is NOT true:
                        qNotThisRoad = 'SUBSTRING("COUNTY_LRS",0,10) <> SUBSTRING("KDOT_LRS_KEY",0,10)'
                        #so we will delete the records where this query is trye
                        try:
                            SelectLayerByAttribute_management(
                                str(end_name) + "_" + str(key),
                                "NEW_SELECTION", qNotThisRoad)
                            DeleteRows_management(
                                str(end_name) + "_" + str(key))
                            #this works in Pro Environment
                        except:
                            #SelectLayerByAttribute_management(localfilegdb+"/"+str(end)+"_"+str(key), "NEW_SELECTION", qNotThisRoad)
                            MakeTableView_management(
                                localfilegdb + "/" + str(end_name) + "_" +
                                str(key), "deleterows", qNotThisRoad, None, "")
                            DeleteRows_management("deleterows")
                            #this works in non-Pro script environment
                #this works in non-Pro script environment
        else:
            MakeFeatureLayer_management(
                sderoads, "lyrStateSystemSource",
                "LRS_ROUTE_PREFIX IN ('I', 'U', 'K') And LRS_ROUTE_SUFFIX NOT IN ('Z', 'G')",
                None, "")
            RoadCenterlines = localfilegdb + "/lyrStateSystemSource"
            End_List = ['START', 'END']
            for end in End_List:
                end_name = end
                print(end_name)
                i_end_output = localfilegdb + "/CalibrationPoint" + str(end)
                try:
                    FeatureVerticesToPoints_management(RoadCenterlines,
                                                       i_end_output, str(end))
                    #this works in Pro
                except:
                    FeatureVerticesToPoints_management("lyrStateSystemSource",
                                                       i_end_output, str(end))
        #and this is the beginning and end of a line, for which we are going to create a vertex point
        #Iterate through the LRMs to bring them into memory and do the processing for each segment begin and end point!
                for key, value in Lrm_Dict.items():
                    FeatureClassToFeatureClass_conversion(
                        value, localfilegdb, "LRM" + str(key))
                    for end in End_List:
                        outtable = localfilegdb + r"/" + str(
                            end_name) + "_" + str(key)
                        outproperties = str(key) + "_LRS POINT MEAS_" + str(
                            key)
                        if key == "STATE":
                            lrskey = str(key) + "_NQR_DESCRIPTION"
                        else:
                            lrskey = "NQR_DESCRIPTION"
                        try:
                            LocateFeaturesAlongRoutes_lr(
                                localfilegdb + r"/CalibrationPoint" +
                                str(end_name), "LRM" + str(key), lrskey,
                                "500 Feet", outtable, outproperties, "ALL",
                                "DISTANCE", "ZERO", "FIELDS", "M_DIRECTON")
                            #this works in Pro
                        except:
                            LocateFeaturesAlongRoutes_lr(
                                localfilegdb + "/CalibrationPoint" +
                                str(end_name),
                                localfilegdb + r"/LRM" + str(key), lrskey,
                                "500 Feet", outtable, outproperties, "ALL",
                                "DISTANCE", "ZERO", "FIELDS", "M_DIRECTON")
                            #this works in non-Pro script environment

                        #that LFAR function located begin/end segment points to ALL ROUTES within 500 feet of the segment endpoint

                        #for calibrating, we are only interested in the points and LFAR Results that where this query is NOT true:
                        qNotThisRoad = 'SUBSTRING("COUNTY_LRS",0,10) <> SUBSTRING("KDOT_LRS_KEY",0,10)'
                        #so we will delete the records where this query is true
                        #It is possible that there will be multiple rows where this query is true, this result
                        #will calculate one value, not conditional on distance, min/max, just takes the first true result I guess
                        #in situations where there are multiple results, consider dissolving these points and keeping some stats
                        #then review stats to determine appropriate value, probably the closest result
                        try:
                            SelectLayerByAttribute_management(
                                str(end_name) + "_" + str(key),
                                "NEW_SELECTION", qNotThisRoad)
                            DeleteRows_management(
                                str(end_name) + "_" + str(key))
                            #this works in Pro Environment
                        except:
                            #SelectLayerByAttribute_management(localfilegdb+"/"+str(end)+"_"+str(key), "NEW_SELECTION", qNotThisRoad)
                            MakeTableView_management(
                                localfilegdb + "/" + str(end_name) + "_" +
                                str(key), "deleterows", qNotThisRoad, None, "")
                            DeleteRows_management("deleterows")
                            #this works in non-Pro script environment
    print(
        'Calibration process completed in {} hours, minutes, seconds.'.format(
            datetime.datetime.now() - startDateTime))
Пример #17
0
def check_exists(output, name):
    from arcpy import Exists, CreateFileGDB_management, AddMessage
    from os.path import join
    if not Exists(join(output, name)):
        AddMessage('GDB does not exist, creating...')
        CreateFileGDB_management(output, name)
Пример #18
0
    return ''


if __name__ == "__main__":
    print returnGDBOrSDEPath(r'C:\test.gdb')
    print returnGDBOrSDEPath(r'C:\sdeTest.sde')
    print returnGDBOrSDEPath('C:\\user\\other\\arcgdb.gdb')
    print returnGDBOrSDEPath('C:\\user\\other\\arcsde.sde')
    print returnGDBOrSDEPath(r'C:\this\is\a\shapefile.shp')

    print returnGDBOrSDEName(r'C:\test.gdb\testFeature')
    print returnGDBOrSDEName(r'C:\sdeTest.sde\sdeRoads')
    print returnGDBOrSDEName('C:\\user\\other\\arcgdb.gdb\\arcFeature')
    print returnGDBOrSDEName('C:\\user\\other\\arcsde.sde\\arcFeature2')

    print returnGDBOrSDEFolder(r'C:\test.gdb\testFeature')
    print returnGDBOrSDEFolder(r'C:\sdeTest.sde\sdeRoads')
    print returnGDBOrSDEFolder('C:\\user\\other\\arcgdb.gdb\\arcFeature')
    print returnGDBOrSDEFolder('C:\\user\\other\\arcsde.sde\\arcFeature2')

    print returnFeatureClass(r'C:\test.gdb\testFeature')
    print returnFeatureClass(r'C:\sdeTest.sde\sdeRoads')
    print returnFeatureClass('C:\\user\\other\\arcgdb.gdb\\arcFeature')
    print returnFeatureClass('C:\\user\\other\\arcsde.sde\\arcFeature2')
    print returnFeatureClass(r'C:\this\is\a\shapefile.shp')

    CreateFileGDB_management(returnGDBOrSDEFolder(r'C:\test.gdb\testFeature'),
                             returnGDBOrSDEName(r'C:\test.gdb\testFeature'))

else:
    pass
def routeCreation():
    env.workspace = returnGDBOrSDEPath(createRoutesOutputFC)
    env.overwriteOutput = 1
    '''
    # Need to match what Transcend used. -- Done.
    routeId = 'SourceRouteId'
    measureSource = 'TWO_FIELDS'
    fMeasureField = 'SourceFromMeasure'
    tMeasureField = 'SourceToMeasure'
    coordinatePriority = 'UPPER_LEFT'
    measureFactor = 1
    measureOffset = 0
    ignoreGaps = True
    buildIndex = True
    '''
    # Modifications for County Target Network.
    routeId = 'TargetCountyLRSKey'
    measureSource = 'Shapelength'
    #fMeasureField = 'SourceFromMeasure'
    #tMeasureField = 'SourceToMeasure'
    coordinatePriority = 'LOWER_LEFT'
    measureFactor = 1
    measureOffset = 0
    ignoreGaps = True
    buildIndex = True

    routesOutputGDB = returnGDBOrSDEPath(createRoutesOutputFC)
    routesOutputGDBName = returnGDBOrSDEName(routesOutputGDB)
    # Need to implement a new path function to get the GDB's folder.
    routesOutputGDBFolder = mainFolder
    if Exists(routesOutputGDB):
        Delete_management(routesOutputGDB)
    else:
        pass
    CreateFileGDB_management(routesOutputGDBFolder, routesOutputGDBName)

    # Checking to see if the copy for routes output exists.
    # If so, remove it.
    #if Exists(createRoutesOutputFC):
    #    Delete_management(createRoutesOutputFC)
    #else:
    #    pass

    print("Creating the lrs routes.")
    # CreateRoutes_lr GP Tool
    CreateRoutes_lr(createRoutesInputFC, routeId, createRoutesOutputFC,
                    measureSource, fMeasureField, tMeasureField,
                    coordinatePriority, measureFactor, measureOffset,
                    ignoreGaps, buildIndex)

    print("Adding date fields to " + returnFeatureClass(createRoutesOutputFC) +
          ".")
    #Addfields:
    AddField_management(createRoutesOutputFC, "F_Date", "DATE", "", "", "",
                        "F_Date", nullable)
    pyDateExpression = '''def pyFindTheDate():
        import time
        return time.strftime("%Y/%m/%d")'''

    CalculateField_management(createRoutesOutputFC, "F_Date",
                              "pyFindTheDate()", "PYTHON_9.3",
                              pyDateExpression)
    # T_Date (Date)
    AddField_management(createRoutesOutputFC, "T_Date", "DATE", "", "", "",
                        "T_Date", nullable)

    # ---- Add route calibration point creation steps for Start & End points. ----
    MakeFeatureLayer_management(createRoutesInputFC, 'tempFeatureLayer')

    # Checking to see if the output already exists.
    # If so, remove it so that it can be recreated.
    if Exists(startCalibrationPoints):
        Delete_management(startCalibrationPoints)
    else:
        pass
    if Exists(endCalibrationPoints):
        Delete_management(endCalibrationPoints)
    else:
        pass

    # Create 2 fieldInfo objects. Turn off all the fields in each one.
    featureDesc = Describe('tempFeatureLayer')
    if featureDesc.dataType == "FeatureLayer":
        fieldInfo_For_Start_CP_Fields = featureDesc.fieldInfo
        fieldInfo_For_End_CP_Fields = featureDesc.fieldInfo
        # Use the count property to iterate through all the fields
        for index in range(0, fieldInfo_For_Start_CP_Fields.count):
            fieldInfo_For_Start_CP_Fields.setVisible(index, 'HIDDEN')
            fieldInfo_For_End_CP_Fields.setVisible(index, 'HIDDEN')

    # Turn on the needed fields.
    visibile_Fields_For_Start_CP_Layer = [routeId, 'SourceFromMeasure']
    for visibile_Field in visibile_Fields_For_Start_CP_Layer:
        tempIndex = fieldInfo_For_Start_CP_Fields.findFieldByName(
            visibile_Field)
        fieldInfo_For_Start_CP_Fields.setVisible(tempIndex, 'VISIBLE')
    # Create a feature layer that only shows the needed fields.
    MakeFeatureLayer_management(createRoutesInputFC,
                                featureLayerCL_For_Start_CP, "", "",
                                fieldInfo_For_Start_CP_Fields)
    # Use that feature layer to create the 1st calibration point set.
    FeatureVerticesToPoints_management(featureLayerCL_For_Start_CP,
                                       startCalibrationPoints, "START")

    # Turn on the needed fields.
    visibile_Fields_For_End_CP_Layer = [routeId, 'SourceToMeasure']
    for visibile_Field in visibile_Fields_For_End_CP_Layer:
        tempIndex = fieldInfo_For_End_CP_Fields.findFieldByName(visibile_Field)
        fieldInfo_For_End_CP_Fields.setVisible(tempIndex, 'VISIBLE')
    # Create a feature layer that only shows the needed fields.
    MakeFeatureLayer_management(createRoutesInputFC, featureLayerCL_For_End_CP,
                                "", "", fieldInfo_For_End_CP_Fields)
    # Use that feature layer to create the 2nd calibration point set.
    FeatureVerticesToPoints_management(featureLayerCL_For_End_CP,
                                       endCalibrationPoints, "END")

    # ---- Merge the Start & End calibration points. ----
    # Checking to see if the output already exists.
    # If so, remove it so that it can be recreated.
    if Exists(mergedCalibrationPoints):
        Delete_management(mergedCalibrationPoints)
    else:
        pass
    # RoutesSource_Start_CP.SourceRouteId to CalPts_Merge.RouteId
    # RoutesSource_End_CP.SourceRouteId to CalPts_Merge.RouteId
    mcp_Field1 = FieldMap()
    mcp_Field1.addInputField(startCalibrationPoints, routeId)
    mcp_Field1.addInputField(endCalibrationPoints, routeId)
    mcp_Field1_OutField = mcp_Field1.outputField
    mcp_Field1_OutField.name = 'RouteId'
    mcp_Field1_OutField.aliasName = 'RouteId'
    mcp_Field1_OutField.type = 'String'
    mcp_Field1_OutField.length = 50
    mcp_Field1.outputField = mcp_Field1_OutField

    # RoutesSource_Start_CP.SourceFromMeasure to CalPts_Merge.Measure
    mcp_Field2 = FieldMap()
    mcp_Field2.addInputField(startCalibrationPoints, 'SourceFromMeasure')
    mcp_Field2.addInputField(endCalibrationPoints, 'SourceToMeasure')
    mcp_Field2_OutField = mcp_Field2.outputField
    mcp_Field2_OutField.name = 'Measure'
    mcp_Field2_OutField.aliasName = 'Measure'
    mcp_Field2_OutField.type = 'Double'
    mcp_Field2.outputField = mcp_Field2_OutField

    # Create a fieldMappings object for the layer merge.
    calibrationPointsMappings = FieldMappings()
    calibrationPointsMappings.addFieldMap(mcp_Field1)
    calibrationPointsMappings.addFieldMap(mcp_Field2)

    #Merge the points together into a single feature class.
    inputMergeLayers = [startCalibrationPoints, endCalibrationPoints]
    Merge_management(inputMergeLayers, mergedCalibrationPoints,
                     calibrationPointsMappings)

    MakeFeatureLayer_management(mergedCalibrationPoints, 'tempMergedPoints')

    dissolveFields = ["RouteId", "Measure"]
    print('Dissolving points.')
    Dissolve_management('tempMergedPoints', dissolvedCalibrationPoints,
                        dissolveFields, "#", "SINGLE_PART")
def exportErrorsToFeatureClasses(reviewTable, originGDB, errorOutputGDB,
                                 errorOutputGDBFolder):
    # Checking to see if the output already exists.
    # If so, remove it.
    if Exists(errorOutputGDB):
        Delete_management(errorOutputGDB)
    else:
        pass

    CreateFileGDB_management(errorOutputGDBFolder,
                             returnGDBOrSDEName(errorOutputGDB))

    previousWorkspace = env.workspace
    env.workspace = errorOutputGDB

    tableFields = ['ORIGINTABLE', 'CHECKTITLE', 'OBJECTID']
    newCursor = daSearchCursor(reviewTable, tableFields)

    revRows = list()

    for rowItem in newCursor:
        revRows.append(list(rowItem))

    try:
        del newCursor
    except:
        pass

    originTableList = list()
    checkTitleList = list()

    for revRowItem in revRows:
        originTableList.append(revRowItem[0])
        checkTitleList.append(revRowItem[1])

    print('Creating sets from the originTable and checkTitle lists.')
    originTableSet = set(originTableList)
    checkTitleSet = set(checkTitleList)
    print('Finished set creation.')

    originTableList = list(originTableSet)
    checkTitleList = list(checkTitleSet)

    tableAndCheckDataObjects = list()
    csvDictOfErrorFeatures = dict()

    for originTableItem in originTableList:
        print('Origin table = ' + originTableItem + '.')
        completeOriginTablePath = os.path.join(originGDB, originTableItem)
        print('The full path to the origin table is ' +
              str(completeOriginTablePath) + '.')
        tableViewName = "ReviewTable_View_" + str(originTableItem)
        originTableWhereClause = """"ORIGINTABLE" = '""" + str(
            originTableItem) + """'"""
        try:
            Delete_management(tableViewName)
        except:
            pass
        MakeTableView_management(reviewTable, tableViewName,
                                 originTableWhereClause)

        for checkTitleItem in checkTitleList:
            print('Check title = ' + checkTitleItem + '.')
            selectionWhereClause = """"CHECKTITLE" = '""" + str(
                checkTitleItem) + """'"""
            SelectLayerByAttribute_management(tableViewName, "NEW_SELECTION",
                                              selectionWhereClause)
            countResult = GetCount_management(tableViewName)
            intCount = int(countResult.getOutput(0))

            if intCount >= 1:
                tempTableAndCheckData = tableAndCheckData(
                    originTableItem, checkTitleItem)
                tableViewFields = ["RECORDID", "OBJECTID"]

                newCursor = daSearchCursor(tableViewName, tableViewFields,
                                           selectionWhereClause)

                newOIDList = list()

                for cursorItem in newCursor:
                    newOIDList.append(cursorItem[1])

                try:
                    del newCursor
                except:
                    pass

                tempTableAndCheckData.listOfOIDsToUse = newOIDList

                tableAndCheckDataObjects.append(tempTableAndCheckData)
            else:
                print("There were no features selected for the " +
                      tableViewName + " table.")

    print("There are " + str(len(tableAndCheckDataObjects)) +
          " different items in the tableAndCheckDataObjects list.")

    for listObject in tableAndCheckDataObjects:

        featureLayerForErrorOutput = 'FeatureClassAsFeatureLayer'

        if Exists(featureLayerForErrorOutput):
            Delete_management(featureLayerForErrorOutput)
        else:
            pass

        fullPathToFeatureClass = os.path.join(originTablesGDB,
                                              listObject.tableName)

        MakeFeatureLayer_management(fullPathToFeatureClass,
                                    featureLayerForErrorOutput)

        # build the selection list & select up to but not more than 999 features at at time
        OIDTotalCounter = 0
        errorOutputWhereClause = """ "OBJECTID" IN ("""

        for errorOID in listObject.listOfOIDsToUse:
            if OIDTotalCounter <= 998:
                errorOutputWhereClause = errorOutputWhereClause + str(
                    errorOID) + """, """
                OIDTotalCounter += 1
            else:
                # Remove the trailing ", " and add a closing parenthesis.
                errorOutputWhereClause = errorOutputWhereClause[:-2] + """) """
                SelectLayerByAttribute_management(featureLayerForErrorOutput,
                                                  "ADD_TO_SELECTION",
                                                  errorOutputWhereClause)

                OIDTotalCounter = 0
                errorOutputWhereClause = """ "OBJECTID" IN ("""
                errorOutputWhereClause = errorOutputWhereClause + str(
                    errorOID) + """, """

        # Remove the trailing ", " and add a closing parenthesis.
        errorOutputWhereClause = errorOutputWhereClause[:-2] + """) """
        SelectLayerByAttribute_management(featureLayerForErrorOutput,
                                          "ADD_TO_SELECTION",
                                          errorOutputWhereClause)

        ##print "Counting..."
        selectedErrorsResult = GetCount_management(featureLayerForErrorOutput)
        selectedErrorsCount = int(selectedErrorsResult.getOutput(0))

        # export the selected data with the correct tableName & checkTitle
        outputFeatureClassName = formatCheckTitle(
            listObject.checkTitle) + "ErrorsFrom_" + listObject.tableName
        fullPathToOutputFeatureClass = os.path.join(errorOutputGDB,
                                                    outputFeatureClassName)

        csvDictOfErrorFeatures[outputFeatureClassName] = str(
            selectedErrorsCount)

        print(
            str(selectedErrorsCount) + "\t features will be written to \t" +
            outputFeatureClassName)
        if selectedErrorsCount >= 1:
            CopyFeatures_management(featureLayerForErrorOutput,
                                    fullPathToOutputFeatureClass)
            time.sleep(25)
            AddField_management(outputFeatureClassName, "OptionalInfo", "TEXT",
                                "", "", 250, "ReviewingInfo", nullable)
        else:
            pass

    # Need to write a short CSV here that tells the number and type of errors.
    print('Writing error information to an error reports file called ' +
          str(errorReportCSVName) + '.')
    try:
        with open(errorReportCSV, 'w') as fHandle:
            for errorFeature in errorReportRowsOrder:
                if errorFeature in csvDictOfErrorFeatures:
                    errorFeatureCount = csvDictOfErrorFeatures[errorFeature]
                    fHandle.write(
                        str(errorFeature) + ', ' + str(errorFeatureCount) +
                        '\n')
                else:
                    fHandle.write(str(errorFeature) + ', ' + str(0) + '\n')
            # Add a blank line to match previous formatting.
            fHandle.write('\n')
    except:
        print("There was an error writing to the file.")

    # Modify this so that it just checks for the existence of the roads
    # and highways check output, rather than relying on the config
    # file for whether or not this should be ran.
    # The config file can tell the full process whether or not
    # to run the R&H check, but the error report should give
    # details on the R&H check whether or not the config file
    # currently states that the R&H check should be ran again
    # were the full process to run.

    env.workspace = previousWorkspace
Пример #21
0
import datetime

print str(datetime.datetime.now()) + " starting arcpy function imports"
from arcpy import DefineProjection_management, CreateFileGDB_management, Append_management, TruncateTable_management, AddJoin_management, Project_management, CalculateField_management, MakeTableView_management, Exists, Delete_management, MakeFeatureLayer_management, env, FeatureClassToFeatureClass_conversion, AddField_management

print str(datetime.datetime.now()) + " setting variables"
env.overwriteOutput = True
labmertCC = "PROJCS['NAD_83_Kansas_Lambert_Conformal_Conic_Meters',GEOGCS['GCS_North_American_1983',DATUM['D_North_American_1983',SPHEROID['GRS_1980',6378137.0,298.257222101]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]],PROJECTION['Lambert_Conformal_Conic'],PARAMETER['false_easting',0.0],PARAMETER['false_northing',0.0],PARAMETER['central_meridian',-98.0],PARAMETER['standard_parallel_1',38.0],PARAMETER['standard_parallel_2',39.0],PARAMETER['scale_factor',1.0],PARAMETER['latitude_of_origin',38.5],UNIT['Meter',1.0]]"

stagews = stageDB + "\\CDRS.gdb"

print str(datetime.datetime.now()) + " refreshing the staging geodatabase"
if Exists(stagews):
    Delete_management(stagews)
CreateFileGDB_management(stageDB, "CDRS.gdb")
env.workspace = stagews

print str(datetime.datetime.now()) + " manipulating the Oracle CDRS layers"
MakeFeatureLayer_management(sdeCDRS, "Construction", '#')
FeatureClassToFeatureClass_conversion(
    "Construction", stagews, "CDRS_RAW", "#",
    """CDRS_ALERT_ROUTE_ID "CDRS_ALERT_ROUTE_ID" true false false 8 Double 10 38 ,First,#,KANROAD.CDRS_ALERT_ROUTE,CDRS_ALERT_ROUTE_ID,-1,-1;AlertID "AlertID" true true false 8 Double 10 38 ,First,#,KANROAD.CDRS_ALERT_ROUTE,ALERT_ID,-1,-1;AlertDate "AlertDate" true true false 36 Date 0 0 ,First,#,KANROAD.CDRS_ALERT_ROUTE,ALERT_DATE,-1,-1;AlertStatus "AlertStatus" true true false 8 Double 10 38 ,First,#,KANROAD.CDRS_ALERT_ROUTE,ALERT_STATUS,-1,-1;FeaClosed "FeaClosed" true true false 8 Double 10 38 ,First,#,KANROAD.CDRS_ALERT_ROUTE,FEA_CLOSED,-1,-1;District "District" true true false 8 Double 10 38 ,First,#,KANROAD.CDRS_ALERT_ROUTE,DISTRICT,-1,-1;Area "Area" true true false 8 Double 10 38 ,First,#,KANROAD.CDRS_ALERT_ROUTE,AREA,-1,-1;LRSKey "LRSKey" true true false 19 Text 0 0 ,First,#,KANROAD.CDRS_ALERT_ROUTE,BEG_LRS_KEY,-1,-1;LRSRoute "LRSRoute" true true false 12 Text 0 0 ,First,#,KANROAD.CDRS_ALERT_ROUTE,BEG_LRS_ROUTE,-1,-1;County "County" true true false 20 Text 0 0 ,First,#,KANROAD.CDRS_ALERT_ROUTE,BEG_COUNTY_NAME,-1,-1;CountyNumber "CountyNumber" true true false 8 Double 10 38 ,First,#,KANROAD.CDRS_ALERT_ROUTE,BEG_COUNTY_NUMBER,-1,-1;AlertType "AlertType" true true false 50 Text 0 0 ,First,#,KANROAD.CDRS_ALERT_ROUTE,ALERT_TYPE_TXT,-1,-1;AlertDescription "AlertDescription" true true false 50 Text 0 0 ,First,#,KANROAD.CDRS_ALERT_ROUTE,ALERT_DESC_TXT,-1,-1;BeginMP "BeginMP" true true false 8 Double 0 38 ,First,#,KANROAD.CDRS_ALERT_ROUTE,BEG_STATE_LOGMILE,-1,-1;BegRP "BegRP" true true false 8 Double 0 38 ,First,#,KANROAD.CDRS_ALERT_ROUTE,BEG_REF_POST,-1,-1;EndMP "EndMP" true true false 8 Double 0 38 ,First,#,KANROAD.CDRS_ALERT_ROUTE,END_STATE_LOGMILE,-1,-1;EndRP "EndRP" true true false 8 Double 10 38 ,First,#,KANROAD.CDRS_ALERT_ROUTE,END_REF_POST,-1,-1;Direction "Direction" true true false 12 Text 0 0 ,First,#,KANROAD.CDRS_ALERT_ROUTE,ALERT_DIREC_TXT,-1,-1;StartDate "StartDate" true true false 36 Date 0 0 ,First,#,KANROAD.CDRS_ALERT_ROUTE,START_DATE,-1,-1;CompDate "CompDate" true true false 36 Date 0 0 ,First,#,KANROAD.CDRS_ALERT_ROUTE,COMP_DATE,-1,-1;ExpireDate "ExpireDate" true true false 36 Date 0 0 ,First,#,KANROAD.CDRS_ALERT_ROUTE,EXPIRE_DATE,-1,-1;TimeDelay "TimeDelay" true true false 30 Text 0 0 ,First,#,KANROAD.CDRS_ALERT_ROUTE,TIME_DELAY_TXT,-1,-1;WZDetailId "WZDetailId" true true false 8 Double 10 38 ,First,#,KANROAD.CDRS_ALERT_ROUTE,CDRS_WZ_DETAIL_ID,-1,-1;WidthLimit "WidthLimit" true true false 8 Double 0 38 ,First,#,KANROAD.CDRS_ALERT_ROUTE,WIDTH_RESTRICTION,-1,-1;HeightLimit "HeightLimit" true true false 8 Double 0 38 ,First,#,KANROAD.CDRS_ALERT_ROUTE,VERT_RESTRICTION,-1,-1;WeightLimit "WeightLimit" true true false 8 Double 0 38 ,First,#,KANROAD.CDRS_ALERT_ROUTE,WEIGHT_RESTRICTION,-1,-1;SpeedLimit "SpeedLimit" true true false 8 Double 0 38 ,First,#,KANROAD.CDRS_ALERT_ROUTE,SPEED_RESTRICTION,-1,-1;INTERNAL_COMMENT "INTERNAL_COMMENT" true true false 4000 Text 0 0 ,First,#,KANROAD.CDRS_ALERT_ROUTE,INTERNAL_COMMENT,-1,-1;Comments "Comments" true true false 4000 Text 0 0 ,First,#,KANROAD.CDRS_ALERT_ROUTE,PUBLIC_COMMENT,-1,-1;PUBLIC_VIEW "PUBLIC_VIEW" true true false 8 Double 10 38 ,First,#,KANROAD.CDRS_ALERT_ROUTE,PUBLIC_VIEW,-1,-1;RPT_BY_NAME "RPT_BY_NAME" true true false 50 Text 0 0 ,First,#,KANROAD.CDRS_ALERT_ROUTE,RPT_BY_NAME,-1,-1;RPT_BY_PHONE "RPT_BY_PHONE" true true false 15 Text 0 0 ,First,#,KANROAD.CDRS_ALERT_ROUTE,RPT_BY_PHONE,-1,-1;RPT_BY_EMAIL "RPT_BY_EMAIL" true true false 40 Text 0 0 ,First,#,KANROAD.CDRS_ALERT_ROUTE,RPT_BY_EMAIL,-1,-1;ContactName "ContactName" true true false 50 Text 0 0 ,First,#,KANROAD.CDRS_ALERT_ROUTE,CONTACT_NAME,-1,-1;ContactPhone "ContactPhone" true true false 15 Text 0 0 ,First,#,KANROAD.CDRS_ALERT_ROUTE,CONTACT_PHONE,-1,-1;ContactEmail "ContactEmail" true true false 30 Text 0 0 ,First,#,KANROAD.CDRS_ALERT_ROUTE,CONTACT_EMAIL,-1,-1;OfficeName "OfficeName" true true false 30 Text 0 0 ,First,#,KANROAD.CDRS_ALERT_ROUTE,OFFICE_NAME,-1,-1;NEW_NOTIFICATION "NEW_NOTIFICATION" true true false 8 Double 10 38 ,First,#,KANROAD.CDRS_ALERT_ROUTE,NEW_NOTIFICATION,-1,-1;ALERT_INSERT_DT "ALERT_INSERT_DT" true true false 36 Date 0 0 ,First,#,KANROAD.CDRS_ALERT_ROUTE,ALERT_INSERT_DT,-1,-1;WebLink "WebLink" true true false 500 Text 0 0 ,First,#,KANROAD.CDRS_ALERT_ROUTE,ALERT_HYPERLINK,-1,-1;SITE_CR "SITE_CR" true true false 18 Text 0 0 ,First,#,KANROAD.CDRS_ALERT_ROUTE,SITE_CR,-1,-1;LINE_COLOR "LINE_COLOR" true true false 8 Double 0 32 ,First,#,KANROAD.CDRS_ALERT_ROUTE,LINE_COLOR,-1,-1;GIS_VIEW "GIS_VIEW" true true false 2 Short 0 2 ,First,#,KANROAD.CDRS_ALERT_ROUTE,GIS_VIEW,-1,-1;DCAM_COMMENT "DCAM_COMMENT" true true false 1024 Text 0 0 ,First,#,KANROAD.CDRS_ALERT_ROUTE,DCAM_COMMENT,-1,-1;DCAM_DATE "DCAM_DATE" true true false 12 Text 0 0 ,First,#,KANROAD.CDRS_ALERT_ROUTE,DCAM_DATE,-1,-1;DISPLAY_MAP "DISPLAY_MAP" true true false 2 Short 0 1 ,First,#,KANROAD.CDRS_ALERT_ROUTE,DISPLAY_MAP,-1,-1;OBJECTID "OBJECTID" true false false 4 Long 0 38 ,First,#,KANROAD.CDRS_ALERT_ROUTE,OBJECTID,-1,-1;ROUTE "ROUTE" true true false 10 Text 0 0 ,First,#,KANROAD.CDRS_ALERT_ROUTE,ROUTE,-1,-1""",
    "DEFAULTS")
MakeTableView_management(sdeCDRSWZ, "detail", "#", "#")
MakeFeatureLayer_management(stagews + "//CDRS_RAW", "ConstructionJoin")

print str(datetime.datetime.now()) + " Joining the Oracle CDRS WZ table"

AddJoin_management("ConstructionJoin", "CDRS_WZ_DETAIL_ID", "detail",
                   "CDRS_WZ_DETAIL_ID", "KEEP_ALL")
Пример #22
0
from CONFIG import ws, tempgdb
inws = ws + "/" + tempgdb
inws = r'\\gisdata\arcgis\GISdata\KDOT\BTP\CANSYSTEST\CANSYSNet2013_7_29.gdb'
#mxd = arcpy.mapping.MapDocument(r"\\GISDATA\ArcGIS\GISDATA\MXD\NewGISNetworkSeed.mxd")
#df = arcpy.mapping.ListDataFrames(mxd, "Layers")[0]

outws = r'\\gisdata\arcgis\GISdata\KDOT\BTP\CANSYSTEST\AASHTOPROC.gdb'
outfinal = r'\\gisdata\arcgis\GISdata\KDOT\BTP\CANSYSTEST\AASHTORoutes.gdb'
route = "SRND"
routelyr = inws + "/" + route
IDfield = "NE_UNIQUE"

if Exists(outws):
    Delete_management(outws)

CreateFileGDB_management(r"\\gisdata\arcgis\GISdata\KDOT\BTP\CANSYSTEST",
                         "AASHTOPROC.gdb")

if Exists(outfinal):
    Delete_management(outfinal)

CreateFileGDB_management(r"\\gisdata\arcgis\GISdata\KDOT\BTP\CANSYSTEST",
                         "AASHTOROUTES.gdb")

FeatureClassToFeatureClass_conversion(routelyr, outws, "USRoutes",
                                      "NE_UNIQUE LIKE 'U%'", "#")

routelyr = outws + "/USroutes"
rows = SearchCursor(routelyr)

for row in rows:
    print(row.getValue(IDfield))
                   SelectLayerByAttribute_management)
from config import (advancedErrorReportingDict, gdbBasePart1, gdbBasePart2,
                    gdbBasePart3, gdbForSourceCreation, gdbForSourceNamePart,
                    mainFolder, otherFCsToCopyList, routesSource2,
                    routesSourceFeatureLayer, routesSourceName)
routesSource2FeatureLayer = routesSourceFeatureLayer + '_2'

for dictKey in advancedErrorReportingDict:
    # Need to create a _Source and a base gdb where
    # the _Source includes at least Calpts and Routes from
    # the full gdb for the current data mirroring version.

    # For each dictKey in the advancedErrorReportingDict
    # Create a new GDB in mainFolder (1st dictKey gdb) called gdbBasePart1 + '_' + gdbBasePart2 + '_' +  dictKey + '_' + gdbForSourceNamePart + '.gdb'
    dictKey_SourceGDB = gdbBasePart1 + '_' + gdbBasePart2 + '_' + dictKey + '_' + gdbForSourceNamePart + '.gdb'
    CreateFileGDB_management(mainFolder, dictKey_SourceGDB)
    # and create a new gdb in the same folder (2nd dictKey gdb) called gdbBasePart1 + '_' + gdbBasePart2 + '_' +  dictKey + '.gdb'
    dictKey_GDB = gdbBasePart1 + '_' + gdbBasePart2 + '_' + dictKey + '.gdb'
    CreateFileGDB_management(mainFolder, dictKey_GDB)
    # Then, load a feature layer of the RoutesSource from the gdbForSourceCreation so that you can do a selection on it
    MakeFeatureLayer_management(routesSource2, routesSource2FeatureLayer)
    # Use the value associated with the dict key as the selection where clause for the routesSourceFeatureLayer
    dictWhereClause = advancedErrorReportingDict[dictKey]
    SelectLayerByAttribute_management(routesSource2FeatureLayer,
                                      "CLEAR_SELECTION")
    SelectLayerByAttribute_management(routesSource2FeatureLayer,
                                      "NEW_SELECTION", dictWhereClause)
    # Copy the selected features from the feature layer to the dictKey_SourceGDB.
    dictKeyRoutesSource = os.path.join(dictKey_SourceGDB, routesSourceName)
    CopyFeatures_management(routesSource2FeatureLayer, dictKeyRoutesSource)
Пример #24
0
def mainProcessFeatureSimplification(inputFeatures, maxCount, outputFeatures):
    # Split the input features into intermediary features:
    # Add each intermediary feature class to a list and
    # pass one feature class of the intermediary features
    # to each subprocess.
    
    # When all of the subprocesses complete, use the
    # list of the intermediary feature classes to append
    # the data into the output features.
    
    countResult = GetCount_management(inputFeatures)
    intCount = int(countResult.getOutput(0))
    # debug print
    print("Counted " + str(intCount) + " features in the " + inputFeatures + " feature class.")
    
    if maxCount > 15000:
        maxCount = 15000
    elif maxCount < 2000:
        maxCount = 7000
    else:
        pass
    
    neededMirrors = intCount / maxCount + 1
    
    # debug print
    print("Will create " + str(neededMirrors) + " reflection gdbs.")
    
    infoForSubprocess = list()
    gdbToCreateList = list()
    
    for countItem in xrange(0, neededMirrors):
        gdbMirrorName = mirrorBaseName + '_' + '0' + str(countItem) + '.gdb'
        gdbMirrorFullPath = os.path.join(mainFolder, gdbMirrorName)
        gdbToCreateList.append(gdbMirrorFullPath)
        try:
            if Exists(gdbMirrorFullPath):
                try:
                    Delete_management(gdbMirrorFullPath)
                except:
                    pass
            else:
                pass
        except:
            pass
        
        CreateFileGDB_management(mainFolder, gdbMirrorName)
        
        # do a selection on the input features here
        # then copyfeatures to get the selected features
        # output to the target gdb.
        
        if Exists(simplifyTempLayer):
            try:
                Delete_management(simplifyTempLayer)
            except:
                pass
        else:
            pass
        
        MakeFeatureLayer_management(inputFeatures, simplifyTempLayer)
        
        currentSelectMin = int(countItem * maxCount) 
        currentSelectMax = int((countItem + 1) * maxCount)
        
        dynSelectClause = """"OBJECTID" >= """ + str(currentSelectMin) + """ AND "OBJECTID" < """ + str(currentSelectMax) + """"""
        
        SelectLayerByAttribute_management(simplifyTempLayer, "NEW_SELECTION", dynSelectClause)
        
        selectedSimplifyFeatures = os.path.join(gdbMirrorFullPath, simplifyInputName)
        
        CopyFeatures_management(simplifyTempLayer, selectedSimplifyFeatures)
        
        subprocessInfoItem = [mainFolder, gdbMirrorFullPath, simplifyAlgorithm, simplifyDistance]
        
        infoForSubprocess.append(subprocessInfoItem)
    
    # Predivide the list of data driven pages that each process needs to run
    # and pass it as a list of exportItems.
    
    coreCount = mp.cpu_count()
    
    # To support running this on the slow AR60, reduce the coreCount used to try to keep
    # this script from crashing there.
    if coreCount >= 3 and useMultithreading == True:
        coreCount = coreCount - 1
        
        print("Starting a multi-threaded job which will use (up to) " + str(coreCount) + " cores at once.")
        
        workPool = mp.Pool(processes=coreCount)
        # Note: This is a different usage of the word map than the one generally used in GIS.
        workPool.map(subProcessFeatureSimplification, infoForSubprocess)
        print("Multi-threaded job's done!")
        
        print("Waiting a few moments before closing down the worker processes...")
        time.sleep(20)
        workPool.close()
        time.sleep(20)
        workPool.join()
        
        print("Worker processes closed.")
        
    else:
        # Don't use multithreading here.
        print("Using the single threaded process for feature simplification.")
        print("This will be slower than the multi-threaded version,")
        print("but it should also be less likely to crash on slower machines")
        print("or those with low core counts.")
        for singleThreadedProcessInfoListItem in infoForSubprocess:
            singleThreadedProcessForSlowMachines(singleThreadedProcessInfoListItem)
        
        print("Waiting a few moments before continuing to the next part of the script...")
        time.sleep(20)
    
    # Delete the output target prior to recreating it and appending data into it.
    if Exists(outputFeatures):
        try:
            Delete_management(outputFeatures)
        except:
            pass
    else:
        pass
    
    # Need the gdb and fc name here from outputFeatures.
    outGDB = returnGDBOrSDEPath(outputFeatures)
    outGDBName = returnGDBOrSDEName(outGDB)
    outGDBFolder = returnGDBOrSDEFolder(outGDB)
    outFCName = returnFeatureClass(outputFeatures)
    
    if not Exists(outGDB):
        CreateFileGDB_management(outGDBFolder, outGDBName)
    
    # Use the inputFeatures as a template.
    CreateFeatureclass_management(outGDB, outFCName, "", inputFeatures)
    
    appendOutputFCList = list()
    
    for gdbToCreate in gdbToCreateList:
        appendOutputFC = os.path.join(gdbToCreate, 'simplificationOutput')
        appendOutputFCList.append(appendOutputFC)
    
    # Do appends here, then sleep again for a bit.
    # Shouldn't need a field mapping since they should all be the same.
    Append_management(appendOutputFCList, outputFeatures, "NO_TEST")
    
    print "Waiting a few moments to be sure that all of the locks have been removed prior to deleting the reflection gdbs..."
    time.sleep(20)
    
    # Then remove the mirror gdbs.
    for gdbToCreate in gdbToCreateList:
        try:
            if Exists(gdbToCreate):
                try:
                    Delete_management(gdbToCreate)
                except:
                    pass
            else:
                pass
        except:
            pass