コード例 #1
0
    def Open(cls, path):
        """
        """
        # change the arcpy workspace for listing, but save the current setting
        workspace = env.workspace
        env.workspace = path

        cls.validate_geodatabase(path)

        # TODO: Need a generic workspace class, and a dataset class
        datasets = ListDatasets()
        fcs_names = ListFeatureClasses()
        rasters_names = ListRasters()
        tables_names = ListTables()

        # take all the found layers and make into layer objects
        fcs = []
        for fc in fcs_names:
            fcs.append(Layer(os.path.join(path, fc)))

        rasters = []
        for raster in rasters_names:
            rasters.append(Layer(os.path.join(path, raster)))

        tables = []
        for table in tables_names:
            tables.append(Layer(os.path.join(path, table)))

        # set the workspace back for the user
        env.workspace = workspace

        return Geodatabase(path, datasets, fcs, rasters, tables)
コード例 #2
0
ファイル: NG911_Aggregate.py プロジェクト: KDOTGIS/pydot
def crawlFinal():
    env.workspace = currentPathSettings.FinalPath
    print str(currentPathSettings.FinalPath)
    #list the file gebdatabases in the final directory
    workspace = ListWorkspaces("*", "FileGDB")
    for ws in workspace:
        print ws
        env.workspace = ws
        #print the tables in the geodatabase
        tablelist = ListTables()
        print "tables"
        for table in tablelist:
            print "   " + table
        #print the features classes stored in feature datasets
        Datasets = ListDatasets("*")
        for fd in Datasets:
            print fd
            print "feature classes - Polygon"
            FCListPoly = ListFeatureClasses("*", "Polygon", fd)
            for fc in FCListPoly:
                print "    " + fc
            print "feature classes - Lines"
            FCListLine = ListFeatureClasses("*", "Polyline", fd)
            for fc in FCListLine:
                print "    " + fc
            print "feature classes - points"
            FCListPoint = ListFeatureClasses("*", "Point", fd)
            for fc in FCListPoint:
                print "    " + fc
コード例 #3
0
ファイル: fExamine.py プロジェクト: cschmeissner/NG911
def ExamineGDB(gdb):
    import ntpath, re
    reviewpath = ntpath.basename(gdb)

    from arcpy import env, ListWorkspaces, ListDatasets, ListTables, ListFeatureClasses, GetCount_management, Compact_management, ListFields
    #set the workspace from the config file
    env.workspace = ntpath.dirname(gdb)
    ng911 = gdb
    print "geodatabases"
    print ng911
    env.workspace = ng911
    datasets = ListDatasets()
    print "Datasets:"
    for dataset in datasets:
        print "     " + str(dataset)
    tables = ListTables()
    print " tables:"
    for table in tables:
        fcc = GetCount_management(table)
        print "     " + str(table)
    fd = datasets[0]
    fcs = ListFeatureClasses("", "", fd)
    for fc in fcs:
        fields = ListFields(fc)
        fcc = GetCount_management(fc)
        print fc + ", " + str(fcc) + " features"
        for field in fields:
            print "        " + str(field.name) + ", " + str(field.type)
    checkfile = reviewpath + "/" + ntpath.basename(ng911)
    topo = fd + "/NG911_Topology"
    Compact_management(ng911)
コード例 #4
0
def LoadARegion():
    from arcpy import Append_management, ListFeatureClasses, ListDatasets, env, ListTables
    #importGDB = r"//gisdata/planning/Cart/projects/Conflation/GIS_DATA/GEO_COMM/REGION3_20151002/REGION3_20151002.gdb"
    #importGDB = r"\\gisdata\planning\Cart\projects\Conflation\GIS_DATA\GEO_COMM\REGION4_20151021\REGION4_20151021\REGION4_20151021.gdb"
    importGDB = r"\\gisdata\planning\Cart\projects\Conflation\GIS_DATA\GEO_COMM\REGION5_20151211\REGION5_20151211.gdb"
    LoadTarget = r"\\gisdata\planning\Cart\projects\Conflation\Workflow\conflation_sqlgis_geo.sde\Conflation.GEO."
    env.workspace = importGDB
    ### There are no tables in the conflated dataset products - handle similarly and separately
    skiplist = [
        'Stitch_Lines', 'RoadCenterlines', 'Overlaps_Gaps_MunicipalBoundary',
        'Overlaps_Gaps_FIRE', 'Overlaps_Gaps_LAW', 'Overlaps_Gaps_PSAP',
        'Overlaps_Gaps_ESZ', 'Overlaps_Gaps_EMS',
        'Overlaps_Gaps_CountyBoundary', 'Overlaps_Gaps_AuthoritativeBoundary'
    ]
    tables = ListTables()
    for table in tables:
        print table
        target = LoadTarget + table
        print target

    datasets = ListDatasets("*")
    for fd in datasets:
        print fd
        featureClasses = ListFeatureClasses("*", "All", fd)
        for fc in featureClasses:
            print fc
            if fc in skiplist:
                print 'skipping'
            else:
                target = LoadTarget + fd + "/" + fc
                print "loading to " + target
                Append_management(fc, target, schema_type="NO_TEST")
コード例 #5
0
def iteratorprocess():
    
    locatorsPath = r'\\gisdata\ArcGIS\GISdata\Accident Geocode\Python\AccidentLocators'
    locatorsFileList = os.listdir(locatorsPath)
    
    env.workspace = extractDataGDBPath
    accDataTablesList = ListTables("CrashLocation.GEO.ACC_*")
    
    NumbersList = set(range(1000))
    
    # Consider replacing the main if/elif here with regex.
    for locatorFile in locatorsFileList:
        if (len(locatorFile) > 5 and locatorFile[0] != "x" and locatorFile[-7:].lower() == "_nk.loc" and
             int(locatorFile[-10:-7]) in NumbersList):
            
            useKDOTIntersect = False
            print "Found a county _NK locator: " + locatorFile
            countyNoToCheckFor = locatorFile[-10:-7]
            print "The county number to check for an accident data table is: " + countyNoToCheckFor
            accDataTableToUse = "CrashLocation.GEO.ACC_" + countyNoToCheckFor
            
            if accDataTableToUse in accDataTablesList:
                print "Calling the Acc Locator with " + countyNoToCheckFor + " for the acc data table."
                # Added CrashLocation.GEO.
                # Changed from just "ACC_PTS_" + countyNoToCheckFor + "_NK" 2015-07-14
                pointFeaturesEnd = "CrashLocation.GEO.ACC_PTS_" + countyNoToCheckFor + "_NK"
                locatedPointsOutput = os.path.join(pointsFeaturesGDB, pointFeaturesEnd)
                locatorFileNoExt = locatorFile[:-4]
                locatorFullPath = os.path.join(locatorsPath, locatorFileNoExt)
                useanaccidentlocatorcaller(extractDataGDBPath, accDataTableToUse, locatorFullPath, locatedPointsOutput, useKDOTIntersect)
            else:
                pass
        elif (len(locatorFile) > 5  and locatorFile[0] != "x" and locatorFile[-4:].lower() == ".loc" and
             int(locatorFile[-7:-4]) in NumbersList):
            
            useKDOTIntersect = True
            print "Found a county locator: " + locatorFile
            countyNoToCheckFor = locatorFile[-7:-4]
            print "The county number to check for an accident data table is: " + countyNoToCheckFor
            accDataTableToUse = "CrashLocation.GEO.ACC_" + countyNoToCheckFor
            
            if accDataTableToUse in accDataTablesList:
                print "Calling Acc Locator with " + countyNoToCheckFor + " for the acc data table."
                # Added CrashLocation.GEO.
                # Changed from just "ACC_PTS_"  + countyNoToCheckFor 2015-07-14
                pointFeaturesEnd = "CrashLocation.GEO.ACC_PTS_" + countyNoToCheckFor
                locatedPointsOutput = os.path.join(pointsFeaturesGDB, pointFeaturesEnd)
                locatorFileNoExt = locatorFile[:-4]
                locatorFullPath = os.path.join(locatorsPath, locatorFileNoExt)
                useanaccidentlocatorcaller(extractDataGDBPath, accDataTableToUse, locatorFullPath, locatedPointsOutput, useKDOTIntersect)
            else:
                pass
        else:
            #print "locatorFile did not pass the test: " + locatorFile
            pass
コード例 #6
0
def MapHPMS_Events():
    env.workspace = HPMS_DATA
    tableList = ListTables()

    for table in tableList:
        intable = HPMS_DATA + r"/" + table
        outname = str(table)
        print "table " + table + " mapping"
        MakeRouteEventLayer_lr("C_Primary_Route", "NQR_DESCRIPTION", intable,
                               "ROUTE_ID LINE END_POINT END_POINT", outname,
                               "", "ERROR_FIELD", "NO_ANGLE_FIELD", "NORMAL",
                               "ANGLE", "LEFT", "POINT")
コード例 #7
0
def iteratorprocess():
    # Get a list of the *_RoadChecks.gdb folders here.
    roadChecksPath = r'\\gisdata\ArcGIS\GISdata\Accident Geocode\Python\RoadChecks'
    newLocatorPath = r'\\gisdata\ArcGIS\GISdata\Accident Geocode\Python\AccidentLocators'
    roadChecksFileList = os.listdir(roadChecksPath)
    env.workspace = extractDataOutGDB
    accDataTablesList = ListTables("CrashLocation.GEO.ACC_*")
    useKDOTIntersect = True

    for gdbName in roadChecksFileList:
        if len(gdbName) > 23 and gdbName[0] != "x" and gdbName[
                -15] == "_" and gdbName[-15:].lower() == "_roadchecks.gdb":
            countyAbbr = gdbName[-23] + gdbName[-22]

            #try:
            for countyItem in coAbbrAndNoList:
                if countyAbbr.lower() == countyItem[0].lower():
                    #print countyAbbr + " : " + countyItem[1]
                    countyNumber = countyItem[1]

                    accDataTableOutName = "CrashLocation.GEO.ACC_" + countyNumber
                    newLocatorName = "Crash_Locator_CO_" + countyNumber
                    #newLocatorName = "TEST_004"
                    if (accDataTableOutName) in accDataTablesList:
                        roadChecksGDBPath = os.path.join(
                            roadChecksPath, gdbName)
                        print(
                            "Calling CreateAccidentLocatorCaller function for "
                            + roadChecksGDBPath + "\n" + " to be called " +
                            newLocatorName + " in " + newLocatorPath + ".")
                        useKDOTIntersect = True
                        CreateAccidentLocatorCaller(roadChecksGDBPath,
                                                    newLocatorPath,
                                                    newLocatorName,
                                                    useKDOTIntersect)

                        newLocatorName = newLocatorName + "_NK"
                        useKDOTIntersect = False
                        CreateAccidentLocatorCaller(roadChecksGDBPath,
                                                    newLocatorPath,
                                                    newLocatorName,
                                                    useKDOTIntersect)
                    else:
                        print "accDataTableOutName: " + accDataTableOutName + " not found in the tableNameList"
                else:
                    pass
            #except:
            #print "An error occurred."
            #print ""

        else:
            pass
コード例 #8
0
ファイル: NG911_Aggregate.py プロジェクト: KDOTGIS/pydot
def Restart():
    from arcpy import DeleteRows_management
    targetpath = currentPathSettings.EntDB + '/' + currentPathSettings.EDBName + '.' + currentPathSettings.EDBO + '.' + currentPathSettings.EFD
    print targetpath
    env.workspace = targetpath
    fclist = ListFeatureClasses()
    for fc in fclist:
        print fc
        #DeleteRows_management(fc)
    targetpath = currentPathSettings.EntDB
    env.workspace = targetpath
    tablelist = ListTables()
    for table in tablelist:
        print table
        DeleteRows_management(table)
コード例 #9
0
ファイル: NG911_Aggregate.py プロジェクト: KDOTGIS/pydot
def LoadFinalStreets(inFile, inFeatureclass, inTable):
    LoadThis = inFeatureclass
    FromThis = inFile
    LoadTable = inTable
    targetpath = currentPathSettings.EntDB + '/' + currentPathSettings.EDBName + '.' + currentPathSettings.EDBO
    env.workspace = currentPathSettings.FinalPath
    print str(currentPathSettings.FinalPath)
    #list the file gebdatabases in the final directory
    workspace = ListWorkspaces(FromThis, "FileGDB")
    for ws in workspace:
        print ws
        env.workspace = ws
        #print the tables in the geodatabase
        tablelist = ListTables(LoadTable)
        print "tables"
        for table in tablelist:
            print "   " + table
            print "loading " + ws + "/" + table + " to " + targetpath + table
            #CalledUpon(ws)
            #Try to load/append the rows i nthe alias table  the aggregated geodatabase
            try:
                Append_management(ws + "/" + table, targetpath + "." + table,
                                  "NO_TEST", "#")
            except:
                print 'there was a problem loading alias table for' + ws

        #print the features classes stored in feature datasets
        Datasets = ListDatasets("*")
        for fd in Datasets:
            #print fd
            #print "feature classes - Polygon"
            #FCListPoly = ListFeatureClasses("*", "Polygon", fd)
            #for fc in FCListPoly:
            #    print "    "+fc
            #print "feature classes - Lines"
            FCListLine = ListFeatureClasses(LoadThis, "Polyline", fd)
            for fc in FCListLine:
                #print "    "+fc
                print "loading " + ws + "/" + fc + " to " + targetpath + '.' + currentPathSettings.EFD + "." + fc
                try:
                    Append_management(
                        fc,
                        targetpath + '.' + currentPathSettings.EFD + "/" + fc,
                        "NO_TEST", "#")
                except:
                    print 'there was a problem loading centerlines for' + ws
コード例 #10
0
def iteratorprocess():
    
    # Get a list of the *_RoadChecks.gdb folders here.
    roadChecksPath = r'\\gisdata\ArcGIS\GISdata\Accident Geocode\Python\RoadChecks'
    roadChecksFileList = os.listdir(roadChecksPath)
    env.workspace = extractDataOutGDB
    accDataTablesList = ListTables("CrashLocation.GEO.ACC_*")
    
    
    print "The tableNameItems are:"
    for tableNameItem in accDataTablesList:
        print "Table Name: " + tableNameItem
    
    for gdbName in roadChecksFileList:
        if len(gdbName) > 10 and gdbName[0] != "x" and gdbName[-15] == "_" and gdbName[-15:].lower() == "_roadchecks.gdb":
            countyAbbr = gdbName[-23] + gdbName[-22]
            
            try:
                for countyItem in coAbbrAndNoList:
                    if countyAbbr.lower() == countyItem[0].lower():
                        #print countyAbbr + " : " + countyItem[1]
                        countyNumber = countyItem[1]
                        
                        accDataTableOutName = "CrashLocation.GEO.ACC_" + countyNumber
                        if (accDataTableOutName) in accDataTablesList:
                            roadChecksGDBPath = os.path.join(roadChecksPath, gdbName)
                            roadChecksCenterlinePath = os.path.join(roadChecksGDBPath, r"NG911\RoadCenterline")
                            accDataTablePath = os.path.join(extractDataOutGDB, accDataTableOutName)
                            print ("Calling RoadNameFixCaller function for " + roadChecksCenterlinePath + "\n" +
                                   " and "+ accDataTablePath + ".")
                        
                            RoadNameFixCaller(roadChecksCenterlinePath, accDataTablePath)
                        else:
                            print "accDataTableOutName: " + accDataTableOutName + " not found in the tableNameList"
                    else:
                        pass
            except:
                print "An error occurred."
                print ""
            
        else:
            pass
コード例 #11
0
ファイル: NG911_Aggregate.py プロジェクト: KDOTGIS/pydot
def ProcessFMLRS(inFile, inFeatureclass, inTable):
    LoadThis = inFeatureclass
    FromThis = inFile
    LoadTable = inTable
    targetpath = currentPathSettings.EntDB + '/' + currentPathSettings.EDBName + '.' + currentPathSettings.EDBO
    print targetpath
    env.workspace = currentPathSettings.FinalPath
    print str(currentPathSettings.FinalPath)
    #list the file gebdatabases in the final directory
    workspace = ListWorkspaces(FromThis, "FileGDB")
    for ws in workspace:
        print ws
        env.workspace = ws
        #print the tables in the geodatabase
        tablelist = ListTables(LoadTable)
        print "tables"
        for table in tablelist:
            print "   " + table
            print "loading " + ws + "/" + table + " to " + targetpath + table
            CalledUpon(ws)
コード例 #12
0
def LoadAliasTables():
    from arcpy import Append_management, env, ListTables, ListWorkspaces, CalculateField_management
    importFolder = r"\\gisdata\planning\Cart\projects\Conflation\GIS_DATA\R1\Alias3"
    LoadTarget = r"Database Connections\Conflation2012_sde.sde\Conflation.SDE.RoadAlias"
    env.workspace = importFolder
    GDBList = []
    for gdb in ListWorkspaces("*", "FileGDB"):
        GDBList.append(gdb)

    for geodatabase in GDBList:
        env.workspace = geodatabase
        tables = ListTables("RoadAlias")
        for table in tables:
            print table
            CalculateField_management(table,
                                      "SEGID",
                                      expression="""[STEWARD]&" "& [SEGID]""",
                                      expression_type="VB",
                                      code_block="")
            Append_management(table, LoadTarget, schema_type="NO_TEST")
        print geodatabase
コード例 #13
0
def copy_tables(input_ws, output_ws, foreach_table=None):
    """
    copies tables or sends each table to a function
        input_ws - the input database
        output_ws - the output database
        foreach_table - the optional function to process each table
    """
    from arcpy import env, ListTables, AddMessage, AddWarning, TableToGeodatabase_conversion
    from os.path import join

    env.workspace = input_ws
    for table in ListTables():
        AddMessage('Processing table: {}'.format(table))
        try:
            if (foreach_table):
                foreach_table(input_ws, output_ws, table)
            else:
                output_path = join(output_ws, get_name(table))
                delete_existing(output_path)
                TableToGeodatabase_conversion(table, output_ws)
        except Exception as e:
            AddWarning('Error on table: {} - {}'.format(table, e))
            pass
コード例 #14
0
def copy_tables(input_ws, output_ws, foreach_table = None):
    """
    copies tables or sends each table to a function
        input_ws - the input database
        output_ws - the output database
        foreach_table - the optional function to process each table
    """
    from arcpy import env, ListTables, AddMessage, AddWarning, \
        TableToGeodatabase_conversion, GetCount_management, \
        TableToTable_conversion
    from os.path import join 

    env.workspace = input_ws
    for table in ListTables():
        AddMessage('Processing table: {}'.format(table))
        
        if env.skipAttach and '_attach' in table.lower():
            AddWarning('Skipping attachments table {}'.format(table))
            continue
        
        if env.skipEmpty:
            count = int(GetCount_management(table)[0])
            if count == 0:
                AddWarning('Skipping because table is empty: {} (empty)'.format(table))
                continue
        
        try:
            if foreach_table:
                foreach_table(input_ws, output_ws, table)
            else:
                output_path = join(output_ws, get_name(table))
                delete_existing(output_path)
                TableToTable_conversion(table, output_ws, get_name(table))
        except Exception as e:
            AddWarning('Error on table: {} - {}'.format(table, e))
            pass
コード例 #15
0
def Analyzer(startworkspace):
    env.workspace = startworkspace
    dataList = ListTables() + ListFeatureClasses()

    for dataset in ListDatasets("*", "Feature"):
        env.workspace = os.path.join(startworkspace, dataset)
        dataList += ListFeatureClasses() + ListDatasets()
        AnalyzeDatasets_management(startworkspace,
                                   include_system="NO_SYSTEM",
                                   in_datasets=dataList,
                                   analyze_base="ANALYZE_BASE",
                                   analyze_delta="ANALYZE_DELTA",
                                   analyze_archive="ANALYZE_ARCHIVE")
        print "analyzed " + str(dataList)
    env.workspace = SDE
    AnalyzeDatasets_management(SDE,
                               include_system="SYSTEM",
                               in_datasets="",
                               analyze_base="ANALYZE_BASE",
                               analyze_delta="ANALYZE_DELTA",
                               analyze_archive="ANALYZE_ARCHIVE")
    print "analyzed system tables"

    env.workspace = startworkspace
コード例 #16
0
ファイル: np2tbl.py プロジェクト: majid-saeed/tools_pro
# .... final code section producing the featureclass and extendtable
if len(sys.argv) == 1:
    testing = True
    pth = script.split("/")[:-2]
    pth = "/".join(pth) + "/Data/sample_20.npy"
    a = np.load(pth)
    frmt = "Result...\n{}"
    print(frmt.format(a))
else:
    testing = False
    in_arr = sys.argv[1]
    out_name = sys.argv[2]
    out_gdb = sys.argv[3]
    make_tbl_view = sys.argv[4]
    env.workspace = out_gdb
    tbls = ListTables()
    out_name = ValidateTableName(out_name)
    if tbls is not None:
        if out_name in tbls:
            out_name += '_dup'
    out_tbl = out_gdb + "/" + out_name
    # ---- call section for processing function
    #
    a = np.load(in_arr)
    NumPyArrayToTable(a, out_tbl)  # create the table
    if make_tbl_view in (True, 'True', 1):
        MakeTableView_management(out_tbl, out_name)
    args = [in_arr, out_gdb, out_name]
    msg = """
    :------------------------------------------------------------