示例#1
0
def ReturnStreetstoTopology():
    from arcpy import (AddFeatureClassToTopology_management, ListDatasets,
                       AddRuleToTopology_management, Delete_management,
                       Describe)
    env.workspace = currentPathSettings.gdbPath
    fd = ListDatasets("*", "Feature")
    gdbw = os.path.join(currentPathSettings.gdbPath, fd[0])
    env.workspace = gdbw
    topoDatasetList = ListDatasets("*", "TOPOLOGY")
    geonetDatasetList = ListDatasets("*", "GeometricNetwork")
    authbnd = os.path.join(gdbw, "AuthoritativeBoundary")
    ESZBnd = os.path.join(gdbw, "ESZ")
    if geonetDatasetList == []:
        print "no geometric network created yet"
    else:
        Delete_management(geonetDatasetList[0])
    desc = Describe(topoDatasetList[0])
    print "%-27s %s" % ("FeatureClassNames:", desc.featureClassNames)
    if lyr in desc.featureClassNames:
        print "Road Centerlines already exist in topology dataset"
    else:
        print "adding road centerlines to topology"
        inputTopology = os.path.join(gdbw, topoDatasetList[0])
        inputRoadCenterline = os.path.join(gdbw, "RoadCenterline")
        AddFeatureClassToTopology_management(inputTopology,
                                             inputRoadCenterline, "1", "1")
        AddRuleToTopology_management(inputTopology, "Must Not Overlap (line)",
                                     inputRoadCenterline, "", "", "")
        AddRuleToTopology_management(inputTopology,
                                     "Must Not Intersect (line)",
                                     inputRoadCenterline, "", "", "")
        AddRuleToTopology_management(inputTopology,
                                     "Must Not Have Dangles (Line)",
                                     inputRoadCenterline, "", "", "")
        AddRuleToTopology_management(inputTopology,
                                     "Must Not Self-Overlap (Line)",
                                     inputRoadCenterline, "", "", "")
        AddRuleToTopology_management(inputTopology,
                                     "Must Not Self-Intersect (Line)",
                                     inputRoadCenterline, "", "", "")
        AddRuleToTopology_management(inputTopology,
                                     "Must Be Single Part (Line)",
                                     inputRoadCenterline, "", "", "")
        AddRuleToTopology_management(
            inputTopology, "Must Not Intersect Or Touch Interior (Line)",
            inputRoadCenterline, "", "", "")
        AddRuleToTopology_management(
            inputTopology, "Must Not Intersect Or Touch Interior (Line)",
            inputRoadCenterline, "", "", "")
        AddRuleToTopology_management(inputTopology,
                                     "Must Be Inside (Line-Area)",
                                     inputRoadCenterline, "", authbnd, "")
        #write for loop - must be inside ESZ boundaries
        AddRuleToTopology_management(
            inputTopology, "Boundary Must Be Covered By (Area-Line)", authbnd,
            "", inputRoadCenterline, "")
        AddRuleToTopology_management(
            inputTopology, "Boundary Must Be Covered By (Area-Line)", ESZBnd,
            "", inputRoadCenterline, "")
示例#2
0
    def Open(cls, path):
        """
        """
        # change the arcpy workspace for listing, but save the current setting
        workspace = env.workspace
        env.workspace = path

        cls.validate_geodatabase(path)

        # TODO: Need a generic workspace class, and a dataset class
        datasets = ListDatasets()
        fcs_names = ListFeatureClasses()
        rasters_names = ListRasters()
        tables_names = ListTables()

        # take all the found layers and make into layer objects
        fcs = []
        for fc in fcs_names:
            fcs.append(Layer(os.path.join(path, fc)))

        rasters = []
        for raster in rasters_names:
            rasters.append(Layer(os.path.join(path, raster)))

        tables = []
        for table in tables_names:
            tables.append(Layer(os.path.join(path, table)))

        # set the workspace back for the user
        env.workspace = workspace

        return Geodatabase(path, datasets, fcs, rasters, tables)
示例#3
0
def ExamineGDB(gdb):
    import ntpath, re
    reviewpath = ntpath.basename(gdb)

    from arcpy import env, ListWorkspaces, ListDatasets, ListTables, ListFeatureClasses, GetCount_management, Compact_management, ListFields
    #set the workspace from the config file
    env.workspace = ntpath.dirname(gdb)
    ng911 = gdb
    print "geodatabases"
    print ng911
    env.workspace = ng911
    datasets = ListDatasets()
    print "Datasets:"
    for dataset in datasets:
        print "     " + str(dataset)
    tables = ListTables()
    print " tables:"
    for table in tables:
        fcc = GetCount_management(table)
        print "     " + str(table)
    fd = datasets[0]
    fcs = ListFeatureClasses("", "", fd)
    for fc in fcs:
        fields = ListFields(fc)
        fcc = GetCount_management(fc)
        print fc + ", " + str(fcc) + " features"
        for field in fields:
            print "        " + str(field.name) + ", " + str(field.type)
    checkfile = reviewpath + "/" + ntpath.basename(ng911)
    topo = fd + "/NG911_Topology"
    Compact_management(ng911)
示例#4
0
def LoadARegion():
    from arcpy import Append_management, ListFeatureClasses, ListDatasets, env, ListTables
    #importGDB = r"//gisdata/planning/Cart/projects/Conflation/GIS_DATA/GEO_COMM/REGION3_20151002/REGION3_20151002.gdb"
    #importGDB = r"\\gisdata\planning\Cart\projects\Conflation\GIS_DATA\GEO_COMM\REGION4_20151021\REGION4_20151021\REGION4_20151021.gdb"
    importGDB = r"\\gisdata\planning\Cart\projects\Conflation\GIS_DATA\GEO_COMM\REGION5_20151211\REGION5_20151211.gdb"
    LoadTarget = r"\\gisdata\planning\Cart\projects\Conflation\Workflow\conflation_sqlgis_geo.sde\Conflation.GEO."
    env.workspace = importGDB
    ### There are no tables in the conflated dataset products - handle similarly and separately
    skiplist = [
        'Stitch_Lines', 'RoadCenterlines', 'Overlaps_Gaps_MunicipalBoundary',
        'Overlaps_Gaps_FIRE', 'Overlaps_Gaps_LAW', 'Overlaps_Gaps_PSAP',
        'Overlaps_Gaps_ESZ', 'Overlaps_Gaps_EMS',
        'Overlaps_Gaps_CountyBoundary', 'Overlaps_Gaps_AuthoritativeBoundary'
    ]
    tables = ListTables()
    for table in tables:
        print table
        target = LoadTarget + table
        print target

    datasets = ListDatasets("*")
    for fd in datasets:
        print fd
        featureClasses = ListFeatureClasses("*", "All", fd)
        for fc in featureClasses:
            print fc
            if fc in skiplist:
                print 'skipping'
            else:
                target = LoadTarget + fd + "/" + fc
                print "loading to " + target
                Append_management(fc, target, schema_type="NO_TEST")
示例#5
0
def StreetNetworkCheck():
    """removes street centerlines from the topology and creates geometric network, then checks geometric network connectivity"""
    from arcpy import (VerifyAndRepairGeometricNetworkConnectivity_management,
                       RemoveFeatureClassFromTopology_management,
                       CreateGeometricNetwork_management,
                       FindDisconnectedFeaturesInGeometricNetwork_management)
    print gdb
    env.workspace = gdb
    fd = ListDatasets("*", "Feature")
    fdNG = fd[0]
    print fd[0]
    topo = gdb + "/" + fdNG + "/NG911_Topology"
    #topo = ListDatasets("*") #TOPO CANNOT GET TO WORK BY LIST FUNCTOINS in V10.2.2
    geonet = gdb + "/" + fdNG + "/RoadCenterlineGeoNet"
    print topo
    if Exists(geonet):
        print "Street Geometric Network Already Exists"
    else:
        try:
            RemoveFeatureClassFromTopology_management(topo, "RoadCenterline")
        except:
            print "could not remove road centerlines from topology"
        CreateGeometricNetwork_management(gdb + "/" + fdNG,
                                          "RoadCenterlineGeoNet",
                                          "RoadCenterline SIMPLE_EDGE NO", "#",
                                          "#", "#", "#", "#")
    FindDisconnectedFeaturesInGeometricNetwork_management(
        gdb + "/" + fdNG + "/RoadCenterline", "Roads_Disconnected")
    StreetLogfile = os.path.join(
        os.path.dirname(currentPathSettings.gdbPath),
        os.path.basename(currentPathSettings.gdbPath)[:-4] + "_Centerline.log")
    VerifyAndRepairGeometricNetworkConnectivity_management(
        geonet, StreetLogfile, "VERIFY_ONLY", "EXHAUSTIVE_CHECK",
        "0, 0, 10000000, 10000000")
示例#6
0
def crawlFinal():
    env.workspace = currentPathSettings.FinalPath
    print str(currentPathSettings.FinalPath)
    #list the file gebdatabases in the final directory
    workspace = ListWorkspaces("*", "FileGDB")
    for ws in workspace:
        print ws
        env.workspace = ws
        #print the tables in the geodatabase
        tablelist = ListTables()
        print "tables"
        for table in tablelist:
            print "   " + table
        #print the features classes stored in feature datasets
        Datasets = ListDatasets("*")
        for fd in Datasets:
            print fd
            print "feature classes - Polygon"
            FCListPoly = ListFeatureClasses("*", "Polygon", fd)
            for fc in FCListPoly:
                print "    " + fc
            print "feature classes - Lines"
            FCListLine = ListFeatureClasses("*", "Polyline", fd)
            for fc in FCListLine:
                print "    " + fc
            print "feature classes - points"
            FCListPoint = ListFeatureClasses("*", "Point", fd)
            for fc in FCListPoint:
                print "    " + fc
示例#7
0
def RemoveGpHistory_fd(sdeconn,remove_gp_history_xslt,out_xml):
    ClearWorkspaceCache_management()
    for fd in ListDatasets():
        env.workspace = sdeconn + os.sep + fd
        for fc in ListFeatureClasses():
            
            name_xml = out_xml + os.sep + str(fc) + ".xml"
            #Process: XSLT Transformation
            XSLTransform_conversion(sdeconn + os.sep + fd + os.sep + fc, remove_gp_history_xslt, name_xml, "")
            print "Completed xml coversion on {0} {1}".format(fd,fc)
            # Process: Metadata Importer
            MetadataImporter_conversion(name_xml,sdeconn + os.sep + fd + os.sep + fc)
            print "Imported XML on {0}".format(fc)
示例#8
0
def Analyzer(startworkspace):
    env.workspace = startworkspace
    dataList = ListTables() + ListFeatureClasses()

    for dataset in ListDatasets("*", "Feature"):
        env.workspace = os.path.join(startworkspace, dataset)
        dataList += ListFeatureClasses() + ListDatasets()
        AnalyzeDatasets_management(startworkspace,
                                   include_system="NO_SYSTEM",
                                   in_datasets=dataList,
                                   analyze_base="ANALYZE_BASE",
                                   analyze_delta="ANALYZE_DELTA",
                                   analyze_archive="ANALYZE_ARCHIVE")
        print "analyzed " + str(dataList)
    env.workspace = SDE
    AnalyzeDatasets_management(SDE,
                               include_system="SYSTEM",
                               in_datasets="",
                               analyze_base="ANALYZE_BASE",
                               analyze_delta="ANALYZE_DELTA",
                               analyze_archive="ANALYZE_ARCHIVE")
    print "analyzed system tables"

    env.workspace = startworkspace
示例#9
0
 def __init__(self, usar_data):
     self.log = log()
     self.dashboard_db = config["dashboard_database"]
     self.hqiis = self.dashboard_db + os.sep + config["hqiis"]
     self.current_use_catcode_table = self.dashboard_db + os.sep + config["catcodes"]
     self.sites = self.dashboard_db + os.sep + config["sites"]
     self.usar_data = usar_data
     env.workspace = self.usar_data
     self.feature_classes = set()
     feature_types = config["feature_types"].split(", ")
     self.layer_status = LayerStatus(self.dashboard_db + os.sep + config["CIP_Layer_Status"])
     self.layer_status.baseline_the_table(self.__get_insts_sites(), feature_types)
     for ds in ListDatasets():
         for fc in ListFeatureClasses(feature_dataset=ds):
             self.feature_classes.add(fc)
     read_cache = File("CheckForNeeds")
     self.previous_rpuids = read_cache.read()
示例#10
0
def LoadFinalStreets(inFile, inFeatureclass, inTable):
    LoadThis = inFeatureclass
    FromThis = inFile
    LoadTable = inTable
    targetpath = currentPathSettings.EntDB + '/' + currentPathSettings.EDBName + '.' + currentPathSettings.EDBO
    env.workspace = currentPathSettings.FinalPath
    print str(currentPathSettings.FinalPath)
    #list the file gebdatabases in the final directory
    workspace = ListWorkspaces(FromThis, "FileGDB")
    for ws in workspace:
        print ws
        env.workspace = ws
        #print the tables in the geodatabase
        tablelist = ListTables(LoadTable)
        print "tables"
        for table in tablelist:
            print "   " + table
            print "loading " + ws + "/" + table + " to " + targetpath + table
            #CalledUpon(ws)
            #Try to load/append the rows i nthe alias table  the aggregated geodatabase
            try:
                Append_management(ws + "/" + table, targetpath + "." + table,
                                  "NO_TEST", "#")
            except:
                print 'there was a problem loading alias table for' + ws

        #print the features classes stored in feature datasets
        Datasets = ListDatasets("*")
        for fd in Datasets:
            #print fd
            #print "feature classes - Polygon"
            #FCListPoly = ListFeatureClasses("*", "Polygon", fd)
            #for fc in FCListPoly:
            #    print "    "+fc
            #print "feature classes - Lines"
            FCListLine = ListFeatureClasses(LoadThis, "Polyline", fd)
            for fc in FCListLine:
                #print "    "+fc
                print "loading " + ws + "/" + fc + " to " + targetpath + '.' + currentPathSettings.EFD + "." + fc
                try:
                    Append_management(
                        fc,
                        targetpath + '.' + currentPathSettings.EFD + "/" + fc,
                        "NO_TEST", "#")
                except:
                    print 'there was a problem loading centerlines for' + ws
示例#11
0
    def checker(self):
        # type: () -> bool
        try:
            env.workspace = self.in_db
            for dataset in ListDatasets():
                for fc in ListFeatureClasses(feature_dataset=dataset):
                    self.__fc = fc
                    self.__fc_fields = ListFields(self.__fc)
                    for installation in self.__get_installations(fc):
                        if installation:
                            self.__installation_field_check(installation)

        except Exception as e:
            self.log.exception(e.message)
            raise Exit()
        else:
            self.__write_result_to_table(self.__attributes)
            return True
def process_datasets(from_db,
                     to_db=None,
                     foreach_layer=None,
                     foreach_table=None,
                     foreach_dataset=None):
    """
    creates the projected datasets necessary and then calls the function
    to perform additional functions on each layer and table
    from_db - the input database to pull from
    to_db - the output database to place the processed data
    foreach_layer - the function to process each layer with
    foreach_table - the function to process each table with
    """
    #get the datasets in the input workspace
    from arcpy import AddMessage, AddWarning, CreateFeatureDataset_management, ListDatasets, Exists, env, ExecuteError
    AddMessage('Workspace: {}'.format(env.workspace))

    #handle feature classes at the top level. these are moved into _top dataset for
    #automatic projection handling
    copy_tables(from_db, to_db, foreach_table)

    process_feature_classes(from_db, to_db, foreach_layer)

    in_datsets = ListDatasets()
    if len(in_datsets):
        for dataset in in_datsets:
            to_dataset = get_name(dataset)
            from_dataset_path = '{}/{}'.format(from_db, dataset)
            to_dataset_path = '{}/{}'.format(to_db, to_dataset)
            AddMessage('Processing Dataset: {}'.format(from_dataset_path))
            try:
                if foreach_dataset:
                    foreach_dataset(from_db, to_db, dataset)
                else:
                    CreateFeatureDataset_management(to_db, to_dataset,
                                                    env.outputCoordinateSystem)
            except ExecuteError as e:
                AddWarning('Could not create dataset {}, {}'.format(
                    to_dataset, e))

            process_feature_classes(from_dataset_path, to_dataset_path,
                                    foreach_layer)
示例#13
0
gdbdesc = Describe(OriginalGDB)
gdbpath = gdbdesc.Path
gdbbasename = gdbdesc.Basename
gdbname = gdbdesc.Name
gdbexts = gdbdesc.Extension
del gdb

gdb = gdbpath + "/" + gdbbasename + "_RoadChecks." + gdbexts
print gdb
if Exists(gdb):
    Delete(gdb)
Copy_management(OriginalGDB, gdb)  #copy

env.workspace = gdb

fd = ListDatasets("NG*", "Feature")
try:
    fdNG = fd[0]
except:
    fd = ListDatasets("*", "Feature")
    fdNG = fd[0]

uniqueIdInFields = [
    "OBJECTID", "COUNTY_L", "COUNTY_R", "STATE_L", "STATE_R", "L_F_ADD",
    "L_T_ADD", "R_F_ADD", "R_T_ADD", "UniqueNo", "LRSKEY", "SHAPE_MILES"
]
uniqueIdOutFields = ["OBJECTID", "UniqueNo", "LRSKEY"]

MakeFeatureLayer_management(gdb + r"/" + fdNG + "/RoadCenterline",
                            "RoadCenterline", "#", "#", "#")
lyr = "RoadCenterline"
示例#14
0
def main():
    from arcpy import CheckExtension, CheckOutExtension, CheckInExtension, ExecuteError, GetMessages, AddError,\
        ListDatasets, env, SetProgressor, SetProgressorLabel, SetProgressorPosition, ResetProgressor, Exists
    from arcpy.management import CreateFileGDB, CreateMosaicDataset, AddRastersToMosaicDataset
    from arcpy import Describe
    from os.path import join, exists
    from os import mkdir, makedirs

    class LicenseError(Exception):
        pass

    try:
        if CheckExtension("ImageAnalyst") == "Available":
            CheckOutExtension("ImageAnalyst")
        else:
            # raise a custom exception
            raise LicenseError
        try:
            from PIL import Image
        except ModuleNotFoundError:
            AddError(
                "PILLOW Library Not Detected. Install using Python Manager in ArcGIS Pro"
            )
            print(
                "PILLOW Library Not Detected. Install using Python Manager in ArcGIS Pro"
            )
            exit()

        env.workspace = in_mosaic_gdb
        mosaics = ListDatasets("*", "Mosaic")
        file_count = len(mosaics)
        count = 0
        SetProgressor("step", "Begin Processing Files...", 0, file_count, 1)
        if not exists(out_folder):
            makedirs(out_folder)
        fileGDB = join(out_folder, "ortho_mosaics.gdb")
        if not Exists(fileGDB):
            CreateFileGDB(out_folder, "ortho_mosaics.gdb")
        for mosaic in mosaics:
            print("processing mosaic {0} of {1}".format(count + 1, file_count))
            in_mosaic = join(in_mosaic_gdb, mosaic)
            i_list, extent = get_images_and_stats(
                in_mosaic
            )  # Obtain image statistics and info from mosaic for processing
            for i in i_list:  # Check that output folder is not the path of i
                if out_folder == path.dirname(i[0]):
                    AddError(
                        "outFolder cannot be the same folder/directory as images referenced in the mosaic dataset"
                    )
                    exit()
            if not path.exists(out_folder):
                makedirs(out_folder)
            out_tile_folder = join(out_folder, "tiles{}".format(count))
            mkdir(out_tile_folder)
            SetProgressorLabel("Texturing Mosaic {0}...".format(count))
            texture_images(i_list, extent, in_texture, in_polygon,
                           out_tile_folder, method,
                           blur_distance)  # Generate Texture-Masked tiles

            mosaic_name = "tiles{}_".format(count)
            mosaic_dataset = join(fileGDB, mosaic_name)
            SetProgressorLabel(
                "Creating Mosaic Dataset for Tiles of {0}...".format(mosaic))
            sr = Describe(in_mosaic).spatialReference
            CreateMosaicDataset(fileGDB, mosaic_name, sr, num_bands,
                                pixel_depth, product_definition,
                                product_band_definitions)
            SetProgressorLabel(
                "Adding of {0} to Mosaic Dataset...".format(mosaic))
            AddRastersToMosaicDataset(mosaic_dataset, "Raster Dataset",
                                      out_tile_folder, "UPDATE_CELL_SIZES",
                                      "UPDATE_BOUNDARY", "NO_OVERVIEWS", None,
                                      0, 1500, None, '', "SUBFOLDERS",
                                      "ALLOW_DUPLICATES", "NO_PYRAMIDS",
                                      "NO_STATISTICS", "NO_THUMBNAILS", '',
                                      "NO_FORCE_SPATIAL_REFERENCE",
                                      "NO_STATISTICS", None, "NO_PIXEL_CACHE")
            SetProgressorPosition()
            count += 1
        ResetProgressor()
        CheckInExtension("ImageAnalyst")
    except LicenseError:
        AddError("Image Analyst license is unavailable")
        print("Image Analyst license is unavailable")
    except ExecuteError:
        print(GetMessages(2))