def replicate_database(db_connection, target_gdb):
    """Replicates the SDE database at `db_connection` to the FGDB at the path `target_gdb`."""
    log = logging.getLogger("script_log")

    if arcpy.Exists(db_connection):
        cnt_sde = len(get_database_items(db_connection))
        log.info("Geodatabase being copied: %s -- Feature Count: %s",
                 db_connection, cnt_sde)
        if arcpy.Exists(target_gdb):
            cnt_gdb = len(get_database_items(target_gdb))
            log.info("Old Target Geodatabase: %s -- Feature Count: %s",
                     target_gdb, cnt_gdb)
            try:
                shutil.rmtree(target_gdb)
                log.info("Deleted Old %s", os.path.split(target_gdb)[-1])
            except Exception as ex:
                log.error(ex)

        (gdb_path, gdb_name) = os.path.split(target_gdb)
        log.info("Now Creating New %s", gdb_name)
        arcpy.CreateFileGDB_management(gdb_path, gdb_name)

        arcpy.env.workspace = db_connection

        try:
            datasets = [arcpy.Describe(a).name for a in arcpy.ListDatasets()]
        except Exception as ex:
            datasets = []
            log.error(ex)
        try:
            feature_classes = [
                arcpy.Describe(a).name for a in arcpy.ListFeatureClasses()
            ]
        except Exception as ex:
            feature_classes = []
            log.error(ex)
        try:
            tables = [arcpy.Describe(a).name for a in arcpy.ListTables()]
        except Exception as ex:
            tables = []
            log.error(ex)

        # Compiles a list of the previous three lists to iterate over
        all_db_data = datasets + feature_classes + tables

        for source_path in all_db_data:
            target_name = source_path.split(".")[-1]
            target_path = os.path.join(target_gdb, target_name)
            if not arcpy.Exists(target_path):
                try:
                    log.info("Attempting to Copy %s to %s", target_name,
                             target_path)
                    arcpy.Copy_management(source_path, target_path)
                    log.info("Finished copying %s to %s", target_name,
                             target_path)
                except Exception as ex:
                    log.error("Unable to copy %s to %s", target_name,
                              target_path)
                    log.error(ex)
            else:
                log.warning("%s already exists....skipping.....", target_name)

        cnt_gdb = len(get_database_items(target_gdb))
        log.info("Completed replication of %s -- Feature Count: %s",
                 db_connection, cnt_gdb)

    else:
        log.warning(
            "%s does not exist or is not supported! \
            Please check the database path and try again.",
            db_connection,
        )
Пример #2
0
def main(thisDB, coordSystem, nCrossSections):
    # create feature dataset GeologicMap
    addMsgAndPrint('  Creating feature dataset GeologicMap...')
    try:
        arcpy.CreateFeatureDataset_management(thisDB, 'GeologicMap',
                                              coordSystem)
    except:
        addMsgAndPrint(arcpy.GetMessages(2))

    # create feature classes in GeologicMap
    # poly feature classes
    featureClasses = ['MapUnitPolys']
    for fc in ['DataSourcePolys', 'MapUnitOverlayPolys', 'OverlayPolys']:
        if fc in OptionalElements:
            featureClasses.append(fc)
    for featureClass in featureClasses:
        fieldDefs = tableDict[featureClass]
        createFeatureClass(thisDB, 'GeologicMap', featureClass, 'POLYGON',
                           fieldDefs)

    # line feature classes
    featureClasses = ['ContactsAndFaults']
    for fc in ['GeologicLines', 'CartographicLines', 'IsoValueLines']:
        if fc in OptionalElements:
            featureClasses.append(fc)
    if debug:
        addMsgAndPrint('Feature classes = ' + str(featureClasses))
    for featureClass in featureClasses:
        fieldDefs = tableDict[featureClass]
        if featureClass in ['ContactsAndFaults', 'GeologicLines'] and addLTYPE:
            fieldDefs.append(['LTYPE', 'String', 'NullsOK', 50])
        createFeatureClass(thisDB, 'GeologicMap', featureClass, 'POLYLINE',
                           fieldDefs)

    # point feature classes
    featureClasses = []
    for fc in [
            'OrientationPoints', 'GeochronPoints', 'FossilPoints', 'Stations',
            'GenericSamples', 'GenericPoints'
    ]:
        if fc in OptionalElements:
            featureClasses.append(fc)
    for featureClass in featureClasses:
        if featureClass == 'MapUnitPoints':
            fieldDefs = tableDict['MapUnitPolys']
            if addLTYPE:
                fieldDefs.append(['PTYPE', 'String', 'NullsOK', 50])
        else:
            fieldDefs = tableDict[featureClass]
            if addLTYPE and featureClass in ['OrientationPoints']:
                fieldDefs.append(['PTTYPE', 'String', 'NullsOK', 50])
        createFeatureClass(thisDB, 'GeologicMap', featureClass, 'POINT',
                           fieldDefs)

    # create feature dataset CorrelationOfMapUnits
    if 'CorrelationOfMapUnits' in OptionalElements:
        addMsgAndPrint('  Creating feature dataset CorrelationOfMapUnits...')
        arcpy.CreateFeatureDataset_management(thisDB, 'CorrelationOfMapUnits')
        fieldDefs = tableDict['CMUMapUnitPolys']
        createFeatureClass(thisDB, 'CorrelationOfMapUnits', 'CMUMapUnitPolys',
                           'POLYGON', fieldDefs)
        fieldDefs = tableDict['CMULines']
        createFeatureClass(thisDB, 'CorrelationOfMapUnits', 'CMULines',
                           'POLYLINE', fieldDefs)
        fieldDefs = tableDict['CMUPoints']
        createFeatureClass(thisDB, 'CorrelationOfMapUnits', 'CMUPoints',
                           'POINT', fieldDefs)

    # create CrossSections
    if nCrossSections > 26:
        nCrossSections = 26
    if nCrossSections < 0:
        nCrossSections = 0
    # note space in position 0
    alphabet = ' ABCDEFGHIJKLMNOPQRSTUVWXYZ'

    for n in range(1, nCrossSections + 1):
        xsLetter = alphabet[n]
        xsName = 'CrossSection' + xsLetter
        xsN = 'CS' + xsLetter
        #create feature dataset CrossSectionA
        addMsgAndPrint('  Creating feature data set CrossSection' + xsLetter +
                       '...')
        arcpy.CreateFeatureDataset_management(thisDB, xsName)
        fieldDefs = tableDict['MapUnitPolys']
        fieldDefs[0][0] = xsN + 'MapUnitPolys_ID'
        createFeatureClass(thisDB, xsName, xsN + 'MapUnitPolys', 'POLYGON',
                           fieldDefs)
        fieldDefs = tableDict['ContactsAndFaults']
        if addLTYPE:
            fieldDefs.append(['LTYPE', 'String', 'NullsOK', 50])
        fieldDefs[0][0] = xsN + 'ContactsAndFaults_ID'
        createFeatureClass(thisDB, xsName, xsN + 'ContactsAndFaults',
                           'POLYLINE', fieldDefs)
        fieldDefs = tableDict['OrientationPoints']
        if addLTYPE:
            fieldDefs.append(['PTTYPE', 'String', 'NullsOK', 50])
        fieldDefs[0][0] = xsN + 'OrientationPoints_ID'
        createFeatureClass(thisDB, xsName, xsN + 'OrientationPoints', 'POINT',
                           fieldDefs)

    # create tables
    tables = ['DescriptionOfMapUnits', 'DataSources', 'Glossary']
    for tb in [
            'RepurposedSymbols', 'StandardLithology', 'GeologicEvents',
            'MiscellaneousMapInformation'
    ]:
        if tb in OptionalElements:
            tables.append(tb)
    for table in tables:
        addMsgAndPrint('  Creating table ' + table + '...')
        try:
            arcpy.CreateTable_management(thisDB, table)
            fieldDefs = tableDict[table]
            for fDef in fieldDefs:
                try:
                    if fDef[1] == 'String':
                        arcpy.AddField_management(thisDB + '/' + table,
                                                  fDef[0], transDict[fDef[1]],
                                                  '#', '#', fDef[3], '#',
                                                  transDict[fDef[2]])
                    else:
                        arcpy.AddField_management(thisDB + '/' + table,
                                                  fDef[0], transDict[fDef[1]],
                                                  '#', '#', '#', '#',
                                                  transDict[fDef[2]])
                except:
                    addMsgAndPrint('Failed to add field ' + fDef[0] +
                                   ' to table ' + table)
                    addMsgAndPrint(arcpy.GetMessages(2))
        except:
            addMsgAndPrint(arcpy.GetMessages())

    ### GeoMaterials
    addMsgAndPrint('  Setting up GeoMaterials table and domains...')
    #  Copy GeoMaterials table
    arcpy.Copy_management(
        os.path.dirname(sys.argv[0]) +
        '/../Resources/GeMS_lib.gdb/GeoMaterialDict',
        thisDB + '/GeoMaterialDict')
    #   make GeoMaterials domain
    arcpy.TableToDomain_management(thisDB + '/GeoMaterialDict', 'GeoMaterial',
                                   'IndentedName', thisDB, 'GeoMaterials')
    #   attach it to DMU field GeoMaterial
    arcpy.AssignDomainToField_management(thisDB + '/DescriptionOfMapUnits',
                                         'GeoMaterial', 'GeoMaterials')
    #  Make GeoMaterialConfs domain, attach it to DMU field GeoMaterialConf
    arcpy.CreateDomain_management(thisDB, 'GeoMaterialConfidenceValues', '',
                                  'TEXT', 'CODED')
    for val in GeoMaterialConfidenceValues:
        arcpy.AddCodedValueToDomain_management(thisDB,
                                               'GeoMaterialConfidenceValues',
                                               val, val)
    arcpy.AssignDomainToField_management(thisDB + '/DescriptionOfMapUnits',
                                         'GeoMaterialConfidence',
                                         'GeoMaterialConfidenceValues')

    #Confidence domains, Glossary entries, and DataSources entry
    if addConfs:
        addMsgAndPrint(
            '  Adding standard ExistenceConfidence and IdentityConfidence domains'
        )
        #  create domain, add domain values, and link domain to appropriate fields
        addMsgAndPrint(
            '    Creating domain, linking domain to appropriate fields')
        arcpy.CreateDomain_management(thisDB, 'ExIDConfidenceValues', '',
                                      'TEXT', 'CODED')
        for item in DefaultExIDConfidenceValues:  # items are [term, definition, source]
            code = item[0]
            arcpy.AddCodedValueToDomain_management(thisDB,
                                                   'ExIDConfidenceValues',
                                                   code, code)
        arcpy.env.workspace = thisDB
        dataSets = arcpy.ListDatasets()
        for ds in dataSets:
            arcpy.env.workspace = thisDB + '/' + ds
            fcs = arcpy.ListFeatureClasses()
            for fc in fcs:
                fieldNames = fieldNameList(fc)
                for fn in fieldNames:
                    if fn in ('ExistenceConfidence', 'IdentityConfidence',
                              'ScientificConfidence'):
                        #addMsgAndPrint('    '+ds+'/'+fc+':'+fn)
                        arcpy.AssignDomainToField_management(
                            thisDB + '/' + ds + '/' + fc, fn,
                            'ExIDConfidenceValues')
        # add definitions of domain values to Glossary
        addMsgAndPrint('    Adding domain values to Glossary')
        ## create insert cursor on Glossary
        cursor = arcpy.da.InsertCursor(
            thisDB + '/Glossary', ['Term', 'Definition', 'DefinitionSourceID'])
        for item in DefaultExIDConfidenceValues:
            cursor.insertRow((item[0], item[1], item[2]))
        del cursor
        # add definitionsource to DataSources
        addMsgAndPrint('    Adding definition source to DataSources')
        ## create insert cursor on DataSources
        cursor = arcpy.da.InsertCursor(thisDB + '/DataSources',
                                       ['DataSources_ID', 'Source', 'URL'])
        cursor.insertRow((
            'FGDC-STD-013-2006',
            'Federal Geographic Data Committee [prepared for the Federal Geographic Data Committee by the U.S. Geological Survey], 2006, FGDC Digital Cartographic Standard for Geologic Map Symbolization: Reston, Va., Federal Geographic Data Committee Document Number FGDC-STD-013-2006, 290 p., 2 plates.',
            'https://ngmdb.usgs.gov/fgdc_gds/geolsymstd.php'))
        del cursor

    # if cartoReps, add cartographic representations to all feature classes
    # trackEdits, add editor tracking to all feature classes and tables
    if cartoReps or trackEdits:
        arcpy.env.workspace = thisDB
        tables = arcpy.ListTables()
        datasets = arcpy.ListDatasets()
        for dataset in datasets:
            addMsgAndPrint('  Dataset ' + dataset)
            arcpy.env.workspace = thisDB + '/' + dataset
            fcs = arcpy.ListFeatureClasses()
            for fc in fcs:
                hasReps, repLyr = cartoRepsExistAndLayer(fc)
                if cartoReps and hasReps:
                    addMsgAndPrint(
                        '    Adding cartographic representations to ' + fc)
                    try:
                        arcpy.AddRepresentation_cartography(
                            fc, fc + '_rep1', 'RuleID', 'Override1', default,
                            repLyr, 'NO_ASSIGN')
                        """
                            Note the 1 suffix on the representation name (fc+'_rep1') and the RuleID and Override1 fields.
                        If at some later time we wish to add additional representations to a feature class, each will
                        require it's own RuleID and Override fields which may be identified, and tied to the appropriate
                        representation, by suffixes 2, 3, ...
                            Naming representations fc+'_rep'+str(n) should be sufficient to identify each representation in a 
                        geodatabase uniquely, and allow for multiple representations within a single feature class.
                            It appears that ArcGIS provides no means of scripting an inventory of representations within
                        feature class or geodatabase. So, the convenience of establishing a coded-value domain that ties
                        representation rule IDs (consecutive integers) to some sort of useful text identifier becomes a
                        necessity for flagging the presence of a representation: One CAN script the inventory of domains
                        in a geodatabase. Run arcpy.da.ListDomains. Check the result for names of the form
                        <featureClassName>_rep??_Rule and voila, you've got a list of representations (and their associated
                        feature classes) in the geodatabase.
                            Moral: If you add a representation, be sure to add an associated coded-value domain and name
                        it appropriately!
                        """
                    except:
                        addMsgAndPrint(arcpy.GetMessages(2))
                if trackEdits:
                    addTracking(fc)
        if trackEdits:
            addMsgAndPrint('  Tables ')
            arcpy.env.workspace = thisDB
            for aTable in tables:
                if aTable != 'GeoMaterialDict':
                    addTracking(aTable)
Пример #3
0
        infile.write("An error occured processing parcel " + str(value) +
                     ".\n")
        infile.write(arcpy.GetMessages() + "\n")
        arcpy.SelectLayerByAttribute_management(PointsFL, "CLEAR_SELECTION")
        arcpy.SelectLayerByLocation_management(footprintFL, "CLEAR_SELECTION")
        del row
        del sc
    del row
del sc

#Merge all summary tables into a single table
arcpy.SetProgressorLabel("Creating final viewshed table")

arcpy.env.workspace = SummaryTables
FinalTable = outputWorkspace + "\\Final_Viewsheds_" + Year + ".dbf"
Tables = arcpy.ListTables()
arcpy.Merge_management(Tables, FinalTable)

#Delete uneeded fields from final table
arcpy.DeleteField_management(FinalTable, ["FREQUENCY", "SUM_GRIDCO"])

print "Final viewshed table for", Year, "is located in", outputWorkspace, "\n"
arcpy.AddMessage("Final viewshed table for " + Year + " is located in " +
                 outputWorkspace + "\n")

#save copy of table to CSV format

import win32com.client
try:

    excel = win32com.client.Dispatch('Excel.Application')
Пример #4
0
def crs9_check_preprod_feature_counts(args):
    # parameters
    labelsPath = args[0]
    preprodPath = args[1]
    preprodPrefix = args[2]
    stagingPath = args[3]
    stagingPrefix = args[4]

    # log function
    log_msg('calling {}'.format(script_name))

    err_message = None

    try:

        ### Get lists of tables and FCs
        # Get tables and FCs from labels GDB
        log_msg('Getting lists of tables and FCs from labels GDB...')

        arcpy.env.workspace = labelsPath
        fcllblg = arcpy.ListFeatureClasses()
        tblllblg = arcpy.ListTables()
        # Get tables and FCs from staging SDE
        log_msg('Getting lists of tables and FCs from staging SDE...')
        arcpy.env.workspace = stagingPath
        fclstgs = arcpy.ListFeatureClasses()
        tbllstgs = arcpy.ListTables()
        # Get tables and FCs from preprod SDE
        log_msg('Getting lists of tables and FCs from preprod SDE...')
        arcpy.env.workspace = preprodPath
        fclpprs = arcpy.ListFeatureClasses()
        tbllpprs = arcpy.ListTables()
        ### Work through lists of feature classes
        log_msg('==> Checking FC counts for preprod...')
        arcpy.env.workspace = preprodPath
        # Get preprod values
        for pprsfc in fclpprs:
            # Ignore views and old data
            pprsfcname = pprsfc[len(preprodPrefix):]
            if pprsfcname.endswith("_1") or pprsfcname.endswith(
                    "_o") or pprsfcname.endswith("_vw") or pprsfcname.endswith(
                        "_oo"):
                log_msg('{} - ignoring...'.format(pprsfc))
            else:
                # Set prelim values
                pprsfccount = dummyVal
                stgsfccount = dummyVal
                lblgfccount = dummyVal
                # crsgfccount = dummyVal
                # Get preprod count
                pprsfccount = arcpy.GetCount_management(pprsfc).getOutput(0)
                log_msg('{} - preprod count = {}'.format(
                    pprsfcname, pprsfccount))
                # Find comparable staging FC
                for stgsfc in fclstgs:
                    stgsfcname = stgsfc[len(stagingPrefix):]
                    if stgsfcname == pprsfcname:
                        # Get staging count
                        stgsfcpath = os.path.join(stagingPath, stgsfc)
                        stgsfccount = arcpy.GetCount_management(
                            stgsfcpath).getOutput(0)
                        stgsfccountname = stgsfcname
                        continue
                # Report staging count status
                if stgsfccount != dummyVal and stgsfccount != pprsfccount:
                    log_msg(
                        '*****ERROR!!!***** preprod count = {0} but staging count = {1}'
                        .format(pprsfccount, stgsfccount))
                elif stgsfccount == dummyVal:
                    log_msg('{} not found in staging SDE'.format(pprsfcname))
                else:
                    log_msg('{0} - staging count = {1}'.format(
                        stgsfccountname, stgsfccount))
                # Find comparable labels FC
                for lblgfc in fcllblg:
                    if lblgfc == pprsfcname:
                        # Get labels count
                        lblgfcpath = os.path.join(labelsPath, lblgfc)
                        lblgfccount = arcpy.GetCount_management(
                            lblgfcpath).getOutput(0)
                        lblgfccountname = lblgfc
                        continue
                # Report labels count status
                if lblgfccount != dummyVal and lblgfccount != pprsfccount:
                    log_msg(
                        '*****ERROR!!!***** preprod count = {0} but labels count = {1}'
                        .format(pprsfccount, lblgfccount))
                elif lblgfccount == dummyVal:
                    log_msg('{} not found in labels GDB'.format(pprsfcname))
                else:
                    log_msg('{0} - labels count = {1}'.format(
                        lblgfccountname, lblgfccount))

        ### Work through lists of tables
        log_msg('==> Checking table counts for preprod...')
        # Get preprod values
        for pprstbl in tbllpprs:
            # Ignore views and old data
            pprstblname = pprstbl[len(preprodPrefix):]
            if pprstblname.endswith("_o") or pprstblname.endswith("_vw") or pprstblname.endswith("_oo") or \
            pprstblname.startswith("mv_") or pprstblname.startswith("vw") or pprstblname.startswith("VW_"):
                log_msg('{} - ignoring...'.format(pprstblname))
            else:
                # Set prelim values
                pprstblcount = dummyVal
                stgstblcount = dummyVal
                lblgtblcount = dummyVal
                # Get preprod count
                #pprstblname = pprstbl[len(preprodPrefix):]
                if pprstblname in tblIgnoreList:
                    log_msg(
                        'WARNING: ignoring {} ***** manual check required *****'
                        .format(pprstblname))
                    continue
                else:
                    pprstblcount = arcpy.GetCount_management(
                        pprstbl).getOutput(0)
                    log_msg('{} - preprod count = {}'.format(
                        pprstblname, pprstblcount))
                    # Find comparable staging table
                    for stgstbl in tbllstgs:
                        stgstblname = stgstbl[len(stagingPrefix):]
                        if stgstblname == pprstblname:
                            # Get staging count
                            stgstblpath = os.path.join(stagingPath, stgstbl)
                            stgstblcount = arcpy.GetCount_management(
                                stgstblpath).getOutput(0)
                            stgstblcountname = stgstblname
                            continue
                    # Report staging count status
                    if stgstblcount != dummyVal and stgstblcount != pprstblcount:
                        log_msg(
                            '*****ERROR!!!***** preprod count = {0} but staging count = {1}'
                            .format(pprstblcount, stgstblcount))
                    elif stgstblcount == dummyVal:
                        log_msg(
                            '{} not found in staging SDE'.format(pprstblname))
                    else:
                        log_msg('{0} - staging count = {1}'.format(
                            stgstblcountname, stgstblcount))
                    # Find comparable labels table
                    for lblgtbl in tblllblg:
                        if lblgtbl == pprstblname:
                            # Get labels count
                            lblgtblpath = os.path.join(labelsPath, lblgtbl)
                            lblgtblcount = arcpy.GetCount_management(
                                lblgtblpath).getOutput(0)
                            lblgtblcountname = lblgtbl
                            continue
                    # Report labels count status
                    if lblgtblcount != dummyVal and lblgtblcount != pprstblcount:
                        log_msg(
                            '*****ERROR!!!***** preprod count = {0} but labels count = {1}'
                            .format(pprstblcount, lblgtblcount))
                    elif lblgtblcount == dummyVal:
                        log_msg(
                            '{} not found in labels GDB'.format(pprstblname))
                    else:
                        log_msg('{0} - labels count = {1}'.format(
                            lblgtblcountname, lblgtblcount))

        log_msg("Process time: %s \n" %
                str(datetime.datetime.now() - starttime))

    except Exception as e:
        err_message = "ERROR while running {0}: {1}".format(script_name, e)

    return err_message, log_messages
Пример #5
0
        except:
            print 'I got an error; skiping this part'
        arcpy.Delete_management("selected" +str(i) +".shp")
    arcpy.Merge_management(tableList, outputTable)
    
    print 'tables are merged'
    for i in range(0,len(tableList)):
        try:    
            arcpy.Delete_management("tblPart" +str(i))
        except:
            pass
    print "Table Created for Raster: " + os.sep + inValueRaster
    
print "joining tables"
env.workspace = outFolder
tList = arcpy.ListTables()
print str(tList) + " = tables in outfolder"
masterTableGDB = arcpy.CreateFileGDB_management (outFolder, "masterTableGDB", "CURRENT")
print str(masterTableGDB) + "= masterTableGDB"
arcpy.TableToGeodatabase_conversion (tList, masterTableGDB)
env.workspace = outFolder + os.sep + "masterTableGDB.gdb"
tList = arcpy.ListTables()
tbl = tList[0]    
masterTableGDB = str(masterTableGDB) + os.sep + "masterTableGDB"
arcpy.Copy_management(tbl,masterTableGDB)

stats = ["MIN","MEAN","MAX","RANGE","STD","SUM"]
for t in tList:
    for stat in stats:
        varName = t # formerly this was: #varName = t[:-4]
        varNameStat = varName[:3]+"_%s" %stat
Пример #6
0
ODlv1_time_name = arcpy.GetParameterAsText(8)  # "Total_TravelTime"
ODlv1_mile_name = arcpy.GetParameterAsText(9)  #'Total_Miles'

ODlv2_time_name = arcpy.GetParameterAsText(10)  #"Total_Time"
ODlv2_mile_name = arcpy.GetParameterAsText(11)  #'Total_Miles'

speed_Geodesic = arcpy.GetParameterAsText(12)
if speed_Geodesic == '#' or speed_Geodesic == '':
    speed_Geodesic = 50
else:
    speed_Geodesic = int(speed_Geodesic)

# Handles lv1 returns (within 3 hrs drive, snap to none pedestrain road)
arcpy.env.workspace = ODlv1
all_tbs = arcpy.ListTables()
for each_tb in all_tbs:
    print(each_tb)
    sc = arcpy.SearchCursor(each_tb)
    row = sc.next()
    O_id = ''
    zcta_dict = dict()
    while row:
        od_id_row = row.getValue(OD_id)
        cO_id = od_id_row.split(' - ')[0]
        D_id = od_id_row.split(' - ')[1]
        if cO_id != O_id:
            if len(zcta_dict) != 0:
                #print(outputfolder+'\\'+O_id[0:3]+'\\'+O_id+'.txt')
                f = open(
                    outputfolder + '\\' + O_id[0:3] + '\\' + O_id + '.txt',
def prep_data(fdhs):
    """
    This function helps in data preparation
    PARAMETERS
    ----------
    fdhs : list
        A list of FDH IDs for which you need the BOMs
    """
    crs = arcpy.SpatialReference(2231)
    arcpy.env.overwriteOutput = True
    arcpy.env.workspace = scratch
    print("FILTERING DATA")
    """
    If there are any Feature classes or Tables present in the scratch GDB,
    remove all of them
    """
    fcs = arcpy.ListFeatureClasses()
    for fc in fcs:
        arcpy.Delete_management(scratch + '/' + fc)
    tables = arcpy.ListTables()
    for table in tables:
        arcpy.Delete_management(scratch + '/' + table)

    # The keys present in the following dictionary are the feature classes
    # Data from these feature classes are gathered to generate BOM
    # and the values are the attributes present in those feature classes.
    # These attributes are later(lines 147 - 166) used in filtering the data
    name_dict = {
        'FiberLine': 'cablename',
        'FC_Structure': 'layer',
        'StructureLine': 'layer',
        'fdhpoint': 'fdhid',
        'SplicePoint': 'locationdescription',
        'FiberSlackLoop': 'designid'
    }

    # The following fdh expression helps in generating a query of below form
    # ("fdhid" = 'DIX101d-F31' or "fdhid" = 'DIX101d-F32' or "fdhid" = 'DIX101d-F33')
    # which can later be used to select only the required FD Boundaries
    fdh_exp = "(" + " or ".join(["fdhid = '{0}'".format(x)
                                 for x in fdhs]) + ")"
    fdh_exp.encode('utf-8').strip()
    # Select only those FDH Boundaries for which the BOMs needs to be generated
    arcpy.Select_analysis(gdb + "\\fdhboundary", scratch + "\\fdhs", fdh_exp)
    """ Exlanations for Queries used inside select_analysis for the for loop part that comes next

    # Query for Structure and Conduit
    # 		Select only those structures and conduits for which the status is 'Preliminary'
    #       and the ones which are present inside the FDH Boundaries we are working on (This part is
    #       handled using Intersect_analysis)
    # Then for the next elif part, the queries are much similar to the above queries and so are self explanatory
    # Same goes for final else part
    """

    for fc in name_dict.keys(
    ):  # ["FiberOpticCable", "FC_Structure", "FC_Conduit", "fdhpoint", "SplicePoint"]:
        fieldnames = [
            field.name for field in arcpy.ListFields(gdb + "\\" + fc)
        ]

        if fc == "SplicePoint":
            arcpy.Select_analysis(gdb + "\\" + fc, scratch + "\\" + fc)

        elif fc in ['FC_Structure', 'StructureLine']:
            arcpy.Select_analysis(gdb + "\\" + fc, scratch + "\\temp_" + fc,
                                  "inventory_status_code = 'Preliminary'")
            arcpy.Intersect_analysis(
                [scratch + "\\temp_" + fc, scratch + "\\fdhs"],
                scratch + "\\" + fc)
        elif "inventory_status_code" in fieldnames:
            arcpy.Select_analysis(
                gdb + "\\" + fc, scratch + "\\pre_" + fc, "(" + " or ".join(
                    ["{0} like '{1}%'".format(name_dict[fc], x)
                     for x in fdhs]) +
                ") and inventory_status_code = 'Preliminary'")
            arcpy.Select_analysis(
                gdb + "\\" + fc, scratch + "\\" + fc, "(" + " or ".join(
                    ["{0} like '{1}%'".format(name_dict[fc], x)
                     for x in fdhs]) + ")")
            arcpy.AddField_management(scratch + "\\pre_" + fc, "fdhid", "TEXT")
            arcpy.CalculateField_management(
                scratch + "\\pre_" + fc, "fdhid",
                "getfdh(!{0}!)".format(name_dict[fc]), "PYTHON_9.3", codeblock)
        else:
            arcpy.Select_analysis(
                gdb + "\\" + fc, scratch + "\\" + fc, "(" + " or ".join(
                    ["{0} like '{1}%'".format(name_dict[fc], x)
                     for x in fdhs]) + ")")

        # Make sure there is an 'fdhid' column for all of the feature classes.
        # There is no special reason for this. It's just to make some of the other geo-processing operations faster
        fieldnames = [
            field.name for field in arcpy.ListFields(scratch + "\\" + fc)
        ]
        if "fdhid" not in fieldnames:
            arcpy.AddField_management(scratch + "\\" + fc, "fdhid", "TEXT")
            arcpy.CalculateField_management(
                scratch + "\\" + fc, "fdhid",
                "getfdh(!{0}!)".format(name_dict[fc]), "PYTHON_9.3", codeblock)

    # Select only Access Fiber, changed 12/07 to grab all fiber intersecting an FDH, and included 'Lateral' infrastructure class query so that 288 cts are counted.
    arcpy.Intersect_analysis([gdb + "\\FiberLine", scratch + "\\fdhs"],
                             scratch + "\\af_1", '', '', 'LINE')
    arcpy.Select_analysis(
        scratch + "\\af_1", scratch + "\\af",
        "infrastructureclass = 'Access' OR infrastructureclass = 'Lateral'")

    # Get the end points of the Access Fiber
    get_end_points(scratch + "\\af", scratch + "\\af_ends", "BOTH_ENDS")

    # Get those fiber ends which intersects with Splice Point
    arcpy.SpatialJoin_analysis(scratch + "\\SplicePoint",
                               scratch + "\\af_ends", scratch + "\\af_sc_join",
                               "JOIN_ONE_TO_MANY", "KEEP_ALL", "", "INTERSECT",
                               "")

    # We dissolve the output from previous step just to make sure we have only one entry even for the points where multiple fibercable intersect with a splice point
    # We will take into consideration only the fibercable with maximum fiber count. Thats the reason why we use ["fibercount", "MAX"]

    arcpy.Dissolve_management(
        scratch + "\\af_sc_join", scratch + "\\final_scs", [
            "locationdescription", "splice_type", "splice_count", "fdhid",
            "fiber_assignments", "spliceenclosuremodelnumber"
        ], [["fibercount", "MAX"]])  # "cable_size",
    arcpy.AlterField_management(scratch + "\\final_scs", "MAX_fibercount",
                                "fcount", "fcount")
    arcpy.AlterField_management(scratch + "\\final_scs",
                                "spliceenclosuremodelnumber", "sc_size",
                                "sc_size")

    # The below set of lines (220- 227) are used to create a feature class with name final_vaults
    # A new attribute named 'pvault' is added and it's value is either 'Y' or 'N' - Changed 12/07/2020 to only include preliminary structures pvault = 'N'

    # Added prelim_vaults 12/07/2020
    arcpy.Select_analysis(gdb + "\\FC_Structure", scratch + "\\prelim_vaults",
                          "inventory_status_code = 'Preliminary'")

    arcpy.AddField_management(scratch + "\\FC_Structure", "pvault", "TEXT")
    arcpy.MakeFeatureLayer_management(scratch + "\\FC_Structure", "vaults")
    # arcpy.CalculateField_management("vaults", "pvault", "'N'", "PYTHON_9.3", "")
    arcpy.SelectLayerByLocation_management("vaults", "INTERSECT",
                                           scratch + "\\prelim_vaults", "",
                                           "NEW_SELECTION")
    arcpy.CalculateField_management("vaults", "pvault", "'N'", "PYTHON_9.3",
                                    "")
    arcpy.SelectLayerByAttribute_management("vaults", "CLEAR_SELECTION")
    arcpy.CopyFeatures_management("vaults", scratch + "\\final_vaults")

    # The following set of lines(234 - 240) are used to find out whether an access fiber cable is an FDH cable.
    # Any Acces Fibercable that intersects FDH point is an 'FDH cable.'
    # So, we add a new field named 'fdhcable' and it's values are 'Y' or 'N'
    # If the value is 'Y' - it means fiber is an FDH Cable else it is not.
    # And the final result is copied into scratch GDB just like vaults

    arcpy.AddField_management(scratch + "\\af", "fdhcable", "TEXT")
    arcpy.MakeFeatureLayer_management(scratch + "\\af", "fiber")
    arcpy.SelectLayerByLocation_management("fiber", "INTERSECT",
                                           scratch + "\\fdhpoint", "",
                                           "NEW_SELECTION")
    arcpy.CalculateField_management("fiber", "fdhcable", "'Y'", "PYTHON_9.3",
                                    "")
    arcpy.SelectLayerByAttribute_management("fiber", "CLEAR_SELECTION")
    arcpy.CopyFeatures_management("fiber", scratch + "\\final_fiber")

    arcpy.AddGeometryAttributes_management(scratch + "\\final_fiber",
                                           "LENGTH_GEODESIC", "FEET_US", "",
                                           crs)

    arcpy.Select_analysis(scratch + "\\StructureLine", scratch + "\\all_con",
                          "diameter = '2inch' or diameter = '1.25inch'")
    arcpy.AddField_management(scratch + "\\all_con", "shared", "TEXT")
    arcpy.CalculateField_management(scratch + "\\all_con", "shared", "'N'",
                                    "PYTHON_9.3", "")
    arcpy.SplitLine_management(scratch + "\\all_con", scratch + "\\con_split")
    get_end_points(scratch + "\\con_split", scratch + "\\con_mids", "MID")
    arcpy.AddField_management(scratch + "\\con_mids", "trench", "SHORT")
    arcpy.CalculateField_management(scratch + "\\con_mids", "trench", "1",
                                    "PYTHON_9.3", "")
    arcpy.Buffer_analysis(scratch + "\\con_mids", scratch + "\\con_mid_buff",
                          "1.5 FEET", "FULL", "ROUND")
    arcpy.Dissolve_management(scratch + "\\con_mid_buff",
                              scratch + "\\con_mid_diss", "", "",
                              "SINGLE_PART", "")
    arcpy.AddField_management(scratch + "\\con_mid_diss", "mid_id", "LONG")
    arcpy.CalculateField_management(scratch + "\\con_mid_diss", "mid_id",
                                    "!objectid!", "PYTHON_9.3", "")
    arcpy.SpatialJoin_analysis(scratch + "\\con_mid_buff",
                               scratch + "\\con_mid_diss",
                               scratch + "\\con_join_temp", "JOIN_ONE_TO_ONE",
                               "KEEP_ALL", "", "INTERSECT", "")
    arcpy.Dissolve_management(scratch + "\\con_join_temp",
                              scratch + "\\con_mid_diss_temp", ["mid_id"],
                              [["trench", "SUM"]], "SINGLE_PART", "")
    arcpy.AlterField_management(scratch + "\\con_mid_diss_temp", "SUM_trench",
                                "trench", "trench")
    arcpy.SpatialJoin_analysis(scratch + "\\con_split",
                               scratch + "\\con_mid_diss_temp",
                               scratch + "\\con_join", "JOIN_ONE_TO_ONE",
                               "KEEP_ALL", "", "INTERSECT", "")

    arcpy.Select_analysis(scratch + "\\con_join", scratch + "\\con2",
                          "diameter = '2inch'")
    arcpy.Select_analysis(scratch + "\\con_join", scratch + "\\con125",
                          "diameter = '1.25inch'")
    arcpy.Buffer_analysis(scratch + "\\con2", scratch + "\\con2_buff",
                          "2 FEET", "FULL", "ROUND", "ALL")
    arcpy.MakeFeatureLayer_management(scratch + "\\con125", "con125")
    arcpy.SelectLayerByLocation_management("con125", "WITHIN",
                                           scratch + "\\con2_buff", "",
                                           "NEW_SELECTION")
    arcpy.CalculateField_management("con125", "shared", "'Y'", "PYTHON_9.3",
                                    "")
    arcpy.SelectLayerByAttribute_management("con125", "CLEAR_SELECTION")
    arcpy.Merge_management([scratch + "\\con2", "con125"],
                           scratch + "\\final_con")
    arcpy.AddGeometryAttributes_management(scratch + "\\final_con",
                                           "LENGTH_GEODESIC", "FEET_US", "",
                                           crs)

    arcpy.Dissolve_management(scratch + "\\final_con", scratch + "\\trench",
                              ["fdhid"])
    arcpy.AddGeometryAttributes_management(scratch + "\\trench",
                                           "LENGTH_GEODESIC", "FEET_US", "",
                                           crs)

    print("DATA FILTERATION DONE..")
def crs1_repair_crs_data(args):
    # script name
    script_name = os.path.basename(__file__)

    # script parameters
    gdb = args[0]    
    log = args[1]

    # Set environment
    arcpy.env.workspace = gdb
    
    # log function
    etgLib.log_info(log, 'calling {}'.format(script_name), True)

    # start time
    starttime = datetime.datetime.now()

    # variables
    err_message = None
    fc_cadastre = 'CADASTRE'
    fld_cadastre = 'F_issues'
    fc_ownerfixed = 'OWNER_FIXED'
    fc_plan ='PLAN'
    fc_parcelplan = 'PARCEL_PLAN'

    try:
              
        ## Delete any extraneous data
        etgLib.log_info(log, 'Delete any extraneous data',True)
        etgLib.delete_layer(fc_ownerfixed)

        # list all the feature classes
        pointfcl = []
        linefcl = []
        polyfcl = [] 
        tbll = []           

        fcs = arcpy.ListFeatureClasses()
        for fc in fcs:                
            desc = arcpy.Describe(fc)
            if desc.shapeType == 'Point':
                pointfcl.append(fc)
            elif desc.shapeType == 'Polyline':
                linefcl.append(fc)
            elif desc.shapeType == 'Polygon':
                polyfcl.append(fc)
            else:
                pass
        
        etgLib.log_info(log, 'Point feature classes:',True)
        print_list(pointfcl,log)
        etgLib.log_info(log, 'Polyline feature classes:',True)
        print_list(linefcl, log)
        etgLib.log_info(log, 'Polygon feature classes:',True)
        print_list(polyfcl, log)

        etgLib.log_info(log, 'tables:',True)
        tbll = arcpy.ListTables()
        print_list(tbll, log)
                    
        # add field to CADASTRE
        if etgLib.field_exist(fc_cadastre, fld_cadastre) == False:
            etgLib.log_info(log, 'adding field [{0}] in {1}'.format(fld_cadastre,fc_cadastre))
            arcpy.AddField_management(fc_cadastre,fld_cadastre,"TEXT","","",250)

        # Repair polygon geometry
        etgLib.log_info(log,'Repairing polygon geometries...')
        for polyfc in polyfcl:                
            preCount = arcpy.GetCount_management(polyfc).getOutput(0)
            arcpy.RepairGeometry_management(polyfc)
            postCount = arcpy.GetCount_management(polyfc).getOutput(0)
            etgLib.log_info(log,'{0}: features pre-repair {1} - post-repair {2}'.format(polyfc, preCount, postCount))
            
        # Rename PLAN to PARCEL_PLAN
        if arcpy.Exists(fc_plan):
            arcpy.Rename_management(fc_plan,fc_parcelplan)
            etgLib.log_info(log,'Renamed {0} to {1}'.format(fc_plan,fc_parcelplan))
        else:               
            etgLib.log_info(log,'ERROR: feature class {} not found'.format(fc_plan))

        etgLib.log_process_time(log,starttime)

        # ### Note that number of features pre- and post-repair should be emailed to Technical Specialist
        # print('***NOTE: next step/s = email pre- and post-repair feature counts to Technical Specialist...')

    except Exception as e:        
        err_message =  "ERROR while running {0}: {1}" .format(script_name,e)

    return err_message      
Пример #9
0
# Set variables
connection_file = raw_input('Enter SDE connection filename: ')

# Set the workspace
arcpy.env.workspace = 'Database Connections/' + connection_file

# Set a variable for the workspace
workspace = arcpy.env.workspace

# Enumerate Database elements
datasets = arcpy.ListDatasets(wild_card=None,
                              feature_type=None)

edit_items = arcpy.ListFeatureClasses() \
             + arcpy.ListTables() \
             + arcpy.ListRasters()

# Add features from inside datasets to edit_items list
for dataset in datasets:
    edit_items += arcpy.ListFeatureClasses(wild_card=None,
                                           feature_type=None,
                                           feature_dataset=dataset)

for item in edit_items:
    arcpy.EnableEditorTracking_management(in_dataset=item,
                                          creator_field="created_user",
                                          creation_date_field="created_date",
                                          last_editor_field="last_edited_user",
                                          last_edit_date_field="last_edited_date",
                                          add_fields="ADD_FIELDS",
Пример #10
0
        def goNational():
            geocodeOption = raw_input("Geocode Results? (Yes or No): ")
            if geocodeOption.lower() == 'n' or geocodeOption.lower() == 'no':
                fGDBnational(projectFolder, csvName)
                for stateX in stateList:
                    listX = []
                    http = url_1 + what + url_2 + stateX
                    url = http
                    nationalScrapeNOGeocode(url, listX)
                    page_number = 2
                    while page_number < (pages + 1):
                        url = http + "&page=" + str(page_number)
                        nationalScrapeNOGeocode(url, listX)
                        page_number += 1
                    writeCSV(tempFolder, stateX, listX)
                # All CSVs added to fgdb as a table
                for stateX in stateList:
                    arcpy.TableToGeodatabase_conversion(
                        tempFolder + '/' + stateX + '.csv',
                        projectFolder + '/' + csvName + '_National.gdb')
                del env.workspace
                env.workspace = projectFolder + '/' + csvName + "_National.gdb"

                tableList = arcpy.ListTables()
                tableMerge = []
                for table in tableList:
                    tableMerge.append(table)
                arcpy.Merge_management(tableMerge, csvName + "_National")

                inTable = csvName + "_National"
                out_xls = projectFolder + '/' + csvName + "_National.xls"
                arcpy.TableToExcel_conversion(inTable, out_xls)

            elif geocodeOption.lower() == 'y' or geocodeOption.lower(
            ) == 'yes':
                fGDBnational(projectFolder, csvName)
                for stateX in stateList:
                    listX = []
                    http = url_1 + what + url_2 + stateX
                    url = http
                    nationalScrape(url, listX)
                    page_number = 2
                    while page_number < (pages + 1):
                        url = http + "&page=" + str(page_number)
                        nationalScrape(url, listX)
                        page_number += 1
                    writeCSV(tempFolder, stateX, listX)

                for stateX in stateList:
                    nationalCreatePoints(tempFolder, stateX, projectFolder,
                                         csvName)

                del env.workspace
                env.workspace = projectFolder + '/' + csvName + "_National.gdb"

                fcList = arcpy.ListFeatureClasses()
                fcMerge = []
                for fc in fcList:
                    fcMerge.append(fc)
                arcpy.Merge_management(fcMerge, csvName)
            else:
                print '\nPlease answer with Yes or No if you would like the results to be geocoded.\n'
                goNational()
    # index_dic = {
    #     'RainGate': '15-雨篦子探查表'
    #     }

    # 中线成果点文件路径
    xlspath_coordinate = r"D:\智能化管线项目\雨污水管线数据处理\原始文件\上交资料0730\总厂雨水最新补测数据\3-中线控制点探查表.xlsx".decode('utf-8')
    data_coordinate = xlrd.open_workbook(xlspath_coordinate)
    table_coor = data_coordinate.sheet_by_index(0)
    sheetname_coor = data_coordinate.sheet_names()[0]

    # 数据库文件
    arcpy.env.workspace = r'D:\智能化管线项目\雨污水管线数据处理\提交成果\JLSHYWS0730雨补.gdb'
    # 数据表excel所在文件path
    wks = r'D:\智能化管线项目\雨污水管线数据处理\原始文件\上交资料0730\总厂雨水最新补测数据'.decode('utf-8')
    feature_classes = arcpy.ListFeatureClasses(feature_dataset='JLYWS')
    table_classes = arcpy.ListTables()
    for ft_cls in (feature_classes + table_classes):
        # print ft_cls
        for root, files, filenames in os.walk(wks):
            for filename in filenames:
                pre_filename = os.path.splitext(filename)[0]
                # print os.path.join(root, filename)
                try:
                    data = xlrd.open_workbook(os.path.join(root, filename))
                    table = data.sheet_by_index(0)
                    sheetname = data.sheet_names()[0]
                    table_1 = data.sheet_by_name('Domain')
                    nrows = table.nrows
                    ncols = table.ncols
                    featuredata = r'JLYWS\{}'.format(ft_cls)
                    if ft_cls.strip().upper() == table_1.cell(0,0).value.strip().upper() and ft_cls != 'ControlPoint':
Пример #12
0
tblTempArea1 = outGDB + os.path.sep + "tbl_temp_web_prov_tot_area_merge"
tblTempArea2 = outGDB + os.path.sep + "tbl_temp_web_prov_tot_area_merge_summary"
tblTempArea3 = outGDB + os.path.sep + "tbl_temp_web_prov_tot_area_merge_summary_sort"
tblTempArea4 = outGDB + os.path.sep + "tbl_temp_web_prov_tot_area_merge_summary_sort_view"
tblFinalChgArea = outFolder + os.path.sep + "tbl_final_WEB_change_detection_prov_tot_speed_area.dbf"
tempTblProv1 = outGDB + os.path.sep + "tbl_temp_web_prov_tot_merge"
tempTblProv2 = outGDB + os.path.sep + "tbl_temp_web_prov_tot_merge_summary"
tempTblProv3 = outGDB + os.path.sep + "tbl_temp_web_prov_tot_merge_summary_view"
tblFinalChgProvTot = outFolder + os.path.sep + "tbl_final_WEB_change_detection_prov_tot.dbf"

#Create a Table of the current and previous Web_ProviderAndTechnology Layers
arcpy.TableToTable_conversion(inWebCur, outGDB, tblWebCur)
arcpy.TableToTable_conversion(inWebPrev, outGDB, tblWebPrev)

arcpy.env.workspace = outGDB
tblWebList = arcpy.ListTables("tbl_temp_web_prov_tot_*", "")
for tbl in tblWebList:
    desc = arcpy.Describe(tbl)
    tblName = desc.name
    arcpy.AddField_management(tbl, "SUMAREA", "TEXT", "", "", "200", "",
                              "NULLABLE", "NON_REQUIRED", "")
    arcpy.CalculateField_management(tbl, "SUMAREA", calcFldProvTotSpd,
                                    "PYTHON", "")
    arcpy.AddField_management(tbl, "SUMPROV", "TEXT", "", "", "150", "",
                              "NULLABLE", "NON_REQUIRED", "")
    arcpy.CalculateField_management(tbl, "SUMPROV", calcSumFldProv, "PYTHON",
                                    "")
    createSumTbl(tbl)

#Create the final web change detection table by Provider TOT Speed Area
arcpy.Merge_management([
Пример #13
0
dropField = [e for e in fieldNames if e not in requiredFieldPlus]

arcpy.DeleteField_management(copyStatus,dropField)

#######################################################
# 3. Convert MasterList.xlsx to Enterprise geodatabase table
#######################################################
# 3. Convert MasterList.xlsx to gdb table (use 'Table to Table' geoprocessing tool)
arcpy.TableToTable_conversion(MasterList_xlsx, workSpace, 'MasterList')

#######################################################
# 4. Join Master List to Copied Feature Layer
#######################################################
## Extract a list of geodatabase tables
tableList=arcpy.ListTables("*")

## Extract only MasterList geodatabase table
r=re.compile(".*Master")
rList = list(filter(r.match, tableList))
MasterList=''.join(rList)

# 4. Join Field
## Get Join Field from MasterList gdb table: Gain all fields except 'Id'
inputField=[f.name for f in arcpy.ListFields(MasterList)]
joinFields = [e for e in inputField if e not in (joinFieldID, joinFieldID.upper(),'OBJECTID')]

## Extract a Field from MasterList and Feature Layer to be used to join two tables
t=[f.name for f in arcpy.ListFields(copyStatus)]
in_field=' '.join(map(str, [f for f in t if f in (joinFieldID, joinFieldID.upper())]))
 
Пример #14
0
                    ])
                    arcpy.AddMessage("***Writing " + str(len(myValues)) +
                                     " unique values in the " +
                                     str(field.name) +
                                     " field of feature class " + str(fc) +
                                     " to Glossary***")
                    cursor = arcpy.da.InsertCursor(glossary,
                                                   ("Term", "TermSrcFld"))
                    for value in myValues:
                        cursor.insertRow([value, field.name])
                    del cursor

# Same process as above, but for all tables in the geodatabase.
# For basic GeMS databases, results will rreturn from AT LEAST the DescriptionOfMapUnits table.
arcpy.AddMessage("Searching Tables")
tables = arcpy.ListTables("*", "All")
for table in tables:
    fields = arcpy.ListFields(table, "*", "All")
    for field in fields:
        if field.name in list:
            fldName = field.name
            tableName = table
            myValues = set([
                row.getValue(fldName)
                for row in arcpy.SearchCursor(tableName, fields=fldName)
            ])
            arcpy.AddMessage("***Writing " + str(len(myValues)) +
                             " unique values in the " + str(field.name) +
                             " field of table " + str(table) +
                             " to Glossary***")
            cursor = arcpy.da.InsertCursor(glossary, ("Term", "TermSrcFld"))

def between(value, a, b):
    # Find and validate before-part.
    pos_a = value.find(a)
    if pos_a == -1: return ""
    # Find and validate after part.
    pos_b = value.rfind(b)
    if pos_b == -1: return ""
    # Return middle part.
    adjusted_pos_a = pos_a + len(a)
    if adjusted_pos_a >= pos_b: return ""
    return value[adjusted_pos_a:pos_b]


tableList = arcpy.ListTables("*_node_importances*")
tableList2 = arcpy.ListTables("*_execution_events*")
nameList = list()


def xstr(s):
    if s is None:
        return '0'
    return str(s)


for table in tableList:
    wCard1 = "dis_sp_"
    wCard2 = "_pr"
    nameList.append(between(table, wCard1, wCard2))
    outtbl = os.path.join(GPS_tbl_fp, tablename)
    statsFields = [["Trackname", "FIRST"], ["CycID", "FIRST"],
                   ["DURATION_s", "SUM"], ["DISTANCE_m", "SUM"],
                   ["SPEED_mps", "MEAN"]]
    arcpy.Statistics_analysis(points, outtbl, statsFields)
    arcpy.AddField_management(outtbl, "Trackname2", "TEXT")
    arcpy.CalculateField_management(outtbl, "Trackname2", "'" + points + "'",
                                    "PYTHON")
    # Ajan summa on sekunteja, lisätään kenttä johon aika lasketaan minuutteina
    arcpy.AddField_management(outtbl, "DURATION_mins", "DOUBLE")
    arcpy.CalculateField_management(outtbl, "DURATION_mins",
                                    '!SUM_DURATION_s!/60', "PYTHON")

# Listataan GPS-pisteiden summataulut
arcpy.env.workspace = GPS_tbl_fp
GPSsumtables = arcpy.ListTables()
# Yhdistetään ne mergellä
GPStable = "Validointi_GPSsummary"
arcpy.Merge_management(GPSsumtables, GPStable)

# Valitaan tieverkosta select by locationilla kuljettu reitti
tieverkko = r"{fp}"
tieverkko_lyr = "tieverkko_lyr"
arcpy.MakeFeatureLayer_management(tieverkko, "tieverkko_lyr")

arcpy.env.workspace = reitti_fp
for route in reittilista:
    arcpy.SelectLayerByLocation_management(tieverkko_lyr,
                                           "HAVE_THEIR_CENTER_IN", route)
    # Tehdään summataulu tieverkon valinnasta
    route_tbl_fp = os.path.join(ws_dir, "Validointi_reittiajat.gdb")
Пример #17
0
def updateFGDBfromSDE(fgdb, sde, logger=None):
    global changes
    """
    fgdb: file geodatabase
    sde: sde geodatabase connection
    logger: agrc.logging.Logger (optional)

    returns: String[] - the list of errors

    Loops through the file geodatabase feature classes and looks for
    matches in the SDE database. If there is a match, it does a schema check
    and then updates the data.
    """
    def log(msg):
        if logger:
            logger.logMsg(msg)
        else:
            print msg

    def updateData(isTable):
        try:
            # validate that there was not a schema change
            arcpy.env.workspace = fgdb
            layer = sdeFC + '_Layer'
            if not isTable:
                arcpy.MakeFeatureLayer_management(sdeFC, layer, '1 = 2')
            else:
                arcpy.MakeTableView_management(sdeFC, layer, '1 = 2')

            try:
                arcpy.Append_management(layer, f, 'TEST')
                log('schema test passed')
                passed = True
            except arcpy.ExecuteError as e:
                if '000466' in e.message:
                    log(e.message)
                    msg = 'schema change detected'
                    msg += '\n\n{0}'.format(getFieldDifferences(sdeFC, f))
                    errors.append('{}: {}'.format(f, msg))
                    log(msg)
                    passed = False
                    return passed
                else:
                    raise e
            arcpy.Delete_management(layer)

            log('checking for changes...')
            if checkForChanges(f, sdeFC, isTable) and passed:
                log('updating data...')
                arcpy.TruncateTable_management(f)

                # edit session required for data that participates in relationships
                editSession = arcpy.da.Editor(fgdb)
                editSession.startEditing(False, False)
                editSession.startOperation()

                fields = [fld.name for fld in arcpy.ListFields(f)]
                fields = filter_fields(fields)
                if not isTable:
                    fields.append('SHAPE@')
                    outputSR = arcpy.Describe(f).spatialReference
                else:
                    outputSR = None
                with arcpy.da.InsertCursor(f, fields) as icursor, \
                    arcpy.da.SearchCursor(sdeFC, fields, sql_clause=(None, 'ORDER BY OBJECTID'),
                                          spatial_reference=outputSR) as cursor:
                    for row in cursor:
                        icursor.insertRow(row)

                editSession.stopOperation()
                editSession.stopEditing(True)

                changes.append(f.upper())
            else:
                log('no changes found')
        except:
            errors.append('Error updating: {}'.format(f))
            if logger:
                logger.logError()

    log('** Updating {} from {}'.format(fgdb, sde))
    errors = []

    # loop through local feature classes
    arcpy.env.workspace = fgdb
    fcs = arcpy.ListFeatureClasses() + arcpy.ListTables()
    totalFcs = len(fcs)
    i = 0
    for f in fcs:
        i = i + 1
        log('{} of {} | {}'.format(i, totalFcs, f))

        found = False

        # search for match in stand-alone feature classes
        arcpy.env.workspace = sde
        matches = arcpy.ListFeatureClasses(
            '*.{}'.format(f)) + arcpy.ListTables('*.{}'.format(f))
        if matches is not None and len(matches) > 0:
            match = matches[0]
            sdeFC = join(sde, match)
            found = True
        else:
            # search in feature datasets
            datasets = arcpy.ListDatasets()
            if len(datasets) > 0:
                # loop through datasets
                for ds in datasets:
                    matches = arcpy.ListFeatureClasses('*.{}'.format(f), None,
                                                       ds)
                    if matches is not None and len(matches) > 0:
                        match = matches[0]
                        sdeFC = join(sde, match)
                        found = True
                        break
        if not found:
            msg = 'no match found in sde'
            errors.append("{}: {}".format(f, msg))
            log(msg)
            continue

        updateData(arcpy.Describe(join(fgdb, f)).datasetType == 'Table')

    return (errors, changes)
Пример #18
0
def compare_models():
    add_tables = []
    remove_tables = []
    add_fields = []
    remove_fields = []
    update_fields = []

    add_tables = []
    remove_tables = []

    models = [
        basename(f)[:-3] for f in glob(join(getcwd(), 'models', "*.py"))
        if isfile(f) and not f.endswith('__init__.py')
    ]

    # collect existing data
    existing_fc = [
        table.split('.')[-1].upper() for table in arcpy.ListFeatureClasses()
    ]
    existing_tables = [
        table.split('.')[-1].upper() for table in arcpy.ListTables()
    ]

    for model in models:
        logging.debug('Checking {}'.format(model))
        table = getattr(import_module('models.{}'.format(model)), model)
        table_name = getattr(table, '_name')
        field_names = [f for f in dir(table) if not f.startswith('_')]

        # collect table props
        table_props = {}
        for prop in [key for key in dir(table) if key in table_keys]:
            table_props[prop] = getattr(table, prop)
        table_props['fields'] = []

        # sanity check
        for field in field_names:
            field_props = getattr(table, field)
            config_name = field_props.get('name', None)
            if not config_name:
                field_props['name'] = field
            if field != field_props.get('name'):
                logging.warn(
                    '{}.{} has mismatched field name ({}) configured. {} will be used.'
                    .format(table_name, field, field_props.get('name'), field))
                field_props['name'] = field

        # see if we need to create the table
        found_table = False
        if '_geometry' not in table_props or not table_props['_geometry']:
            found_table = table_props['_name'].upper() in existing_tables
        else:
            found_table = table_props['_name'].upper() in existing_fc
        if not found_table:
            logging.info('Add: {}'.format(table_props['_name']))
            add_tables.append(table_props)
            continue

        existing_fields = list(
            filter(filter_fields, map(map_fields,
                                      arcpy.ListFields(table_name))))

        # check for new or updated fields
        for field_name in field_names:
            field_obj = getattr(table, field_name)
            field_obj['name'] = field_name
            existing = find_field(existing_fields, field_name)
            if not existing:
                logging.info('Add: {}.{}'.format(table_name, field_name))
                add_fields.append({'table': table_name, 'field': field_obj})
            else:
                should_update = False
                for key in filter(filter_field_keys, field_obj.keys()):
                    if not compare_key(field_obj, existing, key):
                        logging.debug(
                            'Difference: {}.{}, Old Value: {}, New Value: {}'.
                            format(
                                table_name,
                                key,
                                existing[key]
                                if key in field_obj else 'Undefined',
                                field_obj[key]
                                if key in field_obj else 'Undefined',
                            ))
                        should_update = True
                if should_update:
                    logging.info('Update: {}.{}'.format(
                        table_name, field_name))
                    update_fields.append({
                        'table': table_name,
                        'field': field_obj
                    })
                else:
                    logging.debug('Match: {}.{}'.format(
                        table_name, field_name))

        # check for fields that need to be deleted
        for field in existing_fields:
            field_name = field['name']
            try:
                getattr(table, field_name)
            except Exception as e:
                # field doesn't exist
                logging.debug(e)
                logging.info('Remove: {}.{}'.format(table_name, field_name))
                remove_fields.append({'table': table_name, 'field': field})

    logging.info("""
        Updates Summary: 
            Fields:
                Add: {}
                Update: {}
                Remove: {}
            Tables:
                Add: {}
                Remove: {}
    """.format(
        len(add_fields),
        len(update_fields),
        len(remove_fields),
        len(add_tables),
        len(remove_tables),
    ))
    return {
        'add_fields': add_fields,
        'update_fields': update_fields,
        'remove_fields': remove_fields,
        'add_tables': add_tables,
        'remove_tables': remove_tables,
    }
Пример #19
0
CameraInput = arcpy.GetParameterAsText(0)
SinglePhotos = arcpy.GetParameterAsText(1)
Location = arcpy.GetParameterAsText(2)
PassengerPhotos = arcpy.GetParameterAsText(3)
DriverPhotos = arcpy.GetParameterAsText(4)
AngleField = arcpy.GetParameterAsText(5)
Geodatabase = arcpy.GetParameterAsText(6)
Parcels = arcpy.GetParameterAsText(7)
ParcelPIN = arcpy.GetParameterAsText(8)
TemplateGDB = arcpy.GetParameterAsText(9)

# Retrieve Template Feature Class and Template Questions Table from Template Geodatabase
arcpy.env.workspace = TemplateGDB
TemplateFC = arcpy.ListFeatureClasses()
TemplateQTable = arcpy.ListTables()
TemplateFC = TemplateGDB + "\\" + TemplateFC[0]
TemplateQTable = TemplateGDB + "\\" + TemplateQTable[0]

arcpy.AddMessage("Step 1:  Loading input parameters")

if str(AngleField) == 'true':
    AngleField = 'Direction'
else:
    AngleField = ''

if CameraInput == 'Associate Photo with Parcel':

    # ______________________________________________________________________________#
    #
    # Convert Passenger Photos to Points
Пример #20
0
# Name: CreateFeatureclass_Example2.py
# Description: Create a feature class to store the gnatcatcher habitat zones

# Import system modules
import arcpy

# Set workspace
arcpy.env.workspace = #insert connection 'W:\\XXX\\arcgis\\egdb\\Phase1\\ACQUIREFC\\XXXX'

views = [v for v in arcpy.ListTables() if v.endswith("_VW")]
# Set local variables
for table in views:
    print(view)
Пример #21
0
import arcpy, os

for i in range(5):
    arcpy.CreateTable_management(os.getcwd(), "a" + str(i))
    arcpy.CreateTable_management(os.getcwd(), "b" + str(i) + ".dbf")
arcpy.env.workspace = os.getcwd()
print "These are all tables:"
for table in arcpy.ListTables():
    print table
print "These are all dbfTables:"
for table in arcpy.ListTables("", "dBASE"):
    print table
for table in arcpy.ListTables():
    arcpy.Delete_management(os.getcwd() + os.sep + table)
Пример #22
0
def run_job(esri_job):
    """Determines the data type and each dataset is sent to the worker to be processed."""
    status_writer.send_percent(0.0, "Initializing... 0.0%", 'esri_worker')
    job = esri_job

    # if job.path.startswith('http'):
    if job.service_connection:
        global_job(job)
        worker(job.service_connection, esri_service=True)
        return

    dsc = arcpy.Describe(job.path)
    # A single feature class or table.
    if dsc.dataType in ('DbaseTable', 'FeatureClass', 'ShapeFile', 'Shapefile',
                        'Table'):
        global_job(job, int(arcpy.GetCount_management(job.path).getOutput(0)))
        job.tables_to_keep()  # This will populate field mapping.
        worker(job.path)
        return

    # A folder (for shapefiles).
    elif dsc.dataType == 'Folder':
        arcpy.env.workspace = job.path
        tables = []
        tables_to_keep = job.tables_to_keep()
        tables_to_skip = job.tables_to_skip()
        if job.tables_to_keep:
            for t in tables_to_keep:
                [
                    tables.append(os.path.join(job.path, fc))
                    for fc in arcpy.ListFeatureClasses(t)
                ]
        else:
            [
                tables.append(os.path.join(job.path, fc))
                for fc in arcpy.ListFeatureClasses()
            ]

        if tables_to_skip:
            for t in tables_to_keep:
                [
                    tables.remove(os.path.join(job.path, fc))
                    for fc in arcpy.ListFeatureClasses(t)
                ]

    # A geodatabase (.mdb, .gdb, or .sde).
    elif dsc.dataType == 'Workspace':
        # Create a geodatabase entry with links to tables.
        gdb_links = []
        gdb_entry = {}
        gdb_properties = {}
        gdb_properties['id'] = job.location_id + os.path.splitext(dsc.name)[0]
        gdb_properties['name'] = dsc.name
        gdb_properties['path'] = dsc.catalogPath
        gdb_properties['_discoveryID'] = job.discovery_id
        gdb_properties['format'] = dsc.workspaceFactoryProgID
        if hasattr(dsc, 'domains'):
            if dsc.domains:
                gdb_properties['meta_has_domains'] = True
            else:
                gdb_properties['meta_has_domains'] = 'false'
        if dsc.release == '3,0,0':
            gdb_properties[
                'fs_arcgis_version'] = "10.0, 10.1, 10.2, 10.3, 10.4, 10.5 or ArcGIS Pro 1.0, 1.1, 1.2"
        elif dsc.release == '2,3,0':
            gdb_properties['fs_arcgis_version'] = "9.3, 9.3.1"
        else:
            gdb_properties['fs_arcgis_version'] = "9.2"
        if hasattr(dsc.connectionProperties, 'version'):
            cp = dsc.connectionProperties
            gdb_properties['fs_server'] = cp.server
            gdb_properties['fs_instance'] = cp.instance
            gdb_properties['fs_database'] = cp.database
            gdb_properties['fs_version'] = cp.version
        gdb_entry['location'] = job.location_id
        gdb_entry['action'] = job.action_type
        gdb_entry['entry'] = {'fields': gdb_properties}

        arcpy.env.workspace = job.path
        feature_datasets = arcpy.ListDatasets('*', 'Feature')
        tables = []
        tables_to_keep = job.tables_to_keep()
        tables_to_skip = job.tables_to_skip()
        if job.tables_to_keep:
            for t in tables_to_keep:
                [
                    tables.append(os.path.join(job.path, tbl))
                    for tbl in arcpy.ListTables(t)
                ]
                [
                    tables.append(os.path.join(job.path, fc))
                    for fc in arcpy.ListFeatureClasses(t)
                ]
                for fds in feature_datasets:
                    [
                        tables.append(os.path.join(job.path, fds, fc))
                        for fc in arcpy.ListFeatureClasses(wild_card=t,
                                                           feature_dataset=fds)
                    ]
        else:
            [
                tables.append(os.path.join(job.path, tbl))
                for tbl in arcpy.ListTables()
            ]
            [
                tables.append(os.path.join(job.path, fc))
                for fc in arcpy.ListFeatureClasses()
            ]
            for fds in feature_datasets:
                [
                    tables.append(os.path.join(job.path, fds, fc))
                    for fc in arcpy.ListFeatureClasses(feature_dataset=fds)
                ]

        if tables_to_skip:
            for t in tables_to_keep:
                [
                    tables.remove(os.path.join(job.path, tbl))
                    for tbl in arcpy.ListTables(t)
                ]
                [
                    tables.remove(os.path.join(job.path, fc))
                    for fc in arcpy.ListFeatureClasses(t)
                ]
                for fds in feature_datasets:
                    [
                        tables.remove(os.path.join(job.path, fds, fc))
                        for fc in arcpy.ListFeatureClasses(wild_card=t,
                                                           feature_dataset=fds)
                    ]

    # A geodatabase feature dataset, SDC data, or CAD dataset.
    elif dsc.dataType == 'FeatureDataset' or dsc.dataType == 'CadDrawingDataset':
        tables_to_keep = job.tables_to_keep()
        tables_to_skip = job.tables_to_skip()
        arcpy.env.workspace = job.path
        if tables_to_keep:
            tables = []
            for tbl in tables_to_keep:
                [
                    tables.append(os.path.join(job.path, fc))
                    for fc in arcpy.ListFeatureClasses(tbl)
                ]
                tables = list(set(tables))
        else:
            tables = [
                os.path.join(job.path, fc)
                for fc in arcpy.ListFeatureClasses()
            ]
        if tables_to_skip:
            for tbl in tables_to_skip:
                [
                    tables.remove(os.path.join(job.path, fc))
                    for fc in arcpy.ListFeatureClasses(tbl) if fc in tables
                ]

    # Not a recognized data type.
    else:
        sys.exit(1)

    if job.multiprocess:
        # Multiprocess larger databases and feature datasets.
        multiprocessing.log_to_stderr()
        logger = multiprocessing.get_logger()
        logger.setLevel(logging.INFO)
        pool = multiprocessing.Pool(initializer=global_job, initargs=(job, ))
        for i, _ in enumerate(pool.imap_unordered(worker, tables), 1):
            status_writer.send_percent(i / len(tables),
                                       "{0:%}".format(i / len(tables)),
                                       'esri_worker')
        # Synchronize the main process with the job processes to ensure proper cleanup.
        pool.close()
        pool.join()
    else:
        for i, tbl in enumerate(tables, 1):
            try:
                global_job(job)
                te = worker(tbl)
                if te:
                    gdb_links.append({'relation': 'contains', 'id': te['id']})
                status_writer.send_percent(
                    i / len(tables), "{0} {1:%}".format(tbl, i / len(tables)),
                    'esri_worker')
            except Exception:
                continue
    gdb_entry['entry']['links'] = gdb_links
    job.send_entry(gdb_entry)
    return
Пример #23
0
def ProcessReplica(conn):

    #Get the Extracted Geodatabase Name
    for file in os.listdir(wksp + "\\Temp\\"):
        if file.endswith(".gdb"):
            extracted_gdb = wksp + "\\Temp\\" + file

    arcpy.env.workspace = extracted_gdb
    surveytable = arcpy.ListTables()
    surveyfc = arcpy.ListFeatureClasses()

    #Trim All Field & Tables Here
    for fc in surveyfc:
        TrimFields(fc)

    for table in surveytable:
        TrimFields(table)

    arcpy.env.workspace = conn
    sde_tables = arcpy.ListTables()
    sde_fcs = arcpy.ListFeatureClasses()

    #if validation_survey:
    if validation:
        print "Validation needed for " + surveyName + " will export to Excel"
        arcpy.AddMessage("Validation needed for " + surveyName +
                         " will export to Excel")
        for fc in surveyfc:
            print fc
            if arcpy.Exists(conn + "\\" + fc):
                arcpy.AddMessage("Feature Class Exists: " + fc)
                print "Feature Class Exists"
                arcpy.env.workspace = conn
                RowsToDelete = getExistingRecords(conn + "\\" + fc)
                arcpy.env.workspace = extracted_gdb
                DeleteExistingRows(extracted_gdb + "\\" + fc, RowsToDelete)
                arcpy.AddField_management(fc, "X_Coord", "DOUBLE")
                arcpy.AddField_management(fc, "Y_Coord", "DOUBLE")
                arcpy.CalculateField_management(fc, "X_Coord",
                                                "!SHAPE!.firstPoint.X",
                                                "PYTHON_9.3")
                arcpy.CalculateField_management(fc, "Y_Coord",
                                                "!SHAPE!.firstPoint.Y",
                                                "PYTHON_9.3")
                arcpy.TableToExcel_conversion(fc,
                                              CSVLocation + '\\' + fc + ".xls")
                #arcpy.Append_management(fc, conn + "\\" + sde_fc, "NO_TEST")
                for table in surveytable:
                    if arcpy.Exists(conn + "\\" + table):
                        arcpy.TableToExcel_conversion(
                            fc, CSVLocation + '\\' + table + ".xls")
            else:
                print "Feautre Class Does not Exists"
                arcpy.AddMessage("Feature Class Does Not Exists: " + fc)
                arcpy.AddMessage(
                    "Feature Class: " + fc +
                    " not found.  Exporting to Excel.  Schema will be created upon uploading..."
                )
                arcpy.Copy_management(fc, conn + "\\" + fc)
                arcpy.TableToExcel_conversion(fc,
                                              CSVLocation + '\\' + fc + ".xls")

    #if no_validation_survey:
    if not validation:
        print "No validation needed for " + surveyName
        arcpy.AddMessage("No validation needed for " + surveyName)

        for fc in surveyfc:
            if arcpy.Exists(conn + "\\" + fc):
                print "Feature Class Exists"
                arcpy.env.workspace = conn
                RowsToDelete = getExistingRecords(conn + "\\" + fc)
                arcpy.env.workspace = extracted_gdb
                DeleteExistingRows(extracted_gdb + "\\" + fc, RowsToDelete)
                print "Appending remaining rows..."
                arcpy.Append_management(fc, conn + "\\" + fc, "NO_TEST")

                for table in surveytable:
                    if arcpy.Exists(conn + "\\" + table):
                        print "Table already exists: " + table
                        arcpy.Append_management(table, conn + "\\" + table,
                                                "NO_TEST")
            else:
                print "Feautre Class Does not Exists"
                arcpy.AddMessage("Feature Class: " + fc + " not found.")
                arcpy.Copy_management(extracted_gdb + "\\" + fc,
                                      conn + "\\" + surveyName)

                #have to find how to handle this
    return
Пример #24
0
                                     == "true" else "_AllComponents.txt")
    if os.path.isfile(textFilePath):
        os.remove(textFilePath)

    # record basic user inputs and settings to log file for future purposes
    logBasicSettings()

    # Get the database and location of the SSURGO mapunit
    #theDB = GetWorkspace(ssurgoInput) # more than likely it will return a GDB or FD path
    #theDir = os.path.dirname(theDB)

    if setScratchWorkspace():

        arcpy.env.workspace = ssurgoFGDB
        """ ------------------------------------ Prepare Component Table --------------------------"""
        compTable = arcpy.ListTables("component", "ALL")

        # Exit if compTable doesn't exist
        if not len(compTable):
            raise ExitError, "\tComponent table was not found in " + os.path.basename(
                ssurgoFGDB)

        # compTable absolute path
        compTablePath = arcpy.env.workspace + os.sep + compTable[0]

        # make sure all of the following compTable fields exist
        compMukey = FindField(compTable[0], "mukey")
        compSlpLow = FindField(compTable[0], "slope_l")
        compSlpHigh = FindField(compTable[0], "slope_h")
        compPctRv = FindField(compTable[0], "comppct_r")
        majorField = FindField(compTable[0], "majcompflag")
Пример #25
0
    #-------- CREATE FINAL Combined BG TABLE ---------------------------------
    """ Create the blank FC """
    arcpy.FeatureClassToFeatureClass_conversion(finalDir + '/' + city + '_BG', finalDir, city + '_BG_AllData')
    BGFields = [f.name for f in arcpy.ListFields(finalDir + '/' + city + '_BG_AllData')]
    for field in BGFields:
        if field not in ['bgrp']:
            try:
                arcpy.DeleteField_management(finalDir + '/' + city + '_BG_AllData', field)
            except:
                pass
        else:
            pass


    finalTbls = arcpy.ListTables()
    allTbls = [t for t in finalTbls]
    while len(allTbls) > 13:
        for t in allTbls:
            if str(t) not in [str(city + "_BenMap"), str(city + "_BG_Pop"),str(city + "_Floodplain"),str(city + "_DWDemand"),str(city + "_EduLowGS"),str(city + "_historical_places"),str(city + "_iTree"),str(city + "_LCSum"),str(city + "_NrRd_Pop"),str(city + "_Park_Pop"),str(city + "_RB_LC"),str(city + "_TreeWV"),str(city + "_WaterWV")]:
                allTbls.remove(t)
                print("Removed extraneous table: " + str(t))
            else:
                pass
    finalTbls = allTbls
    """ Add the fields for each final table """
    for i,tbl in enumerate(finalTbls,1):
        print("Joining table " + str(i) + " / 13: " + tbl)
        fields = [f.name for f in arcpy.ListFields(tbl)]
        try:
            fields.remove('bgrp')
Пример #26
0
import arcpy
from arcpy import env

parentGeodatabase = arcpy.GetParameterAsText(0)
outGeodb = arcpy.GetParameterAsText(1)
replicaName = arcpy.GetParameterAsText(2)
query = arcpy.GetParameterAsText(3)

env.workspace = parentGeodatabase
allFeatureClasses = arcpy.ListTables()
dataSets = arcpy.ListDatasets()
for dataset in dataSets:
    featureClasses = arcpy.ListFeatureClasses('*', '', dataset)
    print dataset
    arcpy.AddMessage(dataset)
    for featureClass in featureClasses:
        try:
            print featureClass
            featureNames = featureClass.split(".")
            pureName = featureNames[-1]
            arcpy.AddMessage("    " + pureName)
            arcpy.MakeFeatureLayer_management(featureClass, pureName + "_New",
                                              query, dataset)
            allFeatureClasses.append(pureName + "_New")
        except Exception, e:
            arcpy.AddError(e.message)
            # arcpy.MakeFeatureLayer_management(featureClass, featureClass + "_New", workspace=dataset)
            print('query didnt applied to this layer ' + featureClass + " " +
                  arcpy.GetMessages())
            pass
if len(allFeatureClasses) == 0:
def crs4_copy_tbls_stage(args):
    # script name
    script_name = os.path.basename(__file__)

    # script parameters
    sdePath = args[0]  
    gdbPath = args[1]
    sdePrefix = args[2]
   
    # Set environment
    arcpy.env.workspace = gdbPath
    arcpy.env.overwriteOutput = True
    arcpy.env.configkeyword= "GEOMETRY"

    # log function
    log_msg ('calling {}'.format(script_name))

    # variables
    err_message = None    
    # excludeList = ["INSTRUMENT"]
    excludeList = []

    try:
    
        if arcpy.Exists(gdbPath):
            ### Copy tables from local GDB to database
            #*** NOTE: tables have been deleted from SDE previously via 
            #*** CRS2_emptyNDCSTGsde.py - but still check for existence
            # List tables in GDB
            tbll = arcpy.ListTables()
            # Loop through the tables
            log_msg ( 'Copying tables to staging SDE...')
            for tbl in tbll:
                inTBLpath = os.path.join(gdbPath,tbl)
                outTBLname = sdePrefix + tbl
                outTBLpath = os.path.join(sdePath,outTBLname)
                # Check whether table exists in SDE, if so - print warning
                if arcpy.Exists(outTBLpath):
                    log_msg ( 'WARNING: {} exists in staging SDE'.format(outTBLname))
                # Otherwise, copy
                else:
                    # Ignore tables in exclude list
                    if tbl in excludeList:
                        log_msg ( 'Ignoring {}'.format(tbl))                        
                    else:
                        # Copy table from GDB to SDE
                        arcpy.Copy_management(inTBLpath,outTBLpath,"Table") 
                        # Count features and report number - warn if not equal
                        inCount = arcpy.GetCount_management(inTBLpath).getOutput(0)
                        outCount = arcpy.GetCount_management(outTBLpath).getOutput(0)
                        if inCount == outCount:
                            log_msg ('{0} - Copied {1} entries to {2}'.format(tbl,inCount,outTBLname))
                        else:
                            log_msg ( 'ERROR: {0} entries copied from {1} - {2} entries resultant in {3}'.format(inCount,tbl,outCount,outTBLname))                   
        else:
            err_message = 'ERROR: GDB not found - {}'.format(gdbPath)
            
        log_msg ( "Process time: %s \n" % str(datetime.datetime.now()-starttime))  

    except Exception as e: 
        err_message =  "ERROR while running {0}: {1}" .format(script_name,e)

    return err_message, log_messages
Пример #28
0
    def _CopyDataTypeProcess(self, type="FeatureClasses", ds="", fc=""):
        try:
            #Set workspaces
            arcpy.env.workspace = self.start_db
            wk2 = self.end_db
            result = {}
            if (self.calledFromApp):
                if isinstance(self.standaloneFeatures, dict):
                    for key, featClass in self.standaloneFeatures.items():
                        if arcpy.Exists(dataset=featClass):

                            fcName = os.path.basename(featClass)
                            if '.' in fcName:
                                fcSplit = fcName.split('.')
                                fcName = fcSplit[len(fcSplit) - 1]

                            #fcDes = arcpy.Describe(featClass)
                            #workspace =featClass.replace(featClassBase,"")
                            #fullName = arcpy.ParseTableName(name=featClassBase,workspace=fcDes.workspace)
                            #nameList = fullName.split(",")
                            #databaseName = str(nameList[0].encode('utf-8')).strip()
                            #ownerName = str(nameList[1].encode('utf-8')).strip()
                            #fcName = str(nameList[2].encode('utf-8')).strip()

                            fcRes = arcpy.FeatureClassToFeatureClass_conversion(
                                featClass, wk2, fcName)
                            result[key] = str(fcRes)

                            print "Completed copy on {0}".format(fcName)
                        else:
                            result[key] = featClass

                else:
                    for featClass in self.standaloneFeatures:
                        if featClass.upper().find(".SDE") != -1:
                            featName = featClass.split('.')[-1]
                        else:
                            featName = featClass.split('/')[-1]
                        if arcpy.Exists(dataset=featClass):
                            arcpy.FeatureClassToFeatureClass_conversion(
                                featClass, wk2, featName)
                        print "Completed copy on {0}".format(featName)
            else:

                # if ds passed value exist then this call came from a copy dataset child object request.
                if ds != "":
                    if arcpy.Exists(wk2 + os.sep + ds.split('.')[-1] + os.sep +
                                    fc.split('.')[-1]) == False:
                        if type == "FeatureClasses":
                            arcpy.FeatureClassToFeatureClass_conversion(
                                self.start_db + os.sep + ds + os.sep + fc,
                                wk2 + os.sep + ds.split('.')[-1],
                                fc.split('.')[-1])
                            #arcpy.Copy_management(self.start_db + os.sep + ds + os.sep + fc, wk2 + os.sep + ds.split('.')[-1] + os.sep + fc.split('.')[-1])
                            print "Completed copy on {0}".format(fc)
                else:
                    # This function was called independently
                    #Check GDB if not created already, create it now
                    if self._CheckCreateGDBProcess():
                        #Determine the object type and List out
                        if type == "Tables":
                            dataTypeList = arcpy.ListTables()
                        else:
                            dataTypeList = arcpy.ListFeatureClasses()

                        for dtl in dataTypeList:
                            name = arcpy.Describe(dtl)
                            new_data = name.name.split('.')[-1]

                            # Checks to see if user wants to copy all features or just the ones that match the supplied list.
                            if "*" in self.standaloneFeatures and len(
                                    self.standaloneFeatures) == 1:
                                #print "Reading: {0}".format(dtl)
                                if arcpy.Exists(wk2 + os.sep +
                                                new_data) == False:
                                    if type == "Tables":
                                        arcpy.TableToTable_conversion(
                                            dtl, wk2, new_data)
                                    else:
                                        arcpy.FeatureClassToFeatureClass_conversion(
                                            dtl, wk2, new_data)
                                    print "Completed copy on {0}".format(
                                        new_data)
                            else:
                                if new_data in self.standaloneFeatures:
                                    print "Reading here: {0}".format(dtl)
                                    if arcpy.Exists(wk2 + os.sep +
                                                    new_data) == False:
                                        if type == "Tables":
                                            arcpy.TableToTable_conversion(
                                                dtl, wk2, new_data)
                                        else:
                                            arcpy.FeatureClassToFeatureClass_conversion(
                                                dtl, wk2, new_data)
                                        print "Completed copy on {0}".format(
                                            new_data)
                                    else:
                                        print "Feature class {0} already exists in the end_db so skipping".format(
                                            new_data)
                    #Clear memory
                    del dtl
            return True
        except arcpy.ExecuteError:
            line, filename, synerror = trace()
            raise DataPrepError({
                "function": "CopyData",
                "line": line,
                "filename": filename,
                "synerror": synerror,
                "arcpyError": arcpy.GetMessages(2),
            })
        except:
            line, filename, synerror = trace()
            raise DataPrepError({
                "function": "CopyData",
                "line": line,
                "filename": filename,
                "synerror": synerror,
            })
                                         "ANALYZE_BASE", "ANALYZE_DELTA",
                                         "ANALYZE_ARCHIVE")
        '''
        *********************
        Data Owner(s) Section
        *********************
        '''
        # Get a list of datasets owned by the data owner user (requires second connection file)
        # Get the user name for the workspace
        # this assumes you are using database authentication.
        # OS authentication connection files do not have a 'user' property.
        userName = userList

        # Get a list of all the datasets the user has access to.
        # First, get all the stand alone tables, feature classes and rasters owned by the current user.
        oDataList = arcpy.ListTables(
            '*.' + userName + '.*') + arcpy.ListFeatureClasses(
                '*.' + userName + '.*') + arcpy.ListRasters('*.' + userName +
                                                            '.*')

        # Next, for feature datasets owned by the current user
        # get all of the featureclasses and add them to the master list.
        for dataset in arcpy.ListDatasets('*.' + userName + '.*'):
            oDataList += arcpy.ListFeatureClasses(feature_dataset=dataset)

        # Rebuild indexes and analyze the data owner tables
        arcpy.RebuildIndexes_management(TMworkspace, "NO_SYSTEM", oDataList,
                                        "ALL")

        arcpy.AnalyzeDatasets_management(TMworkspace, "NO_SYSTEM", oDataList,
                                         "ANALYZE_BASE", "ANALYZE_DELTA",
                                         "ANALYZE_ARCHIVE")
def main(gdbCopy, outWS, oldgdb):
    #
    # Simple version
    #
    isOpen = False
    addMsgAndPrint('')
    outputDir, logfile = makeOutputDir(oldgdb, outWS, isOpen)
    # point feature classes
    arcpy.env.workspace = gdbCopy
    if 'StandardLithology' in arcpy.ListTables():
        stdLithDict = makeStdLithDict()
    else:
        stdLithDict = 'None'
    mapUnitPolys(stdLithDict, outputDir, logfile)
    arcpy.env.workspace = gdbCopy + '/GeologicMap'
    pointfcs = arcpy.ListFeatureClasses('', 'POINT')
    linefcs = arcpy.ListFeatureClasses('', 'LINE')
    arcpy.env.workspace = gdbCopy
    for fc in linefcs:
        linesAndPoints('GeologicMap/' + fc, outputDir, logfile)
    for fc in pointfcs:
        linesAndPoints('GeologicMap/' + fc, outputDir, logfile)
    logfile.close()
    #
    # Open version
    #
    isOpen = True
    addMsgAndPrint('')
    outputDir, logfile = makeOutputDir(oldgdb, outWS, isOpen)
    # list featuredatasets
    arcpy.env.workspace = gdbCopy
    fds = arcpy.ListDatasets()
    addMsgAndPrint('datasets = ' + str(fds))
    # for each featuredataset
    for fd in fds:
        arcpy.workspace = gdbCopy
        addMsgAndPrint('  Processing feature data set ' + fd + '...')
        logfile.write('Feature data set ' + fd + ' \n')
        try:
            spatialRef = arcpy.Describe(fd).SpatialReference
            logfile.write('  spatial reference framework\n')
            logfile.write('    name = ' + spatialRef.Name + '\n')
            logfile.write('    spheroid = ' + spatialRef.SpheroidName + '\n')
            logfile.write('    projection = ' + spatialRef.ProjectionName +
                          '\n')
            logfile.write('    units = ' + spatialRef.LinearUnitName + '\n')
        except:
            logfile.write(
                '  spatial reference framework appears to be undefined\n')
        # generate featuredataset prefix
        pfx = ''
        for i in range(0, len(fd) - 1):
            if fd[i] == fd[i].upper():
                pfx = pfx + fd[i]
        # for each featureclass in dataset
        arcpy.env.workspace = gdbCopy
        arcpy.env.workspace = fd
        fcList = arcpy.ListFeatureClasses()
        if fcList <> None:
            for fc in arcpy.ListFeatureClasses():
                # don't dump Anno classes
                if arcpy.Describe(fc).featureType <> 'Annotation':
                    outName = pfx + '_' + fc + '.shp'
                    dumpTable(fc, outName, True, outputDir, logfile, isOpen,
                              fc)
                else:
                    addMsgAndPrint('    Skipping annotation feature class ' +
                                   fc + '\n')
        else:
            addMsgAndPrint('   No feature classes in this dataset!')
        logfile.write('\n')
    # list tables
    arcpy.env.workspace = gdbCopy
    for tbl in arcpy.ListTables():
        outName = tbl + '.csv'
        dumpTable(tbl, outName, False, outputDir, logfile, isOpen, tbl)
    logfile.close()