Exemple #1
0
def list_feature(gdb, dataset):
    #dir_name = os.getcwd(gdb)
    # env.workspace = r"D:\全生命周期"  # D:\全生命周期\
    # for fgdb in arcpy.ListWorkspaces("*gdb"):
    for fgdb in list(
            set(arcpy.ListWorkspaces("*gdb"))
            | set(arcpy.ListWorkspaces("*.mdb"))):
        # print fgdb
        wk = os.path.basename(fgdb)

        if str(wk) == gdb:  # str("SPDM2_datasets.gdb"):
            print wk
            env.workspace = fgdb
            # print arcpy.env.workspace
            datasets = arcpy.ListDatasets()
            datasets = [''] + datasets if datasets is not None else []
            ## if datasets is not None:
            #datasets = [''] + datasets
            #else:
            #    datasets = []

            # print datasets
            for ds in datasets:

                #print env.workspace
                if str(ds).strip() == str(dataset).strip():
                    # print ds
                    # env.workspace = fgdb + os.sep + ds
                    return fgdb + os.sep + ds
Exemple #2
0
def exec_batch_convert(infolder, outfolder):
    '''
    マルチプロセスでの処理:
    '''
    try:
        start = datetime.datetime.now()
        print(u"-- Strat: Multiprocess_Forge_ShapefileToFeatureClass --:{0}".
              format(start))
        cpu_cnt = multiprocessing.cpu_count()
        arcpy.env.workspace = infolder
        inwss = arcpy.ListWorkspaces("*", "Folder")
        # 各プロセスに渡すパラメータをリスト化
        params = []
        for inws in inwss:
            param1 = inws  # 市区町村フォルダ(シェープファイルが入っている)
            gdbname = u"{0}.gdb".format(os.path.basename(inws))
            param2 = os.path.join(outfolder, gdbname)  # 市区町村ファイルジオデータベース
            params.append((param1, param2))
        if len(inwss) < cpu_cnt:  # 処理フォルダ数CPUコアより少ない場合無駄なプロセスを起動不要
            cpu_cnt = len(inwss)
        pool = multiprocessing.Pool(cpu_cnt)  # cpu数分プロセス作成
        results = pool.map(multi_run_batch_convert, params)  # 割り当てプロセスで順次実行される
        pool.close()
        pool.join()
        # 各プロセスでの処理結果を出力
        for r in results:
            print(u"{0}".format(r))

        # 各プロセスからのマージ版を作成
        arcpy.env.workspace = outfolder
        outwss = arcpy.ListWorkspaces("*", "FileGDB")
        foldername = "{0}.gdb".format(os.path.basename(outfolder))
        forgefc = "forge"
        print(u"  Mearge to FeatureClass:{1} in FGDB:{0} ".format(
            foldername, forgefc))
        arcpy.CreateFileGDB_management(outfolder, foldername, "CURRENT")
        forgews = os.path.join(outfolder, foldername)
        for outws in outwss:
            arcpy.env.workspace = outws
            fc = arcpy.ListFeatureClasses()[0]  #農地筆は1ファイルしかないので固定
            print(u"    merge: {0} ⇒ {1}".format(fc, forgefc))
            if arcpy.Exists(os.path.join(forgews, forgefc)):
                outfc = os.path.join(forgews, forgefc)
                arcpy.Append_management(fc, outfc)
            else:
                arcpy.FeatureClassToFeatureClass_conversion(
                    fc, forgews, forgefc)

        # マージが終わったので後片付け 各市区町村のFGDBを削除 - 必要に応じてコメントアウトを外す
        #for outws in outwss:
        #    arcpy.Delete_management(outws)

        fin = datetime.datetime.now()
        print(u"-- Finish: Multiprocess_Forge_ShapefileToFeatureClass --:{0}".
              format(fin))
        print(u"     Elapsed time:{0}".format(fin - start))
    except:
        print(u"Exception:{0}".format(sys.exc_info()[2]))
    def set_dbfilepath(self,filepath):
        if self.isSingleFile(filepath):
            paths = filepath.rpartition('/')
            self.dbpath = paths[0] + paths[1]
            self.currDb = paths[2]
            self.dbs = [self.currDb]
        else:
            arcpy.env.workspace = filepath
            gdbs = arcpy.ListWorkspaces('*','FileGDB')
            mdbs = arcpy.ListWorkspaces('*','Access')

            for gdb in gdbs:
                print gdb
def recursive_list_fcs(workspace,
                       theMergeFile,
                       wild_card=None,
                       feature_type=None):
    """Returns a list of all feature classes in a tree.  Returned
    list can be limited by a wildcard, and feature type.
    """
    preexisting_wks = arcpy.env.workspace
    arcpy.env.workspace = workspace
    print workspace

    try:
        list_fcs = []
        for root, dirs, files in os.walk(workspace):
            arcpy.env.workspace = root
            fcs = arcpy.ListFeatureClasses(wild_card, feature_type)
            if fcs:
                list_fcs += [os.path.join(root, fc) for fc in fcs]

            # Pick up workspace types that don't have a folder
            #  structure (coverages, file geodatabase do)
            workspaces = set(arcpy.ListWorkspaces()) - \
                         set(arcpy.ListWorkspaces('', 'FILEGDB')) -\
                         set(arcpy.ListWorkspaces('', 'COVERAGE'))

            for workspace in workspaces:
                arcpy.env.workspace = os.path.join(root, workspace)
                fcs = arcpy.ListFeatureClasses(wild_card, feature_type)

                if fcs:
                    list_fcs += [
                        os.path.join(root, workspace, fc) for fc in fcs
                    ]

            for dataset in arcpy.ListDatasets('', 'FEATURE'):
                ds_fcs = arcpy.ListFeatureClasses(wild_card, feature_type,
                                                  dataset)
                if ds_fcs:
                    list_fcs += [
                        os.path.join(root, workspace, dataset, fc)
                        for fc in ds_fcs
                    ]

    except Exception as err:
        raise err
    finally:
        arcpy.env.workspace = preexisting_wks

    #return list_fcs
    arcpy.Merge_management(list_fcs, theMergeFile)
Exemple #5
0
def fcswpath_in_workspace(workspace):
    arcpy.env.workspace = workspace
    for fc in arcpy.ListFeatureClasses():
        yield os.path.join(workspace, fc)
        for ws in arcpy.ListWorkspaces():
            for fc in fcs_in_workspace(os.path.join(workspace, ws)):
                yield fc
Exemple #6
0
def Do(indir, outdir,outfile):
    f = open(outdir + os.sep +outfile, "w")
    
    env.workspace = indir
    gdblist = arcpy.ListWorkspaces("*", "FileGDB")
    
    for gdb in gdblist:
        print gdb
        f.write(gdb + "\r\n")
        
        gdbname = os.path.split(gdb)[1][:-4]
        f_gdb = open(outdir + os.sep + gdbname + ".txt", "w")
        
        env.workspace = gdb
        fcs_in_gdb = arcpy.ListFeatureClasses()
        for fc in fcs_in_gdb:
            fc_count = arcpy.GetCount_management(env.workspace + os.sep + fc)
            print fc, "\t", fc_count
            f.write(fc + "\t" + str(fc_count) + "\r\n")
            f_gdb.write(fc + "\t" + str(fc_count) + "\r\n")
        dss_in_gdb = arcpy.ListDatasets()
        for ds in dss_in_gdb:
            env.workspace = gdb + os.sep + ds
            fcs_in_ds = arcpy.ListFeatureClasses()
            for fc in fcs_in_ds:
                fc_count = arcpy.GetCount_management(env.workspace + os.sep + fc)
                print ds, fc, fc_count
                f.write(ds + "\t" + fc + "\t" + str(fc_count) +"\r\n")
                f_gdb.write(ds + "\t" + fc + "\t" + str(fc_count) +"\r\n")
        print "==================="
        f.write("===================\r\n")
        f_gdb.close()
    f.close()
Exemple #7
0
def gdb_to_shp(workspace, outworkspace):
    """
    Export all feature classes in a Geodatabase to a single file.
    Do this for all Geodatabases in workspace.
    """

    import os
    from glass.mng.gen import copy_feat

    # List GeoDatabases
    lst_gdb = arcpy.ListWorkspaces(workspace_type="FileGDB")

    dic_gdb = {}

    for gdb in lst_gdb:
        arcpy.env.workspace = gdb

        dic_gdb[gdb] = {}

        dts = arcpy.ListDatasets()

        for dt in dts:
            dic_gdb[gdb][dt] = arcpy.ListFeatureClasses(feature_dataset=dt)

    for gdb in dic_gdb:
        for dt in dic_gdb[gdb]:
            for fc in dic_gdb[gdb][dt]:
                copy_feat(os.path.join(gdb, dt, fc),
                          os.path.join(
                              outworkspace, "{}_{}.shp".format(
                                  os.path.splitext(os.path.basename(gdb))[0],
                                  fc)),
                          gisApi='arcpy')
    def tearDown(self):
        arcpy.env.workspace = self.temp_data_folder
        done = True
        # delete all datasets
        datasets = arcpy.ListDatasets()
        for dataset in datasets:
            arcpy.Delete_management(dataset)

        # delete all workspaces
        workspaces = arcpy.ListWorkspaces()
        for workspace in workspaces:

            #clear all locks
            arcpy.Exists(workspace)
            arcpy.Compact_management(workspace)
            arcpy.Exists(workspace)
            try:
                arcpy.Delete_management(workspace)
            except arcpy.ExecuteError:
                print("cannot delete {0} due to lock".format(workspace))
                done = False

        # delete directory with all remaining files
        if done:
            distutils.dir_util.remove_tree(self.temp_data_folder)
def fcs_in_workspace(workspace):
    arcpy.env.workspace = workspace
    for fc in arcpy.ListFeatureClasses():
        yield (fc)
    for ws in arcpy.ListWorkspaces():
        for fc in fcs_in_workspace(ws):
            yield fc
Exemple #10
0
def mergeSamples():
    arcpy.env.workspace = "D:/projects/ksu/samples"

    # List all file geodatabases in the current workspace
    workspaces = arcpy.ListWorkspaces("*", "FileGDB")
    
    completelist = []
    # return workspaces
    for workspace in workspaces:
        print workspace
        # if workspace == 'D:/projects/ksu/samples\AR.gdb':
        
        arcpy.env.workspace = workspace

        featureclasses = arcpy.ListFeatureClasses()

        print featureclasses

        for fc in featureclasses:
            print fc

            print workspace + '/' + fc
            completelist.append(workspace + '/' + fc)

    print completelist
    arcpy.Merge_management(completelist, "D:/projects/ksu/attributes.gdb/tryit2")
Exemple #11
0
def rasters_in_workspace(workspace):
    arcpy.env.workspace = workspace
    for raster in arcpy.ListRasters():
        yield (raster)
    for ws in arcpy.ListWorkspaces():
        for raster in rasters_in_workspace(ws):
            yield raster
Exemple #12
0
def listfolder(path):
    arcpy.env.workspace = path

    featureclass = arcpy.ListFeatureClasses()
    raster = arcpy.ListRasters()
    workspace = arcpy.ListWorkspaces()
    mxd = arcpy.ListFiles("*.mxd")

    csvfile.writerow(["hello world"])

    for fc in featureclass:
        desc = arcpy.Describe(fc)
        print("there was a feature class")
        csvfile.writerow(["feature class goes here"])

    for ras in raster:
        desc = arcpy.Describe(ras)
        print("there was a raster")
        csvfile.writerow(["raster data goes here"])

    for maps in mxd:
        desc = arcpy.Describe(maps)
        csvfile.writerow(["maps data will go here"])
        #need to do some other stuff before printing

    for work in workspace:
        if work.endswith(".gdb"):
            print(work + " is a file geodatabase will run function")
            #call list file geodatabase function
        elif os.path.isdir(work):
            print(work + " Is a folder will call again to run recursively")
            #make recursive call to funtion to start again :)
    file.flush()
Exemple #13
0
def main():
    todays_date = str(datetime.now()).split(' ')[0].replace('-', '')
    
    # MERGE PARCELS
    print('MERGING PARCELS...')
    gdb_list = []
    arcpy.env.workspace = PARCEL_DIR
    workspaces = arcpy.ListWorkspaces()
    for workspace in workspaces:
        print workspace
        if os.path.basename(workspace).split('.')[0] in state_list:
            gdb = os.path.join(PARCEL_DIR, workspace) # STATES
            gdb_list.append(gdb)
    print gdb_list
    num_processes = 4
    p = mp.Pool(num_processes)
    output = p.map(merge_parcels, gdb_list)
    p.close()
    for line in output:
        print line
        
    # JOIN BLDG FP AND PARCELS
    print('JOINING PARCELS TO BUILDING FOOTPRINTS...')
    num_processes = 4
    p = mp.Pool(num_processes)
    output = p.map(bldg_fp_join, state_list)
    p.close()
    p.join()
    for line in output:
        print line
    
    # COPY FEATURES TO GDB and compare input and output building footprints
    print('COPYING TO FINAL GDB...')
    final_gdb_name = 'BldgFP_Parcels_{}.gdb'.format(todays_date)
    final_gdb_path = os.path.join(PROJECT_FOLDER, final_gdb_name)
    arcpy.env.workspace = final_gdb_path
    arcpy.env.overwriteOutput = True
    if not os.path.exists(final_gdb_path):
        arcpy.CreateFileGDB_management(PROJECT_FOLDER, final_gdb_name)
    for state in state_list:
        in_bfp = os.path.join(BUILDING_FP_GDB, '{}_poly'.format(state.replace('_', '')))
        state_gdb = os.path.join(PROJECT_FOLDER, '{}.gdb'.format(state))
        out_bfp = os.path.join(PROJECT_FOLDER, '{}.gdb\Bldg_FP_Parcels'.format(state))
        in_count = int(arcpy.GetCount_management(in_bfp).getOutput(0))
        out_count = int(arcpy.GetCount_management(out_bfp).getOutput(0))
        if in_count == out_count:
            print('{}: EQUAL!'.format(state))
            out_feature = '{}_Bldg_FP_Parcels'.format(state_abbr[state])
            arcpy.FeatureClassToFeatureClass_conversion(out_bfp, final_gdb_path, out_feature)
            if in_count != int(arcpy.GetCount_management(out_feature).getOutput(0)):
                print('ERROR COPYING FEATURE: {}'.format(out_feature))
            else:
                # deletes intermediate feature classes/gdb after final BLDG FP PARCEL join is saved in final gdb
                print('Deleting gdb')
                arcpy.Delete_management(state_gdb)
        if in_count != out_count:
            print('{}: UNEQUAL...'.format(state))
            print('In count = {}'.format(in_count))
            print('Out count = {}'.format(out_count))
class Find_GDB_In_Folders(arcpy):

    location = r"C:\arcgis\ArcTutor"
    arcpy.env.workspace = location
    wslist = arcpy.ListWorkspaces()

    def findgDB():
        pass
def fcs_in_workspace(workspace):
    # recursively checks workspaces found within the inFileLocation and makes list of all feature class
    arcpy.env.workspace = workspace
    for fcs in arcpy.ListFeatureClasses():
        yield (fcs)
    for wks in arcpy.ListWorkspaces():
        for fcs in fcs_in_workspace(wks):
            yield (fcs)
Exemple #16
0
def getWorkSpaces(rootdir):
    # arcpy.env.workspace = defineGDBpath(gdb_path)
    arcpy.env.workspace = rootpath + rootdir

    # List all file geodatabases in the current workspace
    workspaces = arcpy.ListWorkspaces("*", "FileGDB")

    return workspaces
Exemple #17
0
def fcs_in_workspace(workspace):
    # loops over a workspace and recursively looks for all feature classes - returns the feature class
    arcpy.env.workspace = workspace
    for fc in arcpy.ListFeatureClasses():
        yield (fc)
    for wks in arcpy.ListWorkspaces():
        for fc in fcs_in_workspace(wks):
            yield fc
Exemple #18
0
def fcs_in_workspace(workspace):
    try:
        arcpy.env.workspace = workspace
        for fc in arcpy.ListFeatureClasses():
            yield (fc)
        for ws in arcpy.ListWorkspaces():
            for fc in fcs_in_workspace(ws):
                yield fc
    except TypeError:
        print ("A species geodatabase maybe missing from your species library; confirm all species groups have a geodatabase")
def rasters_in_workspace(workspace):
    path = workspace
    arcpy.env.workspace = workspace
    for raster in arcpy.ListRasters():
        out_path = str(path) + os.sep + str(raster)
        yield (raster), out_path
    for ws in arcpy.ListWorkspaces():
        path = str(path) + os.sep + str(ws)
        for raster, out_path in rasters_in_workspace(ws):
            yield raster, out_path
Exemple #20
0
def create_nonFP_byYear():
    ##Description: merge all the couties for a given year and then use erase() function with clu_[year]_crop to get non crop dataset

    yearlist = range(2005, 2016)

    ##create an array to hold all files from all state gdbs for a given year
    for year in yearlist:
        print year
        countylist = []

        arcpy.env.workspace = "D:\\projects\\intactland\\intact_clu\\main\\states"

        # List all file geodatabases in the current workspace
        workspaces = arcpy.ListWorkspaces("*", "FileGDB")

        #get each state geodtabase
        for workspace in workspaces:
            print workspace
            arcpy.env.workspace = workspace

            ##list features for a given year in each state geodatabase
            featureclasses = arcpy.ListFeatureClasses("*_acea_{}*".format(
                str(year)))

            for fc in featureclasses:
                print 'fc:', fc
                substring_list = fc.split("_")
                countylist.append("'" + substring_list[2] + "'")

        print "number of counties for {}: {}".format(str(year),
                                                     len(countylist))

        def createWhereString(countylist):
            print countylist
            cntyString = ' OR atlas_stco='.join(countylist)
            cond = 'atlas_stco={}'.format(cntyString)
            return cond

        if len(countylist) > 0:
            arcpy.env.workspace = "G:\\ancillary_storage\\intactland\\intact_clu\\main\\years"
            in_features = 'C:\\Users\\Bougie\\Desktop\\Gibbs\\data\\usxp\\ancillary\\vector\\shapefiles.gdb\\counties'
            layer = 'counties_{}'.format(str(year))
            where_clause = createWhereString(countylist)

            # # # Make a layer from the feature class
            arcpy.MakeFeatureLayer_management(in_features, layer, where_clause)

            #create a feature class containing a counties with data for a given year
            # arcpy.FeatureClassToFeatureClass_conversion(layer, "D:\\projects\\intact_land\\years\\{}.gdb".format(str(year)), "clu_{}_counties".format(str(year)))

            ##------------create to noncrop dataset per year ---------------------------------------------------------------------------
            arcpy.Erase_analysis(
                in_features=layer,
                erase_features="{0}.gdb\\fp_{0}".format(str(year)),
                out_feature_class="{0}.gdb\\nfp_{0}".format(str(year)))
Exemple #21
0
def gift_wrap(location):
    '''location: string

    Scrubs the hash field from all data and compacts the geodatabases
    '''
    arcpy.env.workspace = location

    workspaces = arcpy.ListWorkspaces('*', 'FileGDB')

    [_remove_hash_from_workspace(workspace) for workspace in workspaces]
    [arcpy.management.Compact(workspace) for workspace in workspaces]
Exemple #22
0
def walk( key, directory, wildcard = None ):
    '''Walks through root DIRECTORY and lists all KEY files
KEY - is the type of data to list (ex: RASTER, TABLE, SHP, MXD)
DIRECTORY - is the root directory for the walk (i.e. the starting point)
WILDCARD - is a pattern to select against in the file name

EX:  walk( 'SHP', r'C:\\', '*BON*' )
*This will capture all SHPs with BON in the title that live on your C drive*
'''
    #Preserve the old workspace
    oldWKSP = arcpy.env.workspace

    #Set the workspace to the directory input
    arcpy.env.workspace = directory

    #Create return list object
    retList = []

    #Check to see if any sub workspaces exist
    lstWKSPs = arcpy.ListWorkspaces()
    if len(lstWKSPs) > 0:
        for wksp in lstWKSPs:
            retList = retList + walk( key , wksp , wildcard )

    if key.upper().strip()  == 'RASTER':
        for raster in arcpy.ListRasters( wildcard ):
            retList.append( os.path.join(directory , raster ) )

    elif key.upper().strip()  == 'TABLE':
        for table in arcpy.ListTables( wildcard ):
            retList.append( os.path.join(directory, table ) )

    elif key.upper().strip() == 'SHP':
        for shp in arcpy.ListFeatureClasses( wildcard ):
            retList.append( os.path.join(directory , shp ) )

    elif key.upper().strip()  == 'MXD':
        for fileName in arcpy.ListFiles( wildcard ):
            if fileName[-3:].lower() == 'mxd':
                retList.append(os.path.join(directory, fileName ) )

#
#    elif key.upper().strip() == 'LYR':
#        for shp in arcpy.ListFeatureClasses( wildcard ):
#            retList.append( os.path.join(directory , lyr ) )
#
#    elif key.upper().strip() == 'PRJ':
#        for shp in arcpy.ListFeatureClasses( wildcard ):
#            retList.append( os.path.join(directory , prj ) )

    arcpy.env.workspace = oldWKSP

    return retList
    def openFileFolder(self):
        #打开文件夹,其中每一个gdb 或者 mdb 文件创建一个DataChecker
        #fileName = QFileDialog.getOpenFileName(self,'Open file','/home')
        filedir = str(QFileDialog.getExistingDirectory()).replace('\\','/')
        print 'filedir', filedir,type(filedir)

        arcpy.env.workspace = filedir
        datasets = arcpy.ListWorkspaces("*", "Access")

        for dataset in datasets:
            print (dataset)

        print ("openfilefolder")
Exemple #24
0
def Do(gdb_dir_in):
    env.workspace = gdb_dir_in
    GDBs = arcpy.ListWorkspaces()
    GDBcount = len(GDBs)
    print str(GDBcount) + ' gdbs to be compacted'
    gdb_index = 1
    for gdb in GDBs:
        if gdb[-4:] in [".gdb"]:
            arcpy.Compact_management(gdb)
            print str(gdb_index) + '\t' + gdb + ' done'
        else:
            print str(gdb_index) + '\t' + gdb + ' is not a gdb file'
        gdb_index = gdb_index + 1
Exemple #25
0
def SIF_AdmTrack():
    sif = arcpy.ListWorkspaces(final, "All")
    print sif
    tbls = arcpy.ListTables()
    for tbl in tbls:
        print tbl
    fds = arcpy.ListDatasets()
    for fd in fds:
        print fd
        fcs = arcpy.ListFeatureClasses(fd)
        for fc in fcs:
            print fc

    pass
def log_workspace_contents(logger):

    workspaces = arcpy.ListWorkspaces()
    for workspace in workspaces:
        logger.info("Workspace: {}".format(workspace))

    logger.info("arcpy.env.workspace: {}".format(arcpy.env.workspace))
  
    for dataset in arcpy.ListDatasets():
        logger.info("  dataset: {}".format(dataset))
        logger.info("    feature classes: {} ".format(arcpy.ListFeatureClasses(feature_dataset=dataset)))
    
    logger.info("  feature classes: {} ".format(arcpy.ListFeatureClasses()))
    logger.info("  tables: {} ".format(arcpy.ListTables()))
Exemple #27
0
def create(name, path):
    wspath = path
    env.workspace = path
    fgdbname = name
    fgdblst = arcpy.ListWorkspaces("*", "FileGDB")
    if fgdblst:
        for fgdb in fgdblst:
            fname = os.path.basename(fgdb)
            if fname[:-4] == fgdbname:
                return fgdbname + '.gdb' + '已经存在!'
                fgb = wspath + '\\' + fgdbname + '.gdb'
            else:
                fgb = arcpy.CreateFileGDB_management(wspath, fgdbname)
    else:
        fgb = arcpy.CreateFileGDB_management(wspath, fgdbname)
    return 'succes'
def cloudMaskingLandsatScenes(curDir, destinationpath):
    folders = arcpy.ListWorkspaces()
    for folder in folders:
        arcpy.env.workspace = folder
        rasters = arcpy.ListRasters("*", "TIF")
        selectedRasters = rasters[1:]
        cloudmask = rasters[0]
        for raster in selectedRasters:
            if raster.endswith("ndmi_clipped.tif"):
                outNulled = Con(cloudmask, raster, "", "VALUE < 4")
                outputFileName = raster.split(".")[0] + "_cloudMask.tif"
                newFolderName = createFolderFromFilename(
                    destinationpath, outputFileName)
                outputRaster = os.path.join(destinationpath, newFolderName)
                outputMaskedRaster = os.path.join(outputRaster, outputFileName)
                outNulled.save(outputMaskedRaster)
Exemple #29
0
def  Do(in_path,out_path,out_gdb,fcs):
    #获得输入gdb文件路径列表
    env.workspace = in_path
    gdblist = arcpy.ListWorkspaces()
    
    #输出gdb的绝对路径
    outGDBpath = os.path.join(out_path,out_gdb)
    
    #若输出gdb已结存在,则删除之
    if os.path.exists(outGDBpath):
        delete_file_folder(outGDBpath)
    
    #创建输出gdb
    arcpy.CreateFileGDB_management(out_path,out_gdb)
    
    for fc in fcs:
        #输出fc绝对位置
        fc_out = outGDBpath + os.sep + fc
        #初始化fc列表和fc计数
        fclist = []
        fc_cnt1 = 0
        #按照gdb遍历
        for gdb in gdblist:
            if not gdb[-4:] in [".gdb"]:
                continue
            #输入fc
            fc_from = gdb +os.sep + fc
            if arcpy.Exists(fc_from):
                #fc列表
                fclist.append(fc_from)
                #fc计数
                fc_count = int(arcpy.GetCount_management(fc_from).getOutput(0))
                fc_cnt1 = fc_cnt1 + fc_count
                print fc_from + ': ' + str(fc_count)
            else:
                print fc_from + ' is missing'
        #输入fc总数
        print str(fc_cnt1) + ' fcs input'
        #输入fc合并到输出fc
        arcpy.Merge_management(fclist,fc_out)
        #输出fc计数
        fc_cnt2 = int(arcpy.GetCount_management(fc_out).getOutput(0))
        print str(fc_cnt2) + ' fcs output'
Exemple #30
0
def post_process_databases(basic_features_gdb, build_dir):
    """
    Copies in basic features gdb to build dir and cleans up FCs and Tables
    with SummID to RowID. Finally deletes the TEMP folder generated in the
    build process

    Args:
        basic_features_gdb (str): path to the basic features geodatabase
        build_dir (str): path to the build directory
    
    Returns:
        None
    """
    print("Postprocessing build directory...")

    # copy BasicFeatures into Build
    print("--- Overwriting basic features in BUILD dir with current version")
    path, basename = os.path.split(basic_features_gdb)
    out_basic_features = PMT.make_path(build_dir, basename)
    PMT.check_overwrite_output(output=out_basic_features, overwrite=True)
    arcpy.Copy_management(in_data=basic_features_gdb, out_data=out_basic_features)

    # reset SummID to RowID
    print("--- updating SummID to RowID project wide...")
    arcpy.env.workspace = build_dir
    for gdb in arcpy.ListWorkspaces(workspace_type="FileGDB"):
        print(f"--- Cleaning up {gdb}")
        arcpy.env.workspace = gdb
        # update feature classes
        fcs = [fc for fc in list_fcs_in_gdb()]
        tbls = arcpy.ListTables()
        all_tbls = fcs + tbls
        alter_fields(
            table_list=all_tbls,
            field=p_conf.SUMMARY_AREAS_COMMON_KEY,
            new_field_name=b_conf.SUMMARY_AREAS_FINAL_KEY)
    # TODO: incorporate a more broad AlterField protocol for Popup configuration

    # delete TEMP folder
    temp = PMT.make_path(build_dir, "TEMP")
    if arcpy.Exists(temp):
        print("--- deleting TEMP folder from previous build steps")
        arcpy.Delete_management(temp)