Ejemplo n.º 1
0
def ConvertMDBtoGDB(mdb, gdb):
    tables = []
    featurs = []
    FCs = ListFCinGDBorMDB(mdb)
    #print(FCs)
    for fc in FCs:
        ft = arcpy.Describe(mdb + '\\' + fc).dataElementType
        print(ft)
        if ft == 'DEFeatureClass':
            featurs.append(mdb + '\\' + fc)
        if ft == 'DETable':
            tables.append(mdb + '\\' + fc)
    if os.path.exists(gdb):
        try:
            shutil.rmtree(gdb)
        except Exception as e:
            print(e)
            pass
    arcpy.CreateFileGDB_management(os.path.dirname(gdb),
                                   os.path.basename(mdb).split('.')[0])
    if len(featurs) > 0:
        arcpy.FeatureClassToGeodatabase_conversion(featurs, gdb)
    if len(tables) > 0:
        arcpy.TableToGeodatabase_conversion(tables, gdb)
    outL = featurs
    outL.extend(tables)
    return (outL)
Ejemplo n.º 2
0
def get_count(csv_fname, ws, countyName, parcel_name_field, map_name_field):
	'''converst the csv that was created into a dict 
	PARCELID as key and MAP_NAME count as value 
	then counts the number of Pages for each PARCELID'''
	
	print('Now going to get the count \n')
	
	try:
		conversion_fname = os.path.abspath(os.path.dirname(csv_fname))
	except:
		csv_fname = str(csv_fname)
		conversion_fname = os.path.abspath(os.path.dirname(csv_fname)) #apperently this is a list object?
	
	#creating dict with no duplicates
	counts_dict = dict()
	df = pd.read_csv(csv_fname)
	df1 = df[[parcel_name_field, map_name_field]] #creates data frame with just ParcelID and MAP_NAME
	print('Number of rows is: ', df1.shape[0])
	df2 = df1.drop_duplicates()
	print('After removing duplicates, number of rows is: {}'.format(df2.shape[0]) + '\n')
	
	for item in df2[parcel_name_field]: #loops through PARCELID column
		PARCELID = str(item)
		counts_dict[PARCELID] = counts_dict.get(PARCELID,0) + 1 #adds Count to ParcelID (dict key) or creates new ParcelID key if not already found
	
	with open(conversion_fname + '\\' + countyName + '_poly_count.csv', 'wb') as op_file: #writes poly_count.csv
		dict_writer = csv.writer(op_file)
		dict_writer.writerow([parcel_name_field, 'Poly_Count']) #creates headers
		for key, value in counts_dict.items(): 
			dict_writer.writerow(["'" + key + "'", value]) #loops through dictionary and writes the key (PARCELID) & values (Poly_Count)
	op_file.close()
	
	op_file = conversion_fname + '\\' + countyName + '_poly_count.csv'
	csv_to_geodb = arcpy.TableToGeodatabase_conversion(op_file, ws)
	print("All Done.")
Ejemplo n.º 3
0
def archive():

    # DCA - this should really be broken out into inputs vs outputs folders so its not all lumped together

    log("Archiving WHI inputs and outputs")

    # create new geodatabse
    archive_gdb = "WHI_archive_" + datetime.datetime.now().strftime('%Y%m%d')
    full_path = os.path.join(config.archive_loc, archive_gdb + ".gdb")
    if arcpy.Exists(full_path) == False:
        arcpy.CreateFileGDB_management(config.archive_loc, archive_gdb)

    # copy input files into geodatabase
    log("...archiving inputs")

    # vector sources
    for fc in config.vect_archive_list:
        if arcpy.Exists(fc) == True:
            log("......vectors")
            arcpy.FeatureClassToGeodatabase_conversion(
                config.vect_archive_list, full_path)
            return
        else:
            return str(fc) + " not found"


# exclude these for now - they are going to be pretty static and take forever to copy
##    # raster sources
##    for fc in config.rast_archive_list:
##        if arcpy.Exists(fc) == True:
##            log("......rasters")
##            arcpy.RasterToGeodatabase_conversion(config.rast_archive_list, full_path)
##        else:
##            return str(fc) + " not found"

# copy output files into geodatabase
    log("...archiving outputs")

    # table outputs
    log("......tables")
    arcpy.env.workspace = config.primary_output
    tables = arcpy.ListTables()
    arcpy.TableToGeodatabase_conversion(tables, full_path)

    # feature class outputs
    log("......feature class(es)")
    fcs = arcpy.ListFeatureClasses()
    arcpy.FeatureClassToGeodatabase_conversion(fcs, full_path)

    log("Archiving complete")
Ejemplo n.º 4
0
def tableTo_primaryOutput(input_object):
    log("Copy result table to primary output gdb")
    desc = arcpy.Describe(input_object)
    if desc.dataElementType <> 'DETable':
        # if not a table - convert fc to table
        table_view = arcpy.MakeTableView_management(input_object,
                                                    desc.basename)
        # move table to primary output gdb
        full_output_name = os.path.join(config.primary_output, desc.basename)
        arcpy.TableToGeodatabase_conversion(table_view, config.primary_output)
    else:
        # if already a table - copy table to primary output gdb
        full_output_name = os.path.join(config.primary_output, desc.basename)
        arcpy.Copy_management(input_object, full_output_name)
 def select_and_copy_table(self, gdb_full_path_name, id_list, input_data, rename_basename, test_field):
     table_view_name = "table_view_selection"
     table_view_selection = arcpy.MakeTableView_management(input_data,
                                                           table_view_name,
                                                           "{0} in ({1})".format(
                                                               test_field,
                                                               self.utility.format_list_for_where_clause(id_list))
                                                           )
     if arcpy.GetCount_management(table_view_selection) > 0:
         arcpy.AddMessage("...Copying table to gdb")
         arcpy.TableToGeodatabase_conversion(table_view_selection, gdb_full_path_name)
         arcpy.Rename_management(os.path.join(gdb_full_path_name, table_view_name), rename_basename)
         del table_view_selection
     else:
         pass
Ejemplo n.º 6
0
def ImportReDevTable(InTable, UPlanGDB, LongName, PopField, EmpField):
    '''
    *Imports a redevelopment table to the GDB
    *Adds Redev keys to the upc_key table
    *Adds a record to the upc_layers table for this redev table
    
    Calls: 
    AddToUPC_Layers
    
    Called by:
    Import Redevelopment Table Toolbox
    
    Arguments:
    InTable: The table to be added to the GDB
    UPlanGDB: The UPlan GDB (where the table will be imported)
    LongName: The descriptive name of the table
    PopField: Field that contains the number of people
    EmpField: Field that contains the number of employees
    
    Returns: None
    '''
    # Set workspace
    env.workspace = UPlanGDB

    RedevTableName = os.path.basename(InTable)

    #Add table to geodatabase
    arcpy.TableToGeodatabase_conversion(InTable, UPlanGDB)

    #update upc_key table with redev info
    UpdateUPCKeyTable(UPlanGDB, 'Redev', RedevTableName)
    UpdateUPCKeyTable(UPlanGDB, 'Redev_pop', PopField)
    UpdateUPCKeyTable(UPlanGDB, 'Redev_emp', EmpField)

    #add redev table to layer tracker table
    AddToUPC_Layers(UPlanGDB, RedevTableName, LongName, 'RedevTable')
Ejemplo n.º 7
0
#Note: 2015 census shapefile cannot be used to join data as some county names and FIPS have been changed since census 2010
#And Devineni et al. 2015 used 2010 census counties
county = projdir + "data/flood/Population/gz_2010_us_050_00_5m/gz_2010_us_050_00_5m.shp"
NDImax = projdir + "data/scarcity/Devineni_et_al_2015/NDC_NDImax.csv"
db_scarcity = "water_Scarcity/Gage_analysis_scarcity.gdb"
NDImax_db = db_scarcity + "\NDC_NDImax"

#Generate FIPS for counties to have a common key with Devineni's data
arcpy.AddField_management(county, "FIPS", "TEXT")
with arcpy.da.UpdateCursor(county, ['STATE', "COUNTY", 'FIPS']) as cursor:
    for row in cursor:
        row[2] = row[0] + row[1]
        cursor.updateRow(row)

#Export csv to geodatabase
arcpy.TableToGeodatabase_conversion(NDImax, db_scarcity)

#Convert FIPS to string in NDImax table
arcpy.AddField_management(NDImax_db, "FIPS_str", "TEXT")
with arcpy.da.UpdateCursor(NDImax_db, ['FIPS', "FIPS_str", ""]) as cursor:
    for row in cursor:
        if row[0] < 10000:
            row[1] = "0" + str(row[0])
        else:
            row[1] = str(row[0])
        cursor.updateRow(row)

#NDImax and NDC are strangely formatted, include "NaN" and 095.02E-05 and things of the like, so need to be formatted.
arcpy.AddField_management(NDImax_db, "NDImax_numb", "DOUBLE")
arcpy.AddField_management(NDImax_db, "NDC_numb", "DOUBLE")
with arcpy.da.UpdateCursor(
        row = rows.newRow()
        row.setValue(table_field_name1, i[0])
        row.setValue(table_field_name2, i[1])
        row.setValue(table_field_name3, i[2])
        row.setValue(table_field_name4, i[3])
        row.setValue(table_field_name5, i[4])
        row.setValue(table_field_name6, i[5])
        row.setValue(table_field_name7, i[6])
        rows.insertRow(row)
    del row
    del rows

    # Export files to new geodatabase
    arcpy.FeatureClassToGeodatabase_conversion(polygon_name, output_gdb_path)
    arcpy.RasterToGeodatabase_conversion(depth_grid_name, output_gdb_path)
    arcpy.TableToGeodatabase_conversion(table_name, output_gdb_path)

    # Delete files from workspace
    arcpy.Delete_management(polygon_name)
    arcpy.Delete_management(table_name)
    arcpy.Delete_management(depth_grid_name)

    #Delete temporary files and variables
    if delete_intermediate_data == True:
        arcpy.AddMessage("Deleting temporary files for {0}".format(stage))
        arcpy.Delete_management(TIN)
        del TIN
        arcpy.Delete_management(RasterFromTIN)
        del RasterFromTIN
        arcpy.Delete_management(Subtracted)
        del Subtracted
Ejemplo n.º 9
0
destination_path = "D:\Progetti_python\Verona\Sentinel1\BACKSCATTER\ImmaginiBackScatter\MergeIw1Iw2\Statistica_zonale\VH"
for k in rasters:
    outRaster = os.path.join(destination_path, "D_" + k[0:11] + ".dbf")
    z = arcpy.gp.ZonalStatisticsAsTable_sa(
        "D:\\ArcGISProjects\\Verona\\Backscatter.gdb\\Uso_suolo_backscatter_iw1_VH",
        "Id", k, outRaster, "DATA", "MEAN")
    print(str(k[0:8]), "_____done.")

arcpy.env.workspace = "D:\Progetti_python\Verona\Sentinel1\BACKSCATTER\ImmaginiBackScatter\MergeIw1Iw2\Statistica_zonale\VH"
outLocation = "D:\ArcGISProjects\Verona\Backscatter.gdb"
tablesmove = arcpy.ListTables()

for table in tablesmove:
    if "D_2018" in table:
        arcpy.TableToGeodatabase_conversion(table, outLocation)
    else:
        print(table)

#Cambia nomi
arcpy.env.workspace = "D:\\ArcGISProjects\\Verona\\Backscatter.gdb"
tables = arcpy.ListTables()
for fc in tables:
    if "D_2018" in fc:
        fieldList = arcpy.ListFields(fc)
        new_name = fc
        new_name2 = fc
        for field in fieldList:
            if field.name == 'MEAN':
                arcpy.AlterField_management(fc, field.name, new_name2,
                                            new_name2)
Ejemplo n.º 10
0
    for reach in reach_list:
        PercentArea = [
            row[0] for row in arcpy.da.SearchCursor(
                "{0}_Hypsometry".format(reach), "Percent_Area")
        ]
        with arcpy.da.UpdateCursor(
                "hypsometry_all",
            ["{0}_Percent_Area".format(reach)]) as cursor:
            n = 0
            for row in cursor:
                row[0] = PercentArea[n]
                cursor.updateRow(row)
                n += 1

# Export files to new geodatabase
arcpy.TableToGeodatabase_conversion("Hypsometry_all", output_gdb_path)
arcpy.RasterToGeodatabase_conversion(depth_grid_name, output_gdb_path)
arcpy.RasterToGeodatabase_conversion("Hypsometry", output_gdb_path)
for reach in reach_list:
    arcpy.RasterToGeodatabase_conversion("{0}_Hypsometry".format(reach),
                                         output_gdb_path)

# Delete files from workspace
arcpy.Delete_management("Hypsometry_all")
arcpy.Delete_management(depth_grid_name)
arcpy.Delete_management("Hypsometry")
for reach in reach_list:
    arcpy.Delete_management("{0}_Hypsometry".format(reach))

# Delete temporary files and variables
if delete_intermediate_data == True:
Ejemplo n.º 11
0
    writer.writerow(['OBJECTID', 'filename'])
    for path, dirs, files in os.walk("P:/zip"):
        for filename in files:
            x = x + 1
            writer.writerow([x, filename])

############# Copy text data into the default geodatabase #############
# Set local variables to copy
ws = 'C:/Users/' + usr + '/Documents/'
db = 'C:/Users/' + usr + '/Documents/ArcGIS/Default.gdb'

# create a list of new tables
arcpy.env.workspace = ws
newTables = arcpy.ListTables('*cleanup*')

# create a list of old tables
arcpy.env.workspace = db
oldTables = arcpy.ListTables('*cleanup*')

# delete existing tables
print('Deleting current tables.')
for i in oldTables:
    print('Deleting ' + i)
    arcpy.Delete_management(i)

# import both tables
print("Importing tables to gdb: " + db)
for table in newTables:
    table = ws + table
    arcpy.TableToGeodatabase_conversion(table, db)
Ejemplo n.º 12
0
# Copyright © Scott Stopyak 2013
# Licence: Distributed under the terms of GNU GPL
#_________________________________________________________________________________

import os, sys, arcpy

inTable = arcpy.GetParameterAsText(0)
prefix = arcpy.GetParameterAsText(1)
outfolder = arcpy.GetParameterAsText(2)
arcpy.env.overwriteOutput = True
try:
    arcpy.CreateFileGDB_management(outfolder, "PrefixFields")
    gdb = os.path.join(outfolder, "PrefixFields.gdb")
    arcpy.RefreshCatalog(outfolder)
    inTableName = os.path.splitext(os.path.basename(inTable))[0]
    arcpy.TableToGeodatabase_conversion(inTable, gdb)
    table = os.path.join(gdb, os.path.splitext(os.path.basename(inTable))[0])
    arcpy.RefreshCatalog(outfolder)
except:
    pass

fields = arcpy.ListFields(table)
for f in fields:
    try:
        name = prefix + f.name
        type = f.type
        arcpy.AddField_management(table, name, type)
        exp = "!%s!" % (f.name)
        arcpy.CalculateField_management(table, name, exp, "PYTHON")
        arcpy.RefreshCatalog(outfolder)
        arcpy.DeleteField_management(table, [f.name])
Ejemplo n.º 13
0
        def goNational():
            geocodeOption = raw_input("Geocode Results? (Yes or No): ")
            if geocodeOption.lower() == 'n' or geocodeOption.lower() == 'no':
                fGDBnational(projectFolder, csvName)
                for stateX in stateList:
                    listX = []
                    http = url_1 + what + url_2 + stateX
                    url = http
                    nationalScrapeNOGeocode(url, listX)
                    page_number = 2
                    while page_number < (pages + 1):
                        url = http + "&page=" + str(page_number)
                        nationalScrapeNOGeocode(url, listX)
                        page_number += 1
                    writeCSV(tempFolder, stateX, listX)
                # All CSVs added to fgdb as a table
                for stateX in stateList:
                    arcpy.TableToGeodatabase_conversion(
                        tempFolder + '/' + stateX + '.csv',
                        projectFolder + '/' + csvName + '_National.gdb')
                del env.workspace
                env.workspace = projectFolder + '/' + csvName + "_National.gdb"

                tableList = arcpy.ListTables()
                tableMerge = []
                for table in tableList:
                    tableMerge.append(table)
                arcpy.Merge_management(tableMerge, csvName + "_National")

                inTable = csvName + "_National"
                out_xls = projectFolder + '/' + csvName + "_National.xls"
                arcpy.TableToExcel_conversion(inTable, out_xls)

            elif geocodeOption.lower() == 'y' or geocodeOption.lower(
            ) == 'yes':
                fGDBnational(projectFolder, csvName)
                for stateX in stateList:
                    listX = []
                    http = url_1 + what + url_2 + stateX
                    url = http
                    nationalScrape(url, listX)
                    page_number = 2
                    while page_number < (pages + 1):
                        url = http + "&page=" + str(page_number)
                        nationalScrape(url, listX)
                        page_number += 1
                    writeCSV(tempFolder, stateX, listX)

                for stateX in stateList:
                    nationalCreatePoints(tempFolder, stateX, projectFolder,
                                         csvName)

                del env.workspace
                env.workspace = projectFolder + '/' + csvName + "_National.gdb"

                fcList = arcpy.ListFeatureClasses()
                fcMerge = []
                for fc in fcList:
                    fcMerge.append(fc)
                arcpy.Merge_management(fcMerge, csvName)
            else:
                print '\nPlease answer with Yes or No if you would like the results to be geocoded.\n'
                goNational()
Ejemplo n.º 14
0
    Land_csv = Datafolder + r"\Land.csv"
    Owner_Address_csv = Datafolder + r"\Owner_Address.csv"
    Permits_csv = Datafolder + r"\Permits.csv"
    Sales_csv = Datafolder + r"\Sales.csv"
    Tax_Areas_csv = Datafolder + r"\Tax_Areas.csv"
    Tax_Authorities_csv = r"\Tax_Authorities.csv"
    Values_csv = Datafolder + r"\Values.csv"

    Parcels_shp = Datafolder + r"\Parcels.shp"
    County_Boundary_shp = Datafolder + r"\County_Boundary.shp"
    Municipalities_shp = Datafolder + r"\Municipalities.shp"

    # Process: Table to Table (Account_Parcels)
    if arcpy.Exists("Account_Parcels"):
        arcpy.Delete_management("Account_Parcels")
    arcpy.TableToGeodatabase_conversion(Account_Parcels_csv, WorkingGDB)
    arcpy.AddField_management("Account_Parcels",
                              field_name="PARCEL_NO",
                              field_type='Text')
    arcpy.CalculateField_management(in_table="Account_Parcels",
                                    field="PARCEL_NO",
                                    expression="!Parcelno!")

    # Process: Table to Table (Buildings)
    if arcpy.Exists("Buildings"):
        arcpy.Delete_management("Buildings")
    arcpy.TableToGeodatabase_conversion(Buildings_csv, WorkingGDB)

    # Process: Table to Table (Owner_Address)
    if arcpy.Exists("Owner_Address"):
        arcpy.Delete_management("Owner_Address")
FMSS_Assets = os.path.join(new_fgdb, "FMSSExport_Asset")

# Process: Delete
if arcpy.Exists(new_fgdb):
    arcpy.Delete_management(new_fgdb, "Workspace")
if arcpy.Exists(saved_fgdb):
    arcpy.Delete_management(saved_fgdb, "Workspace")

# Process: Saved published fgdb
arcpy.Copy_management(xdrive_fgdb, saved_fgdb, "Workspace")
# Process: Create File GDB
folder, fgdb = os.path.split(new_fgdb)
arcpy.CreateFileGDB_management(folder, fgdb, "CURRENT")

# Process: Table To Geodatabase (multiple)
arcpy.TableToGeodatabase_conversion(sde_tables, new_fgdb)
# Process: Feature Class to Geodatabase (multiple)
arcpy.FeatureClassToGeodatabase_conversion(sde_fcs, new_fgdb)

# Process: Add Field
arcpy.AddField_management(PARKLOTS_py, "Perim_Feet", "DOUBLE", "", "", "", "",
                          "NULLABLE", "NON_REQUIRED", "")
# Process: Calculate Field
arcpy.CalculateField_management(PARKLOTS_py, "Perim_Feet",
                                "!shape.length@feet!", "PYTHON_9.3", "")

# Process: Add Field (2)
arcpy.AddField_management(PARKLOTS_py, "Area_SqFt", "DOUBLE", "", "", "", "",
                          "NULLABLE", "NON_REQUIRED", "")
# Process: Calculate Field (2)
arcpy.CalculateField_management(PARKLOTS_py, "Area_SqFt",
Ejemplo n.º 16
0
# Provide a default value if unspecified
pkt_adr_intersect_gminy = "pkt_adr_intersect_gminy"

# Process: Intersect
arcpy.Intersect_analysis(Input_Features, pkt_adr_intersect_gminy, "ALL", "",
                         "POINT")
arcpy.AddField_management(pkt_adr_intersect_gminy,
                          'nowaUlica',
                          "TEXT",
                          field_length=2048)
arcpy.AddField_management(pkt_adr_intersect_gminy,
                          'nowaMiejscowosc',
                          "TEXT",
                          field_length=2048)
arcpy.TableToGeodatabase_conversion(gminyDir, arcpy.env.workspace)
print('start')
# Replace values in database;
updateCursor = arcpy.da.UpdateCursor(pkt_adr_intersect_gminy,
                                     cursorAccessFields)
for row in updateCursor:
    if row[0] is None:
        row[2] = row[1]
        row[3] = row[4]
    else:
        row[2] = row[0]
        row[3] = row[1]

    updateCursor.updateRow(row)

field_leave = [
Ejemplo n.º 17
0
def topology_repair(
    inFile=path_to_shapefile, 
    dissolve_field="", 
    gap_threshold=10000):    # threshold is max area of gaps that are considered to be errors

    # create variables for necessary paths, create gdb, import inFile into feature dataset
    gdb = os.path.basename(inFile[:-3] + 'gdb')
    gdbDir= os.path.dirname(inFile)
    arcpy.CreateFileGDB_management(gdbDir, gdb)
    arcpy.env.workspace = gdbDir + '/' + gdb
    feature_ds = arcpy.env.workspace + '/topology_ds'
    data = arcpy.env.workspace + '/topology_ds/' + os.path.basename(inFile[:-4])
    topology = feature_ds + '/Topology'
    arcpy.CreateFeatureDataset_management(arcpy.env.workspace, "topology_ds", inFile[:-3] + 'prj')
    arcpy.FeatureClassToGeodatabase_conversion([inFile], "topology_ds")

    # Create topology, add feature class, define rules
    arcpy.CreateTopology_management(feature_ds, "Topology")
    arcpy.AddFeatureClassToTopology_management(topology, data)
    arcpy.AddRuleToTopology_management(topology, "Must Not Overlap (Area)",data,"","","")
    arcpy.ValidateTopology_management(topology)

    # create polygon inFile from errors and delete
    arcpy.ExportTopologyErrors_management(topology, "", "overlapErrors")
    arcpy.AddField_management("overlapErrors_poly", dissolve_field, "STRING")
    o = "o"
    arcpy.CalculateField_management('overlapErrors_poly', dissolve_field,o)

    # Create topology, add feature class, define rules
    arcpy.CreateTopology_management(feature_ds, "Topology")
    arcpy.AddFeatureClassToTopology_management(topology, data)
    arcpy.AddRuleToTopology_management(topology, "Must Not Have Gaps (Area)",data,"","","")
    arcpy.ValidateTopology_management(topology)

    # create polygon inFile from errors and merge with original data
    arcpy.ExportTopologyErrors_management(topology, "", "gapErrors")
    arcpy.FeatureToPolygon_management("gapErrors_line","topo_errors_gaps")
    arcpy.SelectLayerByAttribute_management ("topo_errors_gaps", "NEW_SELECTION", '"Shape_Area" < ' + str(gap_threshold))
    arcpy.AddField_management("topo_errors_gaps", dissolve_field, "STRING")
    g = "g"
    arcpy.CalculateField_management('topo_errors_gaps', dissolve_field,g )
    arcpy.SelectLayerByAttribute_management ("topo_errors_gaps", "SWITCH_SELECTION")
    arcpy.DeleteRows_management("topo_errors_gaps")
    arcpy.Merge_management(["overlapErrors_poly", "topo_errors_gaps" ,inFile],"topomerged")

    # Get neighbor table and export to gdb
    arcpy.PolygonNeighbors_analysis('topomerged', 'topo_errors',['OBJECTID', dissolve_field])  # doesn't always find neighbors on all sides of polygon
    arcpy.TableToGeodatabase_conversion('topo_errors',arcpy.env.workspace)

    #table to array and array to dataframe
    nbr_field = 'nbr_' + dissolve_field
    arr = arcpy.da.FeatureClassToNumPyArray(("topo_errors"), ("src_OBJECTID", nbr_field, "LENGTH"))
    index = [str(i) for i in range(1, len(arr)+1)]
    df = pd.DataFrame(arr, index=index)
    df = df.groupby(['src_OBJECTID','nbr_TYPE'],as_index = False)['LENGTH'].sum()   #sum in case several sides of polygon have same neighbor

    #select rows from df and export to csv and to gdb
    idx = df.groupby(['src_OBJECTID'])['LENGTH'].transform(max) == df['LENGTH']
    df_select = df [idx]
    df_select.to_csv(gdbDir+'/joinme.csv', index=False)
    arcpy.TableToTable_conversion(gdbDir+'/joinme.csv', arcpy.env.workspace, "joinme")

    # Merge error polygons, join field, delete overlaps from infile, assign type to error polygons, merge all and dissolve
    arcpy.JoinField_management('topomerged', 'OBJECTID', 'joinme', 'src_OBJECTID', 'nbr_TYPE')
    arcpy.FeatureClassToFeatureClass_conversion('topomerged', "", 'topo_errors_join')
    arcpy.SelectLayerByAttribute_management("topo_errors_join", "NEW_SELECTION", "TYPE = 'o'")
    arcpy.SelectLayerByAttribute_management("topo_errors_join", "ADD_TO_SELECTION", "TYPE = 'g'")
    arcpy.SelectLayerByAttribute_management ("topo_errors_join", "SWITCH_SELECTION")
    arcpy.DeleteRows_management("topo_errors_join")   #leave only error polygons
    arcpy.AlterField_management('topo_errors_join', 'TYPE', 'orig_TYPE','orig_TYPE')
    arcpy.AlterField_management('topo_errors_join', 'nbr_TYPE', 'TYPE','TYPE')
    arcpy.Erase_analysis(inFile,'overlapErrors_poly','infile_overlaps_erased')
    arcpy.Merge_management(["topo_errors_join","infile_overlaps_erased"],"merged_neighbors")
    arcpy.Dissolve_management('merged_neighbors', 'dissolved_neighbors', 'TYPE')
Ejemplo n.º 18
0
tables = arcpy.ListTables()
print('   -- Tables: ', tables)

print('   -- Process complete. Check for errors and move to next cell.')
print(' ')

#-----------------------------------------------------------------------------------------
# 5.0 - Execute TableToGeodatabase
#-----------------------------------------------------------------------------------------

print('Step 5.0 - Execute TableToGeodatabase: ')
print('   -- Executing...')

try:
    print('   -- Importing tables to gdb: ' + outLocation)
    arcpy.TableToGeodatabase_conversion(csv_name, outLocation)
except:
    print(arcpy.GetMessages())

print('   -- Process complete. Check for errors and move to next cell.')
print(' ')

#-----------------------------------------------------------------------------------------
# 6.0 - Convert Table to Feature Class
#-----------------------------------------------------------------------------------------

print('Step 6.0 - Convert Table to Feature Class...')
print('   -- Executing...')

arcpy.env.workspace = working_dir
arcpy.env.overwriteOutput = True
def import_vicmap_fgdb_data(vicmap_version):

    logging.info('local variables')
    vm = gis.VICMAP(vicmap_version)

    imported_fcs = []
    imported_tables = []

    logging.info('exclusion list:')
    exclude_list = [
                    # VMELEV
                    'EL_CONTOUR_1TO5M', 
                    'EL_GRND_SURFACE_POINT_1TO5M',
                    # VMPROP
                    'ANNOTATION_TEXT', 
                    'CAD_AREA_BDY',
                    'EASEMENT',
                    'CENTROID',
                    'PARCEL_CAD_AREA_BDY',
                    'PROPERTY_CAD_AREA_BDY',
                    ]
    for exclude in exclude_list:
        logging.info(exclude)

    logging.info('starting import')
    for root, gdb_names, files in arcpy.da.Walk(vm.path, datatype='Container'):

        for gdb_name in gdb_names:

            if gdb_name.lower().endswith('gdb'):
                logging.info(gdb_name)

                gdb_folder = os.path.join(vm.path, gdb_name)
                arcpy.env.workspace = gdb_folder
                
                fcs = []
                fc_names = arcpy.ListFeatureClasses()
                for fc in fc_names:
                    if fc.upper() in exclude_list:
                        continue
                    if arcpy.Exists(os.path.join(vm.path, vm.sde, fc)):
                        logging.info('exists: {}'.format(fc))
                        continue
                    logging.info('loading feature class: ' + fc)
                    fcs.append(os.path.join(gdb_folder, fc))
                if fcs:
                    arcpy.FeatureClassToGeodatabase_conversion(Input_Features=fcs,
                                                               Output_Geodatabase=os.path.join(vm.path, vm.sde))
                    imported_fcs.extend(fcs)

                tables = []
                table_names = arcpy.ListTables()
                for table in table_names:
                    if table.upper() in exclude_list:
                        continue
                    if arcpy.Exists(os.path.join(vm.path, vm.sde, table)):
                        logging.info('exists: {}'.format(table))
                        continue
                    logging.info('loading table: ' + table)
                    tables.append(os.path.join(gdb_folder, table))
                if tables:
                    arcpy.TableToGeodatabase_conversion(Input_Table=tables,
                                                        Output_Geodatabase=os.path.join(vm.path, vm.sde))
                    imported_tables.extend(tables)
    return [imported_fcs, imported_tables]
for tiff in tiffs:
    tiffSplit = tiff.split("_")
    year = tiffSplit[1]
    month = tiffSplit[2]

    #Extract Values to Points   *** Change GPS point here***
    arcpy.gp.ExtractValuesToPoints_sa(
        gpsPoints + "GPS_final.shp", fs_rasters + "fc_" + year + "_" + month,
        out_extract + "out_" + year + "_" + month + ".shp", "NONE",
        "VALUE_ONLY")
    print 'finished values to Table run: %s\n\n' % (datetime.datetime.now() -
                                                    start)

    #Copy extract by points table to Geodatabase
    arcpy.TableToGeodatabase_conversion(
        Input_Table=out_extract + "out_" + year + "_" + month,
        Output_Geodatabase='C:/workspace/chirps/processing/data.gdb/')
    century_year = (int(year) - 1900) * 12 + int(month)
    arcpy.AddField_management(dataGDB + "out_" + year + "_" + month,
                              "CenturyMonth", "FLOAT", "", "", "7", "",
                              "NULLABLE", "NON_REQUIRED", "")
    arcpy.CalculateField_management(in_table=dataGDB + "out_" + year + "_" +
                                    month,
                                    field="CenturyMonth",
                                    expression=century_year,
                                    expression_type="VB",
                                    code_block="")

    #Table to CSV
    arcpy.TableToTable_conversion(dataGDB + "out_" + year + "_" + month,
                                  out_csv, "csv" + year + "_" + month + ".csv")
Ejemplo n.º 21
0
arcpy.env.XYTolerance = '0.000000001798644 Degrees'
arcpy.env.MResolution = 0.0001
arcpy.env.MTolerance = 0.0002  # set the M tolerance to the cansys tolerance, tolerance = resx2
arcpy.env.overwriteOutput = True
incrnd = ws + "\\" + tempmdb + "\\CRND"
insrnd = ws + "\\" + tempmdb + "\\SRND"
qclean = "NETWORK_DIRECTION in ( 'EB' , 'NB' ) AND ROUTE NOT LIKE '999%' AND UNIQUE_NUMBER NOT LIKE '9'"
arcpy.MakeFeatureLayer_management(incrnd, "CRND", qclean)
arcpy.MakeFeatureLayer_management(insrnd, "SRND", qclean)
arcpy.FeatureClassToGeodatabase_conversion("CRND",
                                           ws + "\\" + tempgdb)  #10 sec
arcpy.FeatureClassToGeodatabase_conversion("SRND", ws + "\\" + tempgdb)  #9 sec
arcpy.MakeTableView_management(
    r"Database Connections\atlasprod.odc\KDOT.MV_map_extract", "MAP_EXTRACT",
    "DIRECTION <=2")
arcpy.TableToGeodatabase_conversion("MAP_EXTRACT", ws + "\\" + tempgdb)
del mxd
mxd = arcpy.mapping.MapDocument(
    r"\\GISDATA\ArcGIS\GISDATA\MXD\NewGISNetworkSeed.mxd")
arcpy.MakeFeatureLayer_management(ws + "\\" + tempgdb + "\\CRND", "CRND")
arcpy.MakeFeatureLayer_management(ws + "\\" + tempgdb + "\\SRND", "SRND")
arcpy.MakeTableView_management(ws + "\\" + tempgdb + "\\MAP_EXTRACT",
                               "MAP_EXTRACT", "DIRECTION <=2")
arcpy.AddField_management(ws + "\\" + tempgdb + "/MAP_EXTRACT", "SRND", "TEXT",
                          "#", "#", "24")
arcpy.CalculateField_management(ws + "\\" + tempgdb + "/MAP_EXTRACT", "SRND",
                                """Mid([NQR_DESCRIPTION],4,16)""", "VB", "#")
arcpy.AddField_management(ws + "\\" + tempgdb + "/MAP_EXTRACT", "CRND", "TEXT",
                          "#", "#", "24")
arcpy.CalculateField_management(ws + "\\" + tempgdb + "/MAP_EXTRACT", "CRND",
                                """[NQR_DESCRIPTION]""", "VB", "#")
Ejemplo n.º 22
0
 gdb = outLocation + g + ".gdb"
 arcpy.CopyFeatures_management(inFeatures,
                               gdb + "/" + inFeatures[:-4])
 arcpy.CopyFeatures_management(inFeatures2,
                               gdb + "/" + inFeatures2[:-4])
 arcpy.CopyFeatures_management(inFeatures4,
                               gdb + "/" + inFeatures4[:-4])
 arcpy.CopyFeatures_management(inFeatures5,
                               gdb + "/" + inFeatures5[:-4])
 arcpy.CopyFeatures_management(inFeatures6,
                               gdb + "/" + inFeatures6[:-4])
 arcpy.CopyFeatures_management(inFeatures7,
                               gdb + "/" + inFeatures7[:-4])
 arcpy.CopyFeatures_management(inFeatures8,
                               gdb + "/" + inFeatures8[:-4])
 arcpy.TableToGeodatabase_conversion([joinTable, joinTable2],
                                     gdb)
 if joinTable3 == "":
     arcpy.CopyFeatures_management(inFeatures3,
                                   gdb + "/" + inFeatures3[:-4])
 else:
     arcpy.CopyFeatures_management(inFeatures3,
                                   gdb + "/" + inFeatures3[:-4])
     arcpy.TableToGeodatabase_conversion(joinTable3, gdb)
 print("przekopiowalem pliki do geobazy w: " + g)
 inFeatures = ""
 inFeatures2 = ""
 inFeatures3 = ""
 inFeatures4 = ""
 inFeatures5 = ""
 inFeatures6 = ""
 inFeatures7 = ""
Ejemplo n.º 23
0
        for i in range(0, len(tableList)):
            try:
                arcpy.Delete_management("tblPart" + str(i))
            except:
                pass
        print "Table Created for Raster: " + os.sep + inValueRaster

    print "joining tables"
    env.workspace = outFolder
    tList = arcpy.ListTables()
    print str(tList) + " = tables in outfolder"
    gdbName = zoneName + "masterTableGDB"
    masterTableGDB = arcpy.CreateFileGDB_management(outFolder, gdbName,
                                                    "CURRENT")
    print str(masterTableGDB) + "= masterTableGDB"
    arcpy.TableToGeodatabase_conversion(tList, masterTableGDB)
    env.workspace = outFolder + os.sep + "masterTableGDB.gdb"
    tList = arcpy.ListTables()
    tbl = tList[0]
    masterTableGDB = str(masterTableGDB) + os.sep + "masterTableGDB"
    arcpy.Copy_management(tbl, masterTableGDB)

    for t in tList:
        varName = t[:-4]
        varNameMean = varName[:3] + "_MEAN"
        print varName + " = varName"
        arcpy.JoinField_management(masterTableGDB, "TARGET_FID", t,
                                   "TARGET_FID")
        arcpy.AddField_management(masterTableGDB, varNameMean, "FLOAT", "20",
                                  "4", "", varNameMean, "NULLABLE",
                                  "NON_REQUIRED", "")  # Process: Add Field