def mhello(): foldername = path_directory.rsplit('\\', 1)[0] arcpy.CreateFolder_management(foldername, 'input') arcpy.CreateFolder_management(foldername, 'output') arcpy.CreateFolder_management(foldername, 'tmp') inputpath = str(foldername + '/input/') start_county_layer = "C:\Users\zwhitman\Documents\census\psu_app\input\us_counties.shp" global input_county input_county = inputpath + 'us_counties_joined_3857.shp' if os.path.isfile(input_county): controller.show_frame(PageState) else: arcpy.Copy_management(start_county_layer, input_county) arcpy.TableToDBASE_conversion(variable_file, inputpath) dbf_varfile = variable_file.rsplit('/', 1)[1] dbf_varfile = dbf_varfile[:-3] + "dbf" dbf_varfile = inputpath + dbf_varfile arcpy.AddField_management(dbf_varfile, "GEOID_2", "TEXT", "#", "#", "#", "#", "NULLABLE", "NON_REQUIRED", "#") arcpy.CalculateField_management( dbf_varfile, "GEOID_2", "calc(!GEOID!)", "PYTHON_9.3", "def calc(a):\\n x = a[1:-1] \\n return x\\n") arcpy.JoinField_management(input_county, "GEOID", dbf_varfile, "GEOID_2", "#") controller.show_frame(PageState) return
def main(): arcpy.env.overwriteOutput = True inspace = sys.argv[1] arcpy.env.workspace = inspace outspace = sys.argv[2] files = arcpy.ListTables() arcpy.TableToDBASE_conversion(files, outspace) print('{} contains:'.format(outspace)) arcpy.env.workspace = outspace print(arcpy.ListTables('*', 'dBASE'))
def exportGriddedInvDBF(self): pp = self.ProgressPrinter.newProcess(inspect.stack()[0][3], 1, 1).start() arcpy.env.workspace = self.inventory.getWorkspace() prev = sys.stdout silenced = open('nul', 'w') sys.stdout = silenced arcpy.TableToDBASE_conversion("inventory_gridded", self.output_dbf_dir) pp.finish() sys.stdout = prev self.inventory.setLayerName("inventory_gridded") pp.finish()
arcpy.gp.Slope_sa(Avkat_DBO_dem_quickbird_extent, slope__2_, "DEGREE", "1") # Process: Extract Values to Points (2) arcpy.gp.ExtractValuesToPoints_sa(d2h2od2avkat_sitepoints, slope__2_, slope_sitepoints, "NONE", "VALUE_ONLY") # Process: Add Field (3) arcpy.AddField_management(slope_sitepoints, "slope", "FLOAT", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") # Process: Calculate Field (3) arcpy.CalculateField_management(slope_sitepoints__2_, "slope", "[RASTERVALU]", "VB", "") # Process: Delete Field (3) arcpy.DeleteField_management(slope_sitepoints__3_, "Join_Count;TARGET_FID;JOIN_FID;Id;OP;SU;d2water;d2avkat;RASTERVALU") # Process: Table to dBASE (multiple) arcpy.TableToDBASE_conversion("O:\\cross_regional\\data\\databases\\functional_model.gdb\\exposure_sitepoints;O:\\cross_regional\\data\\databases\\functional_model.gdb\\density_sitepoints;O:\\cross_regional\\data\\databases\\functional_model.gdb\\slope_sitepoints", excel) # Process: Extract Values to Points (4) arcpy.gp.ExtractValuesToPoints_sa(sample_points, cum_viewshed_final, exposure_samplepoints, "NONE", "VALUE_ONLY") # Process: Add Field (4) arcpy.AddField_management(exposure_samplepoints, "exposure", "FLOAT", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") # Process: Calculate Field (4) arcpy.CalculateField_management(exposure_samplepoints__2_, "exposure", "[RASTERVALU]", "VB", "") # Process: Delete Field (4) arcpy.DeleteField_management(exposure_samplepoints__3_, "RASTERVALU")
new_row.append("AC_" + str(a + 1)) flows.append(new_row) #Create dataframe of combinations for Flowfile flows_df = pd.DataFrame(flows) #Export dataframe of combinations to csv flows_df.to_csv(export_fl, mode="w", index=False, header=None) #Create DBF flowfile export_dbf = os.path.join(save, "F" + str(number) + "AC" + str(a) + "2.csv") flow_dbf = os.path.join(save, "F" + str(number) + "AC" + str(a) + "2.dbf") if not arcpy.Exists(flow_dbf): arcpy.TableToDBASE_conversion(export_dbf, save) #Find shortest route Origin and Destination in Flowmap import_fm = os.path.join(start, "FM_Route.flg") export_fm = os.path.join( save, "FM_Route_" + str(number) + "AC" + str(a) + ".txt") shutil.copy(import_fm, export_fm) #Feed in Flowfile f1 = open(export_fm, 'r') f2 = open(os.path.join(export_fm[:-4] + ".flg"), 'w') for line in f1: f2.write(line.replace("001AC1", str(number) + "AC" + str(a))) f1.close()
#Geocode Addresses to Parcel Data print "Working on Geocoding" arcpy.GeocodeAddresses_geocoding(Addresses, add_locator, address_fields, Stu_Parcel_Points) #After Geocode, Attach Points to Parcel Flood Data print "Working on Attaching Points to Parcels" Stu_Parcel_Damage= path + "\Shapefiles\Results_92517.gdb\Stu_Parcel_Damage" #Attach Student points to Parcel Damage arcpy.SpatialJoin_analysis(Orig_Parcel, Stu_Parcel_Points, Stu_Parcel_Damage, "JOIN_ONE_TO_ONE", "KEEP_ALL", "#", "INTERSECT") #Select Only Geocoded Features print "Working on Creating New Selection" arcpy.env.workspace= path + "\Shapefiles\Results_92517.gdb" arcpy.MakeFeatureLayer_management(Stu_Parcel_Damage, "temp") #First Make Temp Layer to Select Features arcpy.SelectLayerByAttribute_management("temp", "", '"Status" = \'M\' OR "Status" = \'T\'') #Then Select Features arcpy.CopyFeatures_management("temp", "Stu_Parcel_Damage_1") #Finally Save New Feature Class #Output to DBF for Analysis in R print "Converting to DBF" dbf= path + "\Shapefiles\DBF\\" arcpy.TableToDBASE_conversion(["Stu_Parcel_Damage_1"], dbf) arcpy.TableToDBASE_conversion(["Stu_Parcel_Points_1"], dbf) #Delete Unneeded Files arcpy.Delete_management(Stu_Parcel_Damage) arcpy.Delete_management("temp") print "Finished"
import arcpy arcpy.env.workspace = "c:/data/output" # Convert directly to an Excel workbook. ArcMap can only export 65535 rows though. arcpy.TableToExcel_conversion("100x", "C:/Users/Guest Admin/Desktop/table.xlx") # Convert to a arcpy.TableToDBASE_conversion(["TABLE"], "C:/output")
def execute(self, parameters, messages): arcpy.env.overwriteOutput = True arcpy.CheckOutExtension('Spatial') arcpy.AddMessage("Orientation of species distributions") for param in parameters: arcpy.AddMessage("Parameter: %s = %s" % (param.name, param.valueAsText)) # Read in variables for the tool input_line = parameters[0].valueAsText input_points = parameters[1].valueAsText attribute_process = parameters[2].valueAsText flag_field = parameters[3].valueAsText distance = parameters[4].value angle = parameters[5].value output_directory = parameters[6].valueAsText clean_up = parameters[7].valueAsText # Make output directory if it does not exist output_directory.strip() arcpy.AddMessage(output_directory) if not os.path.exists(str(output_directory)): os.makedirs(output_directory) arcpy.env.workspace = output_directory # 0 Describe files to set coordinate systems desc_input = arcpy.Describe(input_points) coord_system = desc_input.spatialReference arcpy.env.outputCoordinateSystem = coord_system # 1 Convert island line to a polygon - numpy work around due to lack of license if not arcpy.Exists(os.path.join(output_directory, "Island_Poly.shp")): def polygon_to_line_no_gap(input_line_, output_polygon): array = arcpy.da.FeatureClassToNumPyArray( input_line_, ["SHAPE@X", "SHAPE@Y"], spatial_reference=coord_system, explode_to_points=True) if array.size == 0: arcpy.AddError( "Line has no features, check to ensure it is OK") else: array2 = arcpy.Array() for x, y in array: pnt = arcpy.Point(x, y) array2.add(pnt) polygon = arcpy.Polygon(array2) arcpy.CopyFeatures_management(polygon, output_polygon) return polygon_to_line_no_gap( input_line, os.path.join(output_directory, "Island_Poly.shp")) # 2 Create Fishnet for random sampling of points within the cells of the net extent = arcpy.Describe(input_points).extent origin_coord = str(extent.XMin) + " " + str(extent.YMin) y_coord = str(extent.XMin) + " " + str(extent.YMin + 1) corner_coord = str(extent.XMax) + " " + str(extent.YMax) island_area = 0 with arcpy.da.SearchCursor( os.path.join(output_directory, "Island_Poly.shp"), "SHAPE@") as rows: for row in rows: island_area += row[0].getArea("GEODESIC", "SQUAREKILOMETERS") island_area_polygon = sqrt(island_area * 0.1) * 100 arcpy.AddMessage("....fishnet size is: " + str(round(island_area_polygon, 2)) + " m x " + str(round(island_area_polygon, 2)) + " m. Island area is: " + str(round(island_area, 0)) + " km2.") arcpy.CreateFishnet_management(out_feature_class=os.path.join( output_directory, "Fishnet.shp"), origin_coord=origin_coord, y_axis_coord=y_coord, cell_width=island_area_polygon, cell_height=island_area_polygon, number_rows="", number_columns="", corner_coord=corner_coord, labels="", template="", geometry_type="POLYGON") arcpy.Intersect_analysis( in_features=os.path.join(output_directory, "Fishnet.shp") + " #;" + os.path.join(output_directory, "Island_Poly.shp") + " #", out_feature_class=os.path.join(output_directory, "FishClip.shp"), join_attributes="ONLY_FID", cluster_tolerance="-1 Unknown", output_type="INPUT") arcpy.DefineProjection_management( os.path.join(output_directory, "FishClip.shp"), coord_system) arcpy.AddField_management( os.path.join(output_directory, "FishClip.shp"), "Shape_Area", "DOUBLE") arcpy.CalculateField_management( os.path.join(output_directory, "FishClip.shp"), "Shape_Area", "!SHAPE.AREA@SQUAREMETERS!", "PYTHON_9.3") maxvalue = arcpy.SearchCursor( os.path.join(output_directory, "FishClip.shp"), "", "", "", "Shape_Area" + " D").next().getValue("Shape_Area") maxvalue = str(int(maxvalue - 1)) where = '"Shape_Area" > ' + "%s" % maxvalue arcpy.Select_analysis( in_features=os.path.join(output_directory, "FishClip.shp"), out_feature_class=os.path.join(output_directory, "FishClipInner.shp"), where_clause=where) # 3 Create n random points within the cells of the fishnet arcpy.CreateRandomPoints_management( out_path=output_directory, out_name="RndPts.shp", constraining_feature_class=os.path.join( output_directory, "FishClipInner.shp"), constraining_extent="0 0 250 250", number_of_points_or_field="5", minimum_allowed_distance="0 Meters", create_multipoint_output="POINT", multipoint_size="0") arcpy.DefineProjection_management( os.path.join(output_directory, "RndPts.shp"), coord_system) else: arcpy.AddMessage( "....skipping building polygons as they already exist") # 3 Create spatial bootstrapping circle polygons rows = arcpy.SearchCursor(os.path.join(output_directory, "RndPts.shp")) desc = arcpy.Describe(os.path.join(output_directory, "RndPts.shp")) shapefieldname = desc.ShapeFieldName if not arcpy.Exists(os.path.join(output_directory, "SectorPoly.shp")): arcpy.AddMessage("....now conducting spatial bootstrap.") featureclass = os.path.join(output_directory, "SectorPoly.shp") arcpy.CreateFeatureclass_management(os.path.dirname(featureclass), os.path.basename(featureclass), "Polygon") arcpy.AddField_management(featureclass, str("FID_Fishne"), "TEXT", "", "", "150") arcpy.AddField_management(featureclass, "BEARING", "SHORT", "", "", "4") arcpy.DeleteField_management(featureclass, ["Id"]) arcpy.DefineProjection_management(featureclass, coord_system) finalfeatureclass = os.path.join(output_directory, "Final.shp") arcpy.CreateFeatureclass_management( os.path.dirname(finalfeatureclass), os.path.basename(finalfeatureclass), "Polygon") arcpy.AddField_management(finalfeatureclass, str("FID_Fishne"), "TEXT", "", "", "150") arcpy.AddField_management(finalfeatureclass, "BEARING", "SHORT", "", "", "4") arcpy.DeleteField_management(finalfeatureclass, ["Id"]) arcpy.DefineProjection_management(finalfeatureclass, coord_system) featureclass_in_mem = arcpy.CreateFeatureclass_management( "in_memory", "featureclass_in_mem", "Polygon") arcpy.AddField_management(featureclass_in_mem, "OriginID", "TEXT", "", "", "150") arcpy.AddField_management(featureclass_in_mem, "BEARING", "SHORT", "", "", "4") arcpy.DeleteField_management(featureclass_in_mem, ["Id"]) arcpy.DefineProjection_management(featureclass_in_mem, coord_system) for row in rows: angles = range(0, 360, angle) feat = row.getValue(shapefieldname) columnValue = row.getValue(str("FID")) pnt = feat.getPart() origin_x = pnt.X origin_y = pnt.Y for ang in angles: angleorigin = float(int(ang)) # Point 1 (disp_x, disp_y) = (distance * sin(radians(angleorigin)), distance * cos(radians(angleorigin))) (end_x, end_y) = (origin_x + disp_x, origin_y + disp_y) # Point 2 anglestep = float(int(ang) + int(angle)) (disp2_x, disp2_y) = (distance * sin(radians(anglestep)), distance * cos(radians(anglestep))) (end2_x, end2_y) = (origin_x + disp2_x, origin_y + disp2_y) # Create a polygon geometry array = arcpy.Array([ arcpy.Point(origin_x, origin_y), arcpy.Point(end_x, end_y), arcpy.Point(end2_x, end2_y), ]) polygon = arcpy.Polygon(array) with arcpy.da.InsertCursor( featureclass_in_mem, ['OriginID', 'BEARING', 'SHAPE@']) as cur: cur.insertRow([columnValue, ang, polygon]) array.removeAll() arcpy.CopyFeatures_management(r"in_memory\featureclass_in_mem", featureclass) else: arcpy.AddMessage("....using previous spatial bootstrap.") arcpy.AddMessage("....now joining with observations") query = '"' + str(flag_field) + '" = ' + str(0) arcpy.MakeFeatureLayer_management(input_points, "input_points_query_sub") arcpy.Select_analysis("input_points_query_sub", r"in_memory/input_points_query", query) count_records = arcpy.GetCount_management( r"in_memory/input_points_query").getOutput(0) arcpy.AddMessage("....total number of records to process: " + str(count_records)) if int(count_records) > 500: arcpy.AddMessage( "....spatial join will fail due to memory error, working around this limitation..." ) count_records = arcpy.GetCount_management( os.path.join(output_directory, "SectorPoly.shp")).getOutput(0) query_1_range = '"' + str("FID") + '" <= ' + str( int(count_records) / 4) query_2_range = '"' + str("FID") + '" > ' + str( int(count_records) / 4) + ' And "' + str("FID") + '" < ' + str( int(count_records) / 2) query_3_range = '"' + str("FID") + '" >= ' + str( int(count_records) / 2) + ' And "' + str("FID") + '" < ' + str( int(count_records) / 2 + int(count_records) / 4) query_4_range = '"' + str("FID") + '" >= ' + str( int(count_records) / 2 + int(count_records) / 4) query_list = [ query_1_range, query_2_range, query_3_range, query_4_range ] count = 1 for i in query_list: if not arcpy.Exists( os.path.join(output_directory, "SectorPoly" + str(count) + ".shp")): arcpy.Select_analysis( os.path.join(output_directory, "SectorPoly.shp"), os.path.join(output_directory, "SectorPoly" + str(count) + ".shp"), i) arcpy.SpatialJoin_analysis( os.path.join(output_directory, "SectorPoly" + str(count) + ".shp"), r"in_memory/input_points_query", os.path.join(output_directory, "SpatialJoin" + str(count) + ".shp"), "JOIN_ONE_TO_MANY", "KEEP_ALL", "", "INTERSECT") with arcpy.da.UpdateCursor( os.path.join(output_directory, "SpatialJoin" + str(count) + ".shp"), "Join_Count") as cursor: for row in cursor: if row[0] == 0: cursor.deleteRow() if not arcpy.Exists( os.path.join(output_directory, "SpatialJoin" + str(count) + ".csv")): dbf2csv( os.path.join(output_directory, "SpatialJoin" + str(count) + ".dbf"), os.path.join(output_directory, "SpatialJoin" + str(count) + ".csv")) count += 1 else: arcpy.SpatialJoin_analysis( os.path.join(output_directory, "SectorPoly.shp"), r"in_memory/input_points_query", r"in_memory/points_SpatialJoin", "JOIN_ONE_TO_MANY", "KEEP_ALL", "", "INTERSECT") with arcpy.da.UpdateCursor(r"in_memory/points_SpatialJoin", "Join_Count") as cursor: for row in cursor: if row[0] == 0: cursor.deleteRow() arcpy.CopyFeatures_management( r"in_memory/points_SpatialJoin", os.path.join( output_directory, os.path.splitext(os.path.basename(input_points))[0] + "_join.shp")) attribute_process = attribute_process.split(",") if arcpy.Exists(r"in_memory/points_SpatialJoin"): for i in attribute_process: arcpy.AddMessage("....calculating statistics for " + str(i)) stats = [[i, "MEAN"], [i, "STD"]] arcpy.Statistics_analysis( r"in_memory/points_SpatialJoin", os.path.join( output_directory, os.path.splitext(os.path.basename(input_points))[0] + "_" + i + ".dbf"), stats, "BEARING") else: header_saved = False if not arcpy.Exists( os.path.join(output_directory, "SpatialJoin_Merge" + ".csv")): with open( os.path.join(output_directory, "SpatialJoin_Merge" + ".csv"), 'wb') as fout: for num in range(1, 5): with open( os.path.join(output_directory, "SpatialJoin" + str(num) + ".csv")) as fin: header = next(fin) if not header_saved: fout.write(header) header_saved = True for line in fin: fout.write(line) for i in attribute_process: arcpy.AddMessage("....calculating statistics for " + str(i) + " using pandas1.") chunks = pd.read_csv(os.path.join( output_directory, "SpatialJoin_Merge" + ".csv"), chunksize=100000) pieces = [ x.groupby('BEARING', as_index=False)[i].agg(['count', 'mean', 'std']) for x in chunks ] result = pd.concat(pieces) result.columns = result.columns.droplevel(0) result = result.reset_index() name_mean = "MEAN_" + str(i) name_std = "STD_" + str(i) result.rename(columns={'count': 'FREQUENCY'}, inplace=True) result.rename(columns={'mean': name_mean[0:10]}, inplace=True) result.rename(columns={'std': name_std[0:10]}, inplace=True) f = { 'FREQUENCY': ['sum'], name_mean[0:10]: ['mean'], name_std[0:10]: ['mean'] } result_2 = result.groupby('BEARING').agg(f) result_2 = result_2.reset_index() result_2 = result_2[[ 'BEARING', 'FREQUENCY', name_mean[0:10], name_std[0:10] ]] result_2.to_csv(os.path.join( output_directory, os.path.splitext(os.path.basename(input_points))[0] + "_" + i + ".csv"), index=False) if os.path.exists( os.path.join( output_directory, os.path.splitext(os.path.basename(input_points))[0] + "_" + i + ".csv")): with open( os.path.join( output_directory, os.path.splitext( os.path.basename(input_points))[0] + "_" + i + ".csv"), "r") as f: reader = list(csv.reader(f, delimiter=",")) reader.pop(1) reader.pop(1) with open( os.path.join( output_directory, os.path.splitext( os.path.basename(input_points))[0] + "_" + i + ".csv"), "w") as out: writer = csv.writer(out, delimiter=",") for row in reader: writer.writerow(row) result = arcpy.TableToDBASE_conversion( os.path.join( output_directory, os.path.splitext(os.path.basename(input_points))[0] + "_" + i + ".csv"), output_directory) try: arcpy.Delete_management(r"in_memory/points_SpatialJoin") arcpy.Delete_management(r"in_memory/input_points_query") except: pass if clean_up == "true": arcpy.Delete_management( os.path.join(output_directory, "Island_Line.shp")) arcpy.CopyFeatures_management( os.path.join(output_directory, "Island_Poly.shp"), os.path.join( output_directory, os.path.splitext(os.path.basename(input_points))[0] + "_poly.shp")) arcpy.Delete_management( os.path.join(output_directory, "Island_Poly.shp")) arcpy.Delete_management( os.path.join(output_directory, "SectorPoly.shp")) arcpy.Delete_management( os.path.join(output_directory, "Fishnet.shp")) arcpy.Delete_management( os.path.join(output_directory, "Fishnet_label.shp")) arcpy.Delete_management( os.path.join(output_directory, "FishClip.shp")) arcpy.Delete_management( os.path.join(output_directory, "FishClipInner.shp")) arcpy.Delete_management( os.path.join(output_directory, "RndPts.shp")) if int(count_records) > 500: arcpy.Delete_management( os.path.join(output_directory, "SectorPoly1" + ".shp")) arcpy.Delete_management( os.path.join(output_directory, "SectorPoly2" + ".shp")) arcpy.Delete_management( os.path.join(output_directory, "SectorPoly3" + ".shp")) arcpy.Delete_management( os.path.join(output_directory, "SectorPoly4" + ".shp")) arcpy.Delete_management( os.path.join(output_directory, "SpatialJoin1" + ".shp")) arcpy.Delete_management( os.path.join(output_directory, "SpatialJoin2" + ".shp")) arcpy.Delete_management( os.path.join(output_directory, "SpatialJoin3" + ".shp")) arcpy.Delete_management( os.path.join(output_directory, "SpatialJoin4" + ".shp")) arcpy.AddMessage("....completed: " + os.path.splitext(os.path.basename(input_points))[0] + ".") arcpy.CheckInExtension('Spatial') return
"%A, %B %d %Y %I:%M:%S%p") ## --------------------------------------------------------------------------- ## 4. Shapefile To Table ## Description: Convert shapefile to dbf files for MX. print "\nStep 4 Shapefile To Table starts at", datetime.datetime.now( ).strftime("%A, %B %d %Y %I:%M:%S%p") yearList = ["90", "00", "10"] fcList = ["ingmun90gw.dbf", "ingmun00gw.dbf", "ingmun10gw.dbf"] for year in yearList: for fc in fcList: in_fc = "MX\\" + fc if fc.startswith("ingmun" + year): arcpy.TableToDBASE_conversion(in_fc, interFolder) in_data = os.path.join(interFolder, fc) out_data = os.path.join(interFolder, "IncomeMX_" + year + ".dbf") arcpy.Rename_management(in_data, out_data) arcpy.AddField_management(out_data, "ISO_GEOID", "TEXT", "", "", "8") if out_data.endswith("90.dbf"): arcpy.CalculateField_management(out_data, "ISO_GEOID", "'484' + !CLAVE!", "PYTHON_9.3") elif out_data.endswith("00.dbf"): arcpy.CalculateField_management(out_data, "ISO_GEOID", "'484' + !CVE_MUN!", "PYTHON_9.3") elif out_data.endswith("10.dbf"): arcpy.CalculateField_management(out_data, "ISO_GEOID",
if rasters: if var == 'TAVG': Table = TableDir + os.sep + 'TEM' + str(yr) else: Table = TableDir + os.sep + var + str(yr) # for raster in rasters: # rst = arcpy.Raster(raster) # fltname = raster[:-4] # strname = os.path.basename(raster)[:-4] print Table # HdrFile = fltname + '.hdr' # PrjFile = fltname + '.prj' # # HdrName = FltFile + '*.hdr' # if not os.path.exists(HdrFile): # copyfile(HdrName, HdrFile) # if not os.path.exists(PrjFile): # copyfile(PrjName, PrjFile) Sample(rasters, ShpFile, Table, sampMethod) arcpy.TableToDBASE_conversion(Table, TableDir) arcpy.Delete_management(Table) # outTable = TableDir + Var + str(yr) # ID = yr * 100 + i # i = i + 1 # Execute Sample
csvtables = [ os.path.join(path, f) for f in os.listdir(path) if f.endswith(".csv") ] combined = pd.concat([pd.read_csv(f, delimiter=";") for f in csvtables], axis=1) #combined = combined.drop(["OID_","Value"], axis=1) #convert to csv for later use in R combined.to_csv( r"F:\Studium_Trier\Masterarbeit\Datensaetze\combinedglcm_20200807.csv", sep=";") path = r"F:\Studium_Trier\Masterarbeit\Datensaetze" inTables = r"F:\Studium_Trier\Masterarbeit\Datensaetze\combinedglcm_20200807.csv" # Execute TableToDBASE arcpy.TableToDBASE_conversion(inTables, path) # get dates path = r"F:\Studium_Trier\Masterarbeit\Datensaetze\tables\zonalStatistics" tables = [ os.path.join(path, f) for f in os.listdir(path) if f.endswith(".dbf") ] names = [os.path.basename(f)[0:6] for f in tables] myset = set(names) namesUnique = list(myset) namesUnique.sort() print(namesUnique) # columns for all dates with NDVI table = r"F:\Studium_Trier\Masterarbeit\Datensaetze\tables\combined.dbf" for name in namesUnique:
fcPath = os.path.join(arcpy.env.workspace, fc) print fcPath memFC = r'in_memory\memoryFeature' expression=""" {0} > 0 """.format(fcNodeField) print expression arcpy.MakeFeatureLayer_management (fcPath, memFC,where_clause=expression) arcpy.env.workspace = outFolder #arcpy.env.workspace = rawFolder1 fieldList = ["Node","dA","dPC","varPC"] txtFileDBF=txtFile[:-4]+".dbf" print txtFileDBF if os.path.exists(txtFileDBF): arcpy.Delete_management(txtFileDBF) arcpy.TableToDBASE_conversion(Input_Table=txtFile,Output_Folder=outFolder) arcpy.AddJoin_management(in_layer_or_view=memFC, in_field=fcNodeField, join_table= txtFileDBF, join_field=tableNodeField, join_type="KEEP_ALL") def getFieldNames(shp): fieldnames = [f.name for f in arcpy.ListFields(shp)] return fieldnames print "Copying shapefile with joined importances to: " + "impJoin_"+fc arcpy.env.workspace = outFolder arcpy.CopyFeatures_management(memFC,"impJoin_"+fc) arcpy.Delete_management(memFC) del(memFC) fieldNames = getFieldNames("impJoin_"+fc)
# List extent feature classes fcs = [] for root, dirs, files in arcpy.da.Walk(infolder): for file in files: fcs.append(os.path.join(root, file)) # Set workspace arcpy.env.workspace = mem # Spatial Join the wetlands to each extent for fc in fcs: name = os.path.basename(fc) fms = arcpy.FieldMappings() fmid = arcpy.FieldMap() fmha = arcpy.FieldMap() fmid.addInputField(fc, idfield) fmha.addInputField("wetlands", "WetlandHa") fmha.mergeRule = 'Sum' fms.addFieldMap(fmid) fms.addFieldMap(fmha) arcpy.SpatialJoin_analysis(fc, wetlands, os.path.join(outfolder,\ "Table_" + name + "_Wetlands"),'','',fms) # Export feature classes to dbfs outlist = [] for root, dirs, files in arcpy.da.Walk(outfolder): for file in files: outlist.append(os.path.join(root, file)) for f in outlist: arcpy.TableToDBASE_conversion(f, topoutfolder)
'State' <None> VISIBLE NONE;'Street ID' <None> VISIBLE NONE; 'Display X' <None> VISIBLE NONE;'Display Y' <None> VISIBLE NONE; 'Min X value for extent' <None> VISIBLE NONE;'Max X value for extent' <None> VISIBLE NONE; 'Min Y value for extent' <None> VISIBLE NONE;'Max Y value for extent' <None> VISIBLE NONE; 'Additional Field' <None> VISIBLE NONE;'Altname JoinID' <None> VISIBLE NONE""" print "Working on Locator" #arcpy.CreateAddressLocator_geocoding(in_address_locator_style="US Address - Single House", in_reference_data=Orig_Parcel_Table, in_field_map=field_map, out_address_locator=add_locator) #Geocode Addresses to Parcel Data print "Working on Geocoding" arcpy.GeocodeAddresses_geocoding(Addresses, add_locator, address_fields, Stu_Points_int) #Output to DBF for Analysis in R print "Converting to DBF" arcpy.TableToDBASE_conversion([Stu_Points_int], dbf) #Run R Script to Get Unmatched Count print "Running Initial_Geocode.R" script_secondgeo= rpath + "Initial_Geocode.R" process= subprocess.call([script_path, script_secondgeo], shell=True) print "Finished Geocode 1" #Start Your count and count_f values as not equal. The script will update when necessary. The count originates #from the initial geocode. Start iteration at 2 since the initial geocode is considered iteration 1. iteration=2 count = 2 count_f = 1 while (count != count_f): iteration_string=str (iteration)
from arcpy import env env.overwriteOutput = True env.workspace = r"D:\myDocuments\Desktop\DataReorder\bsfCI.gdb\zjCity_bsfCI" layerName = "zjCity_bsfCI_chemical_tanks" fieldLists = arcpy.ListFields(layerName) fieldListText = [ 'OBJECTID', 'building_id', 'type', 'name', 'Height', 'type_name', 'storage_mode', 'storage_location', 'design_stock', 'key_danger', 'single_volume', 'storage_medium', 'single_stock', 'emergency_plan', 'tank_mark' ] # for field in fieldLists: # fieldListText.append(field.name.encode('utf-8')) # 编码去掉u."Shape" # print fieldListText # CSV import # building_csv = r"D:\myDocuments\Desktop\DataReorder\bsfCI_AttributeUpdate\zjCity_bsfCI_chemical_tanks.csv" # building_field = pd.read_csv(building_csv, encoding='gb2312') # print building_field.objectid path = r"D:\myDocuments\Desktop\DataReorder\bsfCI_AttributeUpdate" csv_Input = path + "/" + layerName + ".csv" arcpy.TableToDBASE_conversion(csv_Input, path) dbf_Input = path + "/" + layerName + ".dbf" # building_dbf = r"D:\myDocuments\Desktop\DataReorder\bsfCI_AttributeUpdate\zjCity_bsfCI_chemical_tanks.dbf" arcpy.JoinField_management(layerName, 'OBJECTID', dbf_Input, 'objectid') for field in fieldLists: print field.name # arcpy.CalculateField_management(layerName,)
arcpy.SpatialJoin_analysis(target_features, join_features, out_feature_class, "JOIN_ONE_TO_MANY", "KEEP_ALL", "INTERSECT") print("Spatial join worked", out_feature_class) # create folder to store output tables folder_name = outdir + os.path.normpath("/dbf_output") if not os.path.exists(folder_name): os.makedirs(folder_name) # Define list of files i want to export as a table: to_export = [proj_files, out_feature_class] # Proj_files contains the projected communities and police police_districts # Out feature class is the spatial join output file. for file in to_export: arcpy.TableToDBASE_conversion(file, folder_name) "C:/Program Files/ArcGIS/Pro/bin/Python/Scripts/propy.bat" "C:/Users/econspare/Dropbox/teaching/pp4rs_2018/2018-uzh-course-material/12-python-arcgis/ python_script.py" "C:/Program Files/ArcGIS/Pro/bin/Python/Scripts/propy.bat" "C:/Users/econspare/Dropbox/teaching/pp4rs_2018/2018-uzh-course-material/12-python-arcgis/python_script.py"
"GEOGCS['GCS_WGS_1984',DATUM['D_WGS_1984',SPHEROID['WGS_1984',6378137.0,298.257223563]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]]", "8_BIT_UNSIGNED", "", "1", "LAST", "FIRST") #Clipping inputras = "F:\\wtmp\\hdf\\output\\mosaic\\m_" + str(date) + "_" + str( i) clipshp = "F:\\Uni\\Projects\\P020_Temprature_NE_MIA\\1_Raw_data\\GIS\\NE_MIA_20kbuf.shp" outras = "F:\\wtmp\\hdf\\output\\clip\\m_" + str(date) + "_" + str(i) # Process: Extract by Mask arcpy.gp.ExtractByMask_sa(inputras, clipshp, outras) print "Layer" + str(i) + ": 2. Mosaic & Clip - Finished" #3. Extract XYZ values to .DBF tables env.workspace = "F:\\wtmp\\hdf\\output\\clip\\" rasterList = arcpy.ListRasters("*", "GRID") for raster in rasterList: tmptbl = "f:\\wtmp\\hdf\\output\\tbl" + str(raster) Outlocation = "f:\\wtmp\\hdf\\output\\xytable" # Process: Sample arcpy.gp.Sample_sa(raster, raster, tmptbl, "NEAREST") # Process: Table to dBASE (multiple) arcpy.TableToDBASE_conversion(tmptbl, Outlocation) print "End of Layer of " + str(i) print "REAL End!!"
def EBK_ga(out_file, zField): normaltime = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time())) arcpy.AddMessage(normaltime + ":" + out_file + "正在进行经验贝叶斯克里金插值...") outTableName = arcpy.ValidateTableName( os.path.basename(out_file.strip(".xls")), out_gdb) print(outTableName) outTable = os.path.join(out_gdb, outTableName) print('Converting sheet1 to {}'.format(outTable)) # Perform the conversion dbfTable = os.path.join(outdBASEPath, outTableName + '.dbf') try: arcpy.ExcelToTable_conversion(out_file, outTable, "Sheet1") # Excel to Table arcpy.TableToDBASE_conversion(outTable, outdBASEPath) #Table to dbf except Exception as err: print("{} is existing".format(dbfTable)) arcpy.AddMessage(err.message) # dbaseTableName = filename.strip(".xls") # print (dbaseTableName) # outTable = os.path.join(outgdb, dbaseTableName) # print (outTable) # arcpy.ExcelToTable_conversion(xlsTable, outTable, "Sheet1") x_coords = 'Long' #list(date[u'Long'].head()) y_coords = 'Lat' #list(date[u'Lat'].values) outLayerName = outTableName + '.lyr' outLayer = os.path.join(outLayerPath, outLayerName) spRef = "Coordinate Systems\Geographic Coordinate Systems\World\WGS 1984.prj" try: arcpy.MakeXYEventLayer_management(dbfTable, x_coords, y_coords, outLayerName, spRef) except Exception as err: arcpy.AddMessage("MakeXYEventLayer_management: " + outLayerName + " created Failed") arcpy.AddMessage(err.message) try: arcpy.SaveToLayerFile_management(outLayerName, outLayer) except Exception as err: arcpy.AddMessage("SaveToLayerFile_management: " + outLayer + " created Failed") arcpy.AddMessage(err.message) try: #lyr to shp arcpy.FeatureClassToShapefile_conversion(outLayer, outShpPath) except Exception as err: arcpy.AddMessage("FeatureClassToShapefile_conversion: " + outShpPath + " created Failed") arcpy.AddMessage(err.message) # Set local variables inPointFeatures = os.path.join(outShpPath, outTableName + '_lyr.shp') Output_geostatistical_layer = "" outRasNa = outTableName + '.tif' nt = time.strftime('%Y%m%d', time.localtime(time.time())) dt = time.strftime('%m%d%H', time.localtime(time.time())) outFilePath = "F:\\xiaju\\" + nt + "\\" + houtime + "\\" + zField + "\\" + "tif" try: os.makedirs(outFilePath) except: print("") outRaster = os.path.join(outFilePath, outRasNa) cellSize = 0.001 transformation = "NONE" maxLocalPoints = 50 overlapFactor = 0.5 numberSemivariograms = 100 # Set variables for search neighborhood radius = 0.3 smooth = 0.1 try: #lyr to shp searchNeighbourhood = arcpy.SearchNeighborhoodSmoothCircular( radius, smooth) except Exception as err: arcpy.AddMessage("SearchNeighborhoodSmoothCircular: " + " Failed") arcpy.AddMessage(err.message) outputType = "PREDICTION" quantileValue = "" thresholdType = "" probabilityThreshold = "" semivariogram = "POWER" tempEnvironment0 = arcpy.env.extent arcpy.env.extent = Extent # Execute EmpiricalBayesianKriging try: arcpy.EmpiricalBayesianKriging_ga( inPointFeatures, zField, Output_geostatistical_layer, outRaster, cellSize, transformation, maxLocalPoints, overlapFactor, numberSemivariograms, searchNeighbourhood, outputType, quantileValue, thresholdType, probabilityThreshold) print('Converting {} to {}'.format(inPointFeatures, outRasNa)) arcpy.AddMessage(normaltime + ":" + "经验贝叶斯克里金插值完成") except Exception as err: arcpy.AddMessage("EmpiricalBayesianKriging_ga: " + " Failed") arcpy.AddMessage(err.message) arcpy.env.extent = tempEnvironment0
import arcpy import unicodedata ##set up the workplace theDirectory = r"e:\work2\wrap_up2" arcpy.env.workspace = theDirectory ##name files to variable CsvState = "nhgis0002_ds172_2010_state.csv" CsvCounty = "nhgis0002_ds172_2010_county.csv" US_state_2010 = "US_state_2010.shp" US_county_2010 = "US_county_2010.shp" ##convert to dbf table arcpy.TableToDBASE_conversion(CsvState, theDirectory) arcpy.TableToDBASE_conversion(CsvState, theDirectory) #name dbase file to variable dBase_US_state = "nhgis0002_ds172_2010_state.dbf" dBase_US_county = "nhgis0002_ds172_2010_county.dbf" #Join Dbf table to shapefile arcpy.JoinField_management(US_state_2010, "GISJOIN", dBase_US_state, "GISJOIN") arcpy.JoinField_management(US_county_2010, "GISJOIN", dBase_US_county, "GISJOIN") #create new cloumn for store population density arcpy.AddField_management(US_state_2010, "POP_dense", "FLOAT") arcpy.AddField_management(US_county_2010, "POP_dense", "FLOAT")
# If there is no user-defined scratch workspace, use the neighbor file # directory. if arcpy.env.scratchWorkspace is None: tempdir = descNB.path else: tempdir = arcpy.env.scratchWorkspace make_temp_dbf = False # If the input file is a shapefile, then just swap the file extension. if descDB.dataType == "ShapeFile": data = data.replace(".shp", ".dbf") elif descDB.dataType == "FeatureClass": # If it is a feature class, a temporary DBF file will need to be created. make_temp_dbf = True arcpy.TableToDBASE_conversion([data], tempdir) data = tempdir + "\\" + descDB.basename + ".dbf" start_time = time.time() try: out, selVec = spatialfiltering.spatialfiltering( dependent_var, independent_vars, spatial_lag, data, neighbor_list, style, zero_policy, tolerance, zero_value, exact_EV, symmetric, alpha, alternative) # Print summary table header. np.set_printoptions(precision=3, suppress=True) hdr = " Step SelEvec Eval MinMi" hdr += " ZMinMi Pr(ZI) R2 tgamma" arcpy.AddMessage(hdr) # Print summary table. arcpy.AddMessage(np.array_str(np.array(out)))
HAZARD_File = r'/ForPortal/00_RawIMSMA/HAZARD.xls' wb = xlrd.open_workbook(HAZARD_File) for name in wb.sheet_names(): out = file('%s.csv' % name, 'wb') writer = csv.writer(out) sheet = wb.sheet_by_name(name) for row in xrange(sheet.nrows): writer.writerow([ sheet.cell_value(row, col) for col in xrange(sheet.ncols) ]) inTables = ['/ForPortal/HR.csv', '/ForPortal/HZ.csv'] outLocation = '/ForPortal/02_TABLES/' arcpy.TableToDBASE_conversion(inTables, outLocation) print "Joining shapefies with related tables" """ Checking if the output folder is empty before processing. If there are files available they will be deleted. """ if not os.path.exists("/ForPortal/03_JOINED"): print "Creating 03_JOINED Folder..." os.makedirs("/ForPortal/03_JOINED") else: print "03_JOINED Folder already exists. Contents will be removed" joinedirPath = "/ForPortal/03_JOINED" joinedfileList = os.listdir(joinedirPath)