yExpression = '!SHAPE.CENTROID.Y!' arcpy.CalculateField_management(outshp, 'xCentroid', xExpression, "PYTHON_9.3") arcpy.CalculateField_management(outshp, 'yCentroid', yExpression, "PYTHON_9.3") #3. Create a shapefile of centroid in_rows = arcpy.SearchCursor(outshp) outPointSHP = os.path.join(scratch, outPointFileName) #outPointSHP = r'in_memory/SiteMarker' point1 = arcpy.Point() array1 = arcpy.Array() arcpy.CreateFeatureclass_management(scratch, outPointFileName, "POINT", "", "DISABLED", "DISABLED", srGCS83) #arcpy.CreateFeatureclass_management('in_memory', 'SiteMarker', "POINT", "", "DISABLED", "DISABLED", srGCS83) cursor = arcpy.InsertCursor(outPointSHP) feat = cursor.newRow() for in_row in in_rows: # Set X and Y for start and end points point1.X = in_row.xCentroid point1.Y = in_row.yCentroid array1.add(point1) centerpoint = arcpy.Multipoint(array1) array1.removeAll() feat.shape = point1
arcpy.AddCodedValueToDomain_management(workspace_path, domain_name, 2, 'ImpactMelt') arcpy.AddCodedValueToDomain_management(workspace_path, domain_name, 3, 'ImpactBreccia') arcpy.AddCodedValueToDomain_management(workspace_path, domain_name, 4, 'Regolith') print('Done adding Domains') #create new feature class arcpy.CreateFeatureclass_management(out_path=workspace_path, out_name=fc_name, geometry_type="POLYGON", template="", has_m="DISABLED", has_z="DISABLED", spatial_reference=SRDefinition, config_keyword="", spatial_grid_1="0", spatial_grid_2="0", spatial_grid_3="0") print('Done creating feature class') #add a field to feature class #AddField_management (in_table, field_name, field_type, {field_precision}, #{field_scale}, {field_length}, {field_alias}, {field_is_nullable}, {field_is_required}, {field_domain}) arcpy.AddField_management(fc_path, new_field, 'TEXT', '', '', '500', 'new_field', 'NULLABLE', 'NON_REQUIRED', '') #set domain to the field #AssignDomainToField_management (in_table, field_name, domain_name, {subtype_code})
traj = sys.argv[3] # Normtrajectnummer kol1 = "AFSTAND_TOT_DIJKPAAL" kol2 = "RFTOMSCH" #------------------------------------------------------- arcpy.AddMessage("\n >>> START PROFIELNAAM BEPALEN... <<<") # describe de geometry lineDesc = arcpy.Describe(inLine) shapefieldname = lineDesc.ShapeFieldName # cursor lines = arcpy.UpdateCursor(inLine) for line in lines: # lijn uitlezen en begin en eindpunt selecteren. lineGeom = line.getValue(shapefieldname) endPoints = lineGeom #, lineGeom.lastPoint arcpy.AddMessage("\n Punt: " + str(line.OBJECTID)) uitPNT = arcpy.CreateFeatureclass_management("in_memory", "BeginEindPNT", "POINT", "", "", "", inLine) ptRows = arcpy.InsertCursor(uitPNT) ptRow = ptRows.newRow() ptRow.shape = endPoints ptRows.insertRow(ptRow) # Nu locatie op route zoeken tol = "5 Meters" # Zoekafstand 5 meter tbl = "locate_points" props = "RID POINT MEASPnt" # uitvoer kolommen # Execute LocateFeaturesAlongRoutes Mtabel = arcpy.LocateFeaturesAlongRoutes_lr(uitPNT, inRT, Rkol, tol, tbl, props) meas = arcpy.SearchCursor(Mtabel) VanTot = [] for r in meas: naam = r.RID
def workLines(lineNo): # Temporary files outWorkspace = flmc.GetWorkspace(workspaceName) # read params from text file f = open(outWorkspace + "\\params.txt") Forest_Line_Feature_Class = f.readline().strip() Cost_Raster = f.readline().strip() Line_Processing_Radius = f.readline().strip() f.close() fileSeg = outWorkspace + "\\FLM_CL_Segment_" + str(lineNo) + ".shp" fileOrigin = outWorkspace + "\\FLM_CL_Origin_" + str(lineNo) + ".shp" fileDestination = outWorkspace + "\\FLM_CL_Destination_" + str( lineNo) + ".shp" fileBuffer = outWorkspace + "\\FLM_CL_Buffer_" + str(lineNo) + ".shp" fileClip = outWorkspace + "\\FLM_CL_Clip_" + str(lineNo) + ".tif" fileCostDist = outWorkspace + "\\FLM_CL_CostDist_" + str(lineNo) + ".tif" fileCostBack = outWorkspace + "\\FLM_CL_CostBack_" + str(lineNo) + ".tif" fileCenterLine = outWorkspace + "\\FLM_CL_CenterLine_" + str( lineNo) + ".shp" # Find origin and destination coordinates x1 = segment_list[0].X y1 = segment_list[0].Y x2 = segment_list[-1].X y2 = segment_list[-1].Y # Create origin feature class try: arcpy.CreateFeatureclass_management(outWorkspace, PathFile(fileOrigin), "POINT", Forest_Line_Feature_Class, "DISABLED", "DISABLED", Forest_Line_Feature_Class) cursor = arcpy.da.InsertCursor(fileOrigin, ["SHAPE@XY"]) xy = (float(x1), float(y1)) cursor.insertRow([xy]) del cursor except Exception as e: print("Creating origin feature class failed: at X, Y" + str(xy) + ".") print(e) # Create destination feature class try: arcpy.CreateFeatureclass_management(outWorkspace, PathFile(fileDestination), "POINT", Forest_Line_Feature_Class, "DISABLED", "DISABLED", Forest_Line_Feature_Class) cursor = arcpy.da.InsertCursor(fileDestination, ["SHAPE@XY"]) xy = (float(x2), float(y2)) cursor.insertRow([xy]) del cursor except Exception as e: print("Creating destination feature class failed: at X, Y" + str(xy) + ".") print(e) try: # Buffer around line arcpy.Buffer_analysis(fileSeg, fileBuffer, Line_Processing_Radius, "FULL", "ROUND", "NONE", "", "PLANAR") # Clip cost raster using buffer DescBuffer = arcpy.Describe(fileBuffer) SearchBox = str(DescBuffer.extent.XMin) + " " + str( DescBuffer.extent.YMin) + " " + str( DescBuffer.extent.XMax) + " " + str(DescBuffer.extent.YMax) arcpy.Clip_management(Cost_Raster, SearchBox, fileClip, fileBuffer, "", "ClippingGeometry", "NO_MAINTAIN_EXTENT") # Least cost path arcpy.gp.CostDistance_sa(fileOrigin, fileClip, fileCostDist, "", fileCostBack, "", "", "", "", "TO_SOURCE") arcpy.gp.CostPathAsPolyline_sa(fileDestination, fileCostDist, fileCostBack, fileCenterLine, "BEST_SINGLE", "") except Exception as e: print("Problem with line starting at X " + str(x1) + ", Y " + str(y1) + "; and ending at X " + str(x1) + ", Y " + str(y1) + ".") print(e) # Clean temporary files arcpy.Delete_management(fileSeg) arcpy.Delete_management(fileOrigin) arcpy.Delete_management(fileDestination) arcpy.Delete_management(fileBuffer) arcpy.Delete_management(fileClip) arcpy.Delete_management(fileCostDist) arcpy.Delete_management(fileCostBack)
lastMonthEnd = datetime.datetime.now()-datetime.timedelta(days=curDay) if __name__ == '__main__': try: print "creat table start..." logging.info("creat table start...") print time.strftime('%Y-%m-%d %H:%M:%S',startTime) logging.info(time.strftime('%Y-%m-%d %H:%M:%S',startTime)) ArcCatalogPath = "C:\\Users\\Administrator\\AppData\\Roaming\\ESRI\\Desktop10.3\\ArcCatalog" GISDBPath = "PDB_PMSDB.sde" gisFeatureOrg = ArcCatalogPath+"\\"+GISDBPath+"\\SDE.gis_app_measured_data"+lastMonthEnd.strftime('%Y%m') gisTemp = ArcCatalogPath+"\\"+GISDBPath+"\\SDE.gis_app_measured_data" print "create feature-->"+gisFeatureOrg logging.info("create feature-->"+gisFeatureOrg) if(arcpy.Exists(gisFeatureOrg)): arcpy.Delete_management(gisFeatureOrg) gisFeatureOrg = arcpy.CreateFeatureclass_management(os.path.dirname(gisFeatureOrg),os.path.basename(gisFeatureOrg),"Polygon",gisTemp,"DISABLED","DISABLED",arcpy.SpatialReference(4326)) arcpy.DeleteField_management (gisFeatureOrg, "SHAPE_STArea__") arcpy.DeleteField_management (gisFeatureOrg, "SHAPE_STLength__") print "creat table end ..." logging.info("creat table end...") except Exception,e: print e logging.error(e) os._exit(0)
def exportFeatureSet(out_fc, feature_set): """export features (JSON result) to shapefile or feature class Required: out_fc -- output feature class or shapefile feature_set -- JSON response (feature set) obtained from a query at minimum, feature set must contain these keys: [u'features', u'fields', u'spatialReference', u'geometryType'] """ # validate features input (should be list or dict, preferably list) if isinstance(feature_set, basestring): try: feature_set = json.loads(feature_set) except: raise IOError('Not a valid input for "features" parameter!') if not isinstance(feature_set, dict) or not 'features' in feature_set: raise IOError('Not a valid input for "features" parameter!') def find_ws_type(path): """determine output workspace (feature class if not FileSystem) returns a tuple of workspace path and type """ # try original path first if not arcpy.Exists(path): path = os.path.dirname(path) desc = arcpy.Describe(path) if hasattr(desc, 'workspaceType'): return path, desc.workspaceType # search until finding a valid workspace SPLIT = filter(None, path.split(os.sep)) if path.startswith('\\\\'): SPLIT[0] = r'\\{0}'.format(SPLIT[0]) # find valid workspace for i in xrange(1, len(SPLIT)): sub_dir = os.sep.join(SPLIT[:-i]) desc = arcpy.Describe(sub_dir) if hasattr(desc, 'workspaceType'): return sub_dir, desc.workspaceType # find workspace type and path ws, wsType = find_ws_type(out_fc) if wsType == 'FileSystem': isShp = True shp_name = out_fc out_fc = r'in_memory\temp_xxx' else: isShp = False # make new feature class fields = [Field(f) for f in feature_set['fields']] sr_dict = feature_set['spatialReference'] if 'latestWkid' in sr_dict: outSR = int(sr_dict['latestWkid']) else: outSR = int(sr_dict['wkid']) g_type = G_DICT[feature_set['geometryType']] path, fc_name = os.path.split(out_fc) arcpy.CreateFeatureclass_management(path, fc_name, g_type, spatial_reference=outSR) # add all fields cur_fields = [] fMap = [] if not isShp: gdb_domains = arcpy.Describe(ws).domains for field in fields: if field.type not in [OID, SHAPE] + SKIP_FIELDS.keys(): field_name = field.name.split('.')[-1] if field.domain and not isShp: if field.domain['name'] not in gdb_domains: if 'codedValues' in field.domain: dType = 'CODED' else: dType = 'RANGE' arcpy.management.CreateDomain(ws, field.domain['name'], field.domain['name'], FTYPES[field.type], dType) if dType == 'CODED': for cv in field.domain['codedValues']: arcpy.management.AddCodedValueToDomain( ws, field.domain['name'], cv['code'], cv['name']) else: _min, _max = field.domain['range'] arcpy.management.SetValueForRangeDomain( ws, field.domain['name'], _min, _max) gdb_domains.append(field.domain['name']) print('added domain "{}" to geodatabase: "{}"'.format( field.domain['name'], ws)) field_domain = field.domain['name'] else: field_domain = '' # need to filter even more as SDE sometimes yields weird field names...sigh if not any([ 'shape_' in field.name.lower(), 'shape.' in field.name.lower(), '(shape)' in field.name.lower(), 'objectid' in field.name.lower(), field.name.lower() == 'fid' ]): arcpy.management.AddField(out_fc, field_name, FTYPES[field.type], field_length=field.length, field_alias=field.alias, field_domain=field_domain) cur_fields.append(field_name) fMap.append(field) # insert cursor to write rows (using arcpy.FeatureSet() is too buggy) cur_fields.append('SHAPE@') fMap += [f for f in fields if f.type == SHAPE] with arcpy.da.InsertCursor(out_fc, cur_fields) as irows: for feat in feature_set['features']: irows.insertRow(Row(feat, fMap, outSR).values) # if output is a shapefile if isShp: out_fc = arcpy.management.CopyFeatures(out_fc, shp_name) print('Created: "{0}"'.format(out_fc)) return out_fc
def workLinesMem(segment_info): """ New version of worklines. It uses memory workspace instead of shapefiles. The refactoring is to accelerate the processing speed. """ # input verification if segment_info is None or len(segment_info) <= 1: print("Input segment is corrupted, ignore") # Temporary files outWorkspace = flmc.GetWorkspace(workspaceName) # read params from text file f = open(outWorkspace + "\\params.txt") Forest_Line_Feature_Class = f.readline().strip() Cost_Raster = f.readline().strip() Line_Processing_Radius = float(f.readline().strip()) f.close() lineNo = segment_info[1] # second element is the line No. outWorkspaceMem = r"memory" arcpy.env.workspace = r"memory" fileSeg = os.path.join(outWorkspaceMem, "FLM_CL_Segment_" + str(lineNo)) fileOrigin = os.path.join(outWorkspaceMem, "FLM_CL_Origin_" + str(lineNo)) fileDestination = os.path.join(outWorkspaceMem, "FLM_CL_Destination_" + str(lineNo)) fileBuffer = os.path.join(outWorkspaceMem, "FLM_CL_Buffer_" + str(lineNo)) fileClip = os.path.join(outWorkspaceMem, "FLM_CL_Clip_" + str(lineNo) + ".tif") fileCostDist = os.path.join(outWorkspaceMem, "FLM_CL_CostDist_" + str(lineNo) + ".tif") fileCostBack = os.path.join(outWorkspaceMem, "FLM_CL_CostBack_" + str(lineNo) + ".tif") fileCenterline = os.path.join(outWorkspaceMem, "FLM_CL_Centerline_" + str(lineNo)) # Load segment list segment_list = [] for line in segment_info[0]: for point in line: # loops through every point in a line # loops through every vertex of every segment if point: # adds all the vertices to segment_list, which creates an array segment_list.append(point) # Find origin and destination coordinates x1 = segment_list[0].X y1 = segment_list[0].Y x2 = segment_list[-1].X y2 = segment_list[-1].Y # Create segment feature class try: arcpy.CreateFeatureclass_management(outWorkspaceMem, os.path.basename(fileSeg), "POLYLINE", Forest_Line_Feature_Class, "DISABLED", "DISABLED", Forest_Line_Feature_Class) cursor = arcpy.da.InsertCursor(fileSeg, ["SHAPE@"]) cursor.insertRow([segment_info[0]]) del cursor except Exception as e: print("Create feature class {} failed.".format(fileSeg)) print(e) return # Create origin feature class # TODO: not in use, delete later try: arcpy.CreateFeatureclass_management(outWorkspaceMem, os.path.basename(fileOrigin), "POINT", Forest_Line_Feature_Class, "DISABLED", "DISABLED", Forest_Line_Feature_Class) cursor = arcpy.da.InsertCursor(fileOrigin, ["SHAPE@XY"]) xy = (float(x1), float(y1)) cursor.insertRow([xy]) del cursor except Exception as e: print("Creating origin feature class failed: at X, Y" + str(xy) + ".") print(e) return # Create destination feature class # TODO: not in use, delete later try: arcpy.CreateFeatureclass_management(outWorkspaceMem, os.path.basename(fileDestination), "POINT", Forest_Line_Feature_Class, "DISABLED", "DISABLED", Forest_Line_Feature_Class) cursor = arcpy.da.InsertCursor(fileDestination, ["SHAPE@XY"]) xy = (float(x2), float(y2)) cursor.insertRow([xy]) del cursor except Exception as e: print("Creating destination feature class failed: at X, Y" + str(xy) + ".") print(e) return try: # Buffer around line arcpy.Buffer_analysis(fileSeg, fileBuffer, Line_Processing_Radius, "FULL", "ROUND", "NONE", "", "PLANAR") # Clip cost raster using buffer DescBuffer = arcpy.Describe(fileBuffer) SearchBox = str(DescBuffer.extent.XMin) + " " + str(DescBuffer.extent.YMin) + " " + \ str(DescBuffer.extent.XMax) + " " + str(DescBuffer.extent.YMax) arcpy.Clip_management(Cost_Raster, SearchBox, fileClip, fileBuffer, "", "ClippingGeometry", "NO_MAINTAIN_EXTENT") # Least cost path # arcpy.gp.CostDistance_sa(fileOrigin, fileClip, fileCostDist, "", fileCostBack, "", "", "", "", "TO_SOURCE") fileCostDist = CostDistance(arcpy.PointGeometry(arcpy.Point(x1, y1)), fileClip, "", fileCostBack) # print("Cost distance file path: {}".format(fileCostDist)) # arcpy.gp.CostPathAsPolyline_sa(fileDestination, fileCostDist, # fileCostBack, fileCenterline, "BEST_SINGLE", "") CostPathAsPolyline(arcpy.PointGeometry(arcpy.Point(x2, y2)), fileCostDist, fileCostBack, fileCenterline, "BEST_SINGLE", "") # get centerline polyline out of feature class file centerline = [] with arcpy.da.SearchCursor(fileCenterline, ["SHAPE@"]) as cursor: for row in cursor: centerline.append(row[0]) except Exception as e: print("Problem with line starting at X " + str(x1) + ", Y " + str(y1) + "; and ending at X " + str(x2) + ", Y " + str(y2) + ".") print(e) centerline = [] return centerline # Clean temporary files arcpy.Delete_management(fileSeg) arcpy.Delete_management(fileOrigin) arcpy.Delete_management(fileDestination) arcpy.Delete_management(fileBuffer) arcpy.Delete_management(fileClip) arcpy.Delete_management(fileCostDist) arcpy.Delete_management(fileCostBack) # Return centerline print("Processing line {} done".format(fileSeg)) return centerline, segment_info[2]
line = arcpy.Polyline( arcpy.Array([arcpy.Point(i[0], i[1]) for i in coordset]), 4326) cursor.insertRow([line]) @timing def create_geom_with_coords(fc, coords, sr): with arcpy.da.InsertCursor(fc, 'SHAPE@') as cursor: for coordset in coords: cursor.insertRow([coordset]) if __name__ == "__main__": fc = arcpy.CreateFeatureclass_management(arcpy.env.scratchGDB, 'line_fc', 'POLYLINE', spatial_reference=4326)[0] coords = [[(-137.7129, 27.5053), (-137.6948, 27.5068), (-137.7486, 27.8083), (-137.7296, 27.7905)], [(-138.1582, 27.6142), (-138.1167, 27.5895), (-137.4899, 27.7587), (-137.4865, 27.7584)], [(-137.4865, 27.7584), (-137.4779, 27.7575), (-137.4779, 27.7575), (-137.4664, 27.7567)], [(-137.4664, 27.7567), (-137.4617, 27.7563), (-137.4617, 27.7563), (-137.4446, 27.7560)], [(-137.4446, 27.7560), (-137.4403, 27.7559), (-137.4403, 27.7559), (-137.4355, 27.7558)], [(-137.4355, 27.7558), (-137.4316, 27.7557), (-137.4316, 27.7557), (-137.4146, 27.7558)]]
isClosed = True # Only start the process of creating Voxel if the Multipatch-Feature is closed if isClosed is True: # Create a Point-Feature Class that holds the Voxel arcpy.AddMessage("Creating Voxel Feature Class for: {}".format( desc.baseName)) logging_file_opened.write( str(datetime.now()) + " Creating Voxel Feature Class for: {}\n".format(desc.baseName)) mp_name = desc.baseName spatial_reference = desc.spatialReference.factoryCode if spatial_reference == 0: spatial_reference = None voxelFC = arcpy.CreateFeatureclass_management(gdb_for_voxel, mp_name + "_voxelFC", "POINT", "", "DISABLED", "ENABLED", spatial_reference) # Add a "TYPE"-field to manage which Voxel are in the Multipatch and which are not arcpy.AddField_management(voxelFC, "TYPE", "SHORT") # Mathematical calculations to determine extent and distance between Voxel extent = desc.extent xMin, xMax, yMin, yMax, zMin, zMax = extent.XMin, extent.XMax, extent.YMin, extent.YMax, extent.ZMin, extent.ZMax # arcpy.AddMessage((xMin, xMax, yMax, yMax, zMin, zMax)) xDist = xMax - xMin yDist = yMax - yMin zDist = zMax - zMin xStep = xDist / vX
def rainfall_above_threshold(returnperiod_folder, rainfall_folder, above_threshold_folder): print("processing rainfall rate above threshold") print("creating folder to save rainfall rate above threshold ") os.mkdir(above_threshold_folder) for returnperiodfile in os.listdir(returnperiod_folder): if returnperiodfile.endswith(".tif") or returnperiodfile.endswith( ".tiff"): print("processing return period for " + returnperiodfile) print("create folder to stored result on this return period...") output_folder = os.path.join(above_threshold_folder, returnperiodfile) os.mkdir(output_folder) print("create folder to stored raster above return period...") rasterfolder = os.path.join(output_folder, '1_raster_above_threshold') os.mkdir(rasterfolder) print("Folder 1_raster_above_threshold is created...") print("Start Extracting Data.....") extract_per_returnperiods( rainfall_folder, os.path.join(returnperiod_folder, returnperiodfile), rasterfolder) # ----Creating shapefile----# # ----Preparing folder to save shapefile----# print("create folder to stored shapefile...") shapefile_folder = os.path.join(output_folder, '2_shapefile') os.mkdir(shapefile_folder) print("Folder shapefile is created...") print("start processing raster to shapefile.....") for raster_file in os.listdir(rasterfolder): if raster_file.endswith(".tif") or raster_file.endswith( ".tiff"): inRaster = os.path.join(rasterfolder, raster_file) shapefilename = raster_file.split('.')[0] + '_' + raster_file.split('.')[1] + '_' + \ raster_file.split('.')[2] \ + '_' + raster_file.split('.')[3] + '.shp' field = "VALUE" output_point = os.path.join(shapefile_folder, shapefilename) if arcpy.Exists( os.path.join(shapefile_folder, shapefilename)): print("shapefile " + shapefilename + " is available") else: array = arcpy.RasterToNumPyArray(inRaster) if numpy.max(array) > 0: arcpy.RasterToPoint_conversion( inRaster, output_point, field) print("shapefile " + shapefilename + " is created") else: arcpy.CreateFeatureclass_management( shapefile_folder, shapefilename, geometry_type="POINT") print("empty shapefile " + shapefilename + " is created") print( "Start adding X and Y coloumn to Shapefiles......") arcpy.AddXY_management(output_point) field_name = 'r_' + raster_file.split('.')[2] field_type = "FLOAT" if arcpy.ListFields(output_point, field_name): print "Field exists" else: arcpy.AddField_management(output_point, field_name, field_type) arcpy.CalculateField_management( output_point, field_name, "!GRID_CODE!", "PYTHON_9.3") # add new x and y point to 3 digit decimal print("Creating Shapefile is Done") print("create folder to stored csv file...") csv_folder = os.path.join(output_folder, '3_shp_to_csv') os.mkdir(csv_folder) print("Folder shp_to_csv is created...") print("start processing shapefile to csv.....") arcpy.env.workspace = shapefile_folder for i in os.listdir(shapefile_folder): if i.endswith(".shp"): print(i) print("processing " + i) new_name = i.split('.')[0] new_name_csv = '{0}.csv'.format(new_name) arcpy.TableToTable_conversion(in_rows=i, out_path=csv_folder, out_name=new_name_csv) print("csv file " + i + " is created") print("Shapefile to csv are completed...") print("create folder to stored cleaned csv file...") cleaned_folder = os.path.join(output_folder, '4_cleaned_csv') os.mkdir(cleaned_folder) print("Folder cleaned_csv is created...") print("start processing cleaning csv files.....") for k in os.listdir(csv_folder): if k.endswith(".csv"): b = pd.read_csv(os.path.join(csv_folder, k), sep=',') drop_other_column = b.drop(['OID'], axis=1) drop_other_column.to_csv(os.path.join(cleaned_folder, k), sep=',', index=False) print("cleaned column csv file " + k + " is created") print("Cleaning unused coloumn are completed...") print("start Merging csv files.....") path = cleaned_folder + "/*.csv" file_concat = pd.concat([ pd.read_csv(f, sep=',').set_index(['POINT_X', 'POINT_Y']) for f in glob.glob(path) ], axis=1).reset_index() file_concat.to_csv(os.path.join(output_folder, "temp_final_output.csv"), index=False, sep=',') csv_to_sort = pd.read_csv(os.path.join(output_folder, "temp_final_output.csv"), sep=',') final_input_name = 'rainfall_above_threshold_{0}_{1}.csv'.format( returnperiodfile.split('_')[5], year) csv_sorted = csv_to_sort.reindex_axis(sorted(csv_to_sort.columns), axis=1) csv_sorted.to_csv(os.path.join(above_threshold_folder, final_input_name), sep=',', index=False) print("final result is created") print("processing rainfall rate above threshold are done") print(".......") print(".......") print(".......") print(".......")
arcpy.env.workspace = "C:\\Users\\esther\\Desktop\\EBowlin_Final" arcpy.env.overwriteOutput = True # define variables outFolder = "C:\\Users\\esther\\Desktop\\EBowlin_Final" fClass = "anomalies.shp" inputPath = "C:\\Users\\esther\\Desktop\\EBowlin_Final\\anomalies.txt" try: # open text file for reading inputFile = open(inputPath) # create point shapefile arcpy.CreateFeatureclass_management(outFolder, fClass, "POINT") print "Created point shapefile." # add name field to point shapefile arcpy.AddField_management(fClass, "anom_ID", "SHORT") print "Added 'name' field to point shapefile." # create insert cursor to add names to name field and x,y coordinates cursor = arcpy.da.InsertCursor(fClass, ["anom_ID", "SHAPE@"]) print "Opened point shapefile for editing." except: print "Feature Class creation failed." try:
for raster_file in arcpy.ListRasters(): print("processing " + raster_file + " into shapefile....") inRaster = os.path.join(gdb_folder, raster_file) shapefilename = raster_file + '.shp' field = "VALUE" output_point = os.path.join(max_FM_shapefile_folder, shapefilename) if arcpy.Exists(os.path.join(max_FM_shapefile_folder, shapefilename)): print("shapefile " + shapefilename + " is available") else: array = arcpy.RasterToNumPyArray(inRaster) if numpy.max(array) > 0: arcpy.RasterToPoint_conversion(inRaster, output_point, field) print("shapefile " + shapefilename + " is created") else: arcpy.CreateFeatureclass_management(max_FM_shapefile_folder, shapefilename, geometry_type="POINT") print("empty shapefile " + shapefilename + " is created") print("Start adding X and Y coloumn to Shapefiles......") arcpy.AddXY_management(output_point) field_name = 'r_' + raster_file.split('_')[4] field_type = "FLOAT" if arcpy.ListFields(output_point, field_name): print "Field exists" else: arcpy.AddField_management(output_point, field_name, field_type) arcpy.CalculateField_management(output_point, field_name, "!GRID_CODE!", "PYTHON_9.3") #add new x and y point to 3 digit decimal print("Creating Shapefile is Done")
##--------------------------------------------------------------------- # Import modules import sys, os, arcpy #Allow arcpy to overwrite outputs arcpy.env.overwriteOutput = True # Set input variables (user input) inputFolder = arcpy.GetParameterAsText(0) outputFC = arcpy.GetParameterAsText(1) outputSR = arcpy.GetParameterAsText(2) #Create an empty feature class to which we'll add features outPath, outName = os.path.split(outputFC) arcpy.CreateFeatureclass_management(outPath, outName, "POINT", "", "", "", outputSR) # Add TagID, LC, IQ, and Date fields to the output feature class arcpy.AddField_management(outputFC, "TagID", "LONG") arcpy.AddField_management(outputFC, "LC", "TEXT") arcpy.AddField_management(outputFC, "Date", "TEXT") # Create the insert cursor cur = arcpy.da.InsertCursor(outputFC, ['Shape@', 'TagID', 'LC', 'Date']) #Iterate through each ARGOS file inputFiles = os.listdir(inputFolder) for inputFile in inputFiles: #Don't process README.txt file if inputFile == "README.txt": continue
def new_DCE(srs_template, project_path, AP_fold, DCE_fold, image_path): # LayerTypes = { # RSLayer(name, id, tag, rel_path) # 'AP_new': RSLayer(date_name, AP_fold, 'Raster', os.path.join('01_Inputs/01_Imagery', AP_fold, 'imagery.tif')), # 'INUN_new': RSLayer('Inundation', 'DCE_01_inun', 'Vector', os.path.join('03_Analysis', DCE_fold, 'Shapefiles/inundation.shp')), # 'DAM_CREST_new': RSLayer('Dam Crests', 'DCE_01_damcrests', 'Vector', os.path.join('03_Analysis', DCE_fold, 'Shapefiles/dam_crests.shp')), # 'TWG_new': RSLayer('Thalwegs', 'DCE_01_thalwegs', 'Vector', os.path.join('03_Analysis', DCE_fold, 'Shapefiles/thalwegs.shp')) # } #log = Logger('edit_xml') #log.info('Loading the XML to make edits...') # Load up a new RSProject class #project = RSProject(cfg, project_path) log = Logger('new_DCE') # Set local variables has_m = "DISABLED" has_z = "DISABLED" log.info('before getting spatial reference') # Use Describe to get a SpatialReference object spatial_reference = arcpy.Describe(srs_template).spatialReference log.info('checking if project folders exist') # check if Inputs, Mapping, and Analysis folders exist, if not create them folder_list = ['01_Inputs', '02_Mapping', '03_Analysis'] for folder in folder_list: if not os.path.exists(os.path.join(project_path, folder)): os.makedirs(os.path.join(project_path, folder)) log.info('Inputs, Mapping, Analysis folders exist') # set pathway to imagery folder image_folder = os.path.join(project_path, '01_Inputs/01_Imagery') # create new AP folder if not os.path.exists(os.path.join(image_folder, AP_fold)): os.makedirs(os.path.join(image_folder, AP_fold)) AP_path = os.path.join(image_folder, AP_fold) else: AP_path = os.path.join(image_folder, AP_fold) log.info('copying image to project folder...') def add_image(image_path, AP_folder): # put input imagery in folder if not os.path.exists(os.path.join(AP_folder, 'imagery.png')): arcpy.CopyRaster_management(image_path, os.path.join(AP_folder, 'imagery.tif')) else: print("existing image already exists in this AP folder") add_image(image_path, AP_path) # set pathway to mapping folder map_path = os.path.join(project_path, '02_Mapping') # check if RS folder exists, if not make one if not os.path.exists(os.path.join(map_path, 'RS_01')): os.makedirs(os.path.join(map_path, 'RS_01')) # create new DCE folder if not os.path.exists(os.path.join(map_path, DCE_fold)): log.info('creating new DCE shapefiles...') os.makedirs(os.path.join(map_path, DCE_fold)) # inundation arcpy.CreateFeatureclass_management(os.path.join(map_path, DCE_fold), "inundation.shp", "POLYGON", "", has_m, has_z, spatial_reference) # add field for inundation type arcpy.AddField_management( os.path.join(map_path, DCE_fold, 'inundation.shp'), 'type', "TEXT") # dam crests arcpy.CreateFeatureclass_management(os.path.join(map_path, DCE_fold), "dam_crests.shp", "POLYLINE", "", has_m, has_z, spatial_reference) # add fields for dam state and crest type arcpy.AddField_management( os.path.join(map_path, DCE_fold, 'dam_crests.shp'), 'dam_state', "TEXT") arcpy.AddField_management( os.path.join(map_path, DCE_fold, 'dam_crests.shp'), 'crest_type', "TEXT") arcpy.AddField_management( os.path.join(map_path, DCE_fold, 'dam_crests.shp'), 'dam_id', "DOUBLE") # thalwegs arcpy.CreateFeatureclass_management(os.path.join(map_path, DCE_fold), "thalwegs.shp", "POLYLINE", "", has_m, has_z, spatial_reference) arcpy.AddField_management( os.path.join(map_path, DCE_fold, 'thalwegs.shp'), 'type', "TEXT") else: print("this DCE already exists") log.info('updating xml with new DCE...') # create a folder in Analysis for this DCE analysis_path = os.path.join(project_path, '03_Analysis') if not os.path.exists(os.path.join(analysis_path, DCE_fold)): os.makedirs(os.path.join(analysis_path, DCE_fold)) DCEout = os.path.join(analysis_path, DCE_fold) if not os.path.exists(os.path.join(DCEout, 'shapefiles')): os.makedirs(os.path.join(DCEout, 'Shapefiles'))
newpath2 = r"F:\01- Maps & Imagery\MXD_Templates\shapefiles\\" if not os.path.exists(newpath2): os.makedirs(newpath2) #-------------------------------Creating Polygon in Empty Feature Class within a blank GeoDatabase-------------------------------# arcpy.env.workspace = r"F:\01- Maps & Imagery\MXD_Templates\shapefiles" arcpy.env.OverwriteOutput = True #Creating Empty Geodatabase in folder path GDBName = "digpolytesting" FileGDB = arcpy.CreateFileGDB_management(newpath2, GDBName) print(arcpy.Exists(FileGDB)) sr = arcpy.SpatialReference(4326) fc = arcpy.CreateFeatureclass_management(FileGDB, "dpExtent", "POINT", "", "DISABLED", "DISABLED", sr) #Adding 3 different fields into the feature class fieldOne = arcpy.AddField_management(fc, 'NAME', 'STRING') fieldTwo = arcpy.AddField_management(fc, 'LAT', 'DOUBLE') fieldThree = arcpy.AddField_management(fc, 'LONG', 'DOUBLE') #opening GeoJSON.IO so user can find lat/lon coordinates print("Opening GeoJson.io...\n ...specify your boundary") webbrowser.open("http://geojson.io/#map=15/38.8047/-76.8726") #adding user input to all the copying/pasting list of each coordinate value print("\nEnter 1 coordinate value [X,Y] in decimal degrees...") print("\nCoordinate 1: ") coordOne = input() print("\nCoordinate 2: ")
def convert_to_kml(input_items, out_workspace, extent, show_progress=False): converted = 0 errors = 0 skipped = 0 global processed_count global layer_name global existing_fields global new_fields global field_values arcpy.env.overwriteOutput = True for ds, out_name in input_items.iteritems(): try: # ----------------------------------------------- # If the item is a service layer, process and continue. # ----------------------------------------------- if ds.startswith('http'): try: service_layer = task_utils.ServiceLayer( ds, extent.JSON, 'esriGeometryPolygon') arcpy.env.overwriteOutput = True oid_groups = service_layer.object_ids out_features = None g = 0. group_cnt = service_layer.object_ids_cnt if not arcpy.Exists(os.path.join(out_workspace, 'temp.gdb')): temp_gdb = arcpy.CreateFileGDB_management( out_workspace, 'temp.gdb') temp_gdb = temp_gdb[0] else: temp_gdb = os.path.join(out_workspace, 'temp.gdb') for group in oid_groups: g += 1 group = [oid for oid in group if oid] where = '{0} IN {1}'.format( service_layer.oid_field_name, tuple(group)) url = ds + "/query?where={}&outFields={}&returnGeometry=true&f=json&".format( where, '*') feature_set = arcpy.FeatureSet() try: feature_set.load(url) except Exception: continue if not out_features: out_features = arcpy.CopyFeatures_management( feature_set, task_utils.create_unique_name( out_name, temp_gdb)) else: features = arcpy.CopyFeatures_management( feature_set, task_utils.create_unique_name( out_name, temp_gdb)) arcpy.Append_management(features, out_features, 'NO_TEST') try: arcpy.Delete_management(features) except arcpy.ExecuteError: pass status_writer.send_percent( float(g) / group_cnt * 100, '', 'convert_to_kml') arcpy.MakeFeatureLayer_management(out_features, out_name) arcpy.LayerToKML_conversion( out_name, '{0}.kmz'.format(os.path.join(out_workspace, out_name)), 1, boundary_box_extent=extent) processed_count += 1. converted += 1 status_writer.send_percent(processed_count / result_count, _('Converted: {0}').format(ds), 'convert_to_kml') continue except Exception as ex: status_writer.send_state(status.STAT_WARNING, str(ex)) errors += 1 errors_reasons[ds] = ex.message continue # Is the input a mxd data frame. map_frame_name = task_utils.get_data_frame_name(ds) if map_frame_name: ds = ds.split('|')[0].strip() # ------------------------------- # Is the input a geometry feature # ------------------------------- if isinstance(out_name, list): increment = task_utils.get_increment(result_count) for row in out_name: try: name = arcpy.ValidateTableName(ds, 'in_memory') name = os.path.join('in_memory', name) # Clip the geometry. geo_json = row['[geo]'] geom = arcpy.AsShape(geo_json) row.pop('[geo]') if not arcpy.Exists(name): if arcpy.env.outputCoordinateSystem: layer_name = arcpy.CreateFeatureclass_management( 'in_memory', os.path.basename(name), geom.type.upper()) else: arcpy.env.outputCoordinateSystem = 4326 layer_name = arcpy.CreateFeatureclass_management( 'in_memory', os.path.basename(name), geom.type.upper()) # layer_name = arcpy.MakeFeatureLayer_management(name, 'flayer') existing_fields = [ f.name for f in arcpy.ListFields(layer_name) ] new_fields = [] field_values = [] for field, value in row.iteritems(): valid_field = arcpy.ValidateFieldName( field, 'in_memory') new_fields.append(valid_field) field_values.append(value) arcpy.AddField_management( layer_name, valid_field, 'TEXT') else: if not geom.type.upper() == arcpy.Describe( name).shapeType.upper(): name = arcpy.CreateUniqueName( os.path.basename(name), 'in_memory') if arcpy.env.outputCoordinateSystem: layer_name = arcpy.CreateFeatureclass_management( 'in_memory', os.path.basename(name), geom.type.upper()) else: arcpy.env.outputCoordinateSystem = 4326 layer_name = arcpy.CreateFeatureclass_management( 'in_memory', os.path.basename(name), geom.type.upper()) existing_fields = [ f.name for f in arcpy.ListFields(layer_name) ] new_fields = [] field_values = [] for field, value in row.iteritems(): valid_field = arcpy.ValidateFieldName( field, 'in_memory') new_fields.append(valid_field) field_values.append(value) if not valid_field in existing_fields: arcpy.AddField_management( layer_name, valid_field, 'TEXT') with arcpy.da.InsertCursor(layer_name, ["SHAPE@"] + new_fields) as icur: icur.insertRow([geom] + field_values) arcpy.MakeFeatureLayer_management( layer_name, os.path.basename(name)) arcpy.LayerToKML_conversion( os.path.basename(name), '{0}.kmz'.format( os.path.join(out_workspace, os.path.basename(name))), 1, boundary_box_extent=extent) if (processed_count % increment) == 0: status_writer.send_percent( float(processed_count) / result_count, _('Converted: {0}').format(row['name']), 'convert_to_kml') processed_count += 1 converted += 1 except KeyError: processed_count += 1 skipped += 1 skipped_reasons[ds] = 'Invalid input type' status_writer.send_state( _(status.STAT_WARNING, 'Invalid input type: {0}').format(ds)) except Exception as ex: processed_count += 1 errors += 1 errors_reasons[ds] = ex.message continue del icur continue dsc = arcpy.Describe(ds) if os.path.exists( os.path.join('{0}.kmz'.format( os.path.join(out_workspace, out_name)))): out_name = os.path.basename( arcpy.CreateUniqueName(out_name + '.kmz', out_workspace))[:-4] if dsc.dataType == 'FeatureClass': arcpy.MakeFeatureLayer_management(ds, dsc.name) if out_name == '': out_name = dsc.name arcpy.LayerToKML_conversion( dsc.name, '{0}.kmz'.format(os.path.join(out_workspace, out_name)), 1, boundary_box_extent=extent) converted += 1 elif dsc.dataType == 'ShapeFile': arcpy.MakeFeatureLayer_management(ds, dsc.name[:-4]) if out_name == '': out_name = dsc.name[:-4] arcpy.LayerToKML_conversion( dsc.name[:-4], '{0}.kmz'.format(os.path.join(out_workspace, out_name)), 1, boundary_box_extent=extent) converted += 1 elif dsc.dataType == 'RasterDataset': arcpy.MakeRasterLayer_management(ds, dsc.name) if out_name == '': out_name = dsc.name arcpy.LayerToKML_conversion( dsc.name, '{0}.kmz'.format(os.path.join(out_workspace, out_name)), 1, boundary_box_extent=extent) converted += 1 elif dsc.dataType == 'Layer': if out_name == '': if dsc.name.endswith('.lyr'): out_name = dsc.name[:-4] else: out_name = dsc.name arcpy.LayerToKML_conversion( ds, '{0}.kmz'.format(os.path.join(out_workspace, out_name)), 1, boundary_box_extent=extent) converted += 1 elif dsc.dataType == 'FeatureDataset': arcpy.env.workspace = ds for fc in arcpy.ListFeatureClasses(): arcpy.MakeFeatureLayer_management(fc, 'tmp_lyr') arcpy.LayerToKML_conversion( 'tmp_lyr', '{0}.kmz'.format(os.path.join(out_workspace, fc)), 1, boundary_box_extent=extent) converted += 1 elif dsc.dataType == 'CadDrawingDataset': arcpy.env.workspace = dsc.catalogPath for cad_fc in arcpy.ListFeatureClasses(): if cad_fc.lower() == 'annotation': try: cad_anno = arcpy.ImportCADAnnotation_conversion( cad_fc, arcpy.CreateUniqueName('cadanno', arcpy.env.scratchGDB)) except arcpy.ExecuteError: cad_anno = arcpy.ImportCADAnnotation_conversion( cad_fc, arcpy.CreateUniqueName('cadanno', arcpy.env.scratchGDB), 1) arcpy.MakeFeatureLayer_management(cad_anno, 'cad_lyr') name = '{0}_{1}'.format(dsc.name[:-4], cad_fc) arcpy.LayerToKML_conversion( 'cad_lyr', '{0}.kmz'.format(os.path.join(out_workspace, name)), 1, boundary_box_extent=extent) converted += 1 else: arcpy.MakeFeatureLayer_management(cad_fc, 'cad_lyr') name = '{0}_{1}'.format(dsc.name[:-4], cad_fc) arcpy.LayerToKML_conversion( 'cad_lyr', '{0}.kmz'.format(os.path.join(out_workspace, name)), 1, boundary_box_extent=extent) converted += 1 # Map document to KML. elif dsc.dataType == 'MapDocument': mxd = arcpy.mapping.MapDocument(ds) if map_frame_name: data_frames = arcpy.mapping.ListDataFrames( mxd, map_frame_name) else: data_frames = arcpy.mapping.ListDataFrames(mxd) for df in data_frames: name = '{0}_{1}'.format(dsc.name[:-4], df.name) arcpy.MapToKML_conversion( ds, df.name, '{0}.kmz'.format(os.path.join(out_workspace, name)), extent_to_export=extent) converted += 1 else: processed_count += 1 status_writer.send_percent( processed_count / result_count, _('Invalid input type: {0}').format(dsc.name), 'convert_to_kml') skipped += 1 skipped_reasons[ds] = _('Invalid input type: {0}').format( dsc.dataType) continue processed_count += 1 status_writer.send_percent(processed_count / result_count, _('Converted: {0}').format(ds), 'convert_to_kml') status_writer.send_status(_('Converted: {0}').format(ds)) except Exception as ex: processed_count += 1 status_writer.send_percent(processed_count / result_count, _('Skipped: {0}').format(ds), 'convert_to_kml') status_writer.send_status(_('WARNING: {0}').format(repr(ex))) errors_reasons[ds] = repr(ex) errors += 1 pass return converted, errors, skipped
centerPointX = centerPoint[0] centerPointY = centerPoint[1] path.append([centerPointX, centerPointY]) # add first point step = 1 # step in degrees print "Left Angle, Right Angle" print leftAngle, rightAngle for d in xrange(int(leftAngle), int(rightAngle), step): x = centerPointX + (range * math.cos(math.radians(d))) y = centerPointY + (range * math.sin(math.radians(d))) path.append([x, y]) path.append([centerPointX, centerPointY]) # add last point paths.append(path) arcpy.AddMessage("Creating target feature class ...") arcpy.CreateFeatureclass_management(os.path.dirname(outFeature), os.path.basename(outFeature), "Polygon", "#", "DISABLED", "DISABLED", webMercator) arcpy.AddField_management(outFeature, "Range", "DOUBLE") arcpy.AddField_management(outFeature, "Bearing", "DOUBLE") arcpy.AddMessage("Buiding " + str(len(paths)) + " fans ...") cur = arcpy.InsertCursor(outFeature) for outPath in paths: lineArray = arcpy.Array() for vertex in outPath: pnt = arcpy.Point() pnt.X = vertex[0] pnt.Y = vertex[1] lineArray.add(pnt) del pnt feat = cur.newRow()
destGDB = r'C:\Users\friendde\Documents\ArcGIS\Projects\NAStreets\NAStreets.gdb' stIntersection = r'C:\Users\friendde\Documents\ArcGIS\Projects\NAStreets\NAStreets.gdb\StreetIntersection' stVertices = r'C:\Users\friendde\Documents\ArcGIS\Projects\NAStreets\NAStreets.gdb\StreetVertices' identEnds = r'C:\Users\friendde\Documents\ArcGIS\Projects\NAStreets\NAStreets.gdb\IdenticalStreetEnds' fldNames = {'IntersectingStreets':'Intersecting Streets','StreetName1':'Street Name 1','StreetName2':'Street Name 2','StreetName3':'Street Name 3','StreetName4':'Street Name 4'} # In[8]: arcpy.env.workspace = destGDB for fc in arcpy.ListFeatureClasses(): arcpy.Delete_management(fc) for tbl in arcpy.ListTables(): arcpy.Delete_management(tbl) sr = arcpy.Describe(sourceStreets).spatialReference arcpy.CreateFeatureclass_management(destGDB,'StreetIntersection','POINT',spatial_reference=sr,out_alias='Steet Intersection') for fName,fAlias in fldNames.items(): #print(fName,fAlias) arcpy.AddField_management (stIntersection,fName,'TEXT',field_length=100,field_alias=fAlias,) # In[9]: arcpy.FeatureVerticesToPoints_management(sourceStreets,stVertices,'BOTH_ENDS') arcpy.FindIdentical_management(stVertices,identEnds,'SHAPE',output_record_option='ONLY_DUPLICATES') # In[10]:
line = arcpy.GetParameterAsText(0) create_from = arcpy.GetParameterAsText(1) choice = arcpy.GetParameterAsText(2) use_field = arcpy.GetParameterAsText(3) field = arcpy.GetParameterAsText(4) distance = float(arcpy.GetParameterAsText(5)) end_points = arcpy.GetParameterAsText(6) output = arcpy.GetParameterAsText(7) if "in_memory" in output: mem_name = output.split("\\")[-1] else: mem_name = "mem_point" mem_point = arcpy.CreateFeatureclass_management("in_memory", mem_name, "POINT", "", "DISABLED", "DISABLED", line) arcpy.AddField_management(mem_point, "LineOID", "TEXT") arcpy.AddField_management(mem_point, "Value", "TEXT") result = arcpy.GetCount_management(line) features = int(result.getOutput(0)) arcpy.SetProgressor("step", "Creating Points on Lines...", 0, features, 1) fields = ["SHAPE@", "OID@"] if use_field == "YES": fields.append(field) reverse = False
import arcpy feature_type = 'POINT' out_path = r'E:\Documents2\ArcGIS\Projects\RSD_Impacts\RSD_Impacts.gdb' out_name = r'SurveyRecords_CountryCentroids' sr = r'E:\Documents2\ArcGIS\Projects\RSD_Impacts\RSD_Impacts.gdb\WorldCountries' fields = [['RecordID', 'LONG', 'ID'], ['ActivityType', 'TEXT', 'Activity Type', 50], ['AcademicLeadEmail', 'TEXT', 'Academic Lead Email', 100], ['StartDate', 'DATE', 'Start Date'], ['EndDate', 'DATE', 'End Date'], ['ActivityDetails', 'TEXT', 'Activity Details', 1000], ['NIGGoals', 'TEXT', 'NIG Strategic Goals', 200], ['FieldOfResearch', 'TEXT', 'Field of Research', 100], ['Stakeholder', 'TEXT', 'Stakeholder Name', 250], ['StakeholderLocation', 'TEXT', 'Stakeholder Location', 100], ['Region', 'TEXT', 'Region', 50], ['Country', 'TEXT', 'Country', 100]] fc0 = arcpy.CreateFeatureclass_management(out_path=out_path, out_name=out_name, geometry_type=feature_type, spatial_reference=sr) fl0 = arcpy.MakeFeatureLayer_management(fc0, 'tmp_lyr') arcpy.management.AddFields(fl0, fields)
def main(argv=None): # Setup script path and workspace folder global workspaceName # workspaceName = "FLM_CL_output" global outWorkspace outWorkspace = flmc.SetupWorkspace(workspaceName) # outWorkspace = flmc.GetWorkspace(workspaceName) arcpy.env.workspace = outWorkspace arcpy.env.overwriteOutput = True # Load arguments from file if argv: args = argv else: args = flmc.GetArgs("FLM_CL_params.txt") # Tool arguments global Forest_Line_Feature_Class Forest_Line_Feature_Class = args[0].rstrip() global Cost_Raster Cost_Raster = args[1].rstrip() global Line_Processing_Radius Line_Processing_Radius = args[2].rstrip() ProcessSegments = args[3].rstrip() == "True" Out_Centerline = args[4].rstrip() # write params to text file f = open(outWorkspace + "\\params.txt", "w") f.write(Forest_Line_Feature_Class + "\n") f.write(Cost_Raster + "\n") f.write(Line_Processing_Radius + "\n") f.close() # Prepare input lines for multiprocessing fields = flmc.GetAllFieldsFromShp(Forest_Line_Feature_Class) segment_all = flmc.SplitLines(Forest_Line_Feature_Class, outWorkspace, "CL", ProcessSegments, fields) pool = multiprocessing.Pool(processes=flmc.GetCores()) flmc.log("Multiprocessing center lines...") flmc.log("Using {} CPU cores".format(flmc.GetCores())) centerlines = pool.map(workLinesMem, segment_all) pool.close() pool.join() flmc.logStep("Center line multiprocessing done.") # No line generated, exit if len(centerlines) <= 0: print("No lines generated, exit") return # Create output centerline shapefile flmc.log("Create centerline shapefile...") try: arcpy.CreateFeatureclass_management(os.path.dirname(Out_Centerline), os.path.basename(Out_Centerline), "POLYLINE", Forest_Line_Feature_Class, "DISABLED", "DISABLED", Forest_Line_Feature_Class) except Exception as e: print("Create feature class {} failed.".format(Out_Centerline)) print(e) return # Flatten centerlines which is a list of list flmc.log("Writing centerlines to shapefile...") cl_list = [] for sublist in centerlines: if len(sublist) > 0: for item in sublist[0]: cl_list.append([item, sublist[1]]) # arcpy.Merge_management(cl_list, Out_Centerline) with arcpy.da.InsertCursor(Out_Centerline, ["SHAPE@"] + fields) as cursor: for line in cl_list: row = [] for i in fields: row.append(line[1][i]) cursor.insertRow([line[0]] + row) # TODO: inspect CorridorTh # CorridorTh is added to footprint tool as new parameter # This can be removed after testing if arcpy.Exists(Out_Centerline): arcpy.AddField_management(Out_Centerline, "CorridorTh", "DOUBLE") arcpy.CalculateField_management(Out_Centerline, "CorridorTh", "3") flmc.log("Centerlines shapefile done")
#04 COMBINED CLASSIFIED FEATURECLASSES #Set Variables arcpy.AddMessage("Combining Classified FeatureClasses") VelocCombFC= "FRM_FH_VELOCITY_RIVER_"+HA+"_v"+Version VelocDirCombFC= "FRM_FH_VELOCITY_DIRECTION_RIVER_"+HA+"_v"+Version ExtentCombFC= "FRM_FH_EXTENT_RIVER_"+HA+"_v"+Version DepthCombFC= "FRM_FH_DEPTH_RIVER_"+HA+"_v"+Version arcpy.AddMessage("Adding attribute fields to Classified FeatureClasses") CombiListPoly= [VelocCombFC, ExtentCombFC, DepthCombFC] CombiListPoint=[VelocDirCombFC] Release= datetime.datetime(2019, 12, 19) for i in CombiListPoly: arcpy.CreateFeatureclass_management(str(R4FINALstr), str(i), "POLYGON","#","#","#",str(Velocity10D)) arcpy.AddField_management(str(R4FINALstr)+"\\"+str(i),"MAP_TYPE", "TEXT", "", "", "","", "NULLABLE") arcpy.AddField_management(str(R4FINALstr)+"\\"+str(i),"SOURCE", "TEXT", "", "", "","", "NULLABLE") arcpy.AddField_management(str(R4FINALstr)+"\\"+str(i),"METRIC", "TEXT", "", "", "","", "NULLABLE") arcpy.AddField_management(str(R4FINALstr)+"\\"+str(i),"PROB", "TEXT", "", "", "","", "NULLABLE") arcpy.AddField_management(str(R4FINALstr)+"\\"+str(i),"BAND_DESC", "TEXT", "", "", "","", "NULLABLE") arcpy.AddField_management(str(R4FINALstr)+"\\"+str(i),"GRIDCODE", "LONG", "", "", "","", "NULLABLE") arcpy.AddField_management(str(R4FINALstr)+"\\"+str(i),"REFERENCE", "TEXT", "", "", "","", "NULLABLE") arcpy.AddField_management(str(R4FINALstr)+"\\"+str(i),"VERSION", "TEXT", "", "", "","", "NULLABLE") arcpy.AddField_management(str(R4FINALstr)+"\\"+str(i),"Issue_Date", "DATE", "", "", "","", "NULLABLE") arcpy.CalculateField_management(str(R4FINALstr)+"\\"+str(i),"Issue_Date", Release, "PYTHON_9.3") for i in CombiListPoint: arcpy.CreateFeatureclass_management(str(R4FINALstr), str(i), "POINT","#","#","#",str(Velocity10D)) arcpy.AddField_management(str(R4FINALstr)+"\\"+str(i),"MAP_TYPE", "TEXT", "", "", "","", "NULLABLE") arcpy.AddField_management(str(R4FINALstr)+"\\"+str(i),"SOURCE", "TEXT", "", "", "","", "NULLABLE")
print('Now buffering, be patient...') arcpy.Buffer_analysis("roads_tile_08", roads_Buffer_08, buffer_value, "FULL", "ROUND", "ALL", "") print('Buffering Layer 08 completed') # Append tiles to final layer fcList = roads_Buffer_01, roads_Buffer_02, roads_Buffer_03, roads_Buffer_04, roads_Buffer_05, roads_Buffer_06, roads_Buffer_07, roads_Buffer_08 #fcList = arcpy.ListFeatureClasses() spatial_reference = arcpy.Describe(roads_Buffer_01).spatialReference schemaType = "NO_TEST" fieldMappings = "" subtype = "" arcpy.CreateFeatureclass_management(out_path, out_name, geometry_type, template, has_m, has_z, spatial_reference) print('Now appending layers, be patient...') arcpy.Append_management(fcList, out_name, schemaType, fieldMappings, subtype) print('Layers appended') # Delete tiles arcpy.Delete_management(roads_Buffer_01) arcpy.Delete_management(roads_Buffer_02) arcpy.Delete_management(roads_Buffer_03) arcpy.Delete_management(roads_Buffer_04) arcpy.Delete_management(roads_Buffer_05) arcpy.Delete_management(roads_Buffer_06) arcpy.Delete_management(roads_Buffer_07) arcpy.Delete_management(roads_Buffer_08) print('Intermediate layers deleted')
## apply d3 (if True)- filter points if apply_d3 == 1: poi_filtered = r'in_memory/poifiltered' delimfield = arcpy.AddFieldDelimiters(poif, 'dist') arcpy.Select_analysis(poif, poi_filtered, "{0} <= {1}".format(delimfield, d3)) poif = poi_filtered ## Main functionalities # applying buffer around home and activity points arcpy.Buffer_analysis(home, 'bufhom', hombufd, "FULL", "ROUND", "NONE", "", "PLANAR") arcpy.Buffer_analysis(poif, 'bufpoi', poibufd, "FULL", "ROUND", "NONE", "", "PLANAR") # creating a temporary working feature class in Memory arcpy.CreateFeatureclass_management('in_memory', 'tempnb', "POLYGON", 'bufhom', "DISABLED", "DISABLED", home, '#', '#', '#', '#') # Cursor iterating through individuals using unique numerical identifier field 'uid' cursor3 = arcpy.SearchCursor('bufhom') for row in cursor3: u = row.uid print u # identifying the proper Field delimiters for the follwoing Select step delimfield = arcpy.AddFieldDelimiters('bufhom', 'uid') # Selecting activity and home points for each individual for iteration arcpy.Select_analysis('bufhom', 'homsel', "{0} = {1}".format(delimfield, u)) arcpy.Select_analysis('bufpoi', 'poisel', "{0} = {1}".format(delimfield, u)) # Uniting and dissolving the home and activity points buffers to apply the bounding geometry (convex hull) arcpy.Union_analysis(['poisel', 'homsel'], 'union1', "ALL", "", "GAPS")
spatial_reference = arcpy.SpatialReference('Projected Coordinate Systems/World/WGS 1984 Web Mercator (auxiliary sphere)') arcpy.env.workspace = "C:\\data\\" + connectionname sdeaddress = "C:\\Users\\" + user + "\\AppData\\Roaming\\Esri\\Desktop10.2\\ArcCatalog\\" + connectionname+"\\"+ databse + ".SDE.elec" fullPath1=sdeaddress + "\\" + layerName1 fullPath2=sdeaddress + "\\" + layerName2 def get_a_uuid(): return str(uuid.uuid4()) MtGuid = get_a_uuid() desc1 = arcpy.Describe(fullPath1) desc2 = arcpy.Describe(fullPath2) FC1 = arcpy.CreateFeatureclass_management("in_memory","fc1q",desc1.shapeType,"","DISABLED","DISABLED",spatial_reference) for FeatureClass in FC1: arcpy.AddField_management(FeatureClass, "TableName", "TEXT", "", "", "80", "", "NULLABLE", "NON_REQUIRED", "") arcpy.AddField_management(FeatureClass, "Obj_ID", "TEXT", "", "", "80", "", "NULLABLE", "NON_REQUIRED", "") arcpy.AddMessage(" FC1 created") FC2 = arcpy.CreateFeatureclass_management("in_memory","fc2q",desc2.shapeType,"","DISABLED","DISABLED",spatial_reference) for FeatureClass in FC2: arcpy.AddField_management(FeatureClass, "TableName", "TEXT", "", "", "80", "", "NULLABLE", "NON_REQUIRED", "") arcpy.AddField_management(FeatureClass, "Obj_ID", "TEXT", "", "", "80", "", "NULLABLE", "NON_REQUIRED", "") arcpy.AddMessage(" FC2 created") FCResult = arcpy.CreateFeatureclass_management("in_memory","fcresult",desc1.shapeType,"","DISABLED","DISABLED",spatial_reference) arcpy.AddMessage(" Add CreateFeatureclases ")
#get spatial reference from data frame frame = arcpy.mapping.ListDataFrames(thisMap)[0] inRef = frame.spatialReference #get page name field if "Name" option is selected if nameOrNumber == True: nameField = str(ddPages.pageNameField.name) else: pass inPath = os.path.split(inName)[0] inTable = os.path.split(inName)[1] #create feature class to hold overview polygons and add label field arcpy.CreateFeatureclass_management(inPath,inTable,"POLYGON",'',"DISABLED","DISABLED",inRef) arcpy.AddField_management(inName,"label",'TEXT') with arcpy.da.InsertCursor(inName,['SHAPE@','label']) as polyCursor: for pageNum in range(1, ddPages.pageCount + 1): ddPages.currentPageID = pageNum #set overview polygon labels as either page number or page name if nameOrNumber == True: zone = ddPages.pageRow.getValue(nameField) else: zone = ddPages.currentPageID #create polygon from extents of Data Driven Page Extent Ext = ddPages.dataFrame.extent
#change the Spatial Reference to projected System (UTM 47 N) arcpy.CreateFileGDB_management("C:/", "fGDB.gdb") fc = r"C:\fGDB.gdb\Temp_fc" outCS = arcpy.SpatialReference("WGS 1984 UTM Zone 47N") arcpy.Project_management(fcOrig, fc, outCS) if name.endswith(".shp"): name = name else: name = name + ".shp" file_name = path + "\\" + name #creating feature class arcpy.CreateFeatureclass_management(path, name, "POINT", has_z="ENABLED", spatial_reference=fc) #adding new columns arcpy.AddField_management(file_name, "X", "DOUBLE") arcpy.AddField_management(file_name, "Y", "DOUBLE") arcpy.AddField_management(file_name, "threeD_len", "DOUBLE") arcpy.AddField_management(file_name, "twoD_len", "DOUBLE") arcpy.AddField_management(file_name, "phName", "TEXT") arcpy.AddField_management(file_name, "rdId", "TEXT") #what attributes will be added when every new feature is added cursor0 = arcpy.da.InsertCursor( file_name, ["SHAPE@XY", "X", "Y", "twoD_len", "phName", "rdId"])
basepath = r'\server\folder' fgdb_path = basepath + r'\test.gdb' arcpy.env.workspace = fgdb_path fc = fgdb_path + r'\residential_mixed_and_open_spaces' if arcpy.Exists(fc): # arcpy.DeleteFeatures_management(fc) # doesn't delete the actual feature class arcpy.Delete_management(fc) arcpy.CreateFeatureclass_management( out_path=fgdb_path, out_name='residential_mixed_and_open_spaces', geometry_type='POINT', template=None, has_m='DISABLED', has_z='DISABLED', spatial_reference=spatial_reference) # add attributes to the newly created feature class arcpy.AddField_management('residential_mixed_and_open_spaces', 'HOUSENUMBER', 'TEXT', field_alias='HOUSENUMBER', field_is_nullable='NULLABLE') arcpy.AddField_management('residential_mixed_and_open_spaces', 'STREETNAME', 'TEXT', field_alias='STREETNAME', field_is_nullable='NULLABLE')
def gis_processors_setup_fc(the_scenario, logger): logger.info("start: gis_processors_setup_fc") start_time = datetime.datetime.now() if str(the_scenario.base_processors_layer).lower() == "null" or \ str(the_scenario.base_processors_layer).lower() == "none": # create an empty processors layer # ------------------------- processors_fc = the_scenario.processors_fc if arcpy.Exists(processors_fc): arcpy.Delete_management(processors_fc) logger.debug("deleted existing {} layer".format(processors_fc)) arcpy.CreateFeatureclass_management(the_scenario.main_gdb, "processors", \ "POINT", "#", "DISABLED", "DISABLED", ftot_supporting_gis.LCC_PROJ, "#", "0", "0", "0") arcpy.AddField_management(processors_fc, "Facility_Name", "TEXT", "#", "#", "25", "#", "NULLABLE", "NON_REQUIRED", "#") arcpy.AddField_management(processors_fc, "Candidate", "SHORT") # logger.info("note: processors layer specified in the XML: {}".format(the_scenario.base_processors_layer)) # empty_processors_fc = str("{}\\facilities\\test_facilities.gdb\\test_processors_empty" # .format(the_scenario.common_data_folder)) # processors_fc = the_scenario.processors_fc # arcpy.Project_management(empty_processors_fc, processors_fc, ftot_supporting_gis.LCC_PROJ) else: # copy the processors from the baseline data to the working gdb # ---------------------------------------------------------------- if not arcpy.Exists(the_scenario.base_processors_layer): error = "can't find baseline data processors layer {}".format( the_scenario.base_processors_layer) raise IOError(error) processors_fc = the_scenario.processors_fc arcpy.Project_management(the_scenario.base_processors_layer, processors_fc, ftot_supporting_gis.LCC_PROJ) arcpy.AddField_management(processors_fc, "Candidate", "SHORT") # Delete features with no data in csv-- cleans up GIS output and eliminates unnecessary GIS processing # -------------------------------------------------------------- # create a temp dict to store values from CSV temp_facility_commodities_dict = {} counter = 0 # read through facility_commodities input CSV import csv with open(the_scenario.processors_commodity_data, 'rb') as f: reader = csv.DictReader(f) for row in reader: facility_name = str(row["facility_name"]) commodity_quantity = row["value"] if facility_name not in temp_facility_commodities_dict.keys(): if commodity_quantity > 0: temp_facility_commodities_dict[facility_name] = True with arcpy.da.UpdateCursor(processors_fc, ['Facility_Name']) as cursor: for row in cursor: if row[0] in temp_facility_commodities_dict: pass else: cursor.deleteRow() counter += 1 del cursor logger.config( "Number of processors removed due to lack of commodity data: \t{}". format(counter)) with arcpy.da.SearchCursor( processors_fc, ['Facility_Name', 'SHAPE@X', 'SHAPE@Y']) as scursor: for row in scursor: # Check if coordinates of facility are roughly within North America if -6500000 < row[1] < 6500000 and -3000000 < row[2] < 5000000: pass else: logger.warning( "Facility: {} is not located in North America.".format( row[0])) logger.info( "remove the facility from the scenario or make adjustments to the facility's location " "in the processors feature class: {}".format( the_scenario.base_processors_layer)) error = "Facilities outside North America are not supported in FTOT" logger.error(error) raise Exception(error) del scursor # check for candidates or other processors specified in either XML or layers_to_merge = [] # add the candidates_for_merging if they exists. if arcpy.Exists(the_scenario.processor_candidates_fc): logger.info( "adding {} candidate processors to the processors fc".format( gis_get_feature_count(the_scenario.processor_candidates_fc, logger))) layers_to_merge.append(the_scenario.processor_candidates_fc) gis_merge_processor_fc(the_scenario, layers_to_merge, logger) result = gis_get_feature_count(processors_fc, logger) logger.config("Number of Processors: \t{}".format(result)) logger.debug("finish: gis_processors_setup_fc: Runtime (HMS): \t{}".format( ftot_supporting.get_total_runtime_string(start_time)))
arcpy.Delete_management(CellShanXingPoints, "FeatureClass") print "delete exists CellThiessFinal" logging.info("delete exists CellThiessFinal") if (arcpy.Exists(CellThiessFinal)): arcpy.Delete_management(CellThiessFinal, "FeatureClass") print "delete exists CellThiessCache" logging.info("delete exists CellThiessCache") if (arcpy.Exists(CellThiessCache)): arcpy.Delete_management(CellThiessCache, "FeatureClass") print "delete exists PointInCache" logging.info("delete exists PointInCache") if (arcpy.Exists(PointInCache)): arcpy.Delete_management(PointInCache, "FeatureClass") XiaoQuFields = arcpy.ListFields(infc) PointInCache = arcpy.CreateFeatureclass_management( os.path.dirname(PointInCache), os.path.basename(PointInCache), "Point", PointTemplate, "DISABLED", "DISABLED", arcpy.SpatialReference(4326)) print "筛选室内站生成要素" ShiNeiCell_LTE_ALL = arcpy.TableSelect_analysis( infc, "in_memory/ShiNeiCell_LTE_ALL", "HONEYCOMB_TYPE='室内' and TIME_STAMP = TO_DATE('" + currentDoTimeStr + "','YYYY-MM-DD HH24:MI:SS')") createFeatureFromXY(ShiNeiCell_LTE_ALL, "LONGITUDE", "LATITUDE", PointInCache, XiaoQuFields) print "生成室内站圆形" arcpy.Buffer_analysis(PointInCache, CellShanXing, "30 Meters", "FULL", "ROUND", "NONE", "") CellThiessFinal = arcpy.CreateFeatureclass_management( os.path.dirname(CellThiessFinal), os.path.basename(CellThiessFinal), "Polygon", CellThiessFinalTemp, "DISABLED", "DISABLED",