def log_jaccard(im_id: str, cls: int, true_mask: np.ndarray, mask: np.ndarray, poly_mask: np.ndarray, true_poly: MultiPolygon, poly: MultiPolygon, valid_polygons=False): assert len(mask.shape) == 2 pixel_jc = utils.mask_tp_fp_fn(mask, true_mask, 0.5) if valid_polygons: if not true_poly.is_valid: true_poly = utils.to_multipolygon(true_poly.buffer(0)) if not poly.is_valid: poly = utils.to_multipolygon(poly.buffer(0)) tp = true_poly.intersection(poly).area fn = true_poly.difference(poly).area fp = poly.difference(true_poly).area poly_jc = tp, fp, fn else: poly_jc = utils.mask_tp_fp_fn(poly_mask, true_mask, 0.5) logger.info( '{} cls-{} pixel jaccard: {:.5f}, polygon jaccard: {:.5f}'.format( im_id, cls, jaccard(pixel_jc), jaccard(poly_jc))) return pixel_jc, poly_jc
def glyph_to_mp(g: fontforge.glyph) -> MultiPolygon: """ Converts fontforge glyphs to shapely polygons. Parameters ---------- g: fontforge glyph Glyph to be converted Returns ------- mp: shapely MultiPolygon MultyPolygon representing the whole glyph """ fg = g.layers[1] polys = MultiPolygon([Polygon([(p.x, p.y) for p in c]) for c in fg]) mp = MultiPolygon([polys[0]]) if len(polys) > 1: for h in polys[1:]: if mp.contains(h): mp = mp.difference(h) else: mp = MultiPolygon([*mp, h]) if type(mp) == Polygon: mp = MultiPolygon([mp]) return (mp)
def difference_polygon(left: geometry.MultiPolygon, right: geometry.MultiPolygon) -> float: """ Returns the area of `left` not in `right`""" try: area = left.difference(right).area except TopologicalError: logger.error(f"Polygonal error") area = 0 return area
def row_geometry(row): coordinates = np.column_stack((row[1], row[0])) polygon = Polygon(lon_lat for lon_lat in coordinates) if coordinates[:, 0].max() - coordinates[:, 0].min() > 180: dateline = MultiPolygon(((180.0, 89), (179.95, 89), (179.95, -89), (180.0, -89), (180.0, 89)), ((-180.0, 89), (-179.95, 89), (-179.95, -89), (-180.0, -89), (-180.0, 89))) polygon = dateline.difference(polygon) return polygon
def plot_poloidal_cross_section(ax, geo_axis=True, mag_axis=True, x_axis=True, y_axis=True, ports=()): """Plot a poloidal cross-section on the given axes. Arguments: ax -- The matplotlib axes object to plot into. geo_axis -- Draw the geometric axis. mag_axis -- Draw the magnetic axis. x_axis -- Draw the x-axis. y_axis -- Draw the y-axis. ports -- A list of ports to draw (port objects). Note: This code will draw the cross-section incorrectly if the poloidal angle is zero. """ # Equal aspect ratio. ax.set_aspect('equal') from shapely.geometry import MultiPolygon polys = MultiPolygon((shapely_shell(), )) # Carve out each port. for port in ports: cutter = shapely_port_cutter(port) polys = polys.difference(cutter) if polys.type != 'MultiPolygon': polys = MultiPolygon((polys, )) # Plot each polygon. for poly in polys: x, y = poly.exterior.xy ax.plot(x, y, 'k-') if geo_axis: ax.plot(0, 0, 'ko') if mag_axis: ax.plot(0.06, 0, 'ko') if x_axis: ax.plot((-mp.a0, mp.a0), (0, 0), 'k--') if y_axis: ax.plot((0, 0), (-mp.a0, mp.a0), 'k--')
def DiagnoseAndFixMultiPolygon(attributeName, boundariesList, printDiagnostic=False): print "---" print "*** %s is invalid. Running diagnostic... ***" % (attributeName) # Note: we have already tested each polygon, so the problem is when we add them to a multipolygon # the likely cause is overlapping polygons, so we will automatically union those polygons #printDiagnostic = True goodPolygons = [] badPolygons = [] useSlowerCode = False numPolygons = len(boundariesList) if useSlowerCode: for i in range(0, numPolygons): if i % 20 == 0: print "processing polygon", i thisPolygon = boundariesList[i] shapelyMultiPoly = MultiPolygon(goodPolygons + [thisPolygon]) if shapelyMultiPoly.is_valid: goodPolygons.append(thisPolygon) else: print "*** problem when adding polygon %d ***" % (i) print thisPolygon badPolygons.append(thisPolygon) else: ''' it can be very slow trying these one at a time, so lets try jumping when we can and dropping back to one at a time when we have a problem ''' numPolygons = len(boundariesList) if printDiagnostic: print "numPolygons", numPolygons i = 0 lineReported = 0 while True: if i >= numPolygons: if printDiagnostic: print "reached end of list" break # end of list lineToReport = (i / 100) * 100 if lineToReport != lineReported: lineReported = lineToReport if i > 0: print "processing polygon %d of %d" % (i, numPolygons) continueToTop = False for numToJump in [100, 50, 20, 10]: if continueToTop: continue if i + numToJump > numPolygons: numToJump = numPolygons - i if printDiagnostic: print "adjusting numToJump to", numToJump #try the jump if printDiagnostic: print "trying jump" polygonsToAdd = boundariesList[i:i + numToJump] shapelyMultiPoly = MultiPolygon(goodPolygons + polygonsToAdd) if shapelyMultiPoly.is_valid: goodPolygons = goodPolygons + polygonsToAdd i = i + numToJump if printDiagnostic: print "jumped to %i" % (i) continueToTop = True if continueToTop: continue #resort to one at a time if printDiagnostic: print "resorting to one at a time. i = %d" % (i) for j in range(0, numToJump): thisPolygon = boundariesList[i] shapelyMultiPoly = MultiPolygon(goodPolygons + [thisPolygon]) if shapelyMultiPoly.is_valid: goodPolygons.append(thisPolygon) else: if True: print "polygon %d is bad" % (i) badPolygons.append(thisPolygon) i = i + 1 # now automatically generate a "fixed" multipolygon by unioning the bad polygons shapelyMultiPoly = MultiPolygon(goodPolygons) if len(badPolygons) > 0: print "---" print "*** Handling bad polygons ***" for poly in badPolygons: print "Bad polygon:", poly islandPoly = MultiPolygon([poly]) if islandPoly.is_valid: print "Fixed: The bad polygon was a valid polygon, it has been unioned to the whole." shapelyMultiPoly = shapelyMultiPoly.union(islandPoly) else: # try to fix this polygon ''' Outer boundary problems: Sometimes with clipping of shoreline, there are cases of the outer boundary of the shoreline crossing itself. There is not really anything we can do about such cases. ''' # check to see if the outer boundary is valid outerBoundary, innerBoundaries = poly islandPoly = MultiPolygon([(outerBoundary, [])]) if not islandPoly.is_valid: print "Outer boundary is not valid." print "Unable to fix this polygon." if attributeName == "MAPLAND": print "This is a minor error, it just means the oil contours will not be clipped to this part of the land." else: print "This is a serious error. Part of the %s area will be missing." % ( attributeName) print "---" continue # we will omit this polygon print "The outer boundary of the polygon is valid." ############################## ''' Hole problems: There are two kinds of problems that can occur with GNOME Analyst contours. Sometimes the holes stick slightly out of the outerboundary. In such a case we will rely on the fact that the holes were just areas to be removed. Sometimes there seem to be holes within holes. I'm not sure why GNOME Analyst is doing that, but we will assume that the holes were just areas to be removed. ''' numHoles = len(innerBoundaries) if numHoles > 0: print "Examing the %d holes..." % (numHoles) assert numHoles > 0 # the only way to get to this part of the code is for a hole to be causing the problem numOmittedHoles = 0 for innerBoundary in innerBoundaries: hole = Polygon(innerBoundary) if not hole.is_valid: numOmittedHoles = numOmittedHoles + 1 print "Hole is not valid:", hole print "Omitting this hole. This is a minor error." else: # subtract this hole from the islandPolygon islandPoly = islandPoly.difference(hole) if numOmittedHoles > 0: print "Partially fixed: %d invalid holes were not subtracted from this polygon, but this polygon has been unioned to the whole." % ( numOmittedHoles) elif numHoles > 0: print "Fixed: all holes successfully subtracted from this polygon and the polygon unioned to the whole." else: print "Fixed: polygon unioned to the whole." shapelyMultiPoly = shapelyMultiPoly.union(islandPoly) print "---" return shapelyMultiPoly
def ReadMossPolygons(mossBaseFileName, printDiagnostic=False): ''' .ms1 file is fixed format Header lines 56 characters char 1-5 : Item Number(NEGATIVE IF THE COORDINATES ARE LON/LAT) char 16-45: Attribute Name char 51-55: Number of coord. pairs X,Y, Coordinate pairs 23 characters 01-11: x coordinate 12-22: y coordinate Long,Lat pairs char 01-10: LONGITUDE char 11-20: LATITUDE char 21-22: FLAG 0-NORMAL 1-INDICATES FIRST POINT OF ISLAND POLYGON ''' # import shapely here so it won't be imported if not needed from shapely.geometry import Polygon, MultiPolygon # what about MAPBOUND, EXTENDEDOUTLOOKTHREAT attributesToRead = [ "MAPLAND", "FORECASTHEAVY", "FORECASTMEDIUM", "FORECASTLIGHT", "FORECASTUNCERTAINTY" ] landBoundariesList = [] heavyBoundariesList = [] mediumBoundariesList = [] lightBoundariesList = [] uncertaintyBoundariesList = [] extension = ".ms1" if os.path.exists(mossBaseFileName + extension): inFile = file(mossBaseFileName + extension, 'rU') alreadyReadNextLine = False while True: # read the header lines if not alreadyReadNextLine: line = inFile.readline() alreadyReadNextLine = False if not line: break # end of this file if line.strip() == "": continue # blank line itemNum = int(line[0:5]) assert itemNum < 0 # we expect long/lat values, so the itemNum should be negative attributeName = line[15:45].strip() numCoordinates = int(line[50:55]) # note: for some versions of GNOME analyst # the number of coordinates in MAPLAND overflows and is reported as a negative number or incorrect number. # To support those files, we will ignore the number and just read lines based on the length of the lines if printDiagnostic: print itemNum, attributeName, numCoordinates readingOuterBoundary = True coordList = [] outerBoundary = None innerBoundaries = [] if numCoordinates <= 0: print "*** ignoring bad %s header line value: numCoordinates: %d ***" % ( attributeName, numCoordinates) # read the points for the polygon for this header while True: #for i in range(0,numCoordinates): # since we don't want to rely on numCoordinates # we need to look to see if this is the end of this block of coordinates ################## line = inFile.readline() # The lines are fixed format, # read until we find a header line # header lines lines are longer than coordinate lines if (not line) or len(line.strip( )) > 50: # must be end of file or a header line alreadyReadNextLine = True break # out of this while loop if attributeName in attributesToRead: #process this line longitudeStr = line[0:10].strip() latitudeStr = line[10:20].strip() flag = line[20:22].strip() if flag == "1": #then we are starting a new "inner hole" # enforce having the last point of the polygon equal the first point if len(coordList ) > 0 and coordList[0] != coordList[-1]: coordList.append(coordList[0]) # save the previous coordList if len( coordList ) >= 4: # less than 4 would be a degenerate case if readingOuterBoundary: outerBoundary = coordList else: innerBoundaries.append(coordList) # reset the coordinate list coordList = [] readingOuterBoundary = False coordList.append((float(longitudeStr), float(latitudeStr))) ############# end of while loop # finished reading the header # record the lists we have filled in if not attributeName in attributesToRead: continue # on to the next header line # enforce having the last point of the polygon equal the first point if len(coordList) > 0 and coordList[0] != coordList[-1]: coordList.append(coordList[0]) #filter out degenerate cases if len(coordList) < 4: # less than 4 would be a degenerate case print "*** ignoring degenerate polygon ***" continue # on to the next header line # save the coordinate list if readingOuterBoundary: outerBoundary = coordList else: innerBoundaries.append(coordList) #save thisPolygon if len(outerBoundary) > 0: # outerBoundary,innerBoundaries = VerifyAndFixGnomeAnalystPolygon(attributeName,outerBoundary,innerBoundaries) if len(outerBoundary) > 0: thisPolygon = (outerBoundary, innerBoundaries) if attributeName == "MAPLAND": landBoundariesList.append(thisPolygon) elif attributeName == "FORECASTHEAVY": heavyBoundariesList.append(thisPolygon) elif attributeName == "FORECASTMEDIUM": mediumBoundariesList.append(thisPolygon) elif attributeName == "FORECASTLIGHT": lightBoundariesList.append(thisPolygon) elif attributeName == "FORECASTUNCERTAINTY": uncertaintyBoundariesList.append(thisPolygon) inFile.close() # convert the lists of MossPolygons to shapely MultiPolygons if len(landBoundariesList) == 0: landPolygons = None else: landPolygons = MultiPolygon(landBoundariesList) if not landPolygons.is_valid: # try analysing and fixing the problem landPolygons = DiagnoseAndFixMultiPolygon("MAPLAND", landBoundariesList) if len(heavyBoundariesList) == 0: heavyPolygons = None else: heavyPolygons = MultiPolygon(heavyBoundariesList) if not heavyPolygons.is_valid: heavyPolygons = DiagnoseAndFixMultiPolygon("FORECASTHEAVY", heavyBoundariesList) if len(mediumBoundariesList) == 0: mediumPolygons = None else: mediumPolygons = MultiPolygon(mediumBoundariesList) if not mediumPolygons.is_valid: mediumPolygons = DiagnoseAndFixMultiPolygon( "FORECASTMEDIUM", mediumBoundariesList) if len(lightBoundariesList) == 0: lightPolygons = None else: lightPolygons = MultiPolygon(lightBoundariesList) if not lightPolygons.is_valid: lightPolygons = DiagnoseAndFixMultiPolygon("FORECASTLIGHT", lightBoundariesList) if len(uncertaintyBoundariesList) == 0: uncertaintyPolygons = None else: uncertaintyPolygons = MultiPolygon(uncertaintyBoundariesList) if not uncertaintyPolygons.is_valid: uncertaintyPolygons = DiagnoseAndFixMultiPolygon( "FORECASTUNCERTAINTY", uncertaintyBoundariesList) # clip the oil contours to the shoreline # note: we need to check that the polygons are valid before trying to clip to prevent shapely from crashing if landPolygons != None: if landPolygons.is_valid == False: print "*** landPolygons is not valid. We will not clip to the shoreline. ***" else: if heavyPolygons != None: if heavyPolygons.is_valid == False: print "*** heavyPolygons is not valid. It will not be clipped to the shoreline. ***" elif heavyPolygons.intersects(landPolygons): print "clipping heavyPolygons to shoreline" heavyPolygons = heavyPolygons.difference(landPolygons) if mediumPolygons != None: if mediumPolygons.is_valid == False: print "*** mediumPolygons is not valid. It will not be clipped to the shoreline. ***" elif mediumPolygons.intersects(landPolygons): print "clipping mediumPolygons to shoreline" mediumPolygons = mediumPolygons.difference(landPolygons) if lightPolygons != None: if lightPolygons.is_valid == False: print "*** lightPolygons is not valid. It will not be clipped to the shoreline. ***" elif lightPolygons.intersects(landPolygons): print "clipping lightPolygons to shoreline" lightPolygons = lightPolygons.difference(landPolygons) # note: JerryM wonders we should clip the uncertainty to the shoreline. That it looks better as simple polygons going over the land. if uncertaintyPolygons != None: if uncertaintyPolygons.is_valid == False: print "*** uncertaintyPolygons is not valid. It will not be clipped to the shoreline or oil polygons ***" else: print "clipping uncertaintyPolygons to shoreline and oil polygons" for polygons, nameOfPolygons in [ (lightPolygons, "lightPolygons"), (mediumPolygons, "mediumPolygons"), (heavyPolygons, "heavyPolygons"), (landPolygons, "landPolygons") ]: if polygons != None: print "taking difference with", nameOfPolygons newUncertaintyPolygons = uncertaintyPolygons.difference( polygons) print "finished taking difference" if not newUncertaintyPolygons.is_valid: #print "Uncertainty Polygon is no longer valid after taking difference with",polygons,nameOfPolygons s = "*** uncertaintyPolygons have not been clipped to %s ***" % ( nameOfPolygons) print s else: uncertaintyPolygons = newUncertaintyPolygons return (landPolygons, heavyPolygons, mediumPolygons, lightPolygons, uncertaintyPolygons)
def main(): # setup outputs # global is required as this out of scope variable is modified within the main function sometimes: global RASTER_CLASSIFICATION dst_file = f'{os.path.splitext(PROJECT_SHAPEFILE)[0]}_plots_SEED-{SEED}{os.path.splitext(PROJECT_SHAPEFILE)[1]}' area_dst_file = f'{os.path.splitext(PROJECT_SHAPEFILE)[0]}_AOI{os.path.splitext(PROJECT_SHAPEFILE)[1]}' all_values_file = f'{os.path.splitext(PROJECT_SHAPEFILE)[0]}_random_plots_all{os.path.splitext(PROJECT_SHAPEFILE)[1]}' target_only_file = f'{os.path.splitext(PROJECT_SHAPEFILE)[0]}_plots_target{os.path.splitext(PROJECT_SHAPEFILE)[1]}' # setup logging dst_log_file = f'{os.path.splitext(PROJECT_SHAPEFILE)[0]}_plots_SEED-{SEED}.log' logger = logging.getLogger(__name__) logger.setLevel(logging.INFO) logging.basicConfig(filename=dst_log_file, level=logging.INFO, filemode='w', format='%(asctime)s %(message)s') logger.info(f'\nPROJECT_SHAPEFILE:\t{PROJECT_SHAPEFILE}\n' + \ f'SEED:\t\t\t\t{SEED}\n' + \ f'PROJECT BOUNDARY BUFFER:\t{BOUNDARY_BUFFER}\n' + \ f'EXCLUSION BUFFER:\t{EXCLUSION_BUFFER}\n') # set the random seed random.seed(SEED) # get some metadata from the PROJECT boundary file check_projection(PROJECT_SHAPEFILE, reference='GDA94') with fiona.open(PROJECT_SHAPEFILE) as project_boundary: meta = project_boundary.meta initial_crs = meta['crs'] bounds = project_boundary.bounds utm_crs = find_utm_zone((bounds[2] + bounds[0]) / 2, (bounds[3] + bounds[1]) / 2) # Buffer the project boundary project_boundary = get_utm_geoms(PROJECT_SHAPEFILE) buffered_project = my_buffer(project_boundary, BOUNDARY_BUFFER) print(f'Buffered {len(buffered_project)} project polygons. Empty ones excluded.') # Buffer the exclusion if not EXCLUSION1 == '': check_projection(EXCLUSION1, reference='GDA94') exclu1 = get_utm_geoms(EXCLUSION1) buffered_exclu1 = my_buffer(exclu1, EXCLUSION_BUFFER) print(f'Buffered {len(buffered_exclu1)} exclu1.') logger.info(f'\nExcluded exclusion file {EXCLUSION1}\n') else: buffered_exclu1 = [] if not EXCLUSION2 == '': check_projection(EXCLUSION2, reference='GDA94') exclu3 = get_utm_geoms(EXCLUSION2) buffered_exclu3 = my_buffer(exclu3, EXCLUSION_BUFFER) print(f'Buffered {len(buffered_exclu3)} exclu3.') logger.info(f'\nExcluded exclusion file {EXCLUSION2}\n') else: buffered_exclu3 = [] if not EXCLUSION3 == '': check_projection(EXCLUSION3, reference='GDA94') exclu2 = get_utm_geoms(EXCLUSION3) buffered_exclu2 = my_buffer(exclu2, EXCLUSION_BUFFER) print(f'Buffered {len(buffered_exclu2)} exclu2.') logger.info(f'\nExcluded exclusion file {EXCLUSION3}\n') else: buffered_exclu2 = [] # Combine the buffered exclusion layers all_cut_features = buffered_exclu1 + buffered_exclu2 + buffered_exclu3 print(f'Total number of features to cut out of project is {len(all_cut_features)}.') # Clip our areas to make one 'area of interest': # NB: Make features Multipolygon objects to process the unarary union easier # NB: inputs to the MultiPolygon function should not already be multipolygons... # This conditional statement to deal with the case where no exclusion inputs if len(all_cut_features) == 0: area_of_interest = MultiPolygon(buffered_project) else: if all_cut_features[0].geom_type == 'Polygon': all_cut_features_multi = MultiPolygon(all_cut_features) all_cut_features_multi = unary_union(all_cut_features) buffered_project_multi = MultiPolygon(buffered_project) area_of_interest = unary_union(buffered_project_multi.difference(all_cut_features_multi)) # output the AOI shapefile for records. output_poly_to_file(area_dst_file, area_of_interest, utm_crs, initial_crs) # generate the random points, # the 'generate_random' function will position them in the center of a hypothetical Sentinel pixel. # This is useful as these points will be used in conjunction with a landcover classification using Sentinel data. logger.info(f'\nStarted generation of random plots within area of interest...\n') points = generate_random(NUM_POINTS, area_of_interest) logger.info(f'\nGenerated {len(points)} points.\n') # Plot the output for review plt.title("Area of Interest and Points") for geom in area_of_interest: plt.plot(*geom.exterior.xy) plt.scatter([point.x for point in points], [point.y for point in points], s=1) plt.show() # output the points output_to_file(dst_file, points, utm_crs, initial_crs) # Join attributes from landcover # TODO: deal with bad landcover filename if not LAND_COVER == '': check_projection(LAND_COVER, reference='GDA94') points_for_join = gpd.read_file(dst_file) polys_for_join = gpd.read_file(LAND_COVER) print("Adding attributes from Landcover layer...") pts_landcover_added = spatial_join(points_for_join, polys_for_join, fields_to_add=FIELDS_TO_ADD) # specified above as a constant if pts_landcover_added.empty: print('WARNING: landcover values not successfully added') pts_landcover_added = points_for_join else: logger.info(f'\nLandcover values added from {LAND_COVER}\n') else: pts_landcover_added = gpd.read_file(dst_file) ################################### # GET CLASSIFICATION VALUES SECTION # Check if PROJECT_NAME has been specified if not PROJECT_NAME == '': print('PROJECT_NAME detected... Looking up Classification file...') # give RASTER_CLASSIFICATION a value. # If it is already defined, it will be overwritten by the file from the lookup table try: RASTER_CLASSIFICATION = PROJECTS_DICT[PROJECT_NAME] except KeyError: print(f'({PROJECT_NAME}) does not exist in lookup table, script will continue and check for other options.') # now checks if RASTER_CLASSIFICATION has a value yet, either manually specified or looked up... if not RASTER_CLASSIFICATION == '': check_projection(RASTER_CLASSIFICATION, reference='GDA94') try: with open(RASTER_CLASSIFICATION, 'r') as raster_classification: print(f'{os.path.basename(RASTER_CLASSIFICATION)} successfully opened') pts_classification_added = get_raster_vals(pts_landcover_added, RASTER_CLASSIFICATION) # export file of all points, and one with just those in the TARGET area (classification ==1) print('Writing final output shapefiles - TARGET status from raster') pts_classification_added.to_file(all_values_file, encoding='utf-8') pts_target_only = pts_classification_added.loc[pts_classification_added['Class'] == 1] pts_target_only.to_file(target_only_file, encoding='utf-8') logger.info(f'\nClassification values added from {RASTER_CLASSIFICATION}\n') except FileNotFoundError: print('Raster Class filepath is invalid, script will continue and check for other options.') # VECTOR VERSION elif not VECTOR_CLASSIFICATION == '': check_projection(VECTOR_CLASSIFICATION, reference='GDA94') classification_polys = gpd.read_file(VECTOR_CLASSIFICATION) # find classification column classification_polys_columns = classification_polys.columns.to_list() classification_field = None while classification_field not in classification_polys_columns: print(f'The columns of the VECTOR_CLASSIFICATION file are {classification_polys_columns}') classification_field = input( 'Please enter the exact name of the column that contains the relevant classification values:') # Add that field: pts_classification_added = spatial_join(pts_landcover_added, classification_polys, fields_to_add=['geometry', classification_field]) logger.info(f'\nClassification values added from {VECTOR_CLASSIFICATION}\n') # find points in target and export target only classification_field_values = classification_polys[classification_field].unique().tolist() # remember data type of the TARGET field(to deal with user specifying integers as strings) classification_column_dtype = type(classification_field_values[0]) # convert our list of the classification field values to strings, so that the user can choose between them classification_field_values = [str(i) for i in classification_field_values] # make a blank list, for the user to specify values that mean a polygon is TARGET # this will be built as a list of strings, and later converted back to a numeric type if they were initially. target_designator_list = [] target_designator = None add_another_value = 'True' while add_another_value: print('The unique values of the classification column are:') for s in classification_field_values: print(s) # keeps asking until user provides a valid input, will also accept a blank string (return) while target_designator not in classification_field_values + ['']: target_designator = input( 'Please enter the exact name of the value that indicates a polygon is TARGET\n' '(hit return to skip):') # allow a user to hit return in the last step and skip addition of values. The lists are not modified. if not target_designator == '': target_designator_list.append(target_designator) # Add the value to the list classification_field_values.remove( target_designator) # Remove the value from the list of options to add # while loop limits the user responses to 'Y' or '' while not add_another_value in ['', 'Y']: print(f'Your specified TARGET values are {target_designator_list}') add_another_value = input('Please type Y if you would like to add another value to specify TARGET\n' '(hit return to skip):') if add_another_value == '': add_another_value = False elif add_another_value == 'Y': add_another_value = True # export file of all points, and one with just those in the TARGET area (classification == target_designator) print('Writing final output shapefiles - TARGET status from vector') pts_classification_added.to_file(all_values_file, encoding='utf-8') # convert the TARGET designator list back to its original datatype # (stored in classification_field_dtype variable) # this happens if the classification field datatype was numeric in the original shapefile target_designator_list = [classification_column_dtype(i) for i in target_designator_list] pts_target_only = pts_classification_added.loc[ pts_classification_added[classification_field].isin(target_designator_list)] pts_target_only.to_file(target_only_file, encoding='utf-8') # NO CLASSIFICATION PROVIDED else: print("No Classification provided as raster or vector layer. TARGET subset not exported.") pts_landcover_added.to_file(all_values_file, encoding='utf-8')
def main(): global ERASE_SHP if not str(ERASE_SHP).endswith('.shp'): arcpy.AddMessage("Converting Erase feature class to shapefile...") NEW_ERASE_SHP = os.path.join("in_memory", OUT_SHP + "_1") arcpy.CopyFeatures_management(ERASE_SHP, NEW_ERASE_SHP) ERASE_SHP = NEW_ERASE_SHP # fix veg polys... there is likely bad geometry (self intersecting rings, overlaping polys, etc.) arcpy.AddMessage("Repairing potential invalid geometry with Erase polys...") ERASE_SHP_UNION = os.path.join("in_memory", OUT_SHP + "_union") arcpy.Union_analysis(ERASE_SHP, ERASE_SHP_UNION, "ALL", "1 FEET", "GAPS") arcpy.DeleteIdentical_management(ERASE_SHP_UNION, "Shape") # ERASE_SHP_UNION_DISS = os.path.join(OUT_FOLDER, OUT_SHP + '_union_diss_repair.shp') ERASE_SHP_UNION_DISS = os.path.join(OUT_FOLDER, OUT_SHP + '_union_diss_repair.shp') arcpy.Dissolve_management(ERASE_SHP_UNION, ERASE_SHP_UNION_DISS, "", "", "SINGLE_PART") arcpy.RepairGeometry_management(ERASE_SHP_UNION_DISS, 'DELETE_NULL') ERASE_SHP = ERASE_SHP_UNION_DISS arcpy.AddMessage("Created:\n" + str(ERASE_SHP)) # ESRI -> PYSHP arcpy.AddMessage("Reading shapefiles...") shpA = shapefile.Reader(POLYS_SHP) shpB = shapefile.Reader(ERASE_SHP) # PYSHP -> SHAPELY arcpy.AddMessage("Converting IVM Polygons...") shpA_polys = ConvertPolys(shpA) shpA_multipolys = MultiPolygon(shpA_polys) arcpy.AddMessage("Converting Erase Polygons...") shpB_polys = ConvertPolys(shpB) shpB_multipolys = MultiPolygon(shpB_polys) # SHAPELY arcpy.AddMessage("Performing Erase...") arcpy.AddMessage(time.strftime("%H:%M")) try: shpC = shpA_multipolys.difference(shpB_multipolys) # SHAPELY [(x,y),(x,y),...] except Exception as error: message = error.message args = error.args raise error arcpy.AddMessage(time.strftime("%H:%M")) # SHAPELY -> PYSHP FINAL_SHP = os.path.join(OUT_FOLDER, OUT_SHP + ".shp") arcpy.AddMessage("Saving: " + os.path.basename(FINAL_SHP)) w = shapefile.Writer(shapefile.POLYGON) w.field('ID') for i, geom in enumerate(list(shpC.geoms)): shpC_exterior = [] shpC_pyshp_fmt = [] # get exterior rings for coord in geom.exterior.coords: x_y = [coord[0], coord[1]] # PYSHP [[[x,y],[x,y],...]] shpC_exterior.append(x_y) shpC_pyshp_fmt.append(shpC_exterior) # get interior rings if len(list(geom.interiors)) > 0: for i, ring in enumerate(list(geom.interiors)): shpC_interior = [] for coord in list(ring.coords): x_y = [coord[0], coord[1]] shpC_interior.append(x_y) ##check sign, counter clockwise point order creates hole, else overlapping poly #if shapefile.signed_area(list(ring.coords)) >= 0: #shpC_interior.reverse() shpC_pyshp_fmt.append(shpC_interior) w.poly(shpC_pyshp_fmt) w.record(ID='0') w.save(FINAL_SHP) arcpy.AddMessage("Done!")
def DiagnoseAndFixMultiPolygon(attributeName,boundariesList, printDiagnostic = False): print "---" print "*** %s is invalid. Running diagnostic... ***"%(attributeName) # Note: we have already tested each polygon, so the problem is when we add them to a multipolygon # the likely cause is overlapping polygons, so we will automatically union those polygons #printDiagnostic = True goodPolygons = [] badPolygons = [] useSlowerCode = False numPolygons = len(boundariesList) if useSlowerCode: for i in range(0,numPolygons) : if i % 20 == 0: print "processing polygon", i thisPolygon = boundariesList[i] shapelyMultiPoly = MultiPolygon(goodPolygons + [thisPolygon]) if shapelyMultiPoly.is_valid: goodPolygons.append(thisPolygon) else: print "*** problem when adding polygon %d ***"%(i) print thisPolygon badPolygons.append(thisPolygon) else: ''' it can be very slow trying these one at a time, so lets try jumping when we can and dropping back to one at a time when we have a problem ''' numPolygons = len(boundariesList) if printDiagnostic: print "numPolygons",numPolygons i = 0 lineReported = 0 while True: if i >= numPolygons: if printDiagnostic: print "reached end of list" break # end of list lineToReport = (i/100)*100 if lineToReport != lineReported: lineReported = lineToReport if i > 0: print "processing polygon %d of %d"%(i,numPolygons) continueToTop = False for numToJump in [100,50,20,10]: if continueToTop: continue if i + numToJump > numPolygons: numToJump = numPolygons - i; if printDiagnostic: print "adjusting numToJump to",numToJump #try the jump if printDiagnostic: print "trying jump" polygonsToAdd = boundariesList[i:i+numToJump] shapelyMultiPoly = MultiPolygon(goodPolygons + polygonsToAdd) if shapelyMultiPoly.is_valid: goodPolygons = goodPolygons + polygonsToAdd i = i + numToJump if printDiagnostic: print "jumped to %i"%(i) continueToTop = True if continueToTop: continue #resort to one at a time if printDiagnostic: print "resorting to one at a time. i = %d"%(i) for j in range(0,numToJump): thisPolygon = boundariesList[i] shapelyMultiPoly = MultiPolygon(goodPolygons + [thisPolygon]) if shapelyMultiPoly.is_valid: goodPolygons.append(thisPolygon) else: if True: print "polygon %d is bad"%(i) badPolygons.append(thisPolygon) i = i+1 # now automatically generate a "fixed" multipolygon by unioning the bad polygons shapelyMultiPoly = MultiPolygon(goodPolygons) if len(badPolygons) > 0: print "---" print "*** Handling bad polygons ***" for poly in badPolygons: print "Bad polygon:", poly islandPoly = MultiPolygon([poly]) if islandPoly.is_valid: print "Fixed: The bad polygon was a valid polygon, it has been unioned to the whole." shapelyMultiPoly = shapelyMultiPoly.union(islandPoly) else: # try to fix this polygon ''' Outer boundary problems: Sometimes with clipping of shoreline, there are cases of the outer boundary of the shoreline crossing itself. There is not really anything we can do about such cases. ''' # check to see if the outer boundary is valid outerBoundary,innerBoundaries = poly islandPoly = MultiPolygon([(outerBoundary,[])]) if not islandPoly.is_valid: print "Outer boundary is not valid." print "Unable to fix this polygon." if attributeName == "MAPLAND": print "This is a minor error, it just means the oil contours will not be clipped to this part of the land." else: print "This is a serious error. Part of the %s area will be missing."%(attributeName) print "---" continue # we will omit this polygon print "The outer boundary of the polygon is valid." ############################## ''' Hole problems: There are two kinds of problems that can occur with GNOME Analyst contours. Sometimes the holes stick slightly out of the outerboundary. In such a case we will rely on the fact that the holes were just areas to be removed. Sometimes there seem to be holes within holes. I'm not sure why GNOME Analyst is doing that, but we will assume that the holes were just areas to be removed. ''' numHoles = len(innerBoundaries) if numHoles > 0: print "Examing the %d holes..."%(numHoles) assert numHoles > 0 # the only way to get to this part of the code is for a hole to be causing the problem numOmittedHoles = 0 for innerBoundary in innerBoundaries: hole = Polygon(innerBoundary) if not hole.is_valid: numOmittedHoles = numOmittedHoles + 1 print "Hole is not valid:", hole print "Omitting this hole. This is a minor error." else: # subtract this hole from the islandPolygon islandPoly = islandPoly.difference(hole) if numOmittedHoles > 0: print "Partially fixed: %d invalid holes were not subtracted from this polygon, but this polygon has been unioned to the whole."%(numOmittedHoles) elif numHoles > 0: print "Fixed: all holes successfully subtracted from this polygon and the polygon unioned to the whole." else : print "Fixed: polygon unioned to the whole." shapelyMultiPoly = shapelyMultiPoly.union(islandPoly) print "---" return shapelyMultiPoly
def ReadMossPolygons(mossBaseFileName, printDiagnostic = False): ''' .ms1 file is fixed format Header lines 56 characters char 1-5 : Item Number(NEGATIVE IF THE COORDINATES ARE LON/LAT) char 16-45: Attribute Name char 51-55: Number of coord. pairs X,Y, Coordinate pairs 23 characters 01-11: x coordinate 12-22: y coordinate Long,Lat pairs char 01-10: LONGITUDE char 11-20: LATITUDE char 21-22: FLAG 0-NORMAL 1-INDICATES FIRST POINT OF ISLAND POLYGON ''' # import shapely here so it won't be imported if not needed from shapely.geometry import Polygon, MultiPolygon # what about MAPBOUND, EXTENDEDOUTLOOKTHREAT attributesToRead = ["MAPLAND","FORECASTHEAVY","FORECASTMEDIUM","FORECASTLIGHT","FORECASTUNCERTAINTY"] landBoundariesList = [] heavyBoundariesList = [] mediumBoundariesList = [] lightBoundariesList = [] uncertaintyBoundariesList = [] extension = ".ms1" if os.path.exists(mossBaseFileName + extension): inFile = file(mossBaseFileName + extension, 'rU') alreadyReadNextLine = False while True: # read the header lines if not alreadyReadNextLine: line = inFile.readline() alreadyReadNextLine = False if not line: break # end of this file if line.strip() == "" : continue; # blank line itemNum = int(line[0:5]) assert itemNum < 0 # we expect long/lat values, so the itemNum should be negative attributeName = line[15:45].strip() numCoordinates = int(line[50:55]) # note: for some versions of GNOME analyst # the number of coordinates in MAPLAND overflows and is reported as a negative number or incorrect number. # To support those files, we will ignore the number and just read lines based on the length of the lines if printDiagnostic: print itemNum,attributeName,numCoordinates readingOuterBoundary = True coordList = [] outerBoundary = None innerBoundaries = [] if numCoordinates <= 0: print "*** ignoring bad %s header line value: numCoordinates: %d ***"%(attributeName,numCoordinates) # read the points for the polygon for this header while True: #for i in range(0,numCoordinates): # since we don't want to rely on numCoordinates # we need to look to see if this is the end of this block of coordinates ################## line = inFile.readline() # The lines are fixed format, # read until we find a header line # header lines lines are longer than coordinate lines if (not line) or len(line.strip()) > 50 : # must be end of file or a header line alreadyReadNextLine = True break # out of this while loop if attributeName in attributesToRead: #process this line longitudeStr = line[0:10].strip() latitudeStr = line[10:20].strip() flag = line[20:22].strip() if flag == "1" : #then we are starting a new "inner hole" # enforce having the last point of the polygon equal the first point if len(coordList) > 0 and coordList[0] != coordList[-1]: coordList.append(coordList[0]) # save the previous coordList if len(coordList) >= 4: # less than 4 would be a degenerate case if readingOuterBoundary: outerBoundary = coordList else: innerBoundaries.append(coordList) # reset the coordinate list coordList = [] readingOuterBoundary = False coordList.append((float(longitudeStr),float(latitudeStr))) ############# end of while loop # finished reading the header # record the lists we have filled in if not attributeName in attributesToRead: continue # on to the next header line # enforce having the last point of the polygon equal the first point if len(coordList) > 0 and coordList[0] != coordList[-1]: coordList.append(coordList[0]) #filter out degenerate cases if len(coordList) < 4: # less than 4 would be a degenerate case print "*** ignoring degenerate polygon ***" continue # on to the next header line # save the coordinate list if readingOuterBoundary: outerBoundary = coordList else: innerBoundaries.append(coordList) #save thisPolygon if len (outerBoundary) > 0 : # outerBoundary,innerBoundaries = VerifyAndFixGnomeAnalystPolygon(attributeName,outerBoundary,innerBoundaries) if len (outerBoundary) > 0 : thisPolygon = (outerBoundary,innerBoundaries) if attributeName == "MAPLAND": landBoundariesList.append(thisPolygon) elif attributeName == "FORECASTHEAVY": heavyBoundariesList.append(thisPolygon) elif attributeName == "FORECASTMEDIUM": mediumBoundariesList.append(thisPolygon) elif attributeName == "FORECASTLIGHT": lightBoundariesList.append(thisPolygon) elif attributeName == "FORECASTUNCERTAINTY": uncertaintyBoundariesList.append(thisPolygon) inFile.close() # convert the lists of MossPolygons to shapely MultiPolygons if len(landBoundariesList) == 0: landPolygons = None else: landPolygons = MultiPolygon(landBoundariesList) if not landPolygons.is_valid: # try analysing and fixing the problem landPolygons = DiagnoseAndFixMultiPolygon("MAPLAND",landBoundariesList) if len(heavyBoundariesList) == 0: heavyPolygons = None else: heavyPolygons = MultiPolygon(heavyBoundariesList) if not heavyPolygons.is_valid: heavyPolygons =DiagnoseAndFixMultiPolygon("FORECASTHEAVY",heavyBoundariesList) if len(mediumBoundariesList) == 0: mediumPolygons = None else: mediumPolygons = MultiPolygon(mediumBoundariesList) if not mediumPolygons.is_valid: mediumPolygons = DiagnoseAndFixMultiPolygon("FORECASTMEDIUM",mediumBoundariesList) if len(lightBoundariesList) == 0: lightPolygons = None else: lightPolygons = MultiPolygon(lightBoundariesList) if not lightPolygons.is_valid: lightPolygons = DiagnoseAndFixMultiPolygon("FORECASTLIGHT",lightBoundariesList) if len(uncertaintyBoundariesList) == 0: uncertaintyPolygons = None else: uncertaintyPolygons = MultiPolygon(uncertaintyBoundariesList) if not uncertaintyPolygons.is_valid: uncertaintyPolygons = DiagnoseAndFixMultiPolygon("FORECASTUNCERTAINTY",uncertaintyBoundariesList) # clip the oil contours to the shoreline # note: we need to check that the polygons are valid before trying to clip to prevent shapely from crashing if landPolygons != None : if landPolygons.is_valid == False: print "*** landPolygons is not valid. We will not clip to the shoreline. ***" else : if heavyPolygons != None: if heavyPolygons.is_valid == False: print "*** heavyPolygons is not valid. It will not be clipped to the shoreline. ***" elif heavyPolygons.intersects(landPolygons): print "clipping heavyPolygons to shoreline" heavyPolygons = heavyPolygons.difference(landPolygons) if mediumPolygons != None: if mediumPolygons.is_valid == False: print "*** mediumPolygons is not valid. It will not be clipped to the shoreline. ***" elif mediumPolygons.intersects(landPolygons): print "clipping mediumPolygons to shoreline" mediumPolygons = mediumPolygons.difference(landPolygons) if lightPolygons != None: if lightPolygons.is_valid == False: print "*** lightPolygons is not valid. It will not be clipped to the shoreline. ***" elif lightPolygons.intersects(landPolygons): print "clipping lightPolygons to shoreline" lightPolygons = lightPolygons.difference(landPolygons) # note: JerryM wonders we should clip the uncertainty to the shoreline. That it looks better as simple polygons going over the land. if uncertaintyPolygons != None: if uncertaintyPolygons.is_valid == False: print "*** uncertaintyPolygons is not valid. It will not be clipped to the shoreline or oil polygons ***" else: print "clipping uncertaintyPolygons to shoreline and oil polygons" for polygons,nameOfPolygons in [(lightPolygons,"lightPolygons"),(mediumPolygons,"mediumPolygons"),(heavyPolygons,"heavyPolygons"),(landPolygons,"landPolygons")]: if polygons != None: print "taking difference with",nameOfPolygons newUncertaintyPolygons = uncertaintyPolygons.difference(polygons) print "finished taking difference" if not newUncertaintyPolygons.is_valid: #print "Uncertainty Polygon is no longer valid after taking difference with",polygons,nameOfPolygons s = "*** uncertaintyPolygons have not been clipped to %s ***"%(nameOfPolygons) print s else: uncertaintyPolygons = newUncertaintyPolygons return (landPolygons,heavyPolygons,mediumPolygons,lightPolygons,uncertaintyPolygons)