def test_suite(): try: patches = [Point((0, 0)).buffer(0.05)] unary_union(patches) except KeyError: return lambda x: None return unittest.TestLoader().loadTestsFromTestCase(UnionTestCase)
def createUnaryUnion(self, allAreas): ''' Given a set of areas, this method constructs a unary union for them ''' try: # Construct a unary_union assume that there are no errors in # geometry. allDsgnPlygons = unary_union(allAreas) except Exception as e1: # If there are errors while consutrcuting the union, examine the # geometries further to seperate to just valid polygons. To avoid this error, # ensure that the evaluation features are topologically correct, usually use a # Geometry checker in GIS tools. s1All = [] try: s1Polygons = MultiPolygon([x for x in allAreas if ( x.geom_type == 'Polygon' or x.geom_type == 'MultiPolygon') and x.is_valid]) if s1Polygons: s1All.append(s1Polygons) except Exception as e: print('Error in CreateUnaryUnion Polygon: %s' % e) else: if s1All: allDsgnPlygons = unary_union(s1All) else: allDsgnPlygons = '' return allDsgnPlygons
def test_unary_union_multi(self): # Test of multipart input based on comment by @schwehr at # https://github.com/Toblerity/Shapely/issues/47#issuecomment-21809308 patches = MultiPolygon([Point(xy).buffer(0.05) for xy in self.coords]) self.assertAlmostEqual(unary_union(patches).area, 0.71857254056) self.assertAlmostEqual(unary_union([patches, patches]).area, 0.71857254056)
def move(self, displacement): sweep = aa_get_sweep(self.agent, displacement) if self.attached is not None: poly = self.poly[:self.attached] + self.poly[self.attached+1:] config_obstacle = unary_union(self.config_poly[:self.attached] + self.config_poly[self.attached+1:]) else: poly = self.poly config_obstacle = self.config_objects self.intersect = unary_union(poly).intersection(sweep) self.config_intersect = config_obstacle.intersection(LineString([self.agent.exterior.coords[0], np.array(self.agent.exterior.coords[0]) + np.array(displacement)])) # print self.config_intersect if self.config_intersect.is_empty or self.config_intersect.geom_type == 'Point': self.intersect = None self.config_intersect = None else: if self.config_intersect.geom_type in ('GeometryCollection', 'MultiLineString'): points = np.vstack([ np.array(p.coords) for p in self.config_intersect if p.geom_type == 'LineString']) elif self.config_intersect.geom_type == 'LineString': points = np.array(self.config_intersect.coords) i = np.argmin(np.linalg.norm(points - np.array(self.agent.exterior.coords[0])[None,:], axis=1)) point = points[i] displacement = point - np.array(self.agent.exterior.coords[0]) if self.attached is not None: index = self.attached poly_intersect = unary_union(self.obs_config[index]).intersection(LineString([self.poly[index].exterior.coords[0], np.array(self.poly[index].exterior.coords[0]) + np.array(displacement)])) # print 'obs', poly_intersect if poly_intersect.is_empty or poly_intersect.geom_type == 'Point': pass else: if poly_intersect.geom_type in ('GeometryCollection', 'MultiLineString'): points = np.vstack([ np.array(p.coords) for p in poly_intersect if p.geom_type == 'LineString']) elif poly_intersect.geom_type == 'LineString': points = np.array(poly_intersect.coords) i = np.argmin(np.linalg.norm(points - np.array(self.poly[index].exterior.coords[0])[None,:], axis=1)) point = points[i] obs_pos = point displacement = point - np.array(self.poly[index].exterior.coords[0]) self.move_obs(index, displacement) self.agent = translate(self.agent, xoff = displacement[0], yoff = displacement[1]) self.last_sweep = sweep
def createPhotoFoursects(): """Create foursects out of smaller section units for areas covered by the aerial flight define in the 'photo_qtr_sects' shapefile""" # the mapping must be reversed to be used as a tool to define groups reverse_mapping = {i:k for k,v in foursect_mapping.iteritems() for i in v} # union the photo quarter sections into a single geometry qtr_sect_geoms = [] with fiona.open(photo_qtr_sects) as q_sects: for qs in q_sects: qtr_sect_geoms.append(shape(qs['geometry'])) unioned_qs = unary_union(qtr_sect_geoms) foursect_dict = {} with fiona.open(sections) as sects: meta_data = sects.meta.copy() for s in sects: geom = shape(s['geometry']) if geom.centroid.intersects(unioned_qs): # derive the foursect id of each section based it its section # id and the foursect mapping dictionary sect_id = s['properties']['SECTION'] township = sect_id[:4] foursect = reverse_mapping[int(sect_id[4:])] foursect_id = '{0}-{1}'.format(township, foursect) if foursect_id not in foursect_dict: foursect_dict[foursect_id] = [geom] else: foursect_dict[foursect_id].append(geom) # modify properties such that only the field 'foursect' exists new_properties = [('foursect', 'str')] meta_data['schema']['properties'] = new_properties with fiona.open(foursects, 'w', **meta_data) as f_sects: for fid, geoms in foursect_dict.iteritems(): # not totally clear on why 'mapping' needs to be used below, but it # seems that unary union only returns a representation and mapping # in turn produces a writeable object fs_feat = { 'geometry': mapping(unary_union(geoms)), 'properties': {'foursect': fid}} f_sects.write(fs_feat)
def test_1(self): # Instead of random points, use deterministic, pseudo-random Halton # sequences for repeatability sake. patches = [Point(xy).buffer(0.05) for xy in self.coords] u = unary_union(patches) self.failUnlessEqual(u.geom_type, 'MultiPolygon') self.failUnlessAlmostEqual(u.area, 0.71857254056)
def test_unary_union(self): p1 = self.t1 p2 = Polygon([(2, 0), (3, 0), (3, 1)]) expected = unary_union([p1, p2]) g = GeoSeries([p1, p2]) self._test_unary_topological('unary_union', expected, g)
def get_intersections(roads): """Calculates the intersection points of all roads :param roads: List of shapely geometries representing road segments """ intersections = [] for road1, road2 in itertools.combinations(roads, 2): if road1.intersects(road2): intersection = road1.intersection(road2) if 'Point' == intersection.type: intersections.append(intersection) elif 'MultiPoint' == intersection.type: intersections.extend([pt for pt in intersection]) elif 'MultiLineString' == intersection.type: multiLine = [line for line in intersection] first_coords = multiLine[0].coords[0] last_coords = multiLine[len(multiLine)-1].coords[1] intersections.append(Point(first_coords[0], first_coords[1])) intersections.append(Point(last_coords[0], last_coords[1])) elif 'GeometryCollection' == intersection.type: intersections.extend(get_intersections(intersection)) # The unary_union removes duplicate points unioned = unary_union(intersections) # Ensure the result is a MultiPoint, since calling functions expect an iterable if 'Point' == unioned.type: unioned = MultiPoint([unioned]) return unioned
def get_extents_from_huc(huc_data_shp=None,extents_output_shp=None,extents_huc_list=None): '''Extracts a user-specified HUC or list of HUCs from the national dataset and writes it to a shapefile. 'huc_data_shp'=shapefile that includes the huc polygons that will be extracted.''' extents_huc_scale = len(extents_huc_list[0]) huc_field = 'HUC' + str(extents_huc_scale) with fiona.open(huc_data_shp) as vin: schema = vin.schema crs = vin.crs driver = vin.driver # Reduce the extract schema to only the huc id field schema['properties'] = {huc_field:'str'} # Now write the model domain shapefile with fiona.open(huc_data_shp) as vect_in: polygon_list = [] for feature in vect_in: if (feature['properties'][huc_field] in extents_huc_list): polygon_list.append(shape(feature['geometry'])) merged = unary_union(polygon_list) with fiona.open(extents_output_shp,'w',driver=driver,crs=crs,schema=schema) as extract_out: extract_out.write({'geometry': mapping(merged),'properties':{huc_field:'Merged'}}) return
def _compute_limits(self, region_id=None): """WARNING: probably not yet completely implemented. Parameters ---------- region_id: integer or None (default) the region id information. Returns ------- limits: array_like the limits information. """ if region_id is None: polygons = tesselation(self.regionlocs) whole = ops.unary_union(polygons) limits = np.array(whole.bounds).reshape((2, 2)).T else: polygons = tesselation(self.regionlocs) i_r = match_regions(polygons, self.regionlocs) regionsid = self.regions_id[i_r] p = polygons[np.where(regionsid == region_id)[0]] limits = np.array(p.bounds).reshape((2, 2)).T return limits
def smooth_layer(record_ls, buf_val, scale_km_to_degrees=0.009, delta_km=-0.5, ### STATE cfg=None): """Buffer out, dissolve and buffer back *Mutation* Args: record_ls: <list> A list of fiona records (from fiona collection) buf_val: <float> The value to buffer out each polygon, units of km scale_km_to_degrees <float>: Conversion based on *some* latitude delta_km <float>: Diference between buffer in and buffer out values, A -ve delta indicates the buffer_in is maller than the buffer_out Returns: <coll of shapely Polygons> """ assert isinstance(buf_val, (float, int)) b_out = buf_val * scale_km_to_degrees ### STATE b_in = -(b_out + delta_km * scale_km_to_degrees) vert_ls = [ r['geometry']['coordinates'][0] for r in record_ls ] # Transform to shapely Polygons and guards empty polygons polygons = ( Polygon(v) for v in vert_ls if len(v) > 3 ) dilated = ( p.buffer(b_out) for p in polygons if p.is_valid ) dissolved = unary_union(list(dilated)) eroded = dissolved.buffer(b_in) if isinstance(eroded, Polygon): eroded = MultiPolygon([eroded]) logging.debug("Leaving func:smooth_layer") return eroded
def createCenterline(self): """ Calculates the centerline of a polygon. Densifies the border of a polygon which is then represented by a Numpy array of points necessary for creating the Voronoi diagram. Once the diagram is created, the ridges located within the polygon are joined and returned. Returns: a union of lines that are located within the polygon. """ minx = int(min(self.inputGEOM.envelope.exterior.xy[0])) miny = int(min(self.inputGEOM.envelope.exterior.xy[1])) border = np.array(self.densifyBorder(self.inputGEOM, minx, miny)) vor = Voronoi(border) vertex = vor.vertices lst_lines = [] for j, ridge in enumerate(vor.ridge_vertices): if -1 not in ridge: line = LineString([\ (vertex[ridge[0]][0] + minx, vertex[ridge[0]][1] + miny), \ (vertex[ridge[1]][0] + minx, vertex[ridge[1]][1] + miny)]) if line.within(self.inputGEOM) and len(line.coords[0]) > 1: lst_lines.append(line) return unary_union(lst_lines)
def create_t6_deserts(desert_geom, b_box, mask_metadata): """""" geom_list = list() with fiona.open(t6_block_groups) as block_groups: t6_metadata = block_groups.meta.copy() with fiona.open(t6_desert_feats, 'w', **t6_metadata) as t6_deserts: for bg in block_groups: geom = shape(bg['geometry']) props = bg['properties'] # 'neither' is misspelled in dataset so (sic) if props['min_pov'] != 'niether' and \ geom.intersects(desert_geom): geom_list.append(geom) new_geom = geom.intersection(desert_geom) bg['geometry'] = mapping(new_geom) t6_deserts.write(bg) t6_geom = unary_union(geom_list) t6_desert_geom = t6_geom.intersection(desert_geom) t6_mask_geom = b_box.difference(t6_desert_geom) with fiona.open(t6_desert_mask, 'w', **mask_metadata) as t6_mask: feat = { 'geometry': mapping(t6_mask_geom), 'properties': { 'id': 1 } } t6_mask.write(feat)
def add_polis(self, additional_polis, union=True): if union: newpolis = unary_union(self.POLIS+additional_polis) newpolis = list(flattenMultipoly(newpolis)) else: newpolis = self.POLIS + additional_polis self.POLIS = newpolis#list(np.hstack(newpolis))
def generate_deserts_feature(stops, desert_dist, t6=None): """""" b_box = get_pg_table_b_box('load.county') stops_buffs = [] with fiona.open(stops) as dist_stops: metadata = dist_stops.meta.copy() for feat in dist_stops: geom = shape(feat['geometry']) fields = feat['properties'] dist = fields['vend_dist'] if dist > desert_dist: buff = geom.buffer(desert_dist) stops_buffs.append(buff) desert_area = unary_union(stops_buffs) vendor_buffs = [] with fiona.open(rc_vendors) as vendors: for feat in vendors: geom = shape(feat['geometry']) buff = geom.buffer(desert_dist) vendor_buffs.append(buff) vendor_area = unary_union(vendor_buffs) desert_trim = desert_area.difference(vendor_area) desert_mask = b_box.difference(desert_trim) schema = metadata['schema'] schema['geometry'] = desert_mask.geom_type schema['properties'] = {'id': 'int'} if t6: create_t6_deserts(desert_trim, b_box, metadata) return with fiona.open(desert_gaps, 'w', **metadata) as gaps_shp: feat = { 'geometry': mapping(desert_mask), 'properties': { 'id': 1 } } gaps_shp.write(feat)
def __init__(self, rectangles, state_range, agent_start = None, agent_size = None): poly = [] for rect in rectangles: poly.append(Polygon(rect)) self.config_range = state_range limits = LinearRing([state_range[0], (state_range[1][0], state_range[0][1]), state_range[1], (state_range[0][0], state_range[1][1])]) if agent_start is None: agent_start = np.array(self.DEFAULT_AGENT_START) else: agent_start = np.array(agent_start) if agent_size is None: self.agent_size = np.array(self.DEFAULT_AGENT_SIZE) else: self.agent_size = np.array(agent_size) end_point = np.zeros(2) + self.agent_size self.agent = Polygon([agent_start, (end_point[0], agent_start[1]), end_point, (agent_start[0], end_point[1])]) self.poly = poly self.config_poly = compute_config_all_obst(self.agent, poly) self.config_objects = unary_union(self.config_poly) self.obs_config = { i : compute_config_all_obst(self.poly[i], self.poly[:i] + self.poly[i+1:]) for i in range(len(self.poly))} # self.union_obs_config = { k : unary_union(v) for k, v in self.obs_config.iteritems()} self.objects = unary_union(poly) self.last_sweep = None self.intersect = None self.config_intersect = None self.agent = translate(self.agent, xoff = agent_start[0], yoff = agent_start[1]) self.attached = None
def test_1(self): # Instead of random points, use deterministic, pseudo-random Halton # sequences for repeatability sake. coords = zip(list(islice(halton(5), 20, 120)), list(islice(halton(7), 20, 120))) patches = [Point(xy).buffer(0.05) for xy in coords] u = unary_union(patches) self.failUnlessEqual(u.geom_type, "MultiPolygon") self.failUnlessAlmostEqual(u.area, 0.71857254056)
def move_obs(self, index, displacement): for i in range(0,index): self.obs_config[i][index-1] = translate(self.obs_config[i][index-1], xoff = displacement[0], yoff = displacement[1]) for i in range(index+1, len(self.poly)): self.obs_config[i][index] = translate(self.obs_config[i][index], xoff = displacement[0], yoff = displacement[1]) self.config_poly[index] = translate(self.config_poly[index], xoff = displacement[0], yoff = displacement[1]) self.config_objects = unary_union(self.config_poly) self.poly[index] = translate(self.poly[index], xoff = displacement[0], yoff = displacement[1]) self.objects = unary_union(self.poly)
def region2features(region): features = [] print region # 1_4741 m = re.search('/([\d_r]+).kmz',region) kml_name = m.group(1) + ".kml" with ZipFile(region,'r') as myzip: root = etree.parse(myzip.open(kml_name)) for placemark in xpath(root,"n:Document/n:Placemark"): name = xpatht(placemark,"n:name") lon = xpathf(placemark,"n:Model/n:Location/n:longitude") lat = xpathf(placemark,"n:Model/n:Location/n:latitude") link = xpatht(placemark,"n:Model/n:Link/n:href") # assert location transformation is identity xml = myzip.open(link) model = etree.parse(xml) repairDAE(model) c = Collada(StringIO.StringIO(etree.tostring(model))) tris = [] height = None for geom in c.geometries: assert len(geom.primitives) == 1 triset = geom.primitives[0] heights = np.unique(np.hstack([[point[2] for point in tri] for tri in triset.vertex[triset.vertex_index]])) if len(heights) == 1: height = float(heights[0]) * inch_to_meters # the mesh is parallel to the xy plane for tri in triset.vertex[triset.vertex_index]: tri_p = Polygon([(tri[0][0],tri[0][1]),(tri[1][0],tri[1][1]),(tri[2][0],tri[2][1])]) tris.append(tri_p) def r(t): try: if t.area > 0: return True except: pass return False valid_tris = filter(r,tris) footprint = unary_union(valid_tris) assert height is not None # http://spatialreference.org/ref/epsg/3826/html/ twd97 = Proj(init='epsg:3826') def unproject_twd97(x,y): x_meters = x * inch_to_meters y_meters = y * inch_to_meters # convert the model origin to TWD97 x0,y0 = twd97(lon,lat) return twd97(x0 + x_meters,y0+y_meters,inverse=True) unprojected = transform(unproject_twd97,footprint) cur.execute("INSERT INTO buildings(id, geom, height) VALUES (%s,ST_SetSRID(%s::geometry,4326),%s)",(name,unprojected.wkt,height)) conn.commit() return features
def from_system(cls, system): """Convert a `System` instance to a `Polygons` instance. Parameters ---------- system : System Returns ------- Polygons """ obj = cls() for e in system: if not hasattr(e, "paths"): continue # assert isinstance(e, PolygonPixelElectrode), (e, e.name) exts, ints = [], [] for pi in e.paths: # shapely ignores f-contiguous arrays so copy # https://github.com/sgillies/shapely/issues/26 ei = area_centroid(pi)[0] pi = geometry.LinearRing(pi.copy("C")) if ei < 0: ints.append((abs(ei), pi)) elif ei > 0: exts.append((abs(ei), pi)) if not exts: continue ints.sort(key=operator.itemgetter(0)) exts.sort(key=operator.itemgetter(0)) # the following needs to be complicated to cover # onion-like "ext in int in ext" cases. groups = [] done = set() for exta, exterior in exts: ep = geometry.Polygon(exterior) gint = [] for i, (inta, interior) in enumerate(ints): if i in done: continue if inta >= exta: break if ep.contains(interior): gint.append(interior) done.add(i) pi = geometry.Polygon(exterior, gint) if pi.is_valid and pi.area > 0: groups.append(pi) else: logger.warn("polygon %s failed %s/%s", e.name, pi.is_valid, pi.area) # remaining interiors must be top level or "free" #assert not ints, ints #mp = geometry.MultiPolygon(groups) #mp = mp.union(geometry.Point()) mp = ops.unary_union(groups) obj.append((e.name, mp)) return obj
def get_intersection_parts(roads, int_buffers, max_line_units): """Finds all segments that intersect the buffers, and all that don't :param roads: List of shapely geometries representing road segments :param int_buffers: List of shapely polygons representing intersection buffers :param max_line_units: The maximum allowed length of the line """ # Create a spatial index for intersection buffers to efficiently find intersecting segments int_buffers_index = rtree.index.Index() for idx, intersection_buffer in enumerate(int_buffers): int_buffers_index.insert(idx, intersection_buffer.bounds) segments_map = {} non_int_lines = [] for road in roads: road_int_buffers = [] for idx in int_buffers_index.intersection(road.bounds): int_buffer = int_buffers[idx] if int_buffer.intersects(road): if idx not in segments_map: segments_map[idx] = [] segments_map[idx].append(int_buffer.intersection(road)) road_int_buffers.append(int_buffer) # Collect the non-intersecting segments if len(road_int_buffers) > 0: diff = road.difference(unary_union(road_int_buffers)) if 'LineString' == diff.type: non_int_lines.append(diff) elif 'MultiLineString' == diff.type: non_int_lines.extend([line for line in diff]) else: non_int_lines.append(road) # Union all lines found within a buffer, treating them as a single unit int_multilines = [unary_union(lines) for _, lines in segments_map.items()] # Split any long non-intersecting segments. It's not important that they # be equal lengths, just that none of them are exceptionally long. split_non_int_lines = [] for line in non_int_lines: split_non_int_lines.extend(split_line(line, max_line_units)) # Return a tuple of intersection multilines and non-intersecting segments return int_multilines + split_non_int_lines
def savedxf_polylist(list_of_polygons, filename=None, debug=False, save_as='POLYGON', union = False): """Saves a list_of_polygons to a dxf file. The polygons have a HATCH-property, which is not supported by AutoCAD and LinkCAD. It can be viewed in e.g. Klayout. To convert the polygons into one which do not have the HATCH property, use the built-in convert function from ShapelyChipDesigns. .. plot:: import ShapelyChipDesigns as SD C = SD.Point(0,0).buffer(5) SD.savedxf_polylist([C], 'acircle') C """ try: os.remove('buffer.geojson') except: pass GNEW = [] for p in list_of_polygons: if p.is_valid: GNEW += [p] if not p.is_valid: pnew = p.buffer(0) if pnew.is_valid: GNEW += [pnew] if debug: print('new polygon made from self intersecting polygon, is valid: ',pnew.is_valid) else: if debug: print('self intersecting polygon thrown out.') else: pass if not GNEW: GNEW = [empty()] if union: buffer_obj = unary_union(GNEW) else: buffer_obj = MultiPolygon(GNEW) if debug: print('started writing file ...') f = open("buffer.geojson", "wb") f.write(json.dumps(mapping(buffer_obj))) f.close() if debug: print('finished.') if debug: print('started conversion of geojson to dxf ...') if filename == None: filename = 'buffer' if debug: print('save as MULTILINESTRING or POLYGON...') # --config("DXF_WRITE_HATCH", "NO") os.system('ogr2ogr -f DXF '+filename+'.dxf buffer.geojson') if debug: print('finished.') print('saved '+filename+'.dxf')
def union(self): """ Compute the union of all geometries in layer and return Shapely object """ from shapely.ops import unary_union shapes = [] #for i in range(self.num_features): # feat = self.layer.GetFeature(i) for feature in self.layer: shapes.append(loads(feature.GetGeometryRef().ExportToWkb())) return unary_union(shapes)
def is_valid_state(self, state): s = self.state self.state = state if self.attached is not None: config_obstacle = unary_union(self.config_poly[:self.attached] + self.config_poly[self.attached+1:]) else: config_obstacle = self.config_objects collide = config_obstacle.intersects(Point(self.agent.exterior.coords[0])) for i in xrange(len(self.poly)): if collide: break collide |= unary_union(self.obs_config[i]).intersects(Point(self.poly[i].exterior.coords[0])) self.state = s return not collide
def unary_union(shapes): # seems to be the same as cascade_union except that it handles multipart polygons if shapely_version < '1.2.16': raise Exception("shapely 1.2.16 or higher needed for unary_union; upgrade shapely or try cascade_union instead") o = [] for shape in shapes: if not hasattr(shape,'__geo_interface__'): raise TypeError("%r does not appear to be a shape"%shape) o.append(geom.asShape(shape)) res = shops.unary_union(o) return asShape(res)
def get_airbridge_poly1(width=36,length=16,spacing=34): ''' returns a Polylist Usage: RES.add_STRUC1(S1a+S1b) ''' b = mybox((0,0), width, length) b1 = translate(b, xoff=spacing/2. , yoff=-length/2.) b2 = translate(b, xoff=-spacing/2-width , yoff=-length/2.) A10 = unary_union([b1,b2]) return A10
def clip_altitudes(self, new_geometry, new_altitude=None): # register new geometry with an altitude # a geometry with no altitude will reset the altitude information of its area as if nothing was ever there if self.last_altitude is not None and self.last_altitude > new_altitude: raise ValueError('Altitudes have to be ascending.') if new_altitude in self.altitudes: self.altitudes[new_altitude] = unary_union([self.altitudes[new_altitude], new_geometry]) else: self.altitudes[new_altitude] = new_geometry
def save(self, last_update, new_update): self.finalize() for level_id, geometries in self._geometries_by_level.items(): geometries = unary_union(geometries) if geometries.is_empty: continue history = MapHistory.open_level(level_id, mode='base', default_update=last_update) history.add_geometry(geometries.buffer(1), new_update) history.save() self.reset()
def contiguous_outline(countyshp): fips = set([state.fips for state in us.STATES_CONTIGUOUS]) shapes = [] with fiona.open(countyshp) as f: for county in f: statefp = county['properties']['STATEFP'] if statefp in fips: shapes.append(asShape(county['geometry'])) outline = unary_union(shapes) return outline
def get_intersection_buffers(intersections, intersection_buffer_units, debug=False): """ Buffers intersection according to proj units Args: intersections intersection_buffer_units - in meters debug - if true, will output the buffers to file for debugging Returns: a list of polygons, buffering the intersections these are circles, or groups of overlapping circles """ buffered_intersections = [ intersection['geometry'].buffer(intersection_buffer_units) for intersection in intersections ] buffered_intersections = unary_union(buffered_intersections) if debug: util.output_from_shapes([(x, {}) for x in buffered_intersections], os.path.join(MAP_FP, 'int_buffers.geojson')) results = [] # Index the intersection points for fast lookup inter_index = rtree.index.Index() for idx, inter_point in enumerate(intersections): inter_index.insert(idx, inter_point['geometry'].bounds) # Get the points that overlap with the buffers for buff in buffered_intersections: matches = [] for idx in inter_index.intersection(buff.bounds): if intersections[idx]['geometry'].within(buff): matches.append( Record(intersections[idx]['properties'], point=intersections[idx]['geometry'])) results.append(IntersectionBuffer(buff, matches)) return results
def __init__(self, filename): self.gbr = gerber.read(filename) self._populateShapelyPrimitives() union = sop.unary_union(self.shapelyPrimitives) self.closedPolygons = [geo.polygon.orient(poly) for poly in union] self.polygonLines = [] self.pads = [] self.tolerance = 1e-6 for poly in self.closedPolygons: boundary = poly.boundary for i in range(len(boundary.coords) - 1): self.polygonLines.append( geo.LineString( [boundary.coords[i], boundary.coords[i + 1]]))
def _dilate_and_unify_object_bounds( object_bounds_list: List[List[Dict[str, float]]], dilation_amount: float, source: Tuple[float, float] = None, target: Tuple[float, float] = None) -> Optional[List[Polygon]]: """Dilate the given object bounds by the given amount and return the resulting coordinates. Fall back to the original bounds if the new bounds would overlap the given source or target point.""" source_point = Point(source) if source else None target_point = Point(target) if target else None # Expand the rects by the dilation into bigger polys with 8 points. poly_list = [] for bounds in object_bounds_list: poly = Polygon([(point['x'], point['z']) for point in bounds]) logging.debug(f'original poly {poly}') modified_poly = poly.buffer(dilation_amount, resolution=1, cap_style=3) logging.debug(f'modified poly {modified_poly}') # Use original poly if dilation would overlap with source/target. if ((source and not poly.contains(source_point) and modified_poly.contains(source_point)) or (target and not poly.contains(target_point) and modified_poly.contains(target_point))): poly_list.append(poly) else: poly_list.append(modified_poly) # Merge any intersecting polys. merged_poly_list = (unary_union(poly_list) if len(poly_list) > 1 else poly_list) if isinstance(merged_poly_list, Polygon): merged_poly_list = [merged_poly_list] poly_coords_list = [ list(poly.exterior.coords) for poly in merged_poly_list ] # The polys returned by unary_union have the same first and last point, # but the shortest path code doesn't want them to have the repeated point. for coords in poly_coords_list: if coords[0] == coords[-1]: del coords[-1] return poly_coords_list
def mergeLayer(self, layer, path): # Convert all polygons into shapely.geometry.Polygon _sPolygons = [] for _polygon in layer: _sPolygon = Polygon(shell=_polygon).buffer(0) _sPolygons.append(_sPolygon) try: _outPolygons = unary_union(_sPolygons) except ValueError as ve: print("ERROR : Cannot merge this layer") print(" " + str(ve)) return # Return value within layer if isinstance(_outPolygons, Polygon): path.append(self.polygonToPath(_outPolygons)) else: for _polygon in _outPolygons.geoms: path.append(self.polygonToPath(_polygon))
def polygonizeFeatures(features, fields=None): lineList = [] for inFeat in features: inGeom = inFeat.geometry() if inGeom is None: pass elif inGeom.isMultipart(): lineList.extend(inGeom.asMultiPolyline()) else: lineList.append(inGeom.asPolyline()) allLines = MultiLineString(lineList) allLines = unary_union(allLines) polygons = list(polygonize([allLines])) outList = [] for polygon in polygons: outFeat = QgsFeature(fields) outFeat.setGeometry(QgsGeometry.fromWkt(polygon.wkt)) outList.append(outFeat) return outList
def regrouperEntite(a): """Pour regrouper les entites de la couche vecteur obtenue en union des deux couches cette fonction possede un parametre a,c'est le chemin de la couche à regrouper """ polygons = [shape(feature['geometry']) for feature in fiona.open(a)] union_poly = unary_union(polygons) # or cascaded_union(polygons) schema = { 'geometry': 'Polygon', 'properties': { 'id': 'int' }, } with fiona.open('shp/my_shp6.shp', 'w', 'ESRI Shapefile', schema) as c: c.write({ 'geometry': mapping(union_poly), 'properties': { 'id': 1 }, })
def prepare_earth_geometry(geometry_resolution: str = "50m"): """ Preparations necessary for determining whether a point is over land or water. This code may need to download a ZIP containing Earth geometry data the first time it runs. Code borrowed from https://stackoverflow.com/a/48062502 :param geometry_resolution: The resolution of the NaturalEarth shapereader to use. Valid values are '10m', '50m' or '110m'. Default '50m'. :return: The PreparedGeometry object that can be used for point-land checking. :raises ValueError: If geometry_resolution is not '10m', '50m', or '110m'. """ if geometry_resolution not in ["10m", "50m", "110m"]: raise ValueError("Argument 'geometry_resolution' must be either '10m', '50m', or '110m'.") print("-- Preparing Earth geometry...") land_shp_fname = shpreader.natural_earth(resolution=geometry_resolution, category='physical', name='land') land_geom = unary_union(list(shpreader.Reader(land_shp_fname).geometries())) land = prep(land_geom) print("-- Earth geometry prepared.") return land
def detect_holes(self): ### DETECT HOLE selected_tiles = [ self.complete_graph.tiles[self.inverse_index[i]].tile_poly.buffer( 1e-7) for i in range(len(self.predict)) if self.predict[i] == 1 ] unioned_shape = unary_union(selected_tiles) if isinstance(unioned_shape, shapely.geometry.polygon.Polygon): if len(list(unioned_shape.interiors)) > 0: return True elif isinstance(unioned_shape, shapely.geometry.multipolygon.MultiPolygon): if any([ len(list(unioned_shape[i].interiors)) > 0 for i in range(len(unioned_shape)) ]): return True return False
def test_decompose_polygon(self): polygon1 = box(0, 0, 10, 5) lines1 = self.decomposer1.decompose(geometry=polygon1) self.assertEqual(4, len(lines1)) self._test_objects_are_all_of_type(lines1, LineString) self._test_objects_are_of_lens(lines1, [10, 10, 5, 5]) polygon2 = Polygon([(0, 0), (1, 1), (0, 1)]) lines2 = self.decomposer1.decompose(geometry=polygon2) self.assertEqual(3, len(lines2)) self._test_objects_are_all_of_type(lines2, LineString) self._test_objects_are_of_lens(lines2, [1, 1, 2**0.5]) circle = Point(0.5, 0.5).buffer(0.5) polygon3 = unary_union([circle, box(0.5, 0, 1.5, 1)]) line3 = self.decomposer_angle_tol.decompose(polygon3) self.assertEqual(2, len(line3)) self._test_objects_are_all_of_type(line3, LineString) self._test_objects_are_of_lens(line3, [1, math.pi / 2 + 2])
def find_fit_corners(tile1, tile2, left): # Draw missed corners polygons c = can_be_corner(tile1, tile2, left) ret = None if c is not None: x1 = c[0][0] x2 = c[1][0] y1 = c[0][1] y2 = c[1][1] pol = fit_third_order(x1, x2, y1, y2, c[0][2], c[1][2]) if pol is not None: pol_points = [] if left: pol_points.append((min(y1, y2), min(x1, x2))) else: pol_points.append((min(y1, y2), max(x1, x2))) pol_points_a = [] for x_i in range(min(x1, x2), max(x1, x2) + 1): y_i = int(poly_3(*pol, x_i)) pol_points_a.append((int(y_i), int(x_i))) if all([0<=x[0]<=img_side_len for x in pol_points_a])\ and all([0<=x[1]<=img_side_len for x in pol_points_a]): pol_points.extend(pol_points_a) if left: pol_points.append((max(y1, y2), max(x1, x2))) pol_points.append((img_side_len, 0)) else: pol_points.append((max(y1, y2), min(x1, x2))) pol_points.append((img_side_len, img_side_len)) square_points = [(0, 0), (img_side_len, 0), (img_side_len, img_side_len), (0, img_side_len)] square_poly = Polygon(square_points) line = LineString(pol_points) merged = linemerge([square_poly.boundary, line]) borders = unary_union(merged) polygons = [] for poly in polygonize(borders): polygons.append(poly) if len(polygons) > 1: return mask_for_polygons([polygons[1]], (img_side_len, img_side_len))
def get_nodata_zones(wfs_hsy_url: str, layer: str, hma_mask:str, export_gpkg: str): """1) Downloads polygon layer of municipalities of Helsinki Metropolitan Area, 2) Creates buffered polygons from the boundary lines of these polygons, 3) Exports the boundary-buffers to geopackage. """ mask_poly: Polygon = geom_utils.project_geom(gpd.read_file(hma_mask)['geometry'][0]).buffer(500) municipalities = get_wfs_feature(wfs_hsy_url, layer) municipalities.to_file(export_gpkg, layer='hma_municipalities', driver='GPKG') boundaries = [] for municipality in municipalities.itertuples(): for poly in municipality.geometry.geoms: poly = municipality.geometry boundaries.append(poly.boundary.buffer(22)) dissolved_buffer: Polygon = unary_union(boundaries) intersected_buffer = dissolved_buffer.intersection(mask_poly) boundary_gdf = gpd.GeoDataFrame(data=[{'nodata_zone': 1}], geometry=[intersected_buffer], crs=CRS.from_epsg(3879)) boundary_gdf.to_file(export_gpkg, layer='municipal_boundaries', driver='GPKG')
def add_boundary_perimeters(graph, geometries): """Add shared perimeter between nodes and the total geometry boundary. :param graph: NetworkX graph :param df: Geodataframe containing geometry information. :return: The updated graph. """ prepared_boundary = prep(unary_union(geometries).boundary) boundary_nodes = geometries.boundary.apply(prepared_boundary.intersects) for node in graph: graph.nodes[node]["boundary_node"] = bool(boundary_nodes[node]) if boundary_nodes[node]: total_perimeter = geometries[node].boundary.length shared_perimeter = sum(neighbor_data["shared_perim"] for neighbor_data in graph[node].values()) boundary_perimeter = total_perimeter - shared_perimeter graph.nodes[node]["boundary_perim"] = boundary_perimeter
def dissolve_overlapping_shapes(shapes_with_props, buffer_size=None): _logger.info('Dissolve overlapping shapes') res = [] if buffer_size: shapes_with_props = apply_buffer(shapes_with_props, buffer_size) _logger.info('Enumerate shapes') for i, shape in enumerate(shapes_with_props): shape.props['id'] = i _logger.info('Create index for shapes') ix = create_index(shapes_with_props) total = len(shapes_with_props) with tqdm(total=total) as pbar: new_shape_with_props = None while shapes_with_props: s = shapes_with_props.pop() pbar.update(total - len(shapes_with_props)) while True: intersecting_shape_ids = set(ix.intersection(s.shape.bounds)) ss = [ x for x in shapes_with_props if x != s and x.props['id'] in intersecting_shape_ids ] if not ss: break new_shape = unary_union([s.shape] + [x.shape for x in ss]) new_props = {'prob': np.mean([x.props['prob'] for x in ss])} s = ShapeWithProps(shape=new_shape, props=new_props) shapes_with_props = [ x for x in shapes_with_props if x not in ss ] pbar.update(total - len(shapes_with_props)) res.append(s) return res
def getCaustics(self, lc_ord): x1, x2 = self.getCritPoints(lc_ord, pixel_units=True) y1, y2 = self.mapCrit2Cau(x1, x2) if (y1.size > 1): points = list(zip(y1, y2)) points = np.asarray(points) else: points = [(0, 0), (0, 0)] ls = shapely.geometry.LineString(points) lr = shapely.geometry.LineString(ls.coords[:] + ls.coords[0:1]) mls = unary_union(lr) mp = shapely.geometry.MultiPolygon(list(polygonize(mls))) cau = Caustic(lc_ord.ID, mp) cau.principale = lc_ord.principale cau.passsize = lc_ord.passsize cau.setPoints(points) A = mp.area cau.setArea(A) return (cau)
def intersect_products(self): print('Found ' + str(len(self.products)) + ' products') S2_geojson_path = (self.directory / 'orders' / FLAGS.s2_order_id).with_suffix('.geojson') ground_geojsons = read_geojson(S2_geojson_path) products_geojsons = self.queried_products_as_geojson() ground_polygon = ground_geojsons.get('features')[0].get( 'geometry').get('coordinates') ground_polygon = geometry.Polygon(ground_polygon[0][0]) titles = [] ids = [] for item in products_geojsons.get('features'): id = item.get('properties').get('id') item = item.get('properties').get('title') item = (item[17:25] + item[48:55]) titles.append(item) ids.append([item, id]) unique = list(set(titles)) unique.sort() union_list = [] for i, element in enumerate(unique): local_polygon = Polygon() for j in range(len(titles)): if titles[j] == element: item = products_geojsons.get('features')[j] item = item.get('geometry').get('coordinates') item = geometry.Polygon(item[0][0]) item = affinity.scale(item, xfact=1.01, yfact=1.01) polygons = [item, local_polygon] local_polygons = unary_union(polygons) local_polygon = item union_list.append([local_polygons, element]) found_id = None for index, element in enumerate(union_list): wkt = element[0].wkt if ground_polygon.within(element[0]): found_id = element[1] break for i in ids: if found_id != i[0]: del self.products[i[1]] print('Reduced the products to ' + str(len(self.products)) + ' products')
def flatten_geometries(geometry): """ Flatten polygon collections into a single polygon by using their union :param geometry: Type of Shapely geometry :return: """ from shapely.ops import unary_union if geometry.type == 'Polygon': # ignore Polygons return geometry else: joined = unary_union(list(geometry)) if joined.type == 'MultiPolygon': # some Multipolygons could not be combined return joined[0] # just return first polygon elif joined.type != 'Polygon': # discard geometry if it is still not a Polygon print('Discarding geometry of type: {geometry_type}'.format( geometry_type=joined.type)) return None else: return joined
def generate_obstacles( num_obstacles, bounds = (-10., -10., 10., 10.), min_radius = 2., max_radius = 10., num_vertices_range = list(range(6, 15))): obstacles = [] x_min, y_min, x_max, y_max = bounds for i in range(num_obstacles): vertices = [] x_center = np.random.uniform(x_min, x_max) y_center = np.random.uniform(y_min, y_max) center = Point(x_center, y_center) radius = np.random.uniform(min_radius, max_radius) num_vertices = np.random.choice(num_vertices_range) obstacles.append(generate_obstacle( center, radius, num_vertices)) union = unary_union(obstacles) return union if isinstance(union, MultiPolygon) else MultiPolygon([union])
def load_coordinates(self): with open(self.file_name) as f: folder = parser.parse(f).getroot().Document.Folder # Need to check for multiple LineString elements _lines = [] alts = [] for pm in folder.Placemark: _line = [] alt = None for points in pm.LineString.coordinates.text.split(): lon, lat, alt = points.split(",") _line.append((float(lon), float(lat))) _lines.append(_line) alts.append(float(alt)) ml = MultiLineString(_lines) self.region_data, regions = self.find_regions(ml, alts) mlp = MultiPolygon(regions) self.boundary = unary_union(mlp.buffer(0.001)).exterior.xy
def get_rings_around_cell(cell_idx, vcs, width): """ given the Voronoi cells `vcs`, the index of the cell `cell_idx` and the number of layers `width` creates a list of lists, where the first list contains indexes of `vcs` for the zero layer, the second -- for the first layer etc. """ result = [vcs.iloc[cell_idx:cell_idx + 1]] t = [[cell_idx]] neighbours_dis = result[0].iloc[0].geometry for layer in range(width): neighbours = vcs[vcs.geometry.touches(neighbours_dis)] t.append(neighbours.index.to_list()) result.append(neighbours) neighbours = pd.concat(result) neighbours_dis = unary_union(neighbours.geometry) return t
def railway_barriers(place, download_method, distance = 500.0, epsg = None, keep_light_rail = False): """ The function downloads overground railway structures from OSM. Such structures can be considered barriers which shape the Image of the City and obstruct sight and movement. Parameters ---------- place: string name of cities or areas in OSM: when using "OSMpolygon" please provide the name of a "relation" in OSM as an argument of "place"; when using "distance_from_address" provide an existing OSM address; when using "OSMplace" provide an OSM place name download_method: string {"polygon", "distance_from_address", "OSMplace"} it indicates the method that should be used for downloading the data. distance: float it is used only if download_method == "distance from address" epsg: int epsg of the area considered; if None OSMNx is used for the projection keep_light_rail: boolean considering light rail, like tramway Returns ------- railway_barriers: LineString GeoDataFrame the railway barriers GeoDataFrame """ crs = 'EPSG:' + str(epsg) tags = {"railway":"rail"} railways = _download_geometries(place, download_method, tags, crs, distance) # removing light_rail, in case if not keep_light_rail: railways = railways[railways.railway != 'light_rail'] if "tunnel" in railways.columns: railways["tunnel"].fillna(0, inplace = True) railways = railways[railways["tunnel"] == 0] r = railways.unary_union p = polygonize_full(r) railways = unary_union(p).buffer(10).boundary # to simpify a bit railways = _simplify_barrier(railways) df = pd.DataFrame({'geometry': railways, 'type': ['railway'] * len(railways)}) railway_barriers = gpd.GeoDataFrame(df, geometry = df['geometry'], crs = crs) return railway_barriers
def park_barriers(place, download_method, distance = 500.0, epsg = None, min_area = 100000): """ The function downloads parks areas with a certain extent and converts them to LineString features. Parks may break continuity in the urban structure, besides being attractive areas for pedestrians. Parameters ---------- place: string name of cities or areas in OSM: when using "OSMpolygon" please provide the name of a "relation" in OSM as an argument of "place"; when using "distance_from_address" provide an existing OSM address; when using "OSMplace" provide an OSM place name download_method: string {"polygon", "distance_from_address", "OSMplace"} it indicates the method that should be used for downloading the data. distance: float it is used only if download_method == "distance from address" epsg: int epsg of the area considered; if None OSMNx is used for the projection min_area: double parks with an extension smaller that this parameter are disregarded Returns ------- park_barriers: LineString GeoDataFrame the park barriers GeoDataFrame """ crs = 'EPSG:' + str(epsg) tags = {"leisure": True} parks_poly = _download_geometries(place, download_method, tags, crs, distance) parks_poly = parks_poly[parks_poly.leisure == 'park'] parks_poly = parks_poly[~parks_poly['geometry'].is_empty] parks_poly['area'] = parks_poly.geometry.area parks_poly = parks_poly[parks_poly.area >= min_area] pp = parks_poly['geometry'].unary_union pp = polygonize_full(pp) parks = unary_union(pp).buffer(10).boundary # to simpify a bit parks = _simplify_barrier(parks) df = pd.DataFrame({'geometry': parks, 'type': ['park'] * len(parks)}) park_barriers = gpd.GeoDataFrame(df, geometry = df['geometry'], crs = crs) return park_barriers
def __init__(self, boundary_file, names=None, field='NAME', fmt=None, time_bnds=None): """PolygonBounds constructor. Parameters ---------- boundary_file : str Path to a boundary file (GeoJSON or shapefile). names : list of str, optional List of elements (eg, countries, states, provinces, or counties) to load from the boundary file. If None, all features are used to define the overall boundary mask. field : str, optional Name of field to load from boundary file, default: NAME fmt : {None, 'shp', 'json'} Boundary file type. Can be shp (shapefile), json (GeoJSON), or inferred from the extension. time_bnds : tuple of floats, optional Start and end times. """ self._initialize_domains(time_bnds=time_bnds) fmt = boundary_file.split('.')[-1] if not fmt else fmt def geom_sel(name, names): if not names: return True else: return name in names if fmt == 'shp': reader = shapereader.Reader(boundary_file) features = reader.records() shapes = [feature.geometry for feature in features if geom_sel(feature.attributes[field], names)] elif fmt == 'json': with open(boundary_file) as json_file: features = json.load(json_file)['features'] shapes = [shape(feature['geometry']) for feature in features if geom_sel(feature['properties'][field], names)] else: raise ValueError('Unrecognized boundary file format.') self.shapes = unary_union(shapes) if self.shapes.boundary.type == 'LineString': self.shapes = MultiPolygon([self.shapes])
def unitity(agent, other): assert isinstance(agent, Polygon) and isinstance(other, Polygon) interset = agent.intersection(other) union = ops.unary_union([agent, other]) interset_coords = union.boundary.intersection(interset.boundary) if isinstance(interset_coords, geometry.MultiLineString): coord1 = interset_coords[0].coords[0] coord2 = interset_coords[1].coords[0] if interset_coords[1].coords[0][0] != coord1[0] and \ interset_coords[1].coords[0][1] != coord1[1] else \ interset_coords[1].coords[1] line = LineString([coord1, coord2]) else: line = LineString(interset_coords) unitity = ops.split(union, line) index = 0 for i in range(len(unitity)): if np.abs(agent.intersection(unitity[i]).area - unitity[i].area) < 1e-4: index = i return unitity[index], unitity[1 - index], line
def mergePolygons(self): ''' Description: Turns 2 or more polygons into a single polygon (hopefully) Params: None Returns: None ''' new_geometry = mapping(unary_union( self.polygons)) # This line merges the polygones self.newFeatures = dict(type='Feature', id="", properties=dict(Name=""), geometry=dict( type=new_geometry['type'], coordinates=new_geometry['coordinates'])) if self.filterKey and self.filterVal: self.newFeatures['properties'][self.filterKey] = self.filterVal
def fill(lines: vp.LineCollection, pen_width: float, tolerance: float, keep_open: bool) -> vp.LineCollection: new_lines = vp.LineCollection() polys = [] for line in lines: if np.abs(line[0] - line[-1]) <= tolerance: polys.append(Polygon([(pt.real, pt.imag) for pt in line])) elif keep_open: new_lines.append(line) # merge all polygons and fill the result mp = unary_union(polys) if mp.geom_type == "Polygon": mp = [mp] for p in mp: new_lines.extend(_generate_fill(p, pen_width)) return new_lines
def load_polygon(name, query): api = overpy.Overpass() result = api.query(query) lss = [] #convert ways to linstrings for ii_w, way in enumerate(result.ways): ls_coords = [] for node in way.nodes: ls_coords.append( (node.lon, node.lat)) # create a list of node coordinates lss.append( geometry.LineString(ls_coords)) # create a LineString from coords merged = linemerge([*lss]) # merge LineStrings borders = unary_union(merged) # linestrings to a MultiLineString polygons = list(polygonize(borders)) return geometry.MultiPolygon(polygons)
def nuzzle_poly(poly, neighbors, dilate_multiplier_min, dilate_multiplier_max, erode_multiplier=-1.1, n_iters=1, dilate_join_style=1, dilate_cap_style=1, erode_join_style=1, erode_cap_style=1, ): rad = get_rad(poly) neighbor_union = so.unary_union(neighbors) for i in range(n_iters): d = rad * np.random.uniform(dilate_multiplier_min, dilate_multiplier_max) bc = poly.buffer(d, join_style=dilate_join_style, cap_style=dilate_cap_style) if bc.intersects(neighbor_union): e = d * erode_multiplier bc = bc.difference(neighbor_union).buffer(e,join_style=erode_join_style, cap_style=erode_cap_style) poly = bc return poly
def aggregation(ds, groups): dsc = ds.copy() sums_vars = ['power', 'population', 'heat', 'power_plants', 'onshore_wind','offshore_wind', 'rooftop_pv','utility_pv','hydro_capacity', 'hydro_storage'] area_weighted_vars = ['wind_cf', 'pv_cf', 'wind_offshore_cf', 'cop_air','hydro_inflow'] sep=',' new_coords = dsc.coords['nuts_2'].values for nuts in groups: if nuts[0] in dsc.coords['nuts_2']: ds_is = dsc.sel(nuts_2=nuts) group_area = sum([i.area for i in ds_is['geometry'].values]) offshore_area_sum = ds_is['offshore_wind'].sum() for var in area_weighted_vars: for n in nuts: if var == 'wind_offshore_cf': ds_is[var].loc[n] = dsc[var].loc[n] * dsc['offshore_wind'].loc[n] / offshore_area_sum else: ds_is[var].loc[n] = dsc[var].loc[n] * dsc['geometry'].loc[n].values.item().area / group_area dsc[var].loc[nuts[0]] = ds_is[var].sum(axis=0) for var in sums_vars: dsc[var].loc[nuts[0]] = ds_is[var].sum(axis=0) dsc['geometry'].loc[nuts[0]] = np.array(str(unary_union(ds_is['geometry'].loc[nuts].values))) new_coords = np.where(new_coords==nuts[0], sep.join(nuts), new_coords) dsc = dsc.assign_coords(nuts_2=new_coords) for n in nuts[1:]: new_coords = new_coords[new_coords!=n] dsc = dsc.sel(nuts_2=new_coords) for nuts_2_id in dsc.coords['nuts_2']: dsc['geometry'].loc[nuts_2_id] = str(dsc['geometry'].loc[nuts_2_id].values.item()) dsc['geometry'] = (('nuts_2'),pd.Series(dsc['geometry']).apply(wkt.loads)) return dsc
def create_nyc_custom_fips(self, bdict): """ create a custom FIPS dataset for NYC alone, FIPS #00001 """ from shapely.geometry import Polygon, MultiPolygon from shapely.ops import unary_union def _create_poly(shape): lngs = shape[:, 0] lats = shape[:, 1] p = Polygon(list(zip(lngs, lats))) return p # ## these are the FIPS for the FIVE NYC BOROUGHS #fips_five_boroughs = fips_missing_2019 & data_nyc['fips'] # ## first get a boundary dict: fips -> points #bdict = get_boundary_dict( fips_five_boroughs ) # ## second, construct a list of all the Polygons corresponding to these boroughs all_polys = list( map(_create_poly, chain.from_iterable(bdict.values()))) # ## third, make a collection of MultiPolygon from the unary_union of these Polygons newpolys = unary_union(MultiPolygon(all_polys)) # ## fourth, get the new shapes, ordered by area from smallest to largest newshapes = list( map( lambda poly: numpy.array(poly.exterior.coords.xy). T, # take a Polygon, convert it into shape format we understand sorted(newpolys.geoms, key=lambda poly: poly.area) [::-1] # sort by maximum to minimum area )) # ## fifth (and finally), return this new FIPS data structure: { 'bbox' : bbox, 'points' : list-of-shapes } ## FIPS # is 00001 bbox = gis.calculate_total_bbox(newshapes) # bbox geom_nyc = {'bbox': bbox, 'points': newshapes} return geom_nyc
def merge_counties(): with urlopen( r'https://raw.githubusercontent.com/rstudio/leaflet/master/docs/json/nycounties.geojson' ) as ny_response: ny_counties = json.load(ny_response) indices = [56, 58, 59, 60, 61] nyc_polygons = [ asShape(ny_counties['features'][i]['geometry']) for i in indices ] # get the metadata for the first county properties = ny_counties['features'][indices[0]]['properties'] properties['county'] = 'New York City' properties['id'] = 36998 properties['pop'] = 8443713 # get the union of the polygons joined = unary_union(nyc_polygons) # delete the merged counties counties_ny = ny_counties for i in reversed(sorted(indices)): del counties_ny['features'][i] # add the new polygon to the features feature = Feature(geometry=joined, properties=properties) counties_ny['features'].append(feature) ny_counties = [] for i in range(len(counties_ny['features'])): current_county = counties_ny['features'][i]['properties']['county'] county_dict = {'label': current_county, 'value': current_county} ny_counties.append(county_dict) ny_counties = sorted(ny_counties, key=lambda k: k['label']) ny_counties_list = [] for i in range(len(ny_counties)): ny_counties_list.append(ny_counties[i]['value']) return counties_ny
def dissolve_df(in_df, dissolve_attribute=None): if dissolve_attribute is not None: print("dissolving DataFrame on {}".format(dissolve_attribute)) # unique attributes on which to make the dissolve dissolved_items = list(np.unique(in_df[dissolve_attribute])) # go through unique attributes, combine the geometries, and populate new DataFrame df_out = pd.DataFrame() length = len(dissolved_items) knt = 0 for item in dissolved_items: df_item = in_df[in_df[dissolve_attribute] == item] geometries = list(df_item.geometry) dissolved = unary_union(geometries) dict = {dissolve_attribute: item, 'geometry': dissolved} df_out = df_out.append(dict, ignore_index=True) knt +=1 print('\r{:d}%'.format(100*knt/length)) else: dissolved = unary_union(in_df.geometry.values) df_out = pd.DataFrame([{'geometry': dissolved}]) return df_out