def test_dumps_srid(): p1 = Point(1.2, 3.4) result = dumps(p1) assert bin2hex(result) == "0101000000333333333333F33F3333333333330B40" result = dumps(p1, srid=4326) assert bin2hex( result) == "0101000020E6100000333333333333F33F3333333333330B40"
def test_simple_post_existing(self): self.test_simple_post_new() self.test_simple_post_new(rid=2) row = { 'id': 2, 'name': 'John Doe', 'address': "John's Street", 'geom': 'POINT(42.258729 -71.160281)' } self.check_api_post( '/api/v0/schema/{schema}/tables/{table}/rows/2'.format( schema=self.test_schema, table=self.test_table), data={'query': row}) row['geom'] = wkb.dumps(wkt.loads(row['geom']), hex=True) self.check_api_get( '/api/v0/schema/{schema}/tables/{table}/rows/2'.format( schema=self.test_schema, table=self.test_table), expected_result=row) # Check whether other rows remained unchanged row = { 'id': 1, 'name': 'Mary Doe', 'address': "Mary's Street", 'geom': 'POINT(-71.160281 42.258729)' } row['geom'] = wkb.dumps(wkt.loads(row['geom']), hex=True) self.check_api_get( '/api/v0/schema/{schema}/tables/{table}/rows/1'.format( schema=self.test_schema, table=self.test_table), expected_result=row)
def test_dumps_srid(some_point): result = dumps(some_point) assert bin2hex(result) == hostorder( "BIdd", "0101000000333333333333F33F3333333333330B40") result = dumps(some_point, srid=4326) assert bin2hex(result) == hostorder( "BIIdd", "0101000020E6100000333333333333F33F3333333333330B40")
def test_dumps_endianness(some_point): result = dumps(some_point) assert bin2hex(result) == hostorder( "BIdd", "0101000000333333333333F33F3333333333330B40") result = dumps(some_point, big_endian=False) assert bin2hex(result) == "0101000000333333333333F33F3333333333330B40" result = dumps(some_point, big_endian=True) assert bin2hex(result) == "00000000013FF3333333333333400B333333333333"
def test_dumps_endianness(): p1 = Point(1.2, 3.4) result = dumps(p1) assert bin2hex(result) == "0101000000333333333333F33F3333333333330B40" result = dumps(p1, big_endian=False) assert bin2hex(result) == "0101000000333333333333F33F3333333333330B40" result = dumps(p1, big_endian=True) assert bin2hex(result) == "00000000013FF3333333333333400B333333333333"
def test_wkb(self): p = Point(0.0, 0.0) wkb_big_endian = wkb.dumps(p, big_endian=True) wkb_little_endian = wkb.dumps(p, big_endian=False) # Regardless of byte order, loads ought to correctly recover the # geometry self.assertTrue(p.equals(wkb.loads(wkb_big_endian))) self.assertTrue(p.equals(wkb.loads(wkb_little_endian)))
def _dumps(x, hex=True, srid=4326): from shapely.wkb import dumps try: if versionCompare(gpd.__version__, '0.7.2'): x = dumps(x, hex=hex, srid=srid) else: x = dumps(x, hex=hex) except AttributeError: x = None return x
def test_simple_post_existing(self): self.test_simple_post_new() self.test_simple_post_new(rid=2) row = { 'id': 2, 'name': 'John Doe', 'address': "John's Street", 'geom': 'POINT(42.258729 -71.160281)' } response = self.__class__.client.post( '/api/v0/schema/{schema}/tables/{table}/rows/2'.format( schema=self.test_schema, table=self.test_table), data=json.dumps({'query': row}), HTTP_AUTHORIZATION='Token %s' % self.__class__.token, content_type='application/json') self.assertEqual(response.status_code, 200, response.json().get('reason', 'No reason returned')) response = self.__class__.client.get( '/api/v0/schema/{schema}/tables/{table}/rows/2'.format( schema=self.test_schema, table=self.test_table)) self.assertEqual(response.status_code, 200, response.json().get('reason', 'No reason returned')) row['geom'] = wkb.dumps(wkt.loads(row['geom']), hex=True) self.assertDictEqualKeywise(response.json(), row) # Check whether other rows remained unchanged row = { 'id': 1, 'name': 'Mary Doe', 'address': "Mary's Street", 'geom': 'POINT(-71.160281 42.258729)' } response = self.__class__.client.get( '/api/v0/schema/{schema}/tables/{table}/rows/1'.format( schema=self.test_schema, table=self.test_table)) self.assertEqual(response.status_code, 200, response.json().get('reason', 'No reason returned')) row['geom'] = wkb.dumps(wkt.loads(row['geom']), hex=True) self.assertDictEqualKeywise(response.json(), row)
def test_simple_post_new(self, rid=1): row = { 'id': rid, 'name': 'Mary Doe', 'address': "Mary's Street", 'geom': 'POINT(-71.160281 42.258729)' } response = self.__class__.client.post( '/api/v0/schema/{schema}/tables/{table}/rows/new'.format( schema=self.test_schema, table=self.test_table), data=json.dumps({'query': row}), HTTP_AUTHORIZATION='Token %s' % self.__class__.token, content_type='application/json') self.assertEqual(response.status_code, 201, response.json().get('reason', 'No reason returned')) response = self.__class__.client.get( '/api/v0/schema/{schema}/tables/{table}/rows/{rid}'.format( schema=self.test_schema, table=self.test_table, rid=rid)) self.assertEqual(response.status_code, 200, response.json().get('reason', 'No reason returned')) row['geom'] = wkb.dumps(wkt.loads(row['geom']), hex=True) self.assertDictEqualKeywise(response.json(), row)
def test_loads_srid(): # load a geometry which includes an srid geom = loads(hex2bin("0101000020E6100000333333333333F33F3333333333330B40")) assert isinstance(geom, Point) assert geom.coords[:] == [(1.2, 3.4)] # by default srid is not exported result = dumps(geom) assert bin2hex(result) == "0101000000333333333333F33F3333333333330B40" # include the srid in the output result = dumps(geom, include_srid=True) assert bin2hex( result) == "0101000020E6100000333333333333F33F3333333333330B40" # replace geometry srid with another result = dumps(geom, srid=27700) assert bin2hex( result) == "0101000020346C0000333333333333F33F3333333333330B40"
def make_valid(element, drop_z=True): """ Attempt to make a geometry valid. Returns `None` if the geometry cannot be made valid. Example: .. code-block:: python p | beam.Map(geobeam.fn.make_valid) | beam.Map(geobeam.fn.filter_invalid) """ from shapely.geometry import shape from shapely import validation, wkb props, geom = element shape_geom = shape(geom) if not shape_geom.is_valid: shape_geom = validation.make_valid(shape_geom) if drop_z and shape_geom.has_z: shape_geom = wkb.loads(wkb.dumps(shape_geom, output_dimension=2)) if shape_geom is not None: return (props, shape_geom.__geo_interface__) else: return None
def render_buildings(x1, y1, x2, y2): cursor = db.cursor() # fetch ground polygons, they will be extruded on the client cursor.execute( """ SELECT geometry FROM osm_buildings WHERE geometry && ST_Transform(ST_MakeEnvelope(%s, %s, %s, %s, 4326), 3857) """, (x1, y1, x2, y2)) p = Proj(init='EPSG:3857') px, py = p(x1, y2) px1, py1 = p(x1, y1) px2, py2 = p(x2, y2) bounding = box(px1, py1, px2, py2) # load the geometry, intersect with bounding box and translate to origin # to be able to tile load in the client later on polys = [] for item in cursor: geo = loads(item['geometry'], hex=True).intersection(bounding) geo = translate(geo, -px, -py, 0) polys.append(geo) # join everything into a collection and dump WKB to the response collection = GeometryCollection(polys) response = make_response(dumps(collection), 200) response.headers['Content-Type'] = 'application/octet-stream' return response
def write_locations_postgis(locations, name, con, schema=None, if_exists="fail", index=True, index_label=None, chunksize=None, dtype=None): # Assums that "extent" is not geometry column but center is. # May build additional check for that. if "extent" in locations.columns: # geopandas.to_postgis can only handle one geometry column -> do it manually srid = _get_srid_from_crs(locations) extent_schema = Geometry("POLYGON", srid) if dtype is None: dtype = {"extent": extent_schema} else: dtype["extent"] = extent_schema locations = locations.copy() locations["extent"] = locations["extent"].apply( lambda x: wkb.dumps(x, srid=srid, hex=True)) locations.to_postgis( name, con, schema=schema, if_exists=if_exists, index=index, index_label=index_label, chunksize=chunksize, dtype=dtype, )
def from_shape(shape, srid=-1, extended=False): """ Function to convert a Shapely geometry to a :class:`geoalchemy2.types.WKBElement`. Additional arguments: ``srid`` An integer representing the spatial reference system. E.g. 4326. Default value is -1, which means no/unknown reference system. ``extended`` A boolean to switch between WKB and EWKB. Default value is False. Example:: from shapely.geometry import Point wkb_element = from_shape(Point(5, 45), srid=4326) ewkb_element = from_shape(Point(5, 45), srid=4326, extended=True) """ return WKBElement(buffer(dumps(shape, srid=srid if extended else None)), srid=srid, extended=extended)
def np_to_wkb_point(np_point,in_server): """casting np array as line, then as wkb""" from shapely import wkb from shapely.geometry import Point print 'np_point ',np_point pt = Point(np_point) return wkb.dumps(pt, hex=in_server)
def np_to_wkb_line(np_line,in_server): """casting np array as line, then as wkb""" from shapely import wkb from shapely.geometry import LineString line = LineString(np_line) return wkb.dumps(line, hex=in_server)
def np_to_wkb_line(np_line, in_server): """casting np array as line, then as wkb""" from shapely import wkb from shapely.geometry import LineString line = LineString(np_line) return wkb.dumps(line, hex=in_server)
def polySaveWkb(poly, wkbName): polyWkb = wkb.dumps(poly) wkbFile = open(wkbName, 'w') wkbFile.write(polyWkb.encode('hex')) wkbFile.close()
def np_to_wkb_point(np_point, in_server): """casting np array as line, then as wkb""" from shapely import wkb from shapely.geometry import Point print 'np_point ', np_point pt = Point(np_point) return wkb.dumps(pt, hex=in_server)
def _encode_geom(geom): """Encode geometries into hex-encoded wkb """ from shapely import wkb if geom: return ba.hexlify(wkb.dumps(geom)).decode() return None
def test_load_dataframe_w_wkb(bigquery_client, dataset_id): wkt = pytest.importorskip("shapely.wkt") from shapely import wkb from google.cloud.bigquery.schema import SchemaField df = pandas.DataFrame( dict(name=["foo", "bar"], geo=[None, wkb.dumps(wkt.loads("Point(1 1)"))]) ) table_id = f"{dataset_id}.lake_from_wkb" # We create the table first, to inform the interpretation of the wkb data bigquery_client.query( f"create table {table_id} (name string, geo GEOGRAPHY)" ).result() bigquery_client.load_table_from_dataframe(df, table_id).result() table = bigquery_client.get_table(table_id) assert table.schema == [ SchemaField("name", "STRING", "NULLABLE"), SchemaField("geo", "GEOGRAPHY", "NULLABLE"), ] assert sorted(map(list, bigquery_client.list_rows(table_id))) == [ ["bar", "POINT(1 1)"], ["foo", None], ]
def way(self, w): if len(w.tags) == 0: return if w.is_closed() and closed_way_is_polygon( w.tags): # this will be handled in area() return try: # NOTE: it is possible this is actually a MultiLineString # in the case where a LineString is clipped by the clipping geom, # or the way is self-intersecting # but GDAL and QGIS seem to handle it OK. linestring = None for theme in self.mapping.themes: if theme.matches(GeomType.LINE, w.tags): if not linestring: wkb = fab.create_linestring(w) if self.clipping_geom: sg = loads(bytes.fromhex(wkb)) if not self.prepared_clipping_geom.intersects(sg): return if not self.prepared_clipping_geom.contains_properly( sg): sg = self.clipping_geom.intersection(sg) linestring = ogr.CreateGeometryFromWkb(dumps(sg)) else: linestring = create_geom(wkb) for output in self.outputs: output.write(w.id, theme.name, GeomType.LINE, linestring, w.tags) except RuntimeError: print("Incomplete way: {0}".format(w.id))
def test_simple_post_new(self, rid=1): row = { "id": rid, "name": "Mary Doe", "address": "Mary's Street", "geom": "POINT(-71.160281 42.258729)", } response = self.__class__.client.post( "/api/v0/schema/{schema}/tables/{table}/rows/new".format( schema=self.test_schema, table=self.test_table), data=json.dumps({"query": row}), HTTP_AUTHORIZATION="Token %s" % self.__class__.token, content_type="application/json", ) self.assertEqual( response.status_code, 201, load_content_as_json(response).get("reason", "No reason returned"), ) response = self.__class__.client.get( "/api/v0/schema/{schema}/tables/{table}/rows/{rid}".format( schema=self.test_schema, table=self.test_table, rid=rid)) self.assertEqual( response.status_code, 200, response.json().get("reason", "No reason returned"), ) row["geom"] = wkb.dumps(wkt.loads(row["geom"]), hex=True) self.assertDictEqualKeywise(response.json(), row)
def area(self, a): if len(a.tags) == 0: return if not closed_way_is_polygon(a.tags): return osm_id = a.orig_id() if a.from_way() else -a.orig_id() try: geom_type = GeomType.POLYGON multipolygon = None for theme in self.mapping.themes: if theme.matches(GeomType.POLYGON, a.tags): if not multipolygon: wkb = fab.create_multipolygon(a) if self.clipping_geom: sg = loads(bytes.fromhex(wkb)) if not self.prepared_clipping_geom.intersects(sg): return if not self.prepared_clipping_geom.contains_properly( sg): sg = self.clipping_geom.intersection(sg) multipolygon = ogr.CreateGeometryFromWkb(dumps(sg)) else: multipolygon = create_geom(wkb) geom = multipolygon if self.polygon_centroid is True: geom = multipolygon.Centroid() geom_type = GeomType.POINT for output in self.outputs: output.write(osm_id, theme.name, geom_type, geom, a.tags) except RuntimeError: print('Invalid area: {0}'.format(a.orig_id()))
def getxy_proj(df: pd.DataFrame, proj_epsg: int = None, city: str = None, new_x: str = 'x', new_y: str = 'y', curr_epsg: int = 4326): """ 利用城市名或投影坐标epsg编号,获得数据投影后或坐标系转后的中心点x、y坐标,但原始地理数据信息不变 :param df: 含地理信息的数据,一定要有geometry 或 lng和lat :param proj_epsg: 要投影或转化的坐标的epsg编号 :param city: 要投影的城市名 :param new_x: 投影后x坐标的列名 :param new_y: 投影后y坐标的列名 :param curr_epsg: 目前数据的坐标系 :return: """ if 'geometry' in df.columns: gdf = geom_wkb2gpd(df, epsg_code=curr_epsg) else: gdf = point_to_geo(df, epsg_code=curr_epsg) gdf = projection(gdf, proj_epsg=proj_epsg, city=city) gdf[new_x] = gdf.centroid.x gdf[new_y] = gdf.centroid.y gdf = projection(gdf, proj_epsg=curr_epsg) gdf['geometry'] = gdf['geometry'].apply(lambda x: wkb.dumps(x, hex=True, srid=curr_epsg)) return gdf
def reproject(geom, transform): ''' Reproject a `shapely` geometry using an instance of `osr.CoordinateTransformation`. ARGUMENTS: --------- geom (shapely.geometry.base.BaseGeometry): The geometry to be reprojected; an instance of some `shapely` geometry type. transform (osr.CoordinateTransformation): A coordinate transformation. NOTE: the source CRS must match that of the input geometry. RETURNS: ------- shapely.geometry.base.BaseGeometry: The reprojected geometry. ''' # Convert input geometry to an `ogr.Geometry` type via WKB ogr_geom = ogr.CreateGeometryFromWkb(wkb.dumps(geom)) # Do the coordinate transformation using OGR ogr_geom.Transform(transform) # Convert back to `shapely` geometry type reprojected = wkb.loads(ogr_geom.ExportToWkb()) return reprojected
def save(datasource, filename): """ Save a Datasource instance to a named OGR datasource. """ ext = splitext(filename)[1] out_driver = ogr.GetDriverByName(drivers.get(ext)) out_source = out_driver.CreateDataSource(filename) if out_source is None: raise Exception('Failed creation of %s - is there one already?' % filename) out_layer = out_source.CreateLayer('default', datasource.srs, ogr.wkbMultiPolygon) for field in datasource.fields: field_defn = ogr.FieldDefn(field.name, field.type) field_defn.SetWidth(field.width) out_layer.CreateField(field_defn) for i in datasource._indexes(): segments = datasource.db.execute( """SELECT x1, y1, x2, y2 FROM segments WHERE (src1_id = ? OR src2_id = ?) AND removed = 0""", (i, i)) lines = [ datasource.memo_line(x1, y1, x2, y2) for (x1, y1, x2, y2) in segments ] try: poly = polygonize(lines).next() except StopIteration: lost_area = datasource.shapes[i].area lost_portion = lost_area / (datasource.tolerance**2) if lost_portion < 4: # It's just small. print >> stderr, 'Skipped small feature #%(i)d' % locals() continue # This is a bug we don't understand yet. raise Exception( 'Failed to get a meaningful polygon out of large feature #%(i)d' % locals()) feat = ogr.Feature(out_layer.GetLayerDefn()) for (j, field) in enumerate(datasource.fields): feat.SetField(field.name, datasource.values[i][j]) geom = ogr.CreateGeometryFromWkb(dumps(poly)) feat.SetGeometry(geom) out_layer.CreateFeature(feat)
def load_test_case(case_file): test_dict = json.load(open(case_file)) path, pro = read_init_path(test_dict['path']) tif, tif_proj = read_tif(test_dict['tif']) if "shapes" not in test_dict: test_dict['shapes'] = "gen/shapes/{0}.shapes".format( splitext(case_file)[0]) test_dict['alts'] = "gen/shapes/{0}.alt.json".format( splitext(case_file)[0]) save_test_case(case_file, test_dict) vecs = vectorize_raster(test_dict['tif']) shapes, alt = shapelify_vector(vecs, test_dict['proj']) binary = dumps(MultiPolygon(shapes)) with open(test_dict['shapes'], "wb") as wkb_file: wkb_file.write(binary) with open(test_dict['alts'], "w") as alt_dict_file: json.dump(alt, alt_dict_file) else: shapes = load_shapefile(test_dict['shapes']) alt = load_altfile(test_dict['alts']) return path, alt, shapes, tif, pro, tif_proj, test_dict
def test_put_geometry_wtb(self): row = { "id": 1, "name": "Mary Doe", "address": "Mary's Street", "geom": wkb.dumps(wkt.loads("POINT(-71.160281 42.258729)"), hex=True), } response = self.__class__.client.put( "/api/v0/schema/{schema}/tables/{table}/rows/1".format( schema=self.test_schema, table=self.test_table), data=json.dumps({"query": row}), HTTP_AUTHORIZATION="Token %s" % self.__class__.token, content_type="application/json", ) self.assertEqual( response.status_code, 201, response.json().get("reason", "No reason returned"), ) response = self.__class__.client.get( "/api/v0/schema/{schema}/tables/{table}/rows/1".format( schema=self.test_schema, table=self.test_table)) self.assertEqual( response.status_code, 200, response.json().get("reason", "No reason returned"), ) self.assertDictEqualKeywise(response.json(), row)
def geo_centroid(df, geom): df[geom] = df[geom].apply(lambda x: loads(x, hex=True)) df = gpd.GeoDataFrame(df, crs='epsg:4326') df["lng"] = df.centroid.x df["lat"] = df.centroid.y df[geom] = df[geom].apply(lambda x: dumps(x, hex=True)) return df
def objectwise_f1_score(gt: List[Polygon], pred, format, iou=0.5, v: bool = True): """ Measures objectwise f1-score for two sets of polygons. The algorithm description can be found on https://medium.com/the-downlinq/the-spacenet-metric-612183cc2ddb If the format = 'point' True Positive counts when the prediction point lies within the polygon of GT :param gt: list of shapely Polygons, represents ground truth; :param pred: list of shapely Polygons or Points (according to the 'format' param, represents prediction; :param format: 'vector' or 'point', means format of prediction and corresponding variant of algorithm; :param v: is_verbose :return: float, f1-score and string, log """ log = '' groundtruth_rtree_index = rtree.index.Index() # for some reason builtin pickling doesn't work for i, polygon in enumerate(gt): groundtruth_rtree_index.insert(i, polygon.bounds, dumps(polygon)) if format == 'vector': tp = sum( map(_has_match_rtree, (dumps(polygon) for polygon in pred), [iou] * len(pred), [groundtruth_rtree_index] * len(pred))) else: # format = 'point' tp = sum( map(_lies_within_rtree, (point for point in pred), [groundtruth_rtree_index] * len(pred))) fp = len(pred) - tp fn = len(gt) - tp # to avoid zero-division if tp == 0: f1 = 0. else: precision = tp / (tp + fp) recall = tp / (tp + fn) f1 = 2 * (precision * recall) / (precision + recall) if v: log += 'True Positive = ' + str(tp) + ', False Negative = ' + str( fn) + ', False Positive = ' + str(fp) + '\n' return f1, log
def geom_wkt2wkb(df, geometry='geometry', epsg_code: int = 4326): """wkb转wkt""" from shapely import wkb from shapely import wkt df[geometry] = df[geometry].apply(lambda x: wkt.loads(x)) df = gpd.GeoDataFrame(df, geometry=geometry, crs=epsg_code) df[geometry] = df[geometry].apply(lambda x: wkb.dumps(x, hex=True, srid=epsg_code)) return df
def transform_feature_layers_shape( feature_layers, format, scale, unpadded_bounds, meters_per_pixel_dim, buffer_cfg): if format in (json_format, topojson_format): transform_fn = apply_to_all_coords(mercator_point_to_lnglat) elif format == vtm_format: transform_fn = apply_to_all_coords( rescale_point(unpadded_bounds, scale)) else: # mvt and unknown formats get no geometry transformation transform_fn = _noop # shape_unpadded_bounds = geometry.box(*unpadded_bounds) transformed_feature_layers = [] for feature_layer in feature_layers: layer_name = feature_layer['name'] transformed_features = [] layer_datum = feature_layer['layer_datum'] is_clipped = layer_datum['is_clipped'] clip_factor = layer_datum.get('clip_factor', 1.0) for shape, props, feature_id in feature_layer['features']: if shape.is_empty or shape.type == 'GeometryCollection': continue buffer_padded_bounds = calc_buffered_bounds( format, unpadded_bounds, meters_per_pixel_dim, layer_name, shape.type, buffer_cfg) shape = _clip_shape( shape, buffer_padded_bounds, is_clipped, clip_factor) if shape is None or shape.is_empty: continue # perform the format specific geometry transformations shape = transform_fn(shape) if format.supports_shapely_geometry: geom = shape else: geom = dumps(shape) transformed_features.append((geom, props, feature_id)) transformed_feature_layer = dict( name=feature_layer['name'], features=transformed_features, layer_datum=layer_datum, ) transformed_feature_layers.append(transformed_feature_layer) return transformed_feature_layers
def test_wkb_dumps_endianness(self): p = Point(0.5, 2.0) wkb_big_endian = wkb.dumps(p, big_endian=True) wkb_little_endian = wkb.dumps(p, big_endian=False) self.assertNotEqual(wkb_big_endian, wkb_little_endian) # According to WKB specification in section 3.3 of OpenGIS # Simple Features Specification for SQL, revision 1.1, the # first byte of a WKB representation indicates byte order. # Big-endian is 0, little-endian is 1. self.assertEqual(wkb_big_endian[0], self._byte(0)) self.assertEqual(wkb_little_endian[0], self._byte(1)) # Check that the doubles (0.5, 2.0) are in correct byte order double_size = struct.calcsize('d') self.assertEqual( wkb_big_endian[(-2 * double_size):], struct.pack('>2d', p.x, p.y)) self.assertEqual( wkb_little_endian[(-2 * double_size):], struct.pack('<2d', p.x, p.y))
def save(datasource, filename): """ Save a Datasource instance to a named OGR datasource. """ ext = splitext(filename)[1] out_driver = ogr.GetDriverByName(drivers.get(ext)) out_source = out_driver.CreateDataSource(filename) if out_source is None: raise Exception('Failed creation of %s - is there one already?' % filename) out_layer = out_source.CreateLayer('default', datasource.srs, ogr.wkbMultiPolygon) for field in datasource.fields: field_defn = ogr.FieldDefn(field.name, field.type) field_defn.SetWidth(field.width) out_layer.CreateField(field_defn) for i in datasource._indexes(): segments = datasource.db.execute("""SELECT x1, y1, x2, y2 FROM segments WHERE (src1_id = ? OR src2_id = ?) AND removed = 0""", (i, i)) lines = [datasource.memo_line(x1, y1, x2, y2) for (x1, y1, x2, y2) in segments] try: poly = polygonize(lines).next() except StopIteration: lost_area = datasource.shapes[i].area lost_portion = lost_area / (datasource.tolerance ** 2) if lost_portion < 4: # It's just small. print >> stderr, 'Skipped small feature #%(i)d' % locals() continue # This is a bug we don't understand yet. raise Exception('Failed to get a meaningful polygon out of large feature #%(i)d' % locals()) feat = ogr.Feature(out_layer.GetLayerDefn()) for (j, field) in enumerate(datasource.fields): feat.SetField(field.name, datasource.values[i][j]) geom = ogr.CreateGeometryFromWkb(dumps(poly)) feat.SetGeometry(geom) out_layer.CreateFeature(feat)
def np_to_wkb(np_array,in_server): from shapely import wkb np_array = np.array(np_array) if len(np_array.shape)<=1: #print len(np_array.shape) from shapely.geometry import asPoint np_shapely = asPoint(np_array) else: from shapely.geometry import asMultiPoint np_shapely = asMultiPoint(np_array) return wkb.dumps(np_shapely, hex=in_server)
def writeShp(self, filePath): driver = ogr.GetDriverByName("ESRI Shapefile") self.logger.debug('File %s exitst? %s' % (filePath,os.path.exists(filePath))) if os.path.exists(filePath): driver.DeleteDataSource(filePath) self.logger.info('Deleting %s... ' % (filePath)) source = driver.CreateDataSource(filePath) layerName = os.path.splitext(os.path.basename(filePath))[0] self.logger.debug('Layername: %s' % layerName) self.logger.debug('SRS: %s' % self.srs) self.logger.debug('GeomType: %s' % GeomTypesOgr[self.geometryType].value) layer = source.CreateLayer(layerName,self.srs,GeomTypesOgr[self.geometryType].value) for fieldName, fieldType in self.fields.items(): field = ogr.FieldDefn(fieldName, fieldType) if fieldType == ogr.OFTString: field.SetWidth(80) layer.CreateField(field) for fid, geometry in self.geometries.items(): feature = ogr.Feature(layer.GetLayerDefn()) geom = ogr.CreateGeometryFromWkb(wkb.dumps(geometry.geom)) feature.SetGeometry(geom) self.logger.debug('Feature ID (Geometry): %s' % type(fid)) feature.SetFID(int(fid)) #feature.SetField('FID',ogr.OFTInteger) self.logger.debug('Feature ID (OGR): %i' % feature.GetFID()) for attributeName, attributeValue in geometry.attributes.items(): self.logger.debug('Attribute: %s Value:%s' % (attributeName, attributeValue)) feature.SetField(attributeName,attributeValue) layer.CreateFeature(feature) feature.Destroy() del source
def skeleton_medials_from_postgis (connection, way): """Returns the skeleton for way as calculated by PostGIS. connection must be a psycopg2 connection and way must be a shapey.geometry.""" way= wkb.dumps (way) cursor= connection.cursor () cursor.execute ("SELECT ST_StraightSkeleton(%s), ST_ApproximateMedialAxis(%s);", (way, way)) skel, medials= [ decode (i) for i in (cursor.fetchone ()) ] cursor.close () # medial comes as a series of segments, convert to lines as much as possible medials= shapely.ops.linemerge (medials) # linemerge() returns a single LineString if it can, but the rest of the algos # work with MultiLineString if type (medials)==LineString: medials= MultiLineString ([ medials ]) return skel, medials
def get_features(dbinfo, query, geometry_types, transform_fn, sort_fn, zoom, n_try=1): features = [] with Connection(dbinfo) as db: try: db.execute(query) except TransactionRollbackError: if n_try >= 5: print 'TransactionRollbackError occurred 5 times' raise else: return get_features(dbinfo, query, geometry_types, transform_fn, sort_fn, zoom, n_try=n_try + 1) for row in db.fetchall(): assert '__geometry__' in row, 'Missing __geometry__ in feature result' assert '__id__' in row, 'Missing __id__ in feature result' wkb = bytes(row.pop('__geometry__')) id = row.pop('__id__') shape = loads(wkb) if geometry_types is not None: if shape.type not in geometry_types: #print 'found %s which is not in: %s' % (geom_type, geometry_types) continue props = dict((k, v) for k, v in row.items() if v is not None) if transform_fn: shape, props, id = transform_fn(shape, props, id, zoom) wkb = dumps(shape) features.append((wkb, props, id)) if sort_fn: features = sort_fn(features, zoom) return features
def to_wkb(self): return wkb.dumps(self)
pols.append(subgeom) return pols return [] layer = processing.getobject(input) provider = layer.dataProvider() fields = provider.fields() buffer_dist = distance / 2 inFeat = QgsFeature() inGeom = QgsGeometry() outFeat = QgsFeature() writer = VectorWriter(output, None, fields, provider.geometryType(), layer.crs()) feats = processing.getfeatures(layer) for inFeat in feats: inGeom = inFeat.geometry() if not inGeom is None: poly = loads(inGeom.asWkb()) buff = buffer(poly, -buffer_dist) buff = buffer(buff, buffer_dist) pols = extract_pols(buff) for pol in pols: outGeom = QgsGeometry() outGeom.fromWkb(dumps(pol)) outFeat.setGeometry(outGeom) writer.addFeature(outFeat) del writer
# -*- coding: utf-8 -*- import os from shapely.geometry import Point Point(0,0).wkt # 下面这个,在Python 3中执行有问题。 # Point(0,0).wkb.encode('hex') from shapely.wkb import dumps,loads wkb = dumps(Point(0,0)) # print(wkb.encode('hex')) loads(wkb).wkt wkt = dumps(Point(0,0)) print(wkt) loads(wkt).wkt def Test(): assert True
def to_geometry(shp, copy=False, proj=None): """Convert shp to a ogr.Geometry. Parameters ---------- shp: ogr.Geometry, ogr.Feature, or shapely.BaseGeometry The shape you want to convert copy: boolean (default=False) Return a copy of the shape instead of a reference proj: str or osr.SpatialReference (default=None) The projection of the shape to define (if the shape is not projection aware), or transform to (if projection aware). If a string is provided, it assumes that it is in PROJ4. Returns ------- ogr.Geometry""" target_proj = None source_proj = None # Check shape type if isinstance(shp, ogr.Geometry): geom = shp elif isinstance(shp, ogr.Feature): geom = shp.geometry() elif isinstance(shp, BaseGeometry): geom = ogr.CreateGeometryFromWkb(wkb.dumps(shp)) else: raise ValueError("Unable to convert to ogr.Geometry object") # Check projection if isinstance(proj, str) or isinstance(proj, unicode): target_proj = SpatialReference() target_proj.ImportFromProj4(proj) elif isinstance(proj, SpatialReference): target_proj = proj elif proj is None: target_proj = geom.GetSpatialReference() if target_proj is None: raise ValueError("shp does not have a SpatialReference") else: raise ValueError("Unable to set projction.") # Return shapely if isinstance(shp, BaseGeometry): geom.AssignSpatialReference(proj) return geom if copy: geom = geom.Clone() if proj is not None: source_proj = geom.GetSpatialReference() if source_proj is None: raise ValueError("shp does not have a SpatialReference") ct = CoordinateTransformation(source_proj, target_proj) geom.Transform(ct) geom.AssignSpatialReference(target_proj) return geom
headers = [f[0] for f in sf.fields[1:]] i = 0 for sr in sf.iterShapeRecords(): r = dict(zip(headers,sr.record)) if r['VUORO_LISA'] in known_vuorot: continue known_vuorot.add(r['VUORO_LISA']) #x,y = map(lambda v: round(v,1),sr.shape.points[0]) ls = LineString(sr.shape.points) ls = ls.simplify(2.0) r['geomwkb'] = wkb.dumps(ls).encode('hex') #curs.execute('INSERT INTO pysakkiketjut (vuoro_lisa,vuoro_pys,jarj_nro,saapumisa,lahtoaika,aikapiste,etaisyys,pysakki_gid) VALUES (%(VUORO_LISA)s,%(VUORO_PYS)s,%(JARJ_NRO)s,%(SAAPUMISA)s,%(LAHTOAIKA)s,%(AIKAPISTE)s,%(ETAISYYS)s,%(PYSAKKI_ID)s)',r) execlist.append(r) if i % 1000 == 0: curs.executemany(linjaukset_sql,execlist) execlist = [] print i i+=1 curs.executemany(linjaukset_sql,execlist) curs.close() conn.commit() curs = conn.cursor()
#plpy.notice(theta) ; #computing the position of the _center # E + ( EF + EG )/(norm(EF+EG)) * norm(ET1)/cos(theta) _center = _e + (_ef+_eg) / (np.linalg.norm(_ef+_eg) ) * np.linalg.norm(_t1-_e) / (np.dot(_ef,_eg)/(np.linalg.norm(_ef)*np.linalg.norm(_eg)) ); #plpy.notice(_center) ; t2_g = _e + ((_eg)/np.linalg.norm(_eg)) * np.linalg.norm(_t1-_e) ; t2_f = _e + ((_ef)/np.linalg.norm(_ef)) * np.linalg.norm(_t1-_e) ; if np.linalg.norm(t2_g - _t1) < np.linalg.norm(t2_f - _t1) : t2__ = t2_f ; else : t2__ = t2_g ; center = wkb.dumps(asPoint(_center), hex=in_server) ; radius = _radius ; t1 = _t1 ; t2 = wkb.dumps(asPoint(t2__), hex=in_server) ; #plpy.notice(t2) ; if in_server != True : print( center , radius , t1 , t2) else : return [center,radius, t1,t2 ]; #return { "_center": _center, "_radius": _radius , "t1": t1, "t2":t2} #return { "_center": _center, "_radius": _radius , "t1": t1, "t2":t2} #$$ LANGUAGE plpythonu;
def test_wkb(self): p = Point(0.0, 0.0) bytes = wkb.dumps(p) pb = wkb.loads(bytes) self.assertTrue(pb.equals(p))
def geojson_to_wkb(geojson): s = geometry.shape(geojson) return wkb.dumps(geometry.shape(geojson))
def transform_feature_layers_shape( feature_layers, format, scale, unpadded_bounds, coord, meters_per_pixel_dim, buffer_cfg): if format in (json_format, topojson_format): transform_fn = apply_to_all_coords(mercator_point_to_lnglat) elif format == vtm_format: transform_fn = apply_to_all_coords( rescale_point(unpadded_bounds, scale)) else: # mvt and unknown formats get no geometry transformation transform_fn = _noop # shape_unpadded_bounds = geometry.box(*unpadded_bounds) transformed_feature_layers = [] for feature_layer in feature_layers: layer_name = feature_layer['name'] transformed_features = [] layer_datum = feature_layer['layer_datum'] is_clipped = layer_datum['is_clipped'] clip_factor = layer_datum.get('clip_factor', 1.0) for shape, props, feature_id in feature_layer['features']: if shape.is_empty or shape.type == 'GeometryCollection': continue buffer_padded_bounds = calc_buffered_bounds( format, unpadded_bounds, meters_per_pixel_dim, layer_name, shape.type, buffer_cfg) shape_buf_bounds = geometry.box(*buffer_padded_bounds) if not shape_buf_bounds.intersects(shape): continue if is_clipped: # now we know that we should include the geometry, but # if the geometry should be clipped, we'll clip to the # layer-specific padded bounds layer_padded_bounds = calculate_padded_bounds( clip_factor, buffer_padded_bounds) shape = shape.intersection(layer_padded_bounds) # perform the format specific geometry transformations shape = transform_fn(shape) if format.supports_shapely_geometry: geom = shape else: geom = dumps(shape) transformed_features.append((geom, props, feature_id)) transformed_feature_layer = dict( name=feature_layer['name'], features=transformed_features, layer_datum=layer_datum, ) transformed_feature_layers.append(transformed_feature_layer) return transformed_feature_layers
print "GDAL/OGR ...." ogr.UseExceptions() gdal.ErrorReset() start = time.clock() for i in range(num_runs): geometry = ogr.CreateGeometryFromWkt(sys.argv[1]) wkb = geometry.ExportToWkb() geometry.Destroy() elapsed = (time.clock() - start) print>>sys.stderr,"elapsed=",elapsed print "Shapely ...." start = time.clock() for i in range(num_runs): geometry = loads(sys.argv[1]) wkb = dumps(geometry) elapsed = (time.clock() - start) print>>sys.stderr,"elapsed=",elapsed print "Mapnik ...." start = time.clock() reader = mapnik.WKTReader() for i in range(num_runs): #geometry = mapnik.Path.from_wkt(sys.argv[1]) geometry = reader.read(sys.argv[1]) wkb = geometry.to_wkb(mapnik.wkbByteOrder.XDR) elapsed = (time.clock() - start) print>>sys.stderr,"elapsed=",elapsed
# "input" contains the location of the selected layer. # We get the actual object, layer = processing.getobject(input) provider = layer.dataProvider() fields = provider.fields() inFeat = QgsFeature() outFeat = QgsFeature() inGeom = QgsGeometry() nElement = 0 writer = VectorWriter(output, None, fields, provider.geometryType(), layer.crs() ) feats = processing.getfeatures(layer) nFeat = len(feats) for inFeat in feats: progress.setPercentage(int((100 * nElement)/nFeat)) inGeom = inFeat.geometry() pol = loads(inGeom.asWkb()) buff = pol.buffer(buffer) outGeom = QgsGeometry() outGeom.fromWkb(dumps(buff)) outFeat.setGeometry(outGeom) outFeat.setAttributes(inFeat.attributes()) writer.addFeature(outFeat) nElement+=nElement del writer
def getCatchmentFeaturesForReaches(config, outputDir, catchmentFilename, reaches, format=OGR_SHAPEFILE_DRIVER_NAME): """ Get features (in WGS 84) for the drainage area associated with a set of NHD (National Hydrography Dataset) stream reaches. @param config A Python ConfigParser containing the following sections and options: 'PATH_OF_NHDPLUS2_CATCHMENT' (absolute path to NHD catchment shapefile) @param outputDir String representing the absolute/relative path of the directory into which output rasters should be written @param catchmentFilename String representing name of file to save catchment features to. The appropriate extension will be added to the file name @param reaches List representing catchment features to be output @param format String representing OGR driver to use @return String representing the name of the dataset in outputDir created to hold the features @raise ConfigParser.NoSectionError @raise ConfigParser.NoOptionError @raise IOError(errno.ENOTDIR) if outputDir is not a directory @raise IOError(errno.EACCESS) if outputDir is not writable @raise Exception if output format is not known @todo Detect and fix non-closed geometries, e.g. kalisti:archive miles$ ./GetCatchmentsForComidsSP.py -p test -c 10462287 Traceback (most recent call last): File "./GetCatchmentsForComidsSP.py", line 29, in <module> catchmentFilename, comid) File "/Users/miles/Dropbox/EarthCube-Multilayered/RHESSys-workflow/eclipse/EcohydroWorkflowLib/ecohydrolib/nhdplus2/networkanalysis.py", line 506, in getCatchmentFeaturesForComid outGeom = outGeom.Union( inGeom ) File "/usr/local/Cellar/python/2.7.5/Frameworks/Python.framework/Versions/2.7/lib/python2.7/site-packages/osgeo/ogr.py", line 4065, in Union return _ogr.Geometry_Union(self, *args) RuntimeError: TopologyException: found non-noded intersection between LINESTRING (-77.9145 37.0768, -77.9147 37.0768) and LINESTRING (-77.9147 37.0768, -77.9145 37.0768) at -77.914621661942761 37.076822779115943 """ catchmentFeatureDBPath = config.get('NHDPLUS2', 'PATH_OF_NHDPLUS2_CATCHMENT') if not os.access(catchmentFeatureDBPath, os.R_OK): raise IOError(errno.EACCES, "The catchment feature DB at %s is not readable" % catchmentFeatureDBPath) catchmentFeatureDBPath = os.path.abspath(catchmentFeatureDBPath) if not os.path.isdir(outputDir): raise IOError(errno.ENOTDIR, "Output directory %s is not a directory" % (outputDir,)) if not os.access(outputDir, os.W_OK): raise IOError(errno.EACCES, "Not allowed to write to output directory %s" % (outputDir,)) outputDir = os.path.abspath(outputDir) if not format in OGR_DRIVERS.keys(): raise Exception("Output format '%s' is not known" % (format,) ) catchmentFilename ="%s%s%s" % ( catchmentFilename, os.extsep, OGR_DRIVERS[format] ) catchmentFilepath = os.path.join(outputDir, catchmentFilename) # Open input layer ogr.UseExceptions() poDS = ogr.Open(catchmentFeatureDBPath, OGR_UPDATE_MODE) if not poDS: raise Exception("Unable to open catchment feature database %s" (catchmentFeatureDBPath,)) assert(poDS.GetLayerCount() > 0) poLayer = poDS.GetLayer(0) assert(poLayer) # Create output data source poDriver = ogr.GetDriverByName(format) assert(poDriver) poODS = poDriver.CreateDataSource(catchmentFilepath) assert(poODS != None) # poOLayer = poODS.CreateLayer("catchment", poLayer.GetSpatialRef(), poLayer.GetGeomType()) poOLayer = poODS.CreateLayer("catchment", poLayer.GetSpatialRef(), ogr.wkbMultiPolygon ) # poOLayer = poODS.CreateLayer("catchment", poLayer.GetSpatialRef(), ogr.wkbPolygon ) # Create fields in output layer layerDefn = poLayer.GetLayerDefn() i = 0 fieldCount = layerDefn.GetFieldCount() while i < fieldCount: fieldDefn = layerDefn.GetFieldDefn(i) poOLayer.CreateField(fieldDefn) i = i + 1 # Create single geometry to hold catchment polygon in output shapefile outGeom = ogr.Geometry( poOLayer.GetGeomType() ) # polygon = Polygon() # Copy features, unioning them as we go numReaches = len(reaches) # Copy features in batches of UPSTREAM_SEARCH_THRESHOLD to overcome limit in # OGR driver for input layer start = 0 end = UPSTREAM_SEARCH_THRESHOLD while end < numReaches: whereFilter = "featureid=%s" % (reaches[start],) for reach in reaches[start+1:end]: whereFilter = whereFilter + " OR featureid=%s" % (reach,) # Copy features assert(poLayer.SetAttributeFilter(whereFilter) == 0) inFeature = poLayer.GetNextFeature() # Union geometry of input feature to output feature while inFeature: # inGeom = inFeature.GetGeometryRef().SimplifyPreserveTopology(0.0001) inGeom = inFeature.GetGeometryRef() outGeom = outGeom.Union( inGeom ) # polygon = polygon.union( loads( inGeom.ExportToWkb() ) ) # polygon = cascaded_union( [polygon, loads( inGeom.ExportToWkb() )] ) inFeature.Destroy() inFeature = poLayer.GetNextFeature() start = end end = end + UPSTREAM_SEARCH_THRESHOLD # Copy remaining features whereFilter = "featureid=%s" % (reaches[start],) for reach in reaches[start+1:end]: whereFilter = whereFilter + " OR featureid=%s" % (reach,) # Copy features poLayer.SetAttributeFilter(whereFilter) assert(poLayer.SetAttributeFilter(whereFilter) == 0) inFeature = poLayer.GetNextFeature() while inFeature: # inGeom = inFeature.GetGeometryRef().SimplifyPreserveTopology(0.0001) inGeom = inFeature.GetGeometryRef() outGeom = outGeom.Union( inGeom ) # polygon = polygon.union( loads( inGeom.ExportToWkb() ) ) # polygon = cascaded_union( [polygon, loads( inGeom.ExportToWkb() )] ) inFeature.Destroy() inFeature = poLayer.GetNextFeature() # Create a new polygon that only contains exterior points outGeom = ogr.ForceToPolygon( outGeom ) polygon = loads( outGeom.ExportToWkb() ) if polygon.exterior: coords = polygon.exterior.coords newPolygon = Polygon(coords) else: newPolygon = Polygon() # Write new feature to output feature data source outFeat = ogr.Feature( poOLayer.GetLayerDefn() ) outFeat.SetGeometry( ogr.CreateGeometryFromWkb( dumps(newPolygon) ) ) poOLayer.CreateFeature(outFeat) return catchmentFilename
def __qgPntFromShplyPnt(self, shapelyPnt): wkbPnt = dumps(shapelyPnt) qgGeom = QgsGeometry() qgGeom.fromWkb(wkbPnt) return qgGeom.asPoint()
def repair_shapefile(self): try: shpDriver = ogr.GetDriverByName('ESRI Shapefile') print("Drivers are ready") shpdata = self.shapefilePath output = os.path.dirname(shpdata) + os.sep + "rep_" + os.path.basename(shpdata)[4:] if os.path.exists(output): shpDriver.DeleteDataSource(output) ds = shpDriver.CreateDataSource(output) shpSource = shpDriver.Open(shpdata, 0) if shpSource is None: print('Could not open ' + shpdata) sys.exit(1) #exit with an error code layer = shpSource.GetLayer() numFeatures = layer.GetFeatureCount() print('Feature count:' + str(numFeatures)) inputSR = layer.GetSpatialRef() shplayer = ds.CreateLayer(output, inputSR, ogr.wkbPolygon) inFeature = layer.GetNextFeature() for index in range(0, inFeature.GetFieldCount()): #fieldList.append(inFeature.GetFieldDefnRef(index)) shplayer.CreateField(inFeature.GetFieldDefnRef(index)) print('Created field: ' + inFeature.GetFieldDefnRef(index).name) cnt = 0 while inFeature: cnt = cnt + 1 f = ogr.Feature(shplayer.GetLayerDefn()) for index in range(0, f.GetFieldCount()): try: attValue = inFeature.GetField(inFeature.GetFieldDefnRef(index).name) if attValue: f.SetField(f.GetFieldDefnRef(index).name, inFeature.GetField(inFeature.GetFieldDefnRef(index).name)) except: print(inFeature.GetFieldDefnRef(index).name + ' - skipping attribute value adding null') try: geom = loads(inFeature.GetGeometryRef().ExportToWkb()) if geom.is_valid: newGeom = inFeature.geometry() else: print("Bad geom: " + str(inFeature.GetFID())) print(explain_validity(geom)) cleanGeom = geom.buffer(0.0) if cleanGeom.is_valid: print("Geometry is clean") wkbGeom = dumps(cleanGeom) newGeom = ogr.CreateGeometryFromWkb(wkbGeom) else: newGeom = inFeature.geometry() f.SetGeometry(newGeom) if shplayer.CreateFeature(f) != 0: print("Failed to create feature in shapefile.\n") sys.exit(1) f.Destroy() except: print('Skipping null geometry') config.run_error_message('repair geom feature failure null geom..') break inFeature = layer.GetNextFeature() print('Feature count from loop:' + str(cnt)) ds.Destroy() except: config.run_error_message("Repair failure")
def shape_to_wkb(clz, shape): return wkb.dumps(shape)
normal = normal/np.linalg.norm(normal) ; print(normal) ; #creating the upper point and down point for first and second (hopefully last) point in segment p1u = p[0] + normal * r1 ; p1d = p[0] - normal * r1 ; p2u = p[1] + normal * r2 ; p2d = p[1] - normal * r2 ; output_line = (p1u,p2u,p2d,p1d,p1u) ; ogeom = asPolygon(output_line) ; print(ogeom) ; pp1.pprint( str(geom)); #outputing for postgis : @NOTE : if inside postgres, hex =False, if outside, hex=True output = wkb.dumps(ogeom, hex=True); print(output) ; # ##emulation of postgres output # #return None ; # #return { "center": center, "radius": radius , "t1": t1, "t2":t2} #==============================================================================
if input_gt != ogr.wkbLineString: raise ValueError('Wrong geometry type in %s: %d' % (input_fn, input_gt)) output_dr = ogr.GetDriverByName('ESRI Shapefile') output_ds = output_dr.CreateDataSource(output_fn) output_lyr = output_ds.CreateLayer('', sref_sm, ogr.wkbLineString) define_fields(input_lyr, output_lyr) for (index, details) in enumerate(features(input_lyr, sref_loc)): shape, statefp, countyfp, fullname, tlid, fromadd, toadd, offset, zip = details feature = ogr.Feature(output_lyr.GetLayerDefn()) feature.SetField('STATEFP', statefp) feature.SetField('COUNTYFP', countyfp) feature.SetField('FULLNAME', fullname) feature.SetField('TLID', tlid) feature.SetField('FROMADD', fromadd) feature.SetField('TOADD', toadd) feature.SetField('OFFSET', offset) feature.SetField('ZIP', zip) truncated_shape = truncate(shape, 15) geometry = ogr.CreateGeometryFromWkb(wkb.dumps(truncated_shape)) geometry.Transform(xform_loc2sm) feature.SetGeometry(geometry) output_lyr.CreateFeature(feature)
# "input" contains the location of the selected layer. # We get the actual object, layer = processing.getobject(input) provider = layer.dataProvider() fields = provider.fields() buffer_dist = distance / 2 inFeat = QgsFeature() inGeom = QgsGeometry() buffered = [] feats = processing.getfeatures(layer) for inFeat in feats: inGeom = inFeat.geometry() if not inGeom is None: pol = loads(inGeom.asWkb()) buff = buffer(pol, buffer_dist) buffered.append(buff) union = cascaded_union(buffered) outFeat = QgsFeature() writer = VectorWriter(output, None, fields, provider.geometryType(), layer.crs()) for pol in union: outGeom = QgsGeometry() outGeom.fromWkb(dumps(buffer(pol,-buffer_dist))) outFeat.setGeometry(outGeom) writer.addFeature(outFeat) del writer