def encode_geometry(geom: BasePolygon) -> str: """ Encode geometry into a compressed string """ encoded_geom = geobuf.encode(mapping(geom)).hex() # if the geometry is so complex is still goes over the limit, incrementally attempting to simplify it if sys.getsizeof(encoded_geom) > LAMBDA_ASYNC_PAYLOAD_LIMIT_BYTES: encoded_geom = geobuf.encode( mapping(geom.simplify(0.005, preserve_topology=False))).hex() if sys.getsizeof(encoded_geom) > LAMBDA_ASYNC_PAYLOAD_LIMIT_BYTES: encoded_geom = geobuf.encode( mapping(geom.simplify(0.01, preserve_topology=False))).hex() return encoded_geom
def to_representation(self, obj): return { "id": obj.id, "osm_id": obj.osm_id, "ruta_corta": base64.b64encode(geobuf.encode(json.loads( obj.ruta_corta_geojson))), "long_bondi": obj.long_ruta, "color_polilinea": obj.color_polilinea, "inicio": obj.inicio, "fin": obj.fin, "nombre": obj.nombre, "foto": obj.foto, "paradas": [{ "latlng": p.latlng.coords[::-1], "codigo": p.codigo, "nombre": p.nombre } for p in obj.paradas.all()], }
def togeobuf(infile): out = infile.replace('.json', '.pbf') outfile = open(out, 'wb') json_data = open(infile).read() data = json.loads(json_data) pbf = geobuf.encode(data) outfile.write(pbf)
def supercluster(markers, *args, **kwargs): geojson = markers_to_geojson(markers) # Convert to geobuf. pbf = geobuf.encode(geojson) content = base64.b64encode(pbf).decode() # Construc the cluster. return dl.SuperCluster(data=content, format="geobuf", *args, **kwargs)
def build_zip_donation_pbf_from_geojson( contribution_df, contest_name, candidate_mec_ids, contest_pac_ids, polygons_geojson_data, output_geobuf_path, ): polygons = gpd.read_file(polygons_geojson_data) zip_df = contribution_df.groupby(by=["ZIP5", "MECID"]).agg( {"Amount": "sum"}) zip_total_df = contribution_df.groupby(by=["ZIP5"]).agg({"Amount": "sum"}) for index, polygon in polygons.iterrows(): this_zip = polygon.ZCTA5CE10 no_pac_total = 0 if this_zip in zip_df.index: mec_donations = zip_df.loc[this_zip].to_dict() candidate_geography_totals = {} for mec_id in candidate_mec_ids: if mec_id in mec_donations["Amount"]: this_candidate_donations = mec_donations["Amount"][mec_id] else: this_candidate_donations = 0 polygons.loc[index, "mec_donations_" + mec_id] = this_candidate_donations candidate_geography_totals[mec_id] = this_candidate_donations no_pac_total = no_pac_total + this_candidate_donations for pac_id in contest_pac_ids: if pac_id in candidate_pac_dict and pac_id in mec_donations[ "Amount"]: this_pac = candidate_pac_dict[pac_id] candidate_geography_totals[this_pac["Candidate MECID"]] = ( mec_donations["Amount"][pac_id] + candidate_geography_totals[this_pac["Candidate MECID"]] ) # Add pac to totals for cand_mec_id in candidate_mec_ids: if cand_mec_id in candidate_geography_totals: polygons.loc[ index, "mec_donations_" + cand_mec_id + "_with_pacs"] = candidate_geography_totals[cand_mec_id] else: polygons.loc[index, "mec_dotations_" + mec_id + "_with_pacs"] = 0 total_monetary_donations = zip_total_df.loc[this_zip].Amount else: total_monetary_donations = 0 polygons.loc[index, "total_monetary_donations_" + contest_name] = no_pac_total polygons.loc[index, "total_monetary_donations_" + contest_name + "_with_pacs"] = total_monetary_donations polygons_json = polygons.to_json() polygon_geojson_data = json.loads(polygons_json) pbf = geobuf.encode(polygon_geojson_data) with open(output_geobuf_path, "wb") as write_file: write_file.write(pbf)
def geojson2geobuf(layer): """Geojson to Geobuf conversion.""" with open('{layer}.geojson'.format(layer=layer), 'r') as json: with open('{layer}.pbf'.format(layer=layer), 'wb') as buf: data = geojson.load(json) pbf = geobuf.encode(data) buf.write(pbf) return 'Successfully wrote geobuf.'
def build_donation_pbf_from_geojson( contribution_gdf, contest_name, candidate_mec_ids, contest_pac_ids, polygons_geojson_paths, output_geobuf_path ): # polygons = gpd.read_file(polygons_geojson_path) polygons = gpd.GeoDataFrame( pd.concat([gpd.read_file(i) for i in polygons_geojson_paths], ignore_index=True) ) mec_ids = list(candidate_mec_ids) + list(contest_pac_ids) for index, polygon in polygons.iterrows(): total_monetary_donations = 0 # total_nonmonetary_donations = 0 donations_this_geography = {} no_pac_total = 0 candidate_geography_totals = {} pip = contribution_gdf.within(polygon.geometry) for j, row in contribution_gdf[pip].iterrows(): # Each iteration here is a contribution inside of this polygon's geometry: if row.contribution_type == "M": total_monetary_donations = total_monetary_donations + row.amount if row.mec_id in candidate_mec_ids: if row.mec_id not in donations_this_geography: donations_this_geography[row.mec_id] = 0 donations_this_geography[row.mec_id] = ( donations_this_geography[row.mec_id] + row.amount ) no_pac_total = no_pac_total + row.amount elif row.mec_id in contest_pac_ids: if row.mec_id not in donations_this_geography: donations_this_geography[row.mec_id] = 0 donations_this_geography[row.mec_id] = ( donations_this_geography[row.mec_id] + row.amount ) polygons.loc[index, "total_monetary_donations_"+contest_name] = no_pac_total polygons.loc[index, "total_monetary_donations_"+contest_name+"_with_pacs"] = total_monetary_donations for mec_id in candidate_mec_ids: if mec_id in donations_this_geography and donations_this_geography[mec_id] > 0: this_candidate_donations = donations_this_geography[mec_id] else: this_candidate_donations = 0 polygons.loc[index, "mec_donations_" + mec_id] = this_candidate_donations candidate_geography_totals[mec_id] = this_candidate_donations for pac_id in contest_pac_ids: this_pac = candidate_pac_dict[pac_id] if pac_id in donations_this_geography: candidate_geography_totals[this_pac['candidate_mec_id']] = candidate_geography_totals[this_pac['candidate_mec_id']] + donations_this_geography[pac_id] for mec_id in candidate_mec_ids: polygons.loc[index, "mec_donations_" + mec_id + "_with_pacs"] = candidate_geography_totals[mec_id] polygons_json = polygons.to_json() polygon_geojson_data = json.loads(polygons_json) pbf = geobuf.encode(polygon_geojson_data) with open(output_geobuf_path, "wb") as write_file: write_file.write(pbf)
def encode(precision, with_z): """Given GeoJSON on stdin, writes a geobuf file to stdout.""" logger = logging.getLogger('geobuf') stdin = click.get_text_stream('stdin') sink = click.get_binary_stream('stdout') try: data = json.load(stdin) pbf = geobuf.encode( data, precision if precision >= 0 else 6, 3 if with_z else 2) sink.write(pbf) sys.exit(0) except Exception: logger.exception("Failed. Exception caught") sys.exit(1)
def wfs_geojson_service(request,layer_name): res = {} # str_geojson = serialize('geojson', PunjabDistrict.objects.all(), # geometry_field='geom', # fields=('name_0', 'name_1', 'name_2', 'name_3', 'pop')) layer_info =LayerInfo.objects.filter(layer_name=layer_name).first() layer_model = apps.get_model(layer_info.app_label,layer_info.model_name) str_geojson = serialize('geojson', layer_model.objects.all()) print('Size of serialized string:%s' % str(Common_Utils.get_memory_size(str_geojson)/1000)) geojson = json.loads(str_geojson) print('Size of geojson:%s' % str(Common_Utils.get_memory_size(geojson)/1000)) res = geobuf.encode(geojson) print('Size of geobuf encode:%s' % str(Common_Utils.get_memory_size(res)/1000)) # res = geobuf.decode(res) return HttpResponse(res, content_type='application/octet-stream') # return HttpResponse(res)
def return_geojson_or_geobuf( features: Any, return_type: str = "geojson", ) -> Any: """ Return geojson or geobuf """ if return_type == "geojson": return json.loads(json.dumps(features)) elif return_type == "geobuf": return Response(bytes(geobuf.encode(features)), media_type=MimeTypes.geobuf.value) elif return_type == "db_geobuf": return Response(bytes(features)) else: raise HTTPException(status_code=400, detail="Invalid return type")
def post(self, request, layer_name): print("POST LayerFiltersView layer_name={layer_name}".format(layer_name=layer_name)) data = None columns = get_layer_table_columns(layer_name) if 'boundaryIds' in request.data: if len(request.data['boundaryIds']) == 1 and request.data['boundaryIds'][0] == 'PH000000000': boundary_ids = None else: boundary_ids = ','.join([ "'" + _id + "'" for _id in request.data['boundaryIds']]) data = get_layer_data_by_boundary(layer_name, columns, boundary_ids) elif 'dataPks' in request.data: ids = ','.join([str(id) for id in request.data['dataPks']]) data = get_geojson_layer_data(layer_name, columns, ids) pbf = geobuf.encode(data) return HttpResponse(pbf, content_type="application/x-protobuf")
def __enter__(self): """Open MemoryFile, write data and return.""" if self.driver.lower() == "geobuf": import geobuf return geobuf.encode( dict( type="FeatureCollection", features=[dict(f, type="Feature") for f in self.features] ) ) else: self.fio_memfile = MemoryFile() with self.fio_memfile.open( schema=self.schema, driver=self.driver, crs=self.tile.crs ) as dst: dst.writerecords(self.features) return self.fio_memfile.getbuffer()
def for_web(self, data): """ Convert data to web output (raster only). Parameters ---------- data : array Returns ------- web data : array """ import geobuf return geobuf.encode( dict(type="FeatureCollection", features=[ dict(f, geometry=mapping(_repair(shape(f["geometry"]))), type="Feature") for f in data ])), "application/octet-stream"
def to_representation(self, obj): if not hasattr(obj, 'id2'): return { "id": obj.id, "itinerario": [{ "id": obj.id, "ruta_corta": base64.b64encode( geobuf.encode(json.loads(obj.ruta_corta_geojson))), "ruta": base64.b64encode( geobuf.encode(json.loads(obj.ruta_larga_geojson))), "long_bondi": obj.long_ruta, "long_pata": obj.long_pata, "inicio": obj.inicio, "fin": obj.fin, "nombre": obj.nombre, "type": obj.type, "p1": getParada(obj.p1), "p2": getParada(obj.p2), "paradas": [{ "latlng": p.latlng.coords[::-1], "codigo": p.codigo, "nombre": p.nombre } for p in obj.paradas.all()], "url": obj.get_absolute_url(), }] } else: obj2 = Recorrido.objects.prefetch_related('paradas').get( pk=obj.id2) return { "id": str(obj.id) + str(obj.id2), "long_pata_transbordo": obj.long_pata_transbordo, "itinerario": [{ "id": obj.id, "ruta_corta": base64.b64encode( geobuf.encode(json.loads(obj.ruta_corta_geojson))), "ruta": base64.b64encode( geobuf.encode(json.loads(obj.ruta_larga_geojson))), "long_bondi": obj.long_ruta, "long_pata": obj.long_pata, "inicio": obj.inicio, "fin": obj.fin, "nombre": obj.nombre, "type": obj.type, "p1": getParada(obj.p11ll), "p2": getParada(obj.p12ll), "paradas": [{ "latlng": p.latlng.coords[::-1], "codigo": p.codigo, "nombre": p.nombre } for p in obj.paradas.all()], "url": obj.get_absolute_url(), }, { "id": obj.id2, "ruta_corta": base64.b64encode( geobuf.encode(json.loads(obj.ruta_corta_geojson2))), "ruta": base64.b64encode( geobuf.encode(json.loads(obj.ruta_larga_geojson2))), "long_bondi": obj.long_ruta2, "long_pata": obj.long_pata2, "inicio": obj.inicio2, "fin": obj.fin2, "nombre": obj.nombre2, "type": obj.type2, "p1": getParada(obj.p21ll), "p2": getParada(obj.p22ll), "paradas": [{ "latlng": p.latlng.coords[::-1], "codigo": p.codigo, "nombre": p.nombre } for p in obj2.paradas.all()], "url": obj2.get_absolute_url(), }] }
def serialize(self, dct: dict) -> bytes: return geobuf.encode(dct)
def to_representation(self, obj): if not hasattr(obj, 'id2'): return { "id": obj.id, "itinerario": [{ "id": obj.id, "ruta_corta": base64.b64encode( geobuf.encode(json.loads(obj.ruta_corta_geojson))), "long_bondi": obj.long_ruta, "long_pata": obj.long_pata, "color_polilinea": obj.color_polilinea, "inicio": obj.inicio, "fin": obj.fin, "nombre": obj.nombre, "foto": obj.foto, "p1": getParada(obj.p1), "p2": getParada(obj.p2), # "url": obj.get_absolute_url() }] } else: return { "id": str(obj.id) + str(obj.id2), "itinerario": [ { "id": obj.id, "ruta_corta": base64.b64encode( geobuf.encode(json.loads(obj.ruta_corta_geojson))), "long_bondi": obj.long_ruta, "long_pata": obj.long_pata, "color_polilinea": obj.color_polilinea, "inicio": obj.inicio, "fin": obj.fin, "nombre": obj.nombre, "foto": obj.foto, "p1": getParada(obj.p11ll), "p2": getParada(obj.p12ll), # "url": obj.get_absolute_url(None, None, obj.slug) }, { "id": obj.id2, "ruta_corta": base64.b64encode( geobuf.encode(json.loads( obj.ruta_corta_geojson2))), "long_bondi": obj.long_ruta2, "long_pata": obj.long_pata2, "color_polilinea": obj.color_polilinea2, "inicio": obj.inicio2, "fin": obj.fin2, "nombre": obj.nombre2, "foto": obj.foto2, "p1": getParada(obj.p21ll), "p2": getParada(obj.p22ll), # "url": obj.get_absolute_url(None, None, obj.slug2) } ] }
def render(self, data, media_type=None, renderer_context=None): features = super(GeoBufRenderer, self).get_features(data) feature_collection = FeatureCollection(features=features) geojson_output = geojson.dumps(feature_collection) return geobuf.encode(feature_collection) # GeoJSON or TopoJSON -> Geobuf string
def to_representation(self, value): # Compress geojson to geobuf and return as hexadecimal gbuf = geobuf.encode(loads(value.geojson)) return gbuf.hex()
def main(): buf = encode(GEO) data = Data() data.ParseFromString(buf) print(data) print(decode(buf))
def geojson_to_geobuf(geojson): return base64.b64encode(geobuf.encode(geojson)).decode()
def post(self): body = request.get_json() table_name = body.get('table_name') return_type = body.get('return_type') if table_name == 'pois' and "geom" not in body: prepared_query = '''SELECT * FROM pois_visualization(%(scenario_id)s,%(amenities)s,%(routing_profile)s,%(modus)s)''' elif table_name == 'aois': prepared_query = '''SELECT gid, a.amenity, a.name, a.geom FROM aois a WHERE a.amenity IN(SELECT UNNEST(%(amenities)s))''' elif table_name == 'edges': prepared_query = '''SELECT id AS gid, %(modus_input)s AS modus, cost, geom FROM edges WHERE objectid = %(objectid)s''' elif table_name == 'pois': prepared_query = '''SELECT * FROM pois_visualization(%(scenario_id)s,%(amenities)s,%(routing_profile)s,%(modus)s) WHERE ST_Intersects(geom, ST_SETSRID(ST_GEOMFROMTEXT(%(geom)s), 4326))''' elif table_name == 'mapping_pois_opening_hours': prepared_query = '''SELECT osm_id, amenity, amenity || '_accessible' as amenity_icon, name, CASE WHEN origin_geometry = 'point' THEN 'node' ELSE 'way' END as osm_type, geom FROM pois_mapping WHERE opening_hours IS NULL AND amenity IN (SELECT UNNEST(%(amenities)s))''' elif table_name == 'ways': prepared_query = '''SELECT id as gid, * FROM ways WHERE ST_Intersects(geom, ST_SETSRID(ST_GEOMFROMTEXT(%(geom)s), 4326)) AND class_id NOT IN (0,101,102,103,104,105,106,107,501,502,503,504,701,801)''' elif table_name == 'buildings': prepared_query = '''SELECT * FROM buildings WHERE ST_Intersects(geom, ST_SETSRID(ST_GEOMFROMTEXT(%(geom)s), 4326))''' elif table_name == 'study_area_crop': prepared_query = '''SELECT * FROM study_area_crop''' elif table_name == 'study_area_union': prepared_query = '''SELECT * FROM study_area_union''' elif table_name == 'study_area': prepared_query = '''SELECT * FROM study_area''' elif table_name == 'modeshare': prepared_query = '''SELECT * FROM modeshare''' else: return {"Error": "No valid table was selected."} _body = body.copy() # Workaround to avoid the the accuracy lose coming from ST_AsGeobuf method bug. if (return_type == 'geobuf'): sql_return_type = 'geojson' _body["return_type"] = 'geojson' else: sql_return_type = return_type result = db.select(prepared_query, params=_body, return_type=sql_return_type) if (return_type == 'geobuf'): result = geobuf.encode(result[0][0]) if body["return_type"] == 'geobuf': result_bytes = io.BytesIO(result) return send_file(result_bytes, mimetype='application/geobuf.pbf') else: return result