def data_manipulation_sh(network): from shapely.geometry import Point, LineString, MultiLineString from geoalchemy2.shape import from_shape, to_shape # add connection from Luebeck to Siems new_bus = str(network.buses.index.astype(np.int64).max() + 1) new_trafo = str(network.transformers.index.astype(np.int64).max() + 1) new_line = str(network.lines.index.astype(np.int64).max() + 1) network.add("Bus", new_bus, carrier='AC', v_nom=220, x=10.760835, y=53.909745) network.add("Transformer", new_trafo, bus0="25536", bus1=new_bus, x=1.29960, tap_ratio=1, s_nom=1600) network.add("Line", new_line, bus0="26387", bus1=new_bus, x=0.0001, s_nom=1600) network.lines.loc[new_line, 'cables'] = 3.0 # bus geom point_bus1 = Point(10.760835, 53.909745) network.buses.set_value(new_bus, 'geom', from_shape(point_bus1, 4326)) # line geom/topo network.lines.set_value( new_line, 'geom', from_shape( MultiLineString([ LineString([to_shape(network.buses.geom['26387']), point_bus1]) ]), 4326)) network.lines.set_value( new_line, 'topo', from_shape( LineString([to_shape(network.buses.geom['26387']), point_bus1]), 4326)) # trafo geom/topo network.transformers.set_value( new_trafo, 'geom', from_shape( MultiLineString([ LineString([to_shape(network.buses.geom['25536']), point_bus1]) ]), 4326)) network.transformers.set_value( new_trafo, 'topo', from_shape( LineString([to_shape(network.buses.geom['25536']), point_bus1]), 4326)) return
def get_occurrence_data_1(data_provider): occ_1 = [ { 'id': 0, 'provider_id': data_provider.db_id, 'provider_pk': 0, 'location': from_shape(Point(166.5521, -22.0939), srid=4326), 'properties': {}, }, { 'id': 1, 'provider_id': data_provider.db_id, 'provider_pk': 1, 'location': from_shape(Point(166.551, -22.098), srid=4326), 'properties': {}, }, { 'id': 2, 'provider_id': data_provider.db_id, 'provider_pk': 2, 'location': from_shape(Point(166.552, -22.097), srid=4326), 'properties': {}, }, { 'id': 3, 'provider_id': data_provider.db_id, 'provider_pk': 5, 'location': from_shape(Point(166.553, -22.099), srid=4326), 'properties': {}, }, ] return occ_1
def test_sync_insert(self): self.tearDownClass() self.setUpClass() data_provider_3 = TestDataProvider('test_data_provider_3') with Connector.get_connection() as connection: pp3 = BasePlotProvider(data_provider_3) self.assertEqual(len(pp3.get_niamoto_plot_dataframe(connection)), 0) pl = pd.DataFrame.from_records([ { 'id': 0, 'name': 'plot_3_1', 'location': from_shape(Point(166.5521, -22.0939), srid=4326), 'properties': '{}', }, { 'id': 1, 'name': 'plot_3_2', 'location': from_shape(Point(166.551, -22.098), srid=4326), 'properties': '{}', }, ], index='id') i, u, d = pp3._sync(pl, connection) self.assertEqual(len(i), 2) self.assertEqual(len(u), 0) self.assertEqual(len(d), 0) self.assertEqual(len(pp3.get_niamoto_plot_dataframe(connection)), 2)
def collect_legend_entries_by_bbox(self, session, bbox): """ Extracts all legend entries in the topic which have spatial relation with the passed bounding box of visible extent. Args: session (sqlalchemy.orm.Session): The requested clean session instance ready for use bbox (shapely.geometry.base.BaseGeometry): The bbox to search the records. Returns: list: The result of the related geometries unique by the public law restriction id """ distinct_legend_entry_ids = [] geometries = session.query(self._model_).filter( or_( self._model_.point.ST_Intersects( from_shape(bbox, srid=Config.get('srid'))), self._model_.line.ST_Intersects( from_shape(bbox, srid=Config.get('srid'))), self._model_.surface.ST_Intersects( from_shape(bbox, srid=Config.get('srid'))))).distinct( self._model_.public_law_restriction_id).all() for geometry in geometries: if geometry.public_law_restriction.legend_entry_id not in distinct_legend_entry_ids: distinct_legend_entry_ids.append( geometry.public_law_restriction.legend_entry_id) return session.query(self.legend_entry_model).filter( self.legend_entry_model.t_id.in_( (distinct_legend_entry_ids))).all()
def __init__(self, meta, name, osmtables, subset=None, change=None, column_geom='geom', geom_change=None): TagSubTable.__init__(self, meta, name, osmtables.way, subset=subset, change=change) src_srid = osmtables.node.data.c.geom.type.srid # need a geometry column if isinstance(column_geom, Column): self.column_geom = column_geom srid = column_geom.type.srid else: srid = meta.info.get('srid', osmtables.node.data.c.geom.type.srid) self.column_geom = Column(column_geom, Geometry('GEOMETRY', srid=srid)) self.data.append_column(self.column_geom) self.osmtables = osmtables self.geom_change = geom_change # add an additional transform to the insert statement if the srid changes params = {} for c in self.data.c: if c == self.column_geom: # XXX This ugly from_shape hack is here to be able to inject # the geometry into the compiled expression later. This can't # be the right way to go about this. Better ideas welcome. if src_srid != srid: params[c.name] = ST_Transform(from_shape(Point(0, 0), srid=0), srid) else: params[c.name] = from_shape(Point(0, 0), srid=0) else: params[c.name] = bindparam(c.name) self.stm_insert = self.stm_insert.values(params)
def save_place(ptolemy_id): try: if request.form['submit'] == 'Submit': session = models.create_session(models.DB_URL) place = session.query(models.Place).get(ptolemy_id) place.ptolemy_name = request.form['ptolemy_name'] place.modern_name = request.form['modern_name'] ptolemy_lat, ptolemy_lon = [ float(s) for s in request.form['ptolemy_coords'].split(' ') ] place.ptolemy_point = from_shape(Point(ptolemy_lon, ptolemy_lat)) if len(request.form['modern_coords'].split(' ')) > 1: modern_lat, modern_lon = [ float(s) for s in request.form['modern_coords'].split(' ') ] place.modern_point = from_shape(Point(modern_lon, modern_lat)) else: place.modern_point = None place.disposition = request.form['disposition'] session.add(place) session.commit() session.close() return redirect(url_for('main.show_place', ptolemy_id=ptolemy_id)) except Exception as e: current_app.logger.error(e.message, exc_info=e)
def test_sync_insert(self): self.tearDownClass() self.setUpClass() data_provider_3 = TestDataProvider('test_data_provider_3') with Connector.get_connection() as connection: op3 = BaseOccurrenceProvider(data_provider_3) self.assertEqual( len(op3.get_niamoto_occurrence_dataframe(connection)), 0) occ = pd.DataFrame.from_records([ { 'id': 0, 'taxon_id': None, 'provider_taxon_id': None, 'location': from_shape(Point(166.551, -22.039), srid=4326), 'properties': '{}', }, { 'id': 1, 'taxon_id': None, 'provider_taxon_id': None, 'location': from_shape(Point(166.551, -22.098), srid=4326), 'properties': '{}', }, ], index='id') i, u, d = op3._sync(occ, connection) self.assertEqual(len(i), 2) self.assertEqual(len(u), 0) self.assertEqual(len(d), 0) self.assertEqual( len(op3.get_niamoto_occurrence_dataframe(connection)), 2)
def add_intersections(self, toponyms): """Add intersections between roads.""" processed = [] junctions = [] for idx, (toponym1, classification1) in enumerate(toponyms): if isinstance(toponym1, Polygon) or isinstance(toponym1, Point) or not type_match(classification1['type'], ['ARTIFICIAL FEATURE', 'TRANSPORT', 'ROAD']): continue for toponym2, classification2 in toponyms[idx + 1:]: if (toponym1.osm_id, toponym2.osm_id) in processed or (toponym2.osm_id, toponym1.osm_id) in processed or not type_match(classification2['type'], ['ARTIFICIAL FEATURE', 'TRANSPORT', 'ROAD']): continue geom1 = shape.to_shape(toponym1.way) geom2 = shape.to_shape(toponym2.way) if geom1.intersects(geom2): processed.append((toponym1.osm_id, toponym2.osm_id)) geom = geom1.intersection(geom2) if isinstance(geom, geometry.MultiPoint): for part in geom: junctions.append((Point(gid=-toponym1.gid, osm_id=-toponym1.osm_id, name='%s and %s' % (toponym1.name, toponym2.name), way=shape.from_shape(part, 900913)), {'type': ['ARTIFICIAL FEATURE', 'TRANSPORT', 'ROAD', 'JUNCTION']})) else: junctions.append((Point(gid=-toponym1.gid, osm_id=-toponym1.osm_id, name='%s and %s' % (toponym1.name, toponym2.name), way=shape.from_shape(geom, 900913)), {'type': ['ARTIFICIAL FEATURE', 'TRANSPORT', 'ROAD', 'JUNCTION']})) toponyms.extend(junctions) return toponyms
def test_insert_geog_poi(self): conn = self.conn conn.execute( Poi.__table__.insert(), [{ 'geog': 'SRID=4326;POINT(1 1)' }, { 'geog': WKTElement('POINT(1 1)', srid=4326) }, { 'geog': WKTElement('SRID=4326;POINT(1 1)', extended=True) }, { 'geog': from_shape(Point(1, 1), srid=4326) }]) results = conn.execute(Poi.__table__.select()) rows = results.fetchall() for row in rows: assert isinstance(row[2], WKBElement) wkt = session.execute(row[2].ST_AsText()).scalar() assert wkt == 'POINT(1 1)' srid = session.execute(row[2].ST_SRID()).scalar() assert srid == 4326 assert row[2] == from_shape(Point(1, 1), srid=4326)
def _prepare_samples_fields(): features = [ dict(class_id=1, geometry=from_shape(shape=Point(0, 0), srid=4326)), dict(class_id=1, geometry=from_shape(shape=Point(0, 1), srid=4326)), dict(class_id=1, geometry=from_shape(shape=Point(1, 2), srid=4326)) ] return dict(table_name='FakeSample', version="1", features=features)
def create_iris(polygon: Polygon, iris_code: str = "123456789") -> IrisFrance: iris = IrisFrance() iris.centroid = from_shape(create_centroid_from_polygon(polygon), srid=WGS_SPATIAL_REFERENCE_IDENTIFIER) iris.irisCode = iris_code iris.shape = from_shape(polygon, srid=WGS_SPATIAL_REFERENCE_IDENTIFIER) return iris
def test_update_synonym_mapping(self): self.tearDownClass() self.setUpClass() data_provider_3 = TestDataProvider('test_data_provider_3') with Connector.get_connection() as connection: op3 = BaseOccurrenceProvider(data_provider_3) occ = pd.DataFrame.from_records([ { 'id': 0, 'taxon_id': None, 'provider_taxon_id': 20, 'location': from_shape(Point(166.551, -22.039), srid=4326), 'properties': '{}', }, { 'id': 1, 'taxon_id': None, 'provider_taxon_id': 30, 'location': from_shape(Point(166.551, -22.098), srid=4326), 'properties': '{}', }, { 'id': 2, 'taxon_id': None, 'provider_taxon_id': 60, 'location': from_shape(Point(166.551, -22.098), srid=4326), 'properties': '{}', }, ], index='id') op3._sync(occ, connection) taxonomy_csv_path = os.path.join( NIAMOTO_HOME, 'data', 'taxonomy', 'taxonomy_1.csv', ) taxonomy_api.set_taxonomy(taxonomy_csv_path) data_provider_3 = TestDataProvider.update_data_provider( "test_data_provider_3", synonym_key='gbif') op3 = BaseOccurrenceProvider(data_provider_3) op3.update_synonym_mapping(connection) df = op3.get_niamoto_occurrence_dataframe(connection) r1 = df[df['taxon_id'].isnull()] self.assertEqual(len(r1), 1) self.assertEqual(r1.iloc[0]['provider_taxon_id'], 60) r2 = df[~df['taxon_id'].isnull()] self.assertEqual(len(r2), 2) self.assertEqual(set(r2['taxon_id']), {1, 2}) self.assertEqual( r2[r2['taxon_id'] == 1]['provider_taxon_id'].iloc[0], 20, ) self.assertEqual( r2[r2['taxon_id'] == 2]['provider_taxon_id'].iloc[0], 30, )
def test_get_insert_dataframe(self): data_provider_1 = TestDataProvider('test_data_provider_1') with Connector.get_connection() as connection: pp1 = BasePlotProvider(data_provider_1) df1 = pp1.get_niamoto_plot_dataframe(connection) # 1. Nothing to insert plot_1 = pd.DataFrame.from_records([ { 'id': 0, 'name': 'plot_1_1', 'location': from_shape(Point(166.5521, -22.0939), srid=4326), 'properties': '{}', }, ], index='id') ins = pp1.get_insert_dataframe(df1, plot_1) self.assertEqual(len(ins), 0) # 2. Everything to insert plot_2 = pd.DataFrame.from_records([ { 'id': 10, 'name': 'plot_1_11', 'location': from_shape(Point(166.5521, -22.0939), srid=4326), 'properties': '{}', }, { 'id': 11, 'name': 'plot_1_12', 'location': from_shape(Point(166.551, -22.098), srid=4326), 'properties': '{}', }, ], index='id') ins = pp1.get_insert_dataframe(df1, plot_2) self.assertIn('provider_pk', ins.columns) self.assertIn('provider_id', ins.columns) self.assertEqual(len(ins[pd.isnull(ins['provider_pk'])]), 0) self.assertEqual(len(ins[pd.isnull(ins['provider_id'])]), 0) self.assertEqual(len(ins), 2) # 3. Partial insert plot_3 = pd.DataFrame.from_records([ { 'id': 0, 'name': 'plot_1_1', 'location': from_shape(Point(166.5521, -22.0939), srid=4326), 'properties': '{}', }, { 'id': 11, 'name': 'plot_1_12', 'location': from_shape(Point(166.551, -22.098), srid=4326), 'properties': '{}', }, ], index='id') ins = pp1.get_insert_dataframe(df1, plot_3) self.assertEqual(len(ins), 1)
def test_find_position_by_query(self): pos = Point(10, 20) root_loc = RootLocation(pos, 100) # the simplest db.session.add(root_loc) good_query_results = db.session.query(RootLocation).filter_by(position=from_shape(Point(10, 20))).all() bad_query_results = db.session.query(RootLocation).filter_by(position=from_shape(Point(20, 20))).all() self.assertEqual(1, len(good_query_results)) self.assertEqual(0, len(bad_query_results))
def fill_iris_from(iris_row: GeoSeries) -> IrisFrance: iris = IrisFrance() iris.centroid = from_shape(create_centroid_from_polygon( iris_row["geometry"]), srid=WGS_SPATIAL_REFERENCE_IDENTIFIER) iris.irisCode = iris_row["CODE_IRIS"] iris.shape = from_shape(iris_row["geometry"], srid=WGS_SPATIAL_REFERENCE_IDENTIFIER) return iris
def __set__(self, instance, v): if isinstance(v, Point): self.property.__set__(instance, from_shape(v, srid=4326)) else: if len(v) == 2 and isinstance(v[0], Sequence): points = v[0] srid = v[1] else: points = v srid = None p = Point(*points) self.property.__set__(instance, from_shape(p, srid=srid))
def test_sync_update(self): self.tearDownClass() self.setUpClass() data_provider_1 = TestDataProvider('test_data_provider_1') with Connector.get_connection() as connection: pp1 = BasePlotProvider(data_provider_1) self.assertEqual(len(pp1.get_niamoto_plot_dataframe(connection)), 4) pl = pd.DataFrame.from_records([ { 'id': 0, 'name': "plot_1", 'properties': None, 'location': WKTElement(Point(166.5521, -22.0939).wkt, srid=4326), 'properties': '{}', }, { 'id': 1, 'name': 'plot_b', 'properties': None, 'location': from_shape(Point(166.551, -22.098), srid=4326), 'properties': '{}', }, { 'id': 2, 'name': 'plot_c', 'properties': { 'yo': 'yo' }, 'location': from_shape(Point(166.552, -22.097), srid=4326), 'properties': '{}', }, { 'id': 5, 'name': 'plot_d', 'properties': {}, 'location': WKTElement(Point(166.553, -22.099), srid=4326), 'properties': '{}', }, ], index='id') i, u, d = pp1._sync(pl, connection) self.assertEqual(len(i), 0) self.assertEqual(len(u), 4) self.assertEqual(len(d), 0) self.assertEqual(len(pp1.get_niamoto_plot_dataframe(connection)), 4)
def test_sync_update(self): self.tearDownClass() self.setUpClass() data_provider_1 = TestDataProvider('test_data_provider_1') with Connector.get_connection() as connection: op1 = BaseOccurrenceProvider(data_provider_1) self.assertEqual( len(op1.get_niamoto_occurrence_dataframe(connection)), 4) occ = pd.DataFrame.from_records([ { 'id': 0, 'taxon_id': None, 'provider_taxon_id': None, 'location': WKTElement(Point(166.5521, -22.0939).wkt, srid=4326), 'properties': '{}', }, { 'id': 1, 'taxon_id': None, 'provider_taxon_id': None, 'location': from_shape(Point(166.551, -22.098), srid=4326), 'properties': '{}', }, { 'id': 2, 'taxon_id': None, 'provider_taxon_id': None, 'properties': '{"yo": "yo"}', 'location': from_shape(Point(166.552, -22.097), srid=4326) }, { 'id': 5, 'taxon_id': None, 'provider_taxon_id': None, 'properties': '{}', 'location': WKTElement(Point(166.553, -22.099), srid=4326), }, ], index='id') i, u, d = op1._sync(occ, connection) self.assertEqual(len(i), 0) self.assertEqual(len(u), 4) self.assertEqual(len(d), 0) self.assertEqual( len(op1.get_niamoto_occurrence_dataframe(connection)), 4)
def main(places): valid_dispositions = ('known', 'unknown', 'tentative') try: #connection = psycopg2.connect("dbname='ptolemy' user='******' host='localhost' password='******' port='5433'") session = models.create_session() print 'connected' #cursor = connection.cursor() #cursor.execute('''DELETE FROM places''') #query = '''INSERT INTO places (ptolemy_id, ptolemy_name, modern_name, ptolemy_point, modern_point, disposition) VALUES (%s, %s, %s, ST_GeogFromText(%s), ST_GeogFromText(%s), %s)''' for index, row in places.iterrows(): print index try: place = session.query(models.Place).get(row.ptol_id) if place == None: print 'inserting %s' % (row.ptol_id) place = models.Place() place.ptolemy_id = row.ptol_id else: print 'updating %s' % (row.ptol_id) place.ptolemy_name = row.ptol_name if isinstance(row.modern_name, basestring): place.modern_name = row.modern_name else: place.modern_name = None place.ptolemy_point = from_shape( Point(row.ptol_lon, row.ptol_lat)) if np.isnan(row.modern_lat) or np.isnan(row.modern_lon): place.modern_point = None else: place.modern_point = from_shape( Point(row.modern_lon, row.modern_lat)) if row.disposition not in valid_dispositions: place.disposition = None else: place.disposition = row.disposition session.add(place) #cursor.execute(query, place_data) except Exception as e: print 'unable to insert %s: %s' % (row.ptol_id, e.message) #connection.commit() #cursor.close() #connection.close() session.commit() session.close() except Exception as e: print 'unable to connect: %s' % (e.message, )
def _initTestData(session): """ Initialize Test Scenario with 2 demand nodes and 2 supply_nodes Assumes: - Reference Data (i.e. NodeTypes) are loaded - Database functions/triggers are loaded """ from next.model.models import Scenario, Phase, Node, get_node_type, BASE_SRID scenario = Scenario("Test") session.add(scenario) phase1 = Phase(scenario) # add a 2nd phase to test against phase2 = Phase(scenario, phase1) session.add_all([phase1, phase2]) supply1 = Node( from_shape(Point(0, 0), srid=BASE_SRID), 1, get_node_type('supply', session), phase1 ) supply2 = Node( from_shape(Point(-1, -1), srid=BASE_SRID), 1, get_node_type('supply', session), phase1 ) demand1 = Node( from_shape(Point(1, 1), srid=BASE_SRID), 1, get_node_type('demand', session), phase1 ) # demand2 is added to phase2 demand2 = Node( from_shape(Point(-2, -2), srid=BASE_SRID), 1, get_node_type('demand', session), phase2 ) session.add(supply1) session.add(supply2) session.add(demand1) session.add(demand2) session.flush()
def difference(self): body = loads(self.request.body) if "geometries" not in body or \ not isinstance(body["geometries"], list) or \ len(body["geometries"]) != 2: # pragma: no cover raise HTTPBadRequest("""Wrong body, it should be like that: { "geometries": [geomA, geomB] } """) return to_shape(models.DBSession.query(func.ST_Difference( from_shape(asShape(body["geometries"][0])), from_shape(asShape(body["geometries"][1])) )).scalar())
def difference(self): body = loads(self.request.body) if "geometries" not in body or \ type(body["geometries"]) != list or \ len(body["geometries"]) != 2: # pragma: no cover raise HTTPBadRequest("""Wrong body, it should be like that: { "geometries": [geomA, geomB] } """) return to_shape(DBSession.query(func.ST_Difference( from_shape(asShape(body["geometries"][0])), from_shape(asShape(body["geometries"][1])) )).scalar())
def generate_shp(self): """ transformation des données au format shp et sauvegarde sous forme d'une archive """ FionaShapeService.create_shapes_struct( db_cols=self.columns, srid=self.export.get('geometry_srid'), dir_path=self.export_dir, file_name=self.file_name) items = self.data.get('items') for feature in items['features']: geom, props = (feature.get(field) for field in ('geometry', 'properties')) FionaShapeService.create_feature( props, from_shape(asShape(geom), self.export.get('geometry_srid'))) FionaShapeService.save_and_zip_shapefiles() # Suppression des fichiers générés et non compressé for gtype in ['POINT', 'POLYGON', 'POLYLINE']: file_path = Path(self.export_dir, gtype + '_' + self.file_name) if file_path.is_dir(): shutil.rmtree(file_path) return True
def read_one(self): set_common_headers(self.request, "layers", NO_CACHE, add_cors=True) layer = self._get_layer_for_request() protocol = self._get_protocol_for_layer(layer) feature_id = self.request.matchdict.get("feature_id", None) feature = protocol.read(self.request, id=feature_id) if not isinstance(feature, Feature): return feature if layer.public: return feature if self.request.user is None: raise HTTPForbidden() geom = feature.geometry if not geom or isinstance(geom, geojson.geometry.Default): # pragma: no cover return feature shape = asShape(geom) srid = self._get_geom_col_info(layer)[1] spatial_elt = from_shape(shape, srid=srid) allowed = DBSession.query(func.count(RestrictionArea.id)) allowed = allowed.join(RestrictionArea.roles) allowed = allowed.join(RestrictionArea.layers) allowed = allowed.filter(Role.id == self.request.user.role.id) allowed = allowed.filter(Layer.id == layer.id) allowed = allowed.filter(or_( RestrictionArea.area.is_(None), RestrictionArea.area.ST_Contains(spatial_elt) )) if allowed.scalar() == 0: raise HTTPForbidden() return feature
def test_to_archive(self): waypoint = Waypoint( document_id=1, waypoint_type="summit", elevation=2203, locales=[ WaypointLocale(id=2, lang="en", title="A", description="abc"), WaypointLocale(id=3, lang="fr", title="B", description="bcd"), ], geometry=DocumentGeometry(document_id=1, geom=from_shape(Point(1, 1), srid=3857)), ) waypoint_archive = waypoint.to_archive() self.assertIsNone(waypoint_archive.id) self.assertEqual(waypoint_archive.document_id, waypoint.document_id) self.assertEqual(waypoint_archive.waypoint_type, waypoint.waypoint_type) self.assertEqual(waypoint_archive.elevation, waypoint.elevation) archive_locals = waypoint.get_archive_locales() self.assertEqual(len(archive_locals), 2) locale = waypoint.locales[0] locale_archive = archive_locals[0] self.assertIsNot(locale_archive, locale) self.assertIsNone(locale_archive.id) self.assertEqual(locale_archive.lang, locale.lang) self.assertEqual(locale_archive.title, locale.title) self.assertEqual(locale_archive.description, locale.description) archive_geometry = waypoint.get_archive_geometry() self.assertIsNone(archive_geometry.id) self.assertIsNotNone(archive_geometry.document_id) self.assertEqual(archive_geometry.document_id, waypoint.document_id) self.assertIsNotNone(archive_geometry.geom)
def run(self): dataset = DataSet.query.filter_by(name=self.args['dataset']).first() volume = Volume.query.filter_by(name=self.args['volume'], dataset=dataset).first() coll_type = self.args['synapse_collection_type'] coll_name = self.args['synapse_collection_name'] collection = SynapseCollection(name=coll_name, volume=volume, synapse_collection_type=coll_type) synapse_file = self.args['synapse_file'] with open(synapse_file, 'r') as fp: syn_anns = json.load(fp) syn_anns = syn_anns['area_lists'] for ann in syn_anns: rings = [] for area in ann['areas']: path = np.array(area['global_path']) path = np.concatenate((path, area['z'] * np.ones( (path.shape[0], 1))), axis=1) rings.append(geometry.Polygon(path)) mpoly = geometry.MultiPolygon(rings) syn = Synapse(oid=ann['oid'], areas=from_shape(mpoly, srid=SYNAPSE_SRID), collection=collection) db.session.add(syn) db.session.add(collection) db.session.commit()
def update_flight_path(flight): from skylines.lib.xcsoar_ import flight_path from skylines.lib.datetime import from_seconds_of_day # Now populate the FlightPathChunks table with the (full) flight path path_detailed = flight_path(flight.igc_file, max_points=3000, qnh=flight.qnh) if len(path_detailed) < 2: return False # Number of points in each chunck. num_points = 100 # Interval of the current chunck: [i, j] (-> path_detailed[i:j + 1]) i = 0 j = min(num_points - 1, len(path_detailed) - 1) # Ensure that the last chunk contains at least two fixes if j == len(path_detailed) - 2: j = len(path_detailed) - 1 FlightPathChunks.query().filter( FlightPathChunks.flight == flight).delete() date_utc = flight.igc_file.date_utc while True: flight_path = FlightPathChunks(flight=flight) # Save the timestamps of the coordinates flight_path.timestamps = \ [from_seconds_of_day(date_utc, c.seconds_of_day) for c in path_detailed[i:j + 1]] flight_path.start_time = path_detailed[i].datetime flight_path.end_time = path_detailed[j].datetime # Convert the coordinate into a list of tuples coordinates = [(c.location['longitude'], c.location['latitude']) for c in path_detailed[i:j + 1]] # Create a shapely LineString object from the coordinates linestring = LineString(coordinates) # Save the new path as WKB flight_path.locations = from_shape(linestring, srid=4326) db.session.add(flight_path) if j == len(path_detailed) - 1: break else: i = j + 1 j = min(j + num_points, len(path_detailed) - 1) if j == len(path_detailed) - 2: j = len(path_detailed) - 1 db.session.commit() return True
def project_new_grid(request): if 'zoom' in request.params: user_id = authenticated_userid(request) user = DBSession.query(User).get(user_id) project = Project(u'Untitled project', user) DBSession.add(project) DBSession.flush() zoom = int(request.params['zoom']) geometry = request.params['geometry'] polygons = parse_geojson(geometry) from shapely.geometry import MultiPolygon multipolygon = MultiPolygon([polygon for polygon in polygons]) geometry = shape.from_shape(multipolygon, 4326) project.area = Area(geometry) project.auto_fill(zoom) _ = request.translate request.session.flash( _("Project #${project_id} created successfully", mapping={'project_id': project.id}), 'success') return HTTPFound( location=route_path('project_edit', request, project=project.id)) return dict(page_id='project_new_grid')
def get_elevation(fixes): shortener = int(max(1, len(fixes) / 1000)) coordinates = [(fix[2]['longitude'], fix[2]['latitude']) for fix in fixes] points = MultiPoint(coordinates[::shortener]) locations = from_shape(points, srid=4326).ST_DumpPoints() locations_id = extract_array_item(locations.path, 1) subq = db.session.query(locations_id.label('location_id'), locations.geom.label('location')).subquery() elevation = Elevation.rast.ST_Value(subq.c.location) # Prepare main query q = db.session.query(literal_column('location_id'), elevation.label('elevation')) \ .filter(and_(subq.c.location.ST_Intersects(Elevation.rast), elevation != None)).all() fixes_copy = [list(fix) for fix in fixes] for i in xrange(1, len(q)): prev = q[i - 1].location_id - 1 current = q[i].location_id - 1 for j in range(prev * shortener, current * shortener): elev = q[i - 1].elevation + (q[i].elevation - q[i - 1].elevation) * (j - prev * shortener) fixes_copy[j][11] = elev return fixes_copy
def create_xfers(features): # generator for Transfer objects for feature in features: if DEBUG >= 2: print json.dumps(feature, indent=2) props = feature['properties'] if props['__change__'] == 'DELETE': # ignore DELETEs since title splits/etc will show up as INSERTs too. continue action = 'new' if props['__change__'] == 'INSERT' else 'existing' # use a point within the title (centroid normally) if not feature['geometry']: continue location = from_shape(shape(feature['geometry']).representative_point(), srid=4326) owners = props['owners'] #DEV SECTION# owner_type = '' if bool(re.search(r"\bHer Majesty The Queen\b", props['owners'], re.I)) \ or bool(re.search(r"\bCouncil\b", props['owners'], re.I)): owner_type = 'govt' elif bool(re.search(r"\bLimited\b", props['owners'], re.I)) \ or bool(re.search(r"\bIncorporated\b", props['owners'], re.I)): owner_type = 'company' else: owner_type = 'private' # create our Transfer object transfer = Transfer(props['title_no'], location, action, week_start, owners, owner_type) yield transfer
def project_new_grid(request): if 'zoom' in request.params: user_id = authenticated_userid(request) user = DBSession.query(User).get(user_id) project = Project( u'Untitled project', user ) DBSession.add(project) DBSession.flush() zoom = int(request.params['zoom']) geometry = request.params['geometry'] polygons = parse_geojson(geometry) from shapely.geometry import MultiPolygon multipolygon = MultiPolygon([polygon for polygon in polygons]) geometry = shape.from_shape(multipolygon, 4326) project.area = Area(geometry) project.auto_fill(zoom) _ = request.translate request.session.flash(_("Project #${project_id} created successfully", mapping={'project_id': project.id}), 'success') return HTTPFound(location=route_path('project_edit', request, project=project.id)) return dict(page_id='project_new_grid')
def __update__(self, feature): """ Called by the protocol on object update. Arguments: * ``feature`` The GeoJSON feature as received from the client. """ for p in class_mapper(self.__class__).iterate_properties: if not isinstance(p, ColumnProperty): continue col = p.columns[0] if isinstance(col.type, Geometry): geom = feature.geometry if geom and not isinstance(geom, geojson.geometry.Default): srid = col.type.srid shape = asShape(geom) setattr(self, p.key, from_shape(shape, srid=srid)) self._shape = shape elif not col.primary_key: if p.key in feature.properties: setattr(self, p.key, feature.properties[p.key]) if self.__add_properties__: for k in self.__add_properties__: setattr(self, k, feature.properties.get(k))
def __init__(self, slug, title, geometry=None, description=None, blurb=None, help=None, instruction=None, active=None, difficulty=None, options=None): if geometry is None: geometry = world_polygon if active is None: active = False self.slug = slug self.title = title self.geometry = from_shape(geometry) self.description = description self.blurb = blurb self.help = help self.instruction = instruction self.active = active self.difficulty = difficulty self.options = options
def add_airspace(country_code, airspace_class, name, base, top, geom_str): try: geom = loads(geom_str) except ReadingError: print name + "(" + airspace_class + ") is not a polygon (maybe not enough points?)" return False # orient polygon clockwise geom = polygon.orient(geom, sign=-1) if not airspace_class: print name + " has no airspace class" return False base = normalise_height(base, name) top = normalise_height(top, name) flightlevel_re = re.compile(r"^FL (\d+)$") match = flightlevel_re.match(base) if match and int(match.group(1)) >= 200: print name + " has it's base above FL 200 and is therefore disregarded" return False airspace = Airspace() airspace.country_code = country_code airspace.airspace_class = airspace_class airspace.name = name airspace.base = base airspace.top = top airspace.the_geom = from_shape(geom, srid=4326) db.session.add(airspace) return True
def build_data_set(self, feature, **kwargs): multipoint = feature.GetGeometryRef() point = multipoint.GetGeometryRef(0) shapely_point = geom_from_wkt( point.ExportToWkt()).representative_point() ewkt = shape.from_shape(shapely_point, srid=4326) start_date = self.mappings['start_date'].get('value') or \ feature.GetField(self.mappings['start_date']['key']) end_date = self.mappings['end_date'].get('value') or \ feature.GetField(self.mappings['end_date']['key']) return { "start_date": start_date, "end_date": end_date, "location": ewkt, "class_id": self.storager.samples_map_id[feature.GetField(self.class_name)], "user_id": self.user }
def add_nodes(request): """ Add nodes to a new child phase """ session = DBSession() phase = get_object_or_404(Phase, request, ('phase_id', 'id')) child_phase = Phase(phase.scenario, phase) new_nodes = [] for feature in request.json_body['features']: # assumes point geom coords = feature['geometry']['coordinates'] shp_pt = shapely.geometry.Point(coords[0], coords[1]) wkb_geom = from_shape(shp_pt, srid=BASE_SRID) type_property = feature['properties']['type'] weight = feature['properties']['weight'] node_type = get_node_type(type_property, session) node = Node(wkb_geom, weight, node_type, child_phase) new_nodes.append(node) session.add_all(new_nodes) session.flush() child_phase.create_edges() return json_response( {'scenario_id': child_phase.scenario_id, 'phase_id': child_phase.id} )
def write_to_postgis(self): session = self.Session() i = 0 for id in self.busStopArray: f = self.busStopArray[id] for d in f.delays: delay = BusDelay() delay.id = f.id delay.name = f.name delay.line = d[0] delay.delay = d[1]/60 delay.geom = from_shape(f.geom, srid=4326) delay.time = self.latestUpdateStartTime delay.number = d[2] delay.departure = datetime.datetime.fromtimestamp(d[3]) delay.destination = d[4] session.add(delay) i += 1 if i % 250 == 0: print "committing after %s stops" % i session.commit() session.commit() s = "delete from busdelays where CURRENT_TIMESTAMP - time > INTERVAL '12 hours'" c = self.engine.connect() c.execute(s) del c session.close()
def points_extent(request): try: # try to parse the extent coordinates xmin = float(request.matchdict['xmin']) ymin = float(request.matchdict['ymin']) xmax = float(request.matchdict['xmax']) ymax = float(request.matchdict['ymax']) except ValueError: raise HTTPBadRequest() # create a Shapely polygon for the extent extent = Polygon([ (xmin, ymin), (xmax, ymin), (xmax, ymax), (xmin, ymax), ]) extent_geom = from_shape(extent, srid=4326) points = DBSession \ .query(Point) \ .filter(Point.geom.ST_Intersects(extent_geom)) \ .all() points_geojson = [to_geojson(point) for point in points] return { 'type': 'FeatureCollection', 'features': points_geojson }
def add_airspace(self, country_code, airspace_class, name, base, top, geom_str): try: geom = loads(geom_str) except ReadingError: print name + "(" + airspace_class + ") is not a polygon (maybe not enough points?)" return False # orient polygon clockwise geom = polygon.orient(geom, sign=-1) if not airspace_class: print name + " has no airspace class" return False base = self.normalise_height(base, name) top = self.normalise_height(top, name) flightlevel_re = re.compile(r'^FL (\d+)$') match = flightlevel_re.match(base) if match and int(match.group(1)) >= 200: print name + " has it's base above FL 200 and is therefore disregarded" return False airspace = Airspace() airspace.country_code = country_code airspace.airspace_class = airspace_class airspace.name = name airspace.base = base airspace.top = top airspace.the_geom = from_shape(geom, srid=4326) db.session.add(airspace) return True
def coords_to_point(point): """ Converts a tuple of coordinates: (lat, lng) to a GeoAlchemy point (WKBElement) """ return from_shape(Point(point[1], point[0]), srid=4326)
def update_flight_path(self): from skylines.lib.xcsoar_ import flight_path from skylines.lib.datetime import from_seconds_of_day # Run the IGC file through the FlightPath utility path = flight_path(self.igc_file, qnh=self.qnh) if len(path) < 2: return False # Save the timestamps of the coordinates date_utc = self.igc_file.date_utc self.timestamps = \ [from_seconds_of_day(date_utc, c.seconds_of_day) for c in path] # Convert the coordinate into a list of tuples coordinates = [(c.location['longitude'], c.location['latitude']) for c in path] # Create a shapely LineString object from the coordinates linestring = LineString(coordinates) # Save the new path as WKB self.locations = from_shape(linestring, srid=4326) return True
def load_zupc_temp_table(shape_filename, zupc_obj=None): departements = { d.numero: d for d in administrative.Departement.query.all() } def find_departement(p): for i in range(0, len(p['insee'])): if p['insee'][:i] in departements: return departements[p['insee'][:i]] return None i = 0 for geom, properties in records(shape_filename): departement = find_departement(properties) if not departement: print "Unable to find departement for insee: {}".format(properties['insee']) continue z = zupc_obj() z.nom = properties['nom'] z.insee = properties['insee'] z.departement_id = departement.id z.shape = from_shape(MultiPolygon([shape(geom)]), srid=4326) z.active = False db.session.add(z) if (i%100) == 0: db.session.commit() i += 1 status = r"%10d zupc ajoutées" % (i) status = status + chr(8)*(len(status)+1) print status, db.session.commit() print "%10d zupc ajoutées" %i
def _create_square(x, y, zoom) -> geojson.MultiPolygon: """ Function for creating a geojson.MultiPolygon square representing a single OSM tile grid square :param x: osm tile grid x :param y: osm tile grid y :param zoom: osm tile grid zoom level :return: geojson.MultiPolygon in EPSG:4326 """ # Maximum resolution MAXRESOLUTION = 156543.0339 # X/Y axis limit max = MAXRESOLUTION * 256 / 2 # calculate extents step = max / (2**(zoom - 1)) xmin = x * step - max ymin = y * step - max xmax = (x + 1) * step - max ymax = (y + 1) * step - max # make a shapely multipolygon multipolygon = MultiPolygon([ Polygon([(xmin, ymin), (xmax, ymin), (xmax, ymax), (xmin, ymax)]) ]) # use the database to transform the geometry from 3857 to 4326 transformed_geometry = ST_Transform( shape.from_shape(multipolygon, 3857), 4326) # use DB to get the geometry as geojson return geojson.loads( db.engine.execute(transformed_geometry.ST_AsGeoJSON()).scalar())
def _construct_row(self, obj, conn): tags = TagStore(obj['tags']) outtags, difficulty = basic_tag_transform(obj['id'], tags, self.config) # we don't support hierarchy at the moment outtags['top'] = True # geometry geom = build_route_geometry(conn, obj['members'], self.ways, self.data) if geom is None: return None if geom.geom_type not in ('MultiLineString', 'LineString'): raise RuntimeError("Bad geometry %s for %d" % (geom.geom_type, obj['id'])) # if the route is unsorted but linear, sort it if geom.geom_type == 'MultiLineString': fixed_geom = linemerge(geom) if fixed_geom.geom_type == 'LineString': geom = fixed_geom outtags['geom'] = from_shape(geom, srid=self.c.geom.type.srid) outtags['symbol'] = write_symbol(self.shield_fab, tags, difficulty, self.config.symbol_datadir) outtags['id'] = obj['id'] return outtags
def put(self, args, shop_id, **_kwargs): shop = Shop.query.get_or_404(shop_id) shop.name = args['name'] shop.address = args['address'] shop.position = from_shape(Point(args['lng'], args['lat']), srid=4326) for tag in shop.tags: db.session.delete(tag) try: db.session.flush() except IntegrityError: db.session.rollback() return custom_error('Address/Position/Name', ['Same address, position and name with existing shop']), \ ErrorCode.BAD_REQUEST # flush delete's to db to ensure delete stmts precede insert stmt and avoid integrity error shop.tags = [ ShopTag(name=tag, shop=shop) for tag in unique_stripped(args['tags']) ] try: db.session.commit() except IntegrityError: db.session.rollback() return custom_error( 'tags', ['Duplicate tags' ]), ErrorCode.BAD_REQUEST # we should never get here return shop_schema.dump(shop).data
def update_flight_path(self): from skylines.lib.xcsoar_ import flight_path from skylines.lib.datetime import from_seconds_of_day # Run the IGC file through the FlightPath utility path = flight_path(self.igc_file, qnh=self.qnh) if len(path) < 2: return False # Save the timestamps of the coordinates date_utc = self.igc_file.date_utc self.timestamps = [ from_seconds_of_day(date_utc, c.seconds_of_day) for c in path ] # Convert the coordinate into a list of tuples coordinates = [(c.location["longitude"], c.location["latitude"]) for c in path] # Create a shapely LineString object from the coordinates linestring = LineString(coordinates) # Save the new path as WKB self.locations = from_shape(linestring, srid=4326) return True
def setUp(self): settings_module = os.environ.get('CLUSTERIT_SETTINGS_MODULE', 'tests.settings') self.config_id = importlib.import_module(settings_module).SERVICES.items()[0][0] self.config = importlib.import_module(settings_module).SERVICES.items()[0][1] features = [] for x in range(-3, 4): for y in range(-3, 4): features.append({ 'geometry': Point([x, y]), 'properties': { 'ident': '%+d - %+d' % (x, y) } }) engine = self._get_postgis() table = Table(self.config['table'], MetaData(engine), Column('id', Integer, primary_key=True), Column(self.config['geometryName'], Geometry('POINT', srid=4326)), Column('ident', String)) table.drop(checkfirst=True) table.create() # Insert features connection = engine.connect() for feature in features: values = copy.copy(feature['properties']) values[self.config['geometryName']] = from_shape(feature['geometry'], srid=4326) connection.execute(table.insert().values(values))
def load_zupc(zupc_path): with open(zupc_path) as f: for feature in json.load(f)['features']: wkb = shape.from_shape(geometry.shape(feature['geometry'])) properties = feature['properties'] for p in properties: parent = ZUPC.query.filter_by(insee=p).first() if parent: break if not parent: current_app.logger.error('Unable to get a insee code in : {}'.format(properties)) return for insee in properties: zupc = ZUPC.query.filter_by(insee=insee).first() if not zupc: zupc = ZUPC() zupc.insee = insee zupc.departement = parent.departement zupc.nom = parent.nom db.session.add(zupc) #This is the case in Paris and Lyon, but it's not important zupc.shape = wkb zupc.parent_id = parent.id db.session.commit() update_zupc()
def _proto_read(self, layer): """ Read features for the layer based on the self.request. """ proto = self._get_protocol_for_layer(layer) if layer.public: return proto.read(self.request) user = self.request.user if user is None: raise HTTPForbidden() cls = proto.mapped_class geom_attr = proto.geom_attr ras = DBSession.query(RestrictionArea.area, RestrictionArea.area.ST_SRID()) ras = ras.join(RestrictionArea.roles) ras = ras.join(RestrictionArea.layers) ras = ras.filter(Role.id == user.role.id) ras = ras.filter(Layer.id == layer.id) collect_ra = [] use_srid = -1 for ra, srid in ras.all(): if ra is None: return proto.read(self.request) else: use_srid = srid collect_ra.append(to_shape(ra)) if len(collect_ra) == 0: # pragma: no cover raise HTTPForbidden() filter1_ = create_filter(self.request, cls, geom_attr) ra = cascaded_union(collect_ra) filter2_ = ga_func.ST_Contains( from_shape(ra, use_srid), getattr(cls, geom_attr) ) filter_ = filter2_ if filter1_ is None else and_(filter1_, filter2_) return proto.read(self.request, filter=filter_)
def main(argv=sys.argv): if len(argv) != 2: usage(argv) config_uri = argv[1] setup_logging(config_uri) settings = get_appsettings(config_uri) engine = engine_from_config(settings, 'sqlalchemy.') DBSession.configure(bind=engine) translation_manager.options.update({ 'locales': settings['available_languages'].split(), 'get_locale_fallback': True }) configure_mappers() postgis_version = DBSession.execute(func.postgis_version()).scalar() if not postgis_version.startswith('2.'): # With PostGIS 1.x the AddGeometryColumn and DropGeometryColumn # management functions should be used. Area.__table__.c.geometry.type.management = True Task.__table__.c.geometry.type.management = True Base.metadata.drop_all(engine) Base.metadata.create_all(engine) with transaction.manager: geometry = '{"type":"MultiPolygon","coordinates":[[[[0.9439973,48.1124991],[0.9439617,48.1124316],[0.9436539,48.1123846],[0.9435646,48.1123915],[0.9435327,48.1124159],[0.9435101,48.1124985],[0.9434772,48.1125175],[0.9426452,48.1123642],[0.9425722,48.112333],[0.9422699,48.1120818],[0.9421995,48.1120605],[0.9421477,48.1120648],[0.9420632,48.112103],[0.941881,48.1122479],[0.9418038,48.1122679],[0.9417503,48.1122513],[0.9415619,48.1121153],[0.9414559,48.1120661],[0.9413928,48.1120672],[0.9409135,48.1121726],[0.9408424,48.1121538],[0.9407001,48.112081],[0.9406476,48.1120888],[0.940564,48.1121817],[0.9404357,48.1122414],[0.9403099,48.112273],[0.9399965,48.1122711],[0.9396713,48.1123254],[0.9393974,48.1122674],[0.9392938,48.1122742],[0.939251,48.1123041],[0.9391999,48.1123871],[0.9391291,48.1124258],[0.9390605,48.1124212],[0.9387723,48.1123216],[0.9387038,48.112312],[0.9385911,48.112324],[0.938539,48.1123602],[0.9384626,48.1124552],[0.938409,48.1124857],[0.9382915,48.1124816],[0.937814,48.1123474],[0.9373468,48.1121692],[0.9372416,48.1121531],[0.9370728,48.11222],[0.9368963,48.1122388],[0.9368195,48.1122207],[0.9366888,48.1120758],[0.9366305,48.1120462],[0.9365513,48.1120533],[0.9364563,48.1121898],[0.9364051,48.112215],[0.9362435,48.1122447],[0.9360775,48.1122529],[0.935965,48.1122311],[0.9358367,48.1121436],[0.9356261,48.1121597],[0.9354644,48.1121544],[0.935367,48.1121832],[0.9353183,48.1122211],[0.9353043,48.1123266],[0.9352257,48.1123409],[0.9351053,48.1123109],[0.9349266,48.112308],[0.9349437,48.1122208],[0.935028,48.1121111],[0.9350097,48.1120778],[0.9349264,48.1120525],[0.9348114,48.1120507],[0.9345203,48.1121667],[0.934303,48.1121371],[0.9341118,48.1120827],[0.9336163,48.1118631],[0.9334011,48.1118632],[0.9333406,48.1118784],[0.9333018,48.1119087],[0.9332712,48.1120369],[0.9332095,48.1120514],[0.9326641,48.1119703],[0.9324749,48.1119783],[0.9323901,48.1120035],[0.9320996,48.1121894],[0.932001,48.1122266],[0.9318959,48.1122429],[0.9318205,48.1122366],[0.9317522,48.1121982],[0.9315106,48.111858],[0.9314835,48.111855],[0.9313209,48.111955],[0.9311756,48.1119605],[0.9310817,48.111943],[0.9308539,48.111838],[0.93077,48.1118461],[0.9306913,48.1120577],[0.9306603,48.1120802],[0.9304127,48.1120509],[0.9301916,48.1121665],[0.9301366,48.1121803],[0.9300427,48.1121754],[0.9299711,48.112132],[0.9296377,48.1117226],[0.929518,48.1116539],[0.9291236,48.1115423],[0.928688,48.1113557],[0.9284554,48.1112127],[0.928209,48.1111933],[0.9276319,48.1110024],[0.9272926,48.1109949],[0.92716,48.1109442],[0.9268904,48.1107475],[0.9267995,48.1106985],[0.926513,48.1105908],[0.9257587,48.110361],[0.9256844,48.1103175],[0.9252016,48.1099046],[0.9247036,48.1095281],[0.9238873,48.1089805],[0.9234698,48.1087463],[0.9230175,48.1085317],[0.9228205,48.1084023],[0.9226396,48.1082393],[0.9223989,48.1079389],[0.9221707,48.1076004],[0.9220664,48.1074937],[0.9218263,48.1073359],[0.9206929,48.1067142],[0.9202506,48.1064974],[0.9194249,48.1061589],[0.9189929,48.1059179],[0.9185714,48.1056273],[0.918382,48.1054632],[0.9181514,48.1051805],[0.9180088,48.105076],[0.9178621,48.1050116],[0.9177965,48.1049636],[0.917625,48.1047545],[0.9174401,48.1046548],[0.9171922,48.1045622],[0.9171528,48.1045598],[0.9170589,48.1046051],[0.916151,48.1044458],[0.9157368,48.1043942],[0.9155837,48.104331],[0.9153533,48.1043506],[0.9152267,48.1043078],[0.9151469,48.1043302],[0.9144978,48.1042957],[0.9138818,48.1042867],[0.913612,48.1042512],[0.9135344,48.1042158],[0.91342,48.1041083],[0.9133722,48.1040059],[0.9133453,48.1038797],[0.9124999,48.1037314],[0.9120556,48.103698],[0.9119134,48.1036545],[0.9114243,48.1035828],[0.9110642,48.1034704],[0.9107811,48.1034509],[0.9106821,48.103419],[0.9104466,48.103264],[0.9103186,48.1031233],[0.9102487,48.1031311],[0.9101926,48.103167],[0.9101209,48.1032555],[0.9099347,48.1033954],[0.9097093,48.1035337],[0.9093202,48.1037211],[0.9091969,48.1037633],[0.9088825,48.1038087],[0.908232,48.1038471],[0.9079792,48.1039572],[0.9078613,48.1039921],[0.9073389,48.1040699],[0.9069709,48.1040314],[0.9067374,48.1040984],[0.9062589,48.1041359],[0.9059745,48.1041992],[0.9056679,48.1041682],[0.9050763,48.1042027],[0.904938,48.1041824],[0.9048179,48.1041305],[0.9045452,48.1039603],[0.9044561,48.103924],[0.904196,48.1038809],[0.9036573,48.103662],[0.9034452,48.1035998],[0.903079,48.1035192],[0.9028311,48.103446],[0.9018642,48.1033358],[0.9013742,48.1032336],[0.9005004,48.1031099],[0.9002539,48.1030173],[0.9000271,48.1029553],[0.8980542,48.1026568],[0.8970976,48.1025458],[0.896599,48.1024688],[0.895392,48.1023392],[0.8944552,48.1023009],[0.8943655,48.1022504],[0.893184,48.1021089],[0.892454,48.1020611],[0.8907345,48.102055],[0.8890399,48.1021236],[0.8871747,48.100757],[0.8867015,48.1002951],[0.8856048,48.099084],[0.8853025,48.0987087],[0.8852286,48.0986621],[0.8849884,48.0986016],[0.8850231,48.0983716],[0.8849404,48.0983012],[0.88507,48.0980405],[0.8852975,48.0976653],[0.8852704,48.0976029],[0.8851619,48.0975104],[0.8850349,48.0958233],[0.884925,48.09557],[0.8845941,48.0949965],[0.8844349,48.094641],[0.8839767,48.094183],[0.883877,48.0940455],[0.8838347,48.0939336],[0.8838474,48.0937614],[0.8838277,48.0937378],[0.8836345,48.0937005],[0.8835989,48.0936473],[0.8835399,48.0932616],[0.8835453,48.0931402],[0.8836322,48.0929363],[0.8837115,48.0928221],[0.8839719,48.0925712],[0.8840114,48.0924936],[0.8841869,48.0918922],[0.884514,48.0909761],[0.8845052,48.0909363],[0.8841545,48.0909667],[0.8840605,48.0909899],[0.8840665,48.0909786],[0.88377,48.0910244],[0.8821938,48.0913622],[0.8822637,48.090884],[0.8822353,48.0904922],[0.8816999,48.0878551],[0.8816151,48.0876328],[0.8814922,48.0874268],[0.8814738,48.0874217],[0.8814855,48.0874104],[0.8814306,48.0872591],[0.8815099,48.0869485],[0.8815416,48.086688],[0.8813337,48.0861234],[0.8813029,48.086034],[0.8813103,48.0858493],[0.881293,48.0858045],[0.881064,48.0855132],[0.8809225,48.0852696],[0.8809149,48.0851904],[0.880876,48.0851082],[0.8807837,48.0850312],[0.8805608,48.0849696],[0.87945,48.0845884],[0.8794268,48.0845672],[0.879631,48.0841288],[0.8799079,48.083644],[0.880107,48.0831259],[0.880317,48.0828603],[0.8804452,48.0827907],[0.8808702,48.0826873],[0.8809531,48.0826021],[0.8810799,48.0825662],[0.881479,48.0823449],[0.8818056,48.0820532],[0.8818558,48.081984],[0.8819116,48.0817215],[0.8818362,48.0817037],[0.8819608,48.0813496],[0.8819926,48.0813395],[0.8823055,48.0813597],[0.8823133,48.0812496],[0.8824595,48.0807075],[0.8824582,48.0806392],[0.8823237,48.080407],[0.8821394,48.08017],[0.881799,48.0798468],[0.8816261,48.0796064],[0.8816053,48.0795361],[0.8816229,48.0793828],[0.8815777,48.0788658],[0.8819271,48.0726864],[0.8819837,48.0726738],[0.8827333,48.0724684],[0.8834216,48.0723433],[0.8838909,48.0722909],[0.8861918,48.0721536],[0.8870951,48.07213],[0.8885457,48.0721751],[0.8890476,48.0722146],[0.8897387,48.0722913],[0.8924092,48.0726618],[0.8929554,48.0726915],[0.8938723,48.0726924],[0.8952958,48.0725969],[0.895799,48.0725847],[0.8971856,48.0726643],[0.898136,48.0727578],[0.8990872,48.0728791],[0.899557,48.0729567],[0.9001325,48.0730757],[0.9029573,48.0737891],[0.9038139,48.0740308],[0.9070093,48.0750725],[0.9081086,48.0753728],[0.9099514,48.0758167],[0.910322,48.0759223],[0.9117924,48.0765358],[0.9126649,48.0768328],[0.9132159,48.0769866],[0.9137789,48.0771191],[0.9143513,48.0772292],[0.9149079,48.0773127],[0.9168405,48.0772668],[0.9194871,48.0771411],[0.9199632,48.0770987],[0.9207016,48.0771334],[0.9227369,48.0770818],[0.9227977,48.0771217],[0.9237378,48.0774408],[0.9242799,48.077677],[0.9245315,48.0777642],[0.9259242,48.078126],[0.9259835,48.0781653],[0.9257486,48.0782691],[0.9250651,48.0786527],[0.9250171,48.0787207],[0.9250181,48.078768],[0.9250785,48.0788498],[0.9251769,48.0789144],[0.9253266,48.078969],[0.9253932,48.0790145],[0.9254937,48.0791402],[0.925567,48.0791978],[0.9257456,48.0791989],[0.9258597,48.0791272],[0.9259201,48.0790553],[0.9259676,48.0788869],[0.9260389,48.07885],[0.9262833,48.078847],[0.9263631,48.0788613],[0.9272144,48.0791422],[0.928243,48.0794004],[0.9287124,48.0795397],[0.9288762,48.0796129],[0.9291839,48.07988],[0.9292587,48.080876],[0.9290689,48.081457],[0.9286252,48.0830512],[0.9286038,48.0830777],[0.92844,48.0830836],[0.9281558,48.0834763],[0.9277933,48.083806],[0.9278103,48.0838341],[0.927992,48.0839684],[0.9281209,48.0840093],[0.9281962,48.0840779],[0.9286212,48.0846534],[0.9288801,48.0845032],[0.9290658,48.0843593],[0.9291169,48.0843561],[0.9291326,48.0843802],[0.9290639,48.0845228],[0.9290714,48.084559],[0.9291115,48.084589],[0.9293888,48.0846715],[0.9297144,48.0849806],[0.9299357,48.0850234],[0.9300327,48.0850594],[0.9300734,48.0851101],[0.9301458,48.0853341],[0.9300687,48.0855351],[0.9301256,48.085745],[0.9300608,48.0859372],[0.9301152,48.0860054],[0.9301412,48.0861604],[0.9301083,48.0863554],[0.9301356,48.0864634],[0.9305376,48.0870193],[0.9306298,48.0870276],[0.9311066,48.0868727],[0.9311625,48.0868716],[0.9313259,48.0869521],[0.9318028,48.0880182],[0.9318266,48.0881523],[0.9319901,48.0884926],[0.9320287,48.0885328],[0.9327689,48.0884882],[0.933996,48.0883107],[0.93406,48.0888072],[0.9358186,48.0885738],[0.9379588,48.088448],[0.9404128,48.0884739],[0.9404298,48.0884615],[0.9404271,48.087987],[0.9409544,48.0880107],[0.9436225,48.0909035],[0.944013,48.0912746],[0.9445871,48.0917695],[0.9446888,48.0918275],[0.9448178,48.0918585],[0.9450145,48.0919347],[0.9452888,48.0920105],[0.9456031,48.0921922],[0.9458274,48.0925722],[0.9461027,48.0929715],[0.9467571,48.0937549],[0.9466013,48.0939368],[0.9457586,48.0946713],[0.9449309,48.0951669],[0.9442778,48.0957071],[0.9431922,48.0967234],[0.9430035,48.0968566],[0.9427329,48.0970077],[0.9428798,48.0970882],[0.9436925,48.0974573],[0.9442749,48.097623],[0.9449722,48.0979461],[0.9449351,48.0979618],[0.9446782,48.0981461],[0.9446701,48.0981728],[0.9447244,48.098264],[0.9447744,48.098652],[0.9448442,48.0989081],[0.9448821,48.0989785],[0.9450293,48.099123],[0.9452603,48.0993019],[0.9454104,48.0994448],[0.9454708,48.0996084],[0.9455327,48.099971],[0.9457118,48.1002634],[0.9457302,48.1003421],[0.9457144,48.1005718],[0.945601,48.1009101],[0.9455879,48.1010401],[0.9454882,48.1012024],[0.9454903,48.1017689],[0.9457282,48.1019203],[0.9459384,48.10198],[0.9459877,48.1020078],[0.9461158,48.1021672],[0.9461662,48.1022786],[0.9461757,48.102371],[0.9464741,48.1027264],[0.9465394,48.1029182],[0.9467153,48.103137],[0.9467236,48.1032315],[0.9466626,48.1033925],[0.9466604,48.1034558],[0.9468874,48.1037169],[0.9469397,48.1038022],[0.9469778,48.1041225],[0.9469578,48.104168],[0.9469827,48.1042705],[0.9469437,48.1044249],[0.9469577,48.1045391],[0.9469388,48.1046384],[0.9469778,48.1047147],[0.9470086,48.1047847],[0.9473819,48.1050776],[0.9474707,48.1051754],[0.9475253,48.1052809],[0.9475305,48.1054269],[0.9475663,48.1055737],[0.9475898,48.1056804],[0.9475196,48.1057741],[0.9473788,48.1058605],[0.9472747,48.106018],[0.9471141,48.1062086],[0.9471387,48.1063949],[0.94701,48.1067945],[0.9470153,48.1069376],[0.9469803,48.1069933],[0.946806,48.1070801],[0.9466969,48.1071462],[0.9466209,48.1072286],[0.9464116,48.1072491],[0.9462462,48.1072746],[0.9460057,48.107246],[0.9457974,48.1073321],[0.9456807,48.1074036],[0.9453011,48.1076196],[0.9449878,48.1078416],[0.9448832,48.1079474],[0.9448628,48.1082604],[0.944825,48.1086698],[0.9448109,48.1089699],[0.9447343,48.1098883],[0.9447134,48.1101126],[0.9445625,48.1101423],[0.9444986,48.1102203],[0.9444422,48.1103438],[0.9445083,48.1106833],[0.9446679,48.1108109],[0.9446864,48.1108429],[0.9445882,48.1109482],[0.9445629,48.1110658],[0.9445243,48.1111079],[0.9444333,48.1111562],[0.9443598,48.1111432],[0.9442693,48.1112372],[0.9442649,48.1113383],[0.9442985,48.1114212],[0.9444328,48.1115197],[0.9444457,48.1115469],[0.9444051,48.1116867],[0.9444106,48.1118206],[0.9442092,48.1119209],[0.9441154,48.1120517],[0.9439546,48.112119],[0.9438787,48.1122846],[0.9438909,48.1123155],[0.9440504,48.1123422],[0.9440828,48.11236],[0.9439973,48.1124991]]]]}' # noqa geometry = geojson.loads(geometry, object_hook=geojson.GeoJSON.to_instance) geometry = shapely.geometry.asShape(geometry) geometry = shape.from_shape(geometry, 4326) area = Area( geometry ) DBSession.add(area) project = Project( 'Map all primary roads' ) project.area = area project.short_description = "Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua." # noqa project.description = "Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum." # noqa DBSession.add(project) with project.force_locale('fr'): project.name = "Cartographier les routes" project.auto_fill(14) license = License() license.name = 'NextView' license.description = "This data is licensed for use by the US Government (USG) under the NextView (NV) license and copyrighted by Digital Globe or GeoEye. The NV license allows the USG to share the imagery and Literal Imagery Derived Products (LIDP) with entities outside the USG when that entity is working directly with the USG, for the USG, or in a manner that is directly beneficial to the USG. The party receiving the data can only use the imagery or LIDP for the original purpose or only as otherwise agreed to by the USG. The party receiving the data cannot share the imagery or LIDP with a third party without express permission from the USG. At no time should this imagery or LIDP be used for other than USG-related purposes and must not be used for commercial gain. The copyright information should be maintained at all times. Your acceptance of these license terms is implied by your use." # noqa license.plain_text = "In other words, you may only use NextView imagery linked from this site for digitizing OpenStreetMap data for humanitarian purposes." # noqa DBSession.add(license) license = License() license.name = 'Astrium/UNOSAT' license.description = "UNOSAT allow any INTERNET USER to use the IMAGE to develop DERIVATIVE WORKS provided that the INTERNET USER includes the DERIVATIVE WORKS he/she created in the OpenStreetMap database under CC-BY-SA licence (http://creativecommons.org/licenses/by-sa/2.0/) and/or Open Database licence (ODbL: http://www.opendatacommons.org/licenses/odbl/), with the credit of the corresponding PRODUCT conspicuously displayed and written in full, in order to allow any OpenStreetMap database user to have access to and to use the DERIVATIVE WORKS. Except for the foregoing, the END USER and/or the INTERNET USER shall not be entitled to sell, distribute, assign, dispose of, lease, sublicense or transfer, directly or indirectly, any DERIVATIVE WORKS to any third party." # noqa license.plain_text = "Astrium GEO-Information Services and UNOSAT are allowing access to this imagery for creating information in OpenStreetMap. Other uses are not allowed." # noqa DBSession.add(license)
def check_geometry(r, feature, o): # we need both the "original" and "new" geometry to be # within the restriction area geom_attr, srid = self._get_geom_col_info(layer) geom_attr = getattr(o, geom_attr) geom = feature.geometry allowed = DBSession.query(func.count(RestrictionArea.id)) allowed = allowed.join(RestrictionArea.roles) allowed = allowed.join(RestrictionArea.layers) allowed = allowed.filter(RestrictionArea.readwrite.is_(True)) allowed = allowed.filter(Role.id == self.request.user.role.id) allowed = allowed.filter(Layer.id == layer.id) allowed = allowed.filter(or_( RestrictionArea.area.is_(None), RestrictionArea.area.ST_Contains(geom_attr) )) spatial_elt = None if geom and not isinstance(geom, geojson.geometry.Default): shape = asShape(geom) spatial_elt = from_shape(shape, srid=srid) allowed = allowed.filter(or_( RestrictionArea.area.is_(None), RestrictionArea.area.ST_Contains(spatial_elt) )) if allowed.scalar() == 0: raise HTTPForbidden() # check is geometry is valid self._validate_geometry(spatial_elt)
def get_elevation(fixes): shortener = int(max(1, len(fixes) / 1000)) coordinates = [(fix[2]["longitude"], fix[2]["latitude"]) for fix in fixes] points = MultiPoint(coordinates[::shortener]) locations = from_shape(points, srid=4326) location = locations.ST_DumpPoints() cte = db.session.query( location.label("location"), locations.ST_Envelope().label("locations") ).cte() location_id = literal_column("(location).path[1]") elevation = Elevation.rast.ST_Value(cte.c.location.geom) # Prepare main query q = ( db.session.query(location_id.label("location_id"), elevation.label("elevation")) .filter( and_( cte.c.locations.intersects(Elevation.rast), cte.c.location.geom.intersects(Elevation.rast), ) ) .all() ) fixes_copy = [list(fix) for fix in fixes] # No elevations found at all... if not len(q): return fixes_copy start_idx = 0 while start_idx < len(q) - 1 and q[start_idx].elevation is None: start_idx += 1 prev = q[start_idx] for i in _xrange(start_idx + 1, len(q)): if q[i].elevation is None: continue current = q[i] for j in range( (prev.location_id - 1) * shortener, (current.location_id - 1) * shortener ): elev = prev.elevation + (current.elevation - prev.elevation) / ( (current.location_id - prev.location_id) * shortener ) * (j - (prev.location_id - 1) * shortener) fixes_copy[j][11] = elev prev = current if len(q) and q[-1].elevation: fixes_copy[-1][11] = q[-1].elevation return fixes_copy
def check_geometry(_, feature, obj): # we need both the "original" and "new" geometry to be # within the restriction area geom_attr, srid = self._get_geom_col_info(layer) geom_attr = getattr(obj, geom_attr) geom = feature.geometry allowed = models.DBSession.query(func.count(RestrictionArea.id)) allowed = allowed.join(RestrictionArea.roles) allowed = allowed.join(RestrictionArea.layers) allowed = allowed.filter(RestrictionArea.readwrite.is_(True)) allowed = allowed.filter(Role.id == self.request.user.role.id) allowed = allowed.filter(Layer.id == layer.id) allowed = allowed.filter(or_( RestrictionArea.area.is_(None), RestrictionArea.area.ST_Contains(geom_attr) )) spatial_elt = None if geom and not isinstance(geom, geojson.geometry.Default): shape = asShape(geom) spatial_elt = from_shape(shape, srid=srid) allowed = allowed.filter(or_( RestrictionArea.area.is_(None), RestrictionArea.area.ST_Contains(spatial_elt) )) if allowed.scalar() == 0: raise HTTPForbidden() # check is geometry is valid if self._get_validation_setting(layer): self._validate_geometry(spatial_elt)
def update_zones(): # Request data about zones url = 'https://datos.cdmx.gob.mx/api/records/1.0/download/?dataset=alcaldias' r = requests.get(url, allow_redirects=True) decoded_content = r.content.decode('utf-8') cr = csv.reader(decoded_content.splitlines(), delimiter=';') # Skips headers next(cr) # Remove old data Zone.query.delete() # Insert all records for row in cr: shape = geojson.loads(row[5]) geom = from_shape(asShape(shape), srid=4326) newZone = Zone( name=row[0], shape=geom, ) db.session.add(newZone) db.session.commit()
def test_insert(self): conn = self.conn # Issue two inserts using DBAPI's executemany() method. This tests # the Geometry type's bind_processor and bind_expression functions. conn.execute(Lake.__table__.insert(), [ {'geom': 'SRID=4326;LINESTRING(0 0,1 1)'}, {'geom': WKTElement('LINESTRING(0 0,2 2)', srid=4326)}, {'geom': from_shape(LineString([[0, 0], [3, 3]]), srid=4326)} ]) results = conn.execute(Lake.__table__.select()) rows = results.fetchall() row = rows[0] assert isinstance(row[1], WKBElement) wkt = session.execute(row[1].ST_AsText()).scalar() assert wkt == 'LINESTRING(0 0, 1 1)' srid = session.execute(row[1].ST_SRID()).scalar() assert srid == 4326 row = rows[1] assert isinstance(row[1], WKBElement) wkt = session.execute(row[1].ST_AsText()).scalar() assert wkt == 'LINESTRING(0 0, 2 2)' srid = session.execute(row[1].ST_SRID()).scalar() assert srid == 4326 row = rows[2] assert isinstance(row[1], WKBElement) wkt = session.execute(row[1].ST_AsText()).scalar() assert wkt == 'LINESTRING(0 0, 3 3)' srid = session.execute(row[1].ST_SRID()).scalar() assert srid == 4326
def load_geojson(): result = urlopen('https://github.com/openregister/boundaries/archive/master.zip').read() stream = BytesIO(result) zipfile = ZipFile(stream, 'r') file_names = [name for name in zipfile.namelist() if name.endswith('.geojson')] for name in file_names: with zipfile.open(name, 'r') as f: if name.endswith('.geojson'): file_contents = TextIOWrapper(f, encoding='utf-8', newline='') data = geojson.loads(file_contents.read()) try: name = data['properties']['REGD14NM'] code = data['properties']['REGD14CD'] geometry = data['geometry'] # hackery store everthing as multipolygon if geometry['type'] == 'Polygon': coordinates = [] coordinates.append(geometry['coordinates']) geometry['coordinates'] = coordinates geometry['type'] = 'MultiPolygon' polygon = from_shape(asShape(geometry), srid=4326) boundary = Boundary(name=name, code=code, polygon=polygon) db.session.add(boundary) db.session.commit() except KeyError as e: print("not something we were expecting really")
def _deserialize(self, value, attr, data, **kwargs): try: shape = asShape(value) two_dimension_geom = remove_third_dimension(shape) return from_shape(two_dimension_geom, srid=4326) except ValueError as error: raise ValidationError("Geometry error") from error
def point_update(request): # first try to load the point that should be updated try: # try to cast the given id to an integer id = int(request.matchdict['id']) except ValueError: raise HTTPBadRequest() try: # load the point with the given id point = DBSession.query(Point).filter(Point.id == id).one() except NoResultFound: raise HTTPNotFound() # get the new values title = request.POST.get('title', '') description = request.POST.get('description', '') try: # try to get the input parameters lon = float(request.POST.get('lon')) lat = float(request.POST.get('lat')) except ValueError: raise HTTPBadRequest() # then set the new values on the point point.title = title point.description = description point.geom = from_shape(PointShape(lon, lat), srid=4326) # the point will automatically be flushed (updated in the database) return to_geojson(point)
def main(places): valid_dispositions = ('known', 'unknown', 'tentative') try: #connection = psycopg2.connect("dbname='ptolemy' user='******' host='localhost' password='******' port='5433'") session = models.create_session() print 'connected' #cursor = connection.cursor() #cursor.execute('''DELETE FROM places''') #query = '''INSERT INTO places (ptolemy_id, ptolemy_name, modern_name, ptolemy_point, modern_point, disposition) VALUES (%s, %s, %s, ST_GeogFromText(%s), ST_GeogFromText(%s), %s)''' for index, row in places.iterrows(): print index try: place = session.query(models.Place).get(row.ptol_id) if place == None: print 'inserting %s' % (row.ptol_id) place = models.Place() place.ptolemy_id = row.ptol_id else: print 'updating %s' % (row.ptol_id) place.ptolemy_name = row.ptol_name if isinstance(row.modern_name, basestring): place.modern_name = row.modern_name else: place.modern_name = None place.ptolemy_point = from_shape(Point(row.ptol_lon, row.ptol_lat)) if np.isnan(row.modern_lat) or np.isnan(row.modern_lon): place.modern_point = None else: place.modern_point = from_shape(Point(row.modern_lon, row.modern_lat)) if row.disposition not in valid_dispositions: place.disposition = None else: place.disposition = row.disposition session.add(place) #cursor.execute(query, place_data) except Exception as e: print 'unable to insert %s: %s' % (row.ptol_id, e.message) #connection.commit() #cursor.close() #connection.close() session.commit() session.close() except Exception as e: print 'unable to connect: %s' % (e.message, )
def __update__(self, feature): geometry = feature.geometry if geometry is not None and \ not isinstance(geometry, Default): shape = asShape(feature.geometry) self.geom = from_shape(shape, srid=4326) self.geom.shape = shape self.text = feature.properties.get('text', None)