def test_create_edges(self): """ Test whether demands are associated with the appropriate supply nodes based on proximity """ from next.model.models import Scenario, Node, Edge from next.views import create_edges sc1 = self.session.query(Scenario).filter(Scenario.name == "Test").first() params = {'id': sc1.id, 'phase_id': 2} request = testing.DummyRequest() request.matchdict = params response = create_edges(request) demand1 = self.session.query(Node).filter(func.ST_X(Node.point) == 1.0).first() demand2 = self.session.query(Node).filter(func.ST_X(Node.point) == -2.0).first() supply1 = self.session.query(Node).\ filter(Edge.to_node_id == Node.id).\ filter(Edge.from_node_id == demand1.id).first() supply2 = self.session.query(Node).\ filter(Edge.to_node_id == Node.id).\ filter(Edge.from_node_id == demand2.id).first() supply1_coords = to_shape(supply1.point).coords[0] #(self.session) supply2_coords = to_shape(supply2.point).coords[0] #(self.session) self.assertTrue(supply1_coords[0] == 0.0 and supply1_coords[1] == 0.0) self.assertTrue(supply2_coords[0] == -1.0 and supply2_coords[1] == -1.0)
def dto(self): return { 'code': self.code, 'name': self.name, 'stationTogether1': self.station_together_1, 'lat': to_shape(self.point).x, 'long': to_shape(self.point).y }
def show_place(ptolemy_id): try: session = models.create_session(models.DB_URL) place = session.query(models.Place).get(ptolemy_id) place.ptolemy_coords = to_shape(place.ptolemy_point) if place.modern_point is not None: place.modern_coords = to_shape(place.modern_point) else: place.modern_coords = None return render_template('main/show_place.html', place=place) except Exception as e: current_app.logger.error(e.message, exc_info=e)
def find_geom(feature): """ Locates the feature's geometry. """ if feature.geom_type == 'points': return to_shape(feature.point.the_geom) elif feature.geom_type == 'lines': return to_shape(feature.line.the_geom) elif feature.geom_type == 'multilinestrings': return to_shape(feature.multiline.the_geom) elif feature.geom_type == 'multipolygons': return to_shape(feature.multipolygon.the_geom) elif feature.geom_type == 'other_relations': return to_shape(feature.other_rel.the_geom)
def query_and_output(self): IntersectionsOrm = self.get_table_orm('geocode') session = Session(self.engine) fp = open(self.file_path, 'wb') csv_writer = file_utils.make_csv_writer(fp, self.csv_columns) intersections = session.query(IntersectionsOrm).filter(text('type = 2')).all() for i, a in enumerate(intersections): x = to_shape(a.geom).x y = to_shape(a.geom).y lon, lat = geo_utils.to_lon_lat(x, y) row = {'id': i, 'name': a.address, 'zipcode': a.zip_code, 'lon': lon, 'lat': lat, 'layer_id': 'intersections'} csv_writer.writerow(row)
def _test_update(self, mapped_class): from geojson import Feature, Point from geoalchemy2.elements import WKBElement from geoalchemy2.shape import to_shape feature = Feature( id=1, properties={"text": "foo", "child": "foo", "children": ["foo", "foo"]}, geometry=Point(coordinates=[53, -4]), ) obj = mapped_class(feature) feature = Feature( id=2, properties={"text": "bar", "child": "bar", "children": ["bar", "bar"]}, geometry=Point(coordinates=[55, -5]), ) obj.__update__(feature) self.assertEqual(obj.id, 1) self.assertEqual(obj.text, "bar") self.assertEqual(obj.child, "bar") self.assertEqual(obj.children, ["bar", "bar"]) self.assertTrue(isinstance(obj.geom, WKBElement)) point = to_shape(obj.geom) self.assertEqual(point.x, 55) self.assertEqual(point.y, -5) self.assertEqual(obj.geom.srid, 3000)
def elevation(self, oid, segments=None, **params): if segments is not None and segments.isdigit(): segments = int(segments) if segments > 500 or segments <= 0: segments = 500 else: segments = 100 r = cherrypy.request.app.config['DB']['map'].tables.routes.data gen = sa.select([sa.func.generate_series(0, segments).label('i')]).alias() field = sa.func.ST_LineInterpolatePoint(r.c.geom, gen.c.i/float(segments)) field = sa.func.ST_Collect(field) sel = sa.select([field]).where(r.c.id == oid)\ .where(r.c.geom.ST_GeometryType() == 'ST_LineString') res = cherrypy.request.db.execute(sel).first() if res is None or res[0] is None: raise cherrypy.NotFound() ret = OrderedDict() ret['id'] = oid compute_elevation(to_shape(res[0]), ret) return ret
def main(): session = create_session() for place in session.query(Place).order_by(Place.ptolemy_name): #point = wkb.loads(place.ptolemy_point) point = to_shape(place.ptolemy_point) print place.ptolemy_id, place.ptolemy_name, point.x, point.y session.close()
def geometry_as_geojson(self): if self.geometry is not None: geom = json.dumps( shapely.geometry.mapping(to_shape(self.geometry)) ) return geom return False
def project(request): check_project_expiration() id = request.matchdict['project'] project = DBSession.query(Project).get(id) if project is None: _ = request.translate request.session.flash(_("Sorry, this project doesn't exist")) return HTTPFound(location=route_path('home', request)) project.locale = get_locale_name(request) filter = and_(TaskState.project_id == id, TaskState.state != TaskState.state_removed, TaskState.state != TaskState.state_ready) history = DBSession.query(TaskState) \ .filter(filter) \ .order_by(TaskState.date.desc()) \ .limit(20).all() user_id = authenticated_userid(request) locked_task = None user = None if user_id: user = DBSession.query(User).get(user_id) locked_task = get_locked_task(project.id, user) features = [] for area in project.priority_areas: features.append(Feature(geometry=shape.to_shape(area.geometry))) return dict(page_id='project', project=project, locked_task=locked_task, history=history, priority_areas=FeatureCollection(features),)
def test_to_shape_WKBElement_str(): # POINT(1 2) e = WKBElement(str('0101000000000000000000f03f0000000000000040')) s = to_shape(e) assert isinstance(s, Point) assert s.x == 1 assert s.y == 2
def as_geofeature(self): geometry = to_shape(self.geom) feature = Feature( id=self.code_insee, geometry=geometry ) return feature
def create(self, user_token, layer): from gbi_server.model import User from gbi_server.model import WMTS from gbi_server.extensions import db user = User.by_authproxy_token(user_token) if not user: raise InvalidUserToken() result = db.session.query(WMTS, ST_Transform(WMTS.view_coverage, 3857)).filter_by(name=layer).first() if result: wmts, view_coverage = result if wmts and wmts.is_public: return to_shape(view_coverage) if user.is_customer: couch_url = self.couchdb_url couchdb = CouchDBBox(couch_url, '%s_%s' % (SystemConfig.AREA_BOX_NAME, user.id)) geom = couchdb.layer_extent(self.geometry_layer) return optimize_geometry(geom) if geom else None elif user.is_service_provider: couch_url = self.couchdb_url couchdb = CouchDBBox(couch_url, '%s_%s' % (SystemConfig.AREA_BOX_NAME, user.id)) geom = couchdb.layer_extent() return optimize_geometry(geom) if geom else None elif user.is_admin or user.is_consultant: # permit access to everything return box(-20037508.3428, -20037508.3428, 20037508.3428, 20037508.3428) return None
def _load_sources(self): public_wmts = db.session.query(WMTS, ST_Transform(WMTS.view_coverage, 3857)).filter_by(is_public=True).group_by(WMTS).all() for wmts, view_coverage in public_wmts: self.sources['%s_source' % wmts.name] = { 'type': 'tile', 'url': wmts.url, 'grid': 'GoogleMapsCompatible', 'coverage': { 'srs': 'EPSG:3857', 'bbox': list(to_shape(view_coverage).bounds) } } self.caches['%s_cache' % wmts.name] = { 'sources': ['%s_source' % wmts.name], 'grids': ['GoogleMapsCompatible'], 'disable_storage': True } self.layers.append({ 'name': '%s_layer' % wmts.name, 'title': wmts.title, 'sources': ['%s_cache' % wmts.name], 'min_res': self.grid.resolution(wmts.view_level_start), # increase max_res to allow a bit of oversampling 'max_res': self.grid.resolution(wmts.view_level_end) / 2, })
def _proto_read(self, layer): """ Read features for the layer based on the self.request. """ proto = self._get_protocol_for_layer(layer) if layer.public: return proto.read(self.request) user = self.request.user if user is None: raise HTTPForbidden() cls = proto.mapped_class geom_attr = proto.geom_attr ras = DBSession.query(RestrictionArea.area, RestrictionArea.area.ST_SRID()) ras = ras.join(RestrictionArea.roles) ras = ras.join(RestrictionArea.layers) ras = ras.filter(Role.id == user.role.id) ras = ras.filter(Layer.id == layer.id) collect_ra = [] use_srid = -1 for ra, srid in ras.all(): if ra is None: return proto.read(self.request) else: use_srid = srid collect_ra.append(to_shape(ra)) if len(collect_ra) == 0: # pragma: no cover raise HTTPForbidden() filter1_ = create_filter(self.request, cls, geom_attr) ra = cascaded_union(collect_ra) filter2_ = ga_func.ST_Contains( from_shape(ra, use_srid), getattr(cls, geom_attr) ) filter_ = filter2_ if filter1_ is None else and_(filter1_, filter2_) return proto.read(self.request, filter=filter_)
def create_details_response(self, res): if res is None: raise cherrypy.NotFound() loctags = TagStore.make_localized(res['tags'], cherrypy.request.locales) cfg = cherrypy.request.app.config ret = api.common.RouteDict(res) ret['type'] = res['type'] if res.has_key('type') else 'relation' ret['symbol_url'] = '%s/symbols/%s/%s.png' % (cfg['Global']['MEDIA_URL'], cfg['Global']['BASENAME'], str(res['symbol'])) ret['mapped_length'] = int(res['length']) ret.add_if('official_length', loctags.get_length('distance', 'length', unit='m')) for tag in ('operator', 'note', 'description'): ret.add_if(tag, loctags.get(tag)) ret.add_if('url', loctags.get_url()) ret.add_if('wikipedia', loctags.get_wikipedia_tags()) ret['bbox'] = to_shape(res['bbox']).bounds if hasattr(self, '_hierarchy_list'): for name, val in (('subroutes', True), ('superroutes', False)): ret.add_if(name, self._hierarchy_list(ret['id'], val)) ret['tags'] = res['tags'] return ret
def serializegeofn(self, geoCol, idCol, recursif=False, columns=()): """ Méthode qui renvoie les données de l'objet sous la forme d'une Feature geojson Parameters ---------- geoCol: string Nom de la colonne géométrie idCol: string Nom de la colonne primary key recursif: boolean Spécifie si on veut que les sous objet (relationship) soit également sérialisé columns: liste liste des columns qui doivent être prisent en compte """ if not getattr(self, geoCol) is None: geometry = to_shape(getattr(self, geoCol)) else: geometry = {"type": "Point", "coordinates": [0, 0]} feature = Feature( id=str(getattr(self, idCol)), geometry=geometry, properties=self.as_dict(recursif, columns), ) return feature
def __read__(self): id = None geom = None properties = {} for p in class_mapper(self.__class__).iterate_properties: if isinstance(p, ColumnProperty): if len(p.columns) != 1: # pragma: no cover raise NotImplementedError col = p.columns[0] val = getattr(self, p.key) if col.primary_key: id = val elif isinstance(col.type, Geometry) and col.name == self.geometry_column_to_return().name: if hasattr(self, '_shape'): geom = self._shape elif val is not None: if len(val.data) > 1000000: raise HTTPBandwidtLimited('Feature ID %s: is too large' % self.id) geom = to_shape(val) elif not col.foreign_keys and not isinstance(col.type, Geometry): properties[p.key] = val if self.__add_properties__: for k in self.__add_properties__: properties[k] = getattr(self, k) properties = self.insertLabel(properties) return geojson.Feature(id=id, geometry=geom, properties=properties)
def __read__(self): """ Called by :py:attr:`.__geo_interface__`. """ id = None geom = None properties = {} for p in class_mapper(self.__class__).iterate_properties: if isinstance(p, ColumnProperty): if len(p.columns) != 1: # pragma: no cover raise NotImplementedError col = p.columns[0] val = getattr(self, p.key) if col.primary_key: id = val elif isinstance(col.type, Geometry): if hasattr(self, "_shape"): geom = self._shape elif val is not None: geom = to_shape(val) elif not col.foreign_keys: properties[p.key] = val if self.__add_properties__: for k in self.__add_properties__: properties[k] = getattr(self, k) return geojson.Feature(id=id, geometry=geom, properties=properties)
def __read__(self): id = None geom = None properties = {} for p in class_mapper(self.__class__).iterate_properties: if isinstance(p, ColumnProperty): if len(p.columns) != 1: # pragma: no cover raise NotImplementedError col = p.columns[0] val = getattr(self, p.key) if col.primary_key: id = val elif (isinstance(col.type, GeometryChsdi) and col.name == self.geometry_column_to_return().name): if hasattr(self, '_shape'): geom = self._shape elif val is not None: if len(val.data) > 1000000: raise HTTPBandwidthLimited( 'Feature ID %s: is too large' % self.id) geom = to_shape(val) elif (not col.foreign_keys and not isinstance(col.type, GeometryChsdi)): properties[p.key] = val properties = self.insert_label(properties) bbox = None try: bbox = geom.bounds except: pass return id, geom, properties, bbox
def __read__(self): id = None geom = None bbox = None properties = {} for p in class_mapper(self.__class__).iterate_properties: if isinstance(p, ColumnProperty): if len(p.columns) != 1: # pragma: no cover raise NotImplementedError col = p.columns[0] val = getattr(self, p.key) if col.primary_key: id = val elif (isinstance(col.type, GeometryChsdi) and col.name == self.geometry_column_to_return().name): if hasattr(self, '_shape') and \ len(self._shape) < MAX_FEATURE_GEOMETRY_SIZE: geom = self._shape elif val is not None and \ len(val.data) < MAX_FEATURE_GEOMETRY_SIZE: geom = to_shape(val) try: bbox = geom.bounds except: pass elif (not col.foreign_keys and not isinstance(col.type, GeometryChsdi)): properties[p.key] = val properties = self.insert_label(properties) return id, geom, properties, bbox
def create_feature(cls, data, geom): """ Create a feature (a record of the shapefile) for the three shapefiles by serializing an SQLAlchemy object Parameters: data (dict): the SQLAlchemy model serialized as a dict geom (WKB): the geom as WKB Returns: void """ try: geom_wkt = to_shape(geom) geom_geojson = mapping(geom_wkt) feature = {"geometry": geom_geojson, "properties": data} cls.write_a_feature(feature, geom_wkt) except AssertionError: cls.close_files() raise GeonatureApiError( "Cannot create a shapefile record whithout a Geometry" ) except Exception as e: cls.close_files() raise GeonatureApiError(e)
def test_to_shape_WKBElement(): e = WKBElement(b'\x01\x01\x00\x00\x00\x00\x00\x00\x00\x00' b'\x00\xf0?\x00\x00\x00\x00\x00\x00\x00@') s = to_shape(e) assert isinstance(s, Point) assert s.x == 1 assert s.y == 2
def latlon(self): """ Returns a shapely point gage.latlon().y for latitude gage.latlon().x for longitude """ latlon_point = to_shape(self.point) return latlon_point
def endpoints(geometry,queryset): start = geometry.ST_StartPoint() end = geometry.ST_EndPoint() return (tuple(to_shape(i) for i in j) for j in queryset .with_entities(start,end) .all())
def as_geofeature(self): geometry = to_shape(self.geom) feature = Feature( id=self.r, geometry=geometry, properties= {c.name: getattr(self, c.name) for c in self.__table__.columns if c.name!='geom'} ) return feature
def _get_near_flights(flight, location, time, max_distance=1000): # calculate max_distance in degrees at the earth's sphere (approximate, # cutoff at +-85 deg) max_distance_deg = (max_distance / METERS_PER_DEGREE) / \ math.cos(math.radians(min(abs(location.latitude), 85))) # the distance filter is geometric only, so max_distance must be given in # SRID units (which is degrees for WGS84). The filter will be more and more # inaccurate further to the poles. But it's a lot faster than the geograpic # filter... result = Flight.query() \ .options(undefer_group('path')) \ .filter(Flight.id != flight.id) \ .filter(Flight.takeoff_time <= time) \ .filter(Flight.landing_time >= time) \ .filter(func.ST_DWithin(Flight.locations, location.to_wkt_element(), max_distance_deg)) result = _patch_query(result) flights = [] for flight in result: # find point closest to given time closest = min(range(len(flight.timestamps)), key=lambda x: abs((flight.timestamps[x] - time).total_seconds())) trace = to_shape(flight.locations).coords if closest == 0 or closest == len(trace) - 1: point = trace[closest] else: # interpolate flight trace between two fixes next_smaller = closest if flight.timestamps[closest] < time else closest - 1 next_larger = closest if flight.timestamps[closest] > time else closest + 1 dx = (time - flight.timestamps[next_smaller]).total_seconds() / \ (flight.timestamps[next_larger] - flight.timestamps[next_smaller]).total_seconds() point_next = trace[closest] point_prev = trace[closest] point = [point_prev[0] + (point_next[0] - point_prev[0]) * dx, point_prev[1] + (point_next[1] - point_prev[1]) * dx] point_distance = location.geographic_distance( Location(latitude=point[1], longitude=point[0])) if point_distance > max_distance: continue flights.append(flight) # limit to 5 flights if len(flights) == 5: break return flights
def retrieve_osm_data(): data = osm.get('user/details').data app.logger.debug("getting user data from osm") if not data: # FIXME this requires handling return False userxml = data.find('user') osmid = userxml.attrib['id'] # query for existing user if bool(models.User.query.filter(models.User.id==osmid).count()): app.logger.debug('user exists, getting from database') user = models.User.query.filter(models.User.id==osmid).first() else: app.logger.debug('user is new, create local account') user = models.User() user.id = osmid user.display_name = userxml.attrib['display_name'] user.osm_account_created = userxml.attrib['account_created'] homexml = userxml.find('home') if homexml is not None: user.home_location = WKTElement('POINT(%s %s)' % (homexml.attrib['lon'], homexml.attrib['lat'])) else: app.logger.debug('no home for this user') languages = userxml.find('languages') #FIXME parse languages and add to user.languages string field user.changeset_count = userxml.find('changesets').attrib['count'] # get last changeset info changesetdata = osm.get('changesets?user=%s' % (user.id)).data try: lastchangeset = changesetdata.find('changeset') if 'min_lon' in lastchangeset.attrib: wktbbox = 'POLYGON((%s %s, %s %s, %s %s, %s %s, %s %s))' % ( lastchangeset.attrib['min_lon'], lastchangeset.attrib['min_lat'], lastchangeset.attrib['min_lon'], lastchangeset.attrib['max_lat'], lastchangeset.attrib['max_lon'], lastchangeset.attrib['max_lat'], lastchangeset.attrib['max_lon'], lastchangeset.attrib['min_lat'], lastchangeset.attrib['min_lon'], lastchangeset.attrib['min_lat']) app.logger.debug(wktbbox) user.last_changeset_bbox = WKTElement(wktbbox) user.last_changeset_date = lastchangeset.attrib['created_at'] user.last_changeset_id = lastchangeset.attrib['id'] except: app.logger.debug('could not get changeset data from OSM') db.session.add(user) db.session.commit() app.logger.debug('user created') # we need to convert the GeoAlchemy object to something picklable if user.home_location is not None: point = to_shape(user.home_location) session['home_location'] = [point.x, point.y] or None session['display_name'] = user.display_name session['osm_id'] = user.id session['difficulty'] = user.difficulty
def auto_fill(self, zoom): self.zoom = zoom geom_3857 = DBSession.execute(ST_Transform(self.area.geometry, 3857)).scalar() geom_3857 = shape.to_shape(geom_3857) tasks = [] for i in get_tiles_in_geom(geom_3857, zoom): tasks.append(Task(i[0], i[1], zoom, i[2])) self.tasks = tasks
def to_feature(self): return Feature( geometry=shape.to_shape(self.geometry), id=self.id, properties={ 'state': self.cur_state.state if self.cur_state else 0, 'locked': self.cur_lock and self.cur_lock.lock } )
def elevation(self, oid, segments=None, **params): if segments is not None and segments.isdigit(): segments = int(segments) if segments > 500 or segments <= 0: segments = 500 else: segments = 100 ret = OrderedDict() ret['id'] = oid r = cherrypy.request.app.config['DB']['map'].tables.routes.data gen = sa.select([sa.func.generate_series(0, segments).label('i') ]).alias() field = sa.func.ST_LineInterpolatePoint(r.c.geom, gen.c.i / float(segments)) field = sa.func.ST_Collect(field) sel = sa.select([field]).where(r.c.id == oid)\ .where(r.c.geom.ST_GeometryType() == 'ST_LineString') res = cherrypy.request.db.execute(sel).first() if res is not None and res[0] is not None: geom = to_shape(res[0]) xcoord, ycoord = zip(*((p.x, p.y) for p in geom)) geomlen = LineString(geom).length pos = [geomlen * i / float(segments) for i in range(segments)] compute_elevation(((xcoord, ycoord, pos), ), geom.bounds, ret) return ret # special treatment for multilinestrings sel = sa.select([r.c.geom, sa.literal_column("""ST_Length2dSpheroid(ST_MakeLine(ARRAY[ST_Points(ST_Transform(geom,4326))]), 'SPHEROID[\"WGS 84\",6378137,298.257223563,AUTHORITY["EPSG",\"7030\"]]')"""), r.c.geom.ST_NPoints()])\ .where(r.c.id == oid) res = cherrypy.request.db.execute(sel).first() if res is not None and res[0] is not None: geom = to_shape(res[0]) if res[2] > 10000: geom = geom.simplify(res[2] / 500, preserve_topology=False) elif res[2] > 4000: geom = geom.simplify(res[2] / 1000, preserve_topology=False) segments = [] for seg in geom: p = seg.coords[0] xcoords = array('d', [p[0]]) ycoords = array('d', [p[1]]) pos = array('d') if segments: prev = segments[-1] pos.append(prev[2][-1] + \ Point(prev[0][-1], prev[1][-1]).distance(Point(*p))) else: pos.append(0.0) for p in seg.coords[1:]: pos.append( pos[-1] + Point(xcoords[-1], ycoords[-1]).distance(Point(*p))) xcoords.append(p[0]) ycoords.append(p[1]) segments.append((xcoords, ycoords, pos)) compute_elevation(segments, geom.bounds, ret) ret['length'] = float(res[1]) return ret raise cherrypy.NotFound()
def test_to_shape_WKTElement(): e = WKTElement('POINT(1 2)') s = to_shape(e) assert isinstance(s, Point) assert s.x == 1 assert s.y == 2
def create_gpx_response(self, oid, res): if res is None: raise cherrypy.NotFound() for l in cherrypy.request.locales: if l in res['intnames']: name = res['intnames'][l] break else: name = res['name'] root = ET.Element( 'gpx', { 'xmlns': "http://www.topografix.com/GPX/1/1", 'creator': "waymarkedtrails.org", 'version': "1.1", 'xmlns:xsi': "http://www.w3.org/2001/XMLSchema-instance", 'xsi:schemaLocation': "http://www.topografix.com/GPX/1/1 http://www.topografix.com/GPX/1/1/gpx.xsd" }) # metadata meta = ET.SubElement(root, 'metadata') ET.SubElement(meta, 'name').text = name copy = ET.SubElement(meta, 'copyright', author='OpenStreetMap and Contributors') ET.SubElement( copy, 'license').text = 'http://www.openstreetmap.org/copyright' link = ET.SubElement(meta, 'link', href=config.defaults.BASE_URL + '/#route?id=' + oid) ET.SubElement(link, 'text').text = 'Waymarked Trails' ET.SubElement(meta, 'time').text = datetime.utcnow().isoformat() # and the geometry trk = ET.SubElement(root, 'trk') geom = to_shape(res['geom']) if geom.geom_type == 'LineString': geom = (geom, ) for line in geom: seg = ET.SubElement(trk, 'trkseg') for pt in line.coords: ET.SubElement(seg, 'trkpt', lat="%.7f" % pt[1], lon="%.7f" % pt[0]) # borrowed from Django's slugify name = unicodedata.normalize('NFKC', name) name = re.sub('[^\w\s-]', '', name, flags=re.U).strip().lower() name = re.sub('[-\s]+', '-', name, flags=re.U) cherrypy.response.headers['Content-Type'] = 'application/gpx+xml' cherrypy.response.headers[ 'Content-Disposition'] = 'attachment; filename=%s.gpx' % name return '<?xml version="1.0" encoding="UTF-8" standalone="no" ?>\n\n'.encode('utf-8') \ + ET.tostring(root, encoding="UTF-8")
def alt(self): """Altitude [m]""" return to_shape(self.coordinates).coords[:][0][2]
def geo(self): data = mapping(to_shape(self._geo)) return data
def process(value): if value: return to_shape(to_wkbelement(value)) else: return None
def get_geojson_feature(wkb): ''' retourne une feature geojson à partir d'un WKB''' geometry = to_shape(wkb) feature = Feature(geometry=geometry, properties={}) return feature
def as_dict(self): return { 'id': self.id, 'name': self.name, 'shape': mapping(to_shape(self.shape)) }
def bounds(self): if self.extent is None: return None return to_shape(self.extent).bounds
def location(self): if self.location_wkt is None: return None coords = to_shape(self.location_wkt) return Location(latitude=coords.y, longitude=coords.x)
def test_model(): app, db, admin = setup() GeoModel = create_models(db) db.create_all() GeoModel.query.delete() db.session.commit() view = ModelView(GeoModel, db.session) admin.add_view(view) eq_(view.model, GeoModel) eq_(view._primary_key, 'id') # Verify form eq_(view._create_form_class.point.field_class, GeoJSONField) eq_(view._create_form_class.point.kwargs['geometry_type'], "POINT") eq_(view._create_form_class.line.field_class, GeoJSONField) eq_(view._create_form_class.line.kwargs['geometry_type'], "LINESTRING") eq_(view._create_form_class.polygon.field_class, GeoJSONField) eq_(view._create_form_class.polygon.kwargs['geometry_type'], "POLYGON") eq_(view._create_form_class.multi.field_class, GeoJSONField) eq_(view._create_form_class.multi.kwargs['geometry_type'], "MULTIPOINT") # Make some test clients client = app.test_client() rv = client.get('/admin/geomodel/') eq_(rv.status_code, 200) rv = client.get('/admin/geomodel/new/') eq_(rv.status_code, 200) rv = client.post( '/admin/geomodel/new/', data={ "name": "test1", "point": '{"type": "Point", "coordinates": [125.8, 10.0]}', "line": '{"type": "LineString", "coordinates": [[50.2345, 94.2], [50.21, 94.87]]}', "polygon": ('{"type": "Polygon", "coordinates": [[[100.0, 0.0], [101.0, 0.0],' ' [101.0, 1.0], [100.0, 1.0], [100.0, 0.0]]]}'), "multi": '{"type": "MultiPoint", "coordinates": [[100.0, 0.0], [101.0, 1.0]]}', }) eq_(rv.status_code, 302) model = db.session.query(GeoModel).first() eq_(model.name, "test1") eq_(to_shape(model.point).geom_type, "Point") eq_(list(to_shape(model.point).coords), [(125.8, 10.0)]) eq_(to_shape(model.line).geom_type, "LineString") eq_(list(to_shape(model.line).coords), [(50.2345, 94.2), (50.21, 94.87)]) eq_(to_shape(model.polygon).geom_type, "Polygon") eq_(list(to_shape(model.polygon).exterior.coords), [(100.0, 0.0), (101.0, 0.0), (101.0, 1.0), (100.0, 1.0), (100.0, 0.0)]) eq_(to_shape(model.multi).geom_type, "MultiPoint") eq_(len(to_shape(model.multi).geoms), 2) eq_(list(to_shape(model.multi).geoms[0].coords), [(100.0, 0.0)]) eq_(list(to_shape(model.multi).geoms[1].coords), [(101.0, 1.0)]) rv = client.get('/admin/geomodel/') eq_(rv.status_code, 200) html = rv.data.decode('utf-8') pattern = r'(.|\n)+({.*"type": ?"Point".*})</textarea>(.|\n)+' group = re.match(pattern, html).group(2) p = json.loads(group) eq_(p['coordinates'][0], 125.8) eq_(p['coordinates'][1], 10.0) url = '/admin/geomodel/edit/?id=%s' % model.id rv = client.get(url) eq_(rv.status_code, 200) data = rv.data.decode('utf-8') ok_(r'{"type":"MultiPoint","coordinates":[[100,0],[101,1]]}</textarea>' in data or r'{"type":"MultiPoint","coordinates":[[100,0],[101,1]]}' in data) # rv = client.post(url, data={ # "name": "edited", # "point": '{"type": "Point", "coordinates": [99.9, 10.5]}', # "line": '', # set to NULL in the database # }) # eq_(rv.status_code, 302) # # model = db.session.query(GeoModel).first() # eq_(model.name, "edited") # eq_(to_shape(model.point).geom_type, "Point") # eq_(list(to_shape(model.point).coords), [(99.9, 10.5)]) # eq_(to_shape(model.line), None) # eq_(to_shape(model.polygon).geom_type, "Polygon") # eq_(list(to_shape(model.polygon).exterior.coords), # [(100.0, 0.0), (101.0, 0.0), (101.0, 1.0), (100.0, 1.0), (100.0, 0.0)]) # eq_(to_shape(model.multi).geom_type, "MultiPoint") # eq_(len(to_shape(model.multi).geoms), 2) # eq_(list(to_shape(model.multi).geoms[0].coords), [(100.0, 0.0)]) # eq_(list(to_shape(model.multi).geoms[1].coords), [(101.0, 1.0)]) url = '/admin/geomodel/delete/?id=%s' % model.id rv = client.post(url) eq_(rv.status_code, 302) eq_(db.session.query(GeoModel).count(), 0)
def convert_point_to_lat_lon(self, point): shape = to_shape(point) return {'lat': shape.y, 'lon': shape.x}
def _get_near_flights(flight, location, time, max_distance=1000): # calculate max_distance in degrees at the earth's sphere (approximate, # cutoff at +-85 deg) max_distance_deg = (max_distance / METERS_PER_DEGREE) / math.cos( math.radians(min(abs(location.latitude), 85))) # the distance filter is geometric only, so max_distance must be given in # SRID units (which is degrees for WGS84). The filter will be more and more # inaccurate further to the poles. But it's a lot faster than the geograpic # filter... result = (Flight.query().options(undefer_group("path")).filter( Flight.id != flight.id).filter(Flight.takeoff_time <= time).filter( Flight.landing_time >= time).filter( func.ST_DWithin(Flight.locations, location.to_wkt_element(), max_distance_deg))) result = _patch_query(result) flights = [] for flight in result: # find point closest to given time closest = min( range(len(flight.timestamps)), key=lambda x: abs((flight.timestamps[x] - time).total_seconds()), ) trace = to_shape(flight.locations).coords if closest == 0 or closest == len(trace) - 1: point = trace[closest] else: # interpolate flight trace between two fixes next_smaller = closest if flight.timestamps[ closest] < time else closest - 1 next_larger = closest if flight.timestamps[ closest] > time else closest + 1 dx = (time - flight.timestamps[next_smaller]).total_seconds() / ( flight.timestamps[next_larger] - flight.timestamps[next_smaller]).total_seconds() point_next = trace[closest] point_prev = trace[closest] point = [ point_prev[0] + (point_next[0] - point_prev[0]) * dx, point_prev[1] + (point_next[1] - point_prev[1]) * dx, ] point_distance = location.geographic_distance( Location(latitude=point[1], longitude=point[0])) if point_distance > max_distance: continue flights.append(flight) # limit to 5 flights if len(flights) == 5: break return flights
def point(self): return to_shape(self.coordinates)
from indalsig import db from indalsig.db.dao import PostcodeDAO, WayDAO from geopandas.geodataframe import GeoDataFrame, GeoSeries from geoalchemy2.shape import to_shape import matplotlib.pyplot as plt session = db.Session() postcode_dao = PostcodeDAO(session) way_dao = WayDAO(session) # Representación de las calles con GeoSeries # Extraemos todas las calles ways = [] for way in way_dao.getAll(): ways.append(to_shape(way.geom)) wgs = GeoSeries(ways) base = wgs.plot(color="blue") qfilter = {'ad_type': 'RENT', 'asset_type': 'GARAGE'} prices = session.execute( 'SELECT postcode, avg_price FROM inmosig_average_prices WHERE ad_type = :ad_type AND ' 'asset_type = :asset_type', qfilter) gdf = GeoDataFrame(columns=['geometry', 'price', 'postcode']) for price in prices.fetchall(): postcode = postcode_dao.search_by_postcode(price[0]) if postcode is not None: gdf = gdf.append( { 'geometry': to_shape(postcode.geom),
def bbox(self): return to_shape(self._bbox).bounds
def check_wkb(wkb, x, y): pt = shape.to_shape(wkb) assert round(pt.x, 5) == x assert round(pt.y, 5) == y
def as_geofeature(self, data, columns=None): if getattr(data, self.geometry_field) is not None: geometry = to_shape(getattr(data, self.geometry_field)) return Feature(geometry=geometry, properties=self.as_dict(data, columns))
def project_edit(request): id = request.matchdict['project'] project = DBSession.query(Project).get(id) licenses = DBSession.query(License).all() if 'form.submitted' in request.params: for locale, translation in project.translations.iteritems(): with project.force_locale(locale): for field in ['name', 'short_description', 'description', 'instructions', 'per_task_instructions']: translated = '_'.join([field, locale]) if translated in request.params: setattr(project, field, request.params[translated]) DBSession.add(project) for p in ['changeset_comment', 'entities_to_map', 'imagery']: if p in request.params: setattr(project, p, request.params[p]) if 'license_id' in request.params and \ request.params['license_id'] != "": license_id = int(request.params['license_id']) license = DBSession.query(License).get(license_id) project.license = license if 'private' in request.params and \ request.params['private'] == 'on': project.private = True else: project.private = False project.status = request.params['status'] project.priority = request.params['priority'] if request.params.get('due_date', '') != '': due_date = request.params.get('due_date') due_date = datetime.datetime.strptime(due_date, "%m/%d/%Y") project.due_date = due_date else: project.due_date = None if 'josm_preset' in request.params: josm_preset = request.params.get('josm_preset') if hasattr(josm_preset, 'value'): project.josm_preset = josm_preset.value.decode('UTF-8') # Remove the previously set priority areas for area in project.priority_areas: DBSession.delete(area) project.priority_areas[:] = [] DBSession.flush() priority_areas = request.params.get('priority_areas', '') if priority_areas != '': geoms = parse_geojson(priority_areas) for geom in geoms: geom = 'SRID=4326;%s' % geom.wkt project.priority_areas.append(PriorityArea(geom)) DBSession.add(project) return HTTPFound(location=route_path('project', request, project=project.id)) translations = project.translations.items() features = [] for area in project.priority_areas: features.append(Feature(geometry=shape.to_shape(area.geometry))) return dict(page_id='project_edit', project=project, licenses=licenses, translations=translations, priority_areas=FeatureCollection(features))
def lon(self): """Longitude""" return to_shape(self.coordinates).coords[:][0][0]
def to_shapely(row, name): return shape.to_shape(row[name]) if row[name] is not None else None
def as_dict(self): return { 'id': self.id, 'location': mapping(to_shape(self.location)), 'last_updated': self.last_updated }
############################################################################### print("Database connection established.") # import white area including coastdat id ww_isection = session.query(CosmoClmGrid.gid, func.ST_Intersection(CosmoClmGrid.geom, GeoPotArea.geom)).\ filter(func.ST_Intersects(GeoPotArea.geom, CosmoClmGrid.geom)) ww_isection = [(wid, loads(bytes(geom.data))) for wid, geom in ww_isection] # import grid districts gds = session.query(GridDistrict.subst_id, func.ST_Transform(GridDistrict.geom, 4326)) gds = [(sid, to_shape(geom)) for sid, geom in gds] # import weadata data for Germany (roughly bounding box in cosmoclmgrid) weadata = session.query(Spatial.gid, Timeseries.tsarray).\ join(Located, Located.spatial_id == Spatial.gid).\ join(Timeseries, Located.data_id == Timeseries.id).\ join(Scheduled, Scheduled.data_id == Timeseries.id).\ join(Year, Scheduled.time_id == Year.year).\ join(Typified, Typified.data_id == Timeseries.id).\ join(Datatype, Typified.type_id == Datatype.id).\ filter(Year.year == 2011).\ filter(and_(Spatial.gid / 1000 > 1115, Spatial.gid / 1000 < 1144, Spatial.gid % 1000 > 70, Spatial.gid % 1000 < 110)).\ filter(Datatype.name == "WSS_10M") weadata = weadata.all()
def start_geojson(): pools = Carpool.query if request.args.get('ignore_prior') != 'false': pools = pools.filter(Carpool.leave_time >= datetime.datetime.utcnow()) try: near_lat = request.args.get('near.lat', type=float) near_lon = request.args.get('near.lon', type=float) except ValueError: abort(400, "Invalid lat/lon format") try: near_radius = request.args.get('near.radius', type=int) except ValueError: abort(400, "Invalid radius format") if near_lat and near_lon: center = from_shape(Point(near_lon, near_lat), srid=4326) if near_radius: # We're going to say that radius is in meters. # The conversion factor here is based on a 40deg latitude # (roughly around Virginia) radius_degrees = near_radius / 111034.61 pools = pools.filter( func.ST_Distance(Carpool.from_point, center) <= radius_degrees) pools = pools.order_by(func.ST_Distance(Carpool.from_point, center)) riders = db.session.query(RideRequest.carpool_id, func.count(RideRequest.id).label('pax')).\ filter(RideRequest.status == 'approved').\ group_by(RideRequest.carpool_id).\ subquery('riders') pools = pools.filter(Carpool.from_point.isnot(None)).\ outerjoin(riders, Carpool.id == riders.c.carpool_id).\ filter(riders.c.pax.is_(None) | (riders.c.pax < Carpool.max_riders)) features = [] dt_format = current_app.config.get('DATE_FORMAT') # get the current user's confirmed carpools confirmed_carpools = [] if not current_user.is_anonymous: rides = RideRequest.query.filter(RideRequest.status == 'approved').\ filter(RideRequest.person_id == current_user.id) for ride in rides: confirmed_carpools.append(ride.carpool_id) for pool in pools: if (pool.from_point is None) or pool.destination.hidden: continue # show real location to driver and confirmed passenger geometry = mapping(to_shape(pool.from_point)) if not current_user.is_anonymous and \ (pool.driver_id == current_user.id or pool.id in confirmed_carpools): is_approximate_location = False else: is_approximate_location = True geometry = approximate_location(geometry) features.append({ 'type': 'Feature', 'geometry': geometry, 'id': url_for('carpool.details', uuid=pool.uuid, _external=True), 'properties': { 'from_place': escape(pool.from_place), 'to_place': escape(pool.destination.name), 'seats_available': pool.seats_available, 'leave_time': pool.leave_time.isoformat(), 'return_time': pool.return_time.isoformat(), 'leave_time_human': pool.leave_time.strftime(dt_format), 'return_time_human': pool.return_time.strftime(dt_format), 'driver_gender': escape(pool.driver.gender), 'is_approximate_location': is_approximate_location, 'hidden': pool.destination.hidden }, }) feature_collection = {'type': 'FeatureCollection', 'features': features} return jsonify(feature_collection)
def fulltextsearch(self) -> FeatureCollection: lang = locale_negotiator(self.request) try: language = self.languages[lang] except KeyError: return HTTPInternalServerError( detail=f"{lang!s} not defined in languages") if "query" not in self.request.params: return HTTPBadRequest(detail="no query") terms = self.fts_normalizer(self.request.params.get("query")) maxlimit = self.settings.get("maxlimit", 200) try: limit = int( self.request.params.get("limit", self.settings.get("defaultlimit", 30))) except ValueError: return HTTPBadRequest(detail="limit value is incorrect") limit = min(limit, maxlimit) try: partitionlimit = int(self.request.params.get("partitionlimit", 0)) except ValueError: return HTTPBadRequest(detail="partitionlimit value is incorrect") partitionlimit = min(partitionlimit, maxlimit) terms_array = [ IGNORED_STARTUP_CHARS_RE.sub("", elem) for elem in IGNORED_CHARS_RE.sub(" ", terms).split(" ") ] terms_ts = "&".join(w + ":*" for w in terms_array if w != "") _filter = FullTextSearch.ts.op("@@")(func.to_tsquery( language, terms_ts)) if self.request.user is None: _filter = and_(_filter, FullTextSearch.public.is_(True)) else: _filter = and_( _filter, or_( FullTextSearch.public.is_(True), FullTextSearch.role_id.is_(None), FullTextSearch.role_id.in_( [r.id for r in self.request.user.roles]), ), ) if "interface" in self.request.params: _filter = and_( _filter, or_( FullTextSearch.interface_id.is_(None), FullTextSearch.interface_id == self._get_interface_id( self.request.params["interface"]), ), ) else: _filter = and_(_filter, FullTextSearch.interface_id.is_(None)) _filter = and_( _filter, or_(FullTextSearch.lang.is_(None), FullTextSearch.lang == lang)) rank_system = self.request.params.get("ranksystem") if rank_system == "ts_rank_cd": # The numbers used in ts_rank_cd() below indicate a normalization method. # Several normalization methods can be combined using |. # 2 divides the rank by the document length # 8 divides the rank by the number of unique words in document # By combining them, shorter results seem to be preferred over longer ones # with the same ratio of matching words. But this relies only on testing it # and on some assumptions about how it might be calculated # (the normalization is applied two times with the combination of 2 and 8, # so the effect on at least the one-word-results is therefore stronger). rank = func.ts_rank_cd(FullTextSearch.ts, func.to_tsquery(language, terms_ts), 2 | 8) else: # Use similarity ranking system from module pg_trgm. rank = func.similarity(FullTextSearch.label, terms) if partitionlimit: # Here we want to partition the search results based on # layer_name and limit each partition. row_number = (func.row_number().over( partition_by=FullTextSearch.layer_name, order_by=(desc(rank), FullTextSearch.label)).label("row_number")) sub_query = DBSession.query(FullTextSearch).add_columns( row_number).filter(_filter).subquery() query = DBSession.query( sub_query.c.id, sub_query.c.label, sub_query.c.params, sub_query.c.layer_name, sub_query.c.the_geom, sub_query.c.actions, ) query = query.filter(sub_query.c.row_number <= partitionlimit) else: query = DBSession.query(FullTextSearch).filter(_filter) query = query.order_by(desc(rank)) query = query.order_by(FullTextSearch.label) query = query.limit(limit) objects = query.all() features = [] for o in objects: properties = {"label": o.label} if o.layer_name is not None: properties["layer_name"] = o.layer_name if o.params is not None: properties["params"] = o.params if o.actions is not None: properties["actions"] = o.actions if o.actions is None and o.layer_name is not None: properties["actions"] = [{ "action": "add_layer", "data": o.layer_name }] if o.the_geom is not None: geom = to_shape(o.the_geom) feature = Feature(id=o.id, geometry=geom, properties=properties, bbox=geom.bounds) features.append(feature) else: feature = Feature(id=o.id, properties=properties) features.append(feature) return FeatureCollection(features)
def shape(self): return to_shape(self.geom)
def __get__(self, obj, type=None): geom = obj.__getattribute__(self.column) return geom is not None and shape.to_shape(obj.geom) or None
def parse_boundary(cls, value: WKBElement) -> Any: if value: return geometry.mapping(to_shape(value))
def lat(self): """Latitude""" return to_shape(self.coordinates).coords[:][0][1]
def locations(self): return [ Location(longitude=location[0], latitude=location[1]) for location in to_shape(self._locations).coords ]