def map(): """ This will render a template that holds the map. Displaying buildings with gids contained in taskid """ #get bdg_gids bdg_gids = flask.session['bdg_gids'] #get FeatureCollection with corresponding building footprints rows = object_attribute.query.filter(db.and_(object_attribute.object_id.in_(bdg_gids),object_attribute.attribute_type_code=='RRVS_STATUS')).all() bdgs = [] for row in rows: geom = t_object.query.filter_by(gid=row.object_id).first().the_geom geometry = json.loads(db.session.scalar(func.ST_AsGeoJSON(geom))) feature = Feature(id=row.object_id,geometry=geometry,properties={"gid":row.object_id, "rrvs_status":row.attribute_value}) bdgs.append(feature) bdgs_json = dumps(FeatureCollection(bdgs)) #get img_gids img_gids = flask.session['img_gids'] #get metadata related to these images image_rows = pan_imgs.query.filter(pan_imgs.gid.in_(img_gids)).all() gps_ids = [row.gps for row in image_rows] gps_rows = gps.query.filter(gps.gid.in_(gps_ids)).all() #create a json object img_gps = [] for i,image in enumerate(image_rows): geometry = json.loads(db.session.scalar(func.ST_AsGeoJSON(gps_rows[i].the_geom))) feature = Feature(id=image.gid,geometry=geometry,properties={"img_id":image.gid,"repository":image.repository,"filename":image.filename,"frame_id":image.frame_id,"azimuth":gps_rows[i].azimuth}) img_gps.append(feature) gps_json = dumps(FeatureCollection(img_gps)) return flask.render_template('map.html',bdgs=bdgs_json,gps=gps_json)
def get_dataset_field(tablename, field): response = {'status': 'success', 'data': {}} if tablename in seen_classes: cls = current_app.class_references[tablename] else: db.metadata.reflect(bind=db.engine) seen_classes.add(tablename) cls = type( str(tablename), ( GeoPoly, db.Model, ), { '__tablename__': tablename, '__table_args__': { 'extend_existing': True } }) current_app.class_references[tablename] = cls if field == config.geom_column: vector = cls.query.with_entities( geofuncs.ST_AsGeoJSON(getattr(cls, field))).all() response['data'] = [v[0] for v in vector] elif field == 'geojson': #TODO: How can this be cleaner? Do I need 2 queries go get geojson? #rows = cls.query.all() geoms = cls.query.with_entities( geofuncs.ST_AsGeoJSON(getattr(cls, config.geom_column))).all() features = [] for i, row in enumerate(geoms): #attributes = row.as_dict() #attributes.pop('wkb_geometry', None) #for k, v in attributes.iteritems(): #if isinstance(v, decimal.Decimal): #attributes[k] = float(v) current_feature = { 'type': 'Feature', 'geometry': ast.literal_eval(geoms[i][0]) } #'properties':attributes} features.append(current_feature) geojson = {"type": "FeatureCollection", "features": features} #geojson = {"type":"FeatureCollection", "features": geoms} response['data']['geojson'] = geojson elif field == 'topojson': #TODO: Add topojson support if the DB is postgresql pass else: vector = cls.query.with_entities(getattr(cls, field)).all() responsevector = [v[0] for v in vector] if isinstance(responsevector[0], decimal.Decimal): for i, v in enumerate(responsevector): responsevector[i] = float(v) response['data'] = responsevector return response
def curbrampsv2(): table = models.Curbramps bbox = request.args.get('bbox') all_rows = request.args.get('all') geojson_query = gfunc.ST_AsGeoJSON(table.geom, 7) geojson_geom = geojson_query.label('geom') if all_rows == 'true': select = db.session.query(table.id, geojson_geom) result = select.all() else: if not bbox: select = db.session.query(table.id, geojson_geom) result = select.limit(10).all() else: bounds = [float(b) for b in bbox.split(',')] in_bbox = sql_utils.in_bbox(table.geom, bounds) select = db.session.query(table.id, geojson_geom) result = select.filter(in_bbox).all() feature_collection = geojson.FeatureCollection([]) for row in result: feature = geojson.Feature() geometry = json.loads(row.geom) feature['geometry'] = geometry feature['properties'] = {'id': row.id} feature_collection['features'].append(feature) return jsonify(feature_collection)
def get_mapped_projects(user_id: int, preferred_locale: str) -> UserMappedProjectsDTO: """ Get all projects a user has mapped on """ from backend.models.postgis.task import Task from backend.models.postgis.project import Project query = db.session.query(func.unnest( User.projects_mapped)).filter_by(id=user_id) query_validated = (db.session.query( Task.project_id.label("project_id"), func.count(Task.validated_by).label("validated"), ).filter(Task.project_id.in_(query)).filter_by( validated_by=user_id).group_by(Task.project_id, Task.validated_by).subquery()) query_mapped = (db.session.query( Task.project_id.label("project_id"), func.count(Task.mapped_by).label("mapped"), ).filter(Task.project_id.in_(query)).filter_by( mapped_by=user_id).group_by(Task.project_id, Task.mapped_by).subquery()) query_union = (db.session.query( func.coalesce(query_validated.c.project_id, query_mapped.c.project_id).label("project_id"), func.coalesce(query_validated.c.validated, 0).label("validated"), func.coalesce(query_mapped.c.mapped, 0).label("mapped"), ).join( query_mapped, query_validated.c.project_id == query_mapped.c.project_id, full=True, ).subquery()) results = (db.session.query( Project.id, Project.status, Project.default_locale, query_union.c.mapped, query_union.c.validated, functions.ST_AsGeoJSON(Project.centroid), ).filter(Project.id == query_union.c.project_id).order_by( desc(Project.id)).all()) mapped_projects_dto = UserMappedProjectsDTO() for row in results: mapped_project = MappedProject() mapped_project.project_id = row[0] mapped_project.status = ProjectStatus(row[1]).name mapped_project.tasks_mapped = row[3] mapped_project.tasks_validated = row[4] mapped_project.centroid = geojson.loads(row[5]) project_info = ProjectInfo.get_dto_for_locale( row[0], preferred_locale, row[2]) mapped_project.name = project_info.name mapped_projects_dto.mapped_projects.append(mapped_project) return mapped_projects_dto
def sidewalksv1(): table = models.SidewalksData bbox = request.args.get('bbox') geojson_query = gfunc.ST_AsGeoJSON(table.geom, 7) geojson_geom = geojson_query.label('geom') if not bbox: select = db.session.query(table.id, geojson_geom, table.grade) result = select.limit(10).all() else: bounds = [float(b) for b in bbox.split(',')] in_bbox = sql_utils.in_bbox(table.geom, bounds) select = db.session.query(table.id, geojson_geom, table.grade) result = select.filter(in_bbox).all() feature_collection = geojson.FeatureCollection([]) for row in result: feature = geojson.Feature() geometry = json.loads(row.geom) feature['geometry'] = geometry feature['properties'] = { 'id': row.id, 'grade': str(round(row.grade, 3)) } feature_collection['features'].append(feature) return jsonify(feature_collection)
def converted_geom(self): return geojson.loads( json.dumps( json.loads( db.session.scalar( func.ST_AsGeoJSON(func.ST_Transform(self.geom, 4326))))))
def get_coordinates(self): if self.coordinates is None: return self.coordinates geom_json = json.loads( db.session.scalar(func.ST_AsGeoJSON(self.coordinates))) return { "lng": geom_json['coordinates'][0], "lat": geom_json['coordinates'][1] }
def get_county_geojson(bwks=None): # Please notice the missing filter statement! if bwks: query = db_session.query( MergedDistrictDiff.bwk, functions.ST_AsGeoJSON( functions.ST_Union( functions.ST_Transform(MergedDistrictDiff.geom, 4326))).label("geom"), *sum_party_results(MergedDistrictDiff)).filter( MergedDistrictDiff.bwk.in_(bwks)).group_by( MergedDistrictDiff.bwk) else: query = db_session.query( MergedDistrictDiff.bwk, functions.ST_AsGeoJSON( functions.ST_Union( functions.ST_Transform(MergedDistrictDiff.geom, 4326))).label("geom"), *sum_party_results(MergedDistrictDiff)).group_by( MergedDistrictDiff.bwk) geojsons = [] for bwk, geom, cdu, spd, gruene, die_linke, fdp, afd in query.all(): candidates = db_session.query(Candidate).filter(Candidate.bwk == bwk) geojson = json.loads(geom) geojson['properties'] = { 'bwk': bwk, 'candidates': { candidate.party_key(): candidate.get_json() for candidate in candidates }, 'result': { 'cdu': int(cdu), 'spd': int(spd), 'gruene': int(gruene), 'die_linke': int(die_linke), 'fdp': int(fdp), 'afd': int(afd) } } geojsons.append(geojson) return geojsons
def get_district_geojson(district): query = db_session.query( district, functions.ST_AsGeoJSON(functions.ST_Transform(district.geom, 4326))) geojsons = [] for district, geom in query.all(): geojson = json.loads(geom) geojson["properties"] = district.get_geojson_dict() geojsons.append(geojson) return geojsons
def to_dict(self): boundary = {"type": "Feature", "properties": {}, "geometry": {}} boundary["properties"] = {"name": self.name, "code": self.code} polygon = json.loads( db.session.scalar(geofunc.ST_AsGeoJSON(self.polygon))) coordinates = polygon["coordinates"] if len(coordinates) == 1: boundary["geometry"]["type"] = "Polygon" boundary["geometry"]["coordinates"] = coordinates[0] return boundary
def to_json(self): geo_location = None if self.geo_location is not None: geo_location = json.loads( db.session.scalar(func.ST_AsGeoJSON(self.geo_location))) json_array = { 'id': self.id, 'array_code': self.array_code, 'description': self.description, 'geo_location': geo_location, 'array_name': self.array_name, 'display_name': self.display_name } return json_array
def to_json(self): geo_location = None if self.geo_location is not None: loc = db.session.scalar(func.ST_AsGeoJSON(self.geo_location)) geo_location = json.loads(loc) json_platform_deployment = { 'id': self.id, 'reference_designator': self.reference_designator, 'array_id': self.array_id, 'display_name': self.proper_display_name, 'start_date': self.start_date, 'end_date': self.end_date, 'geo_location': geo_location } return json_platform_deployment
def get_places(): session = app.session() places = session.query( Place.id, Place.label, functions.ST_AsGeoJSON(Place.geometry).label('geometry')) output = [] for place in places: place_data = { 'id': place.id, 'label': place.label, 'geometry': place.geometry } output.append(place_data) session.close() return jsonify({'places': output})
def geometry(obj): dataObj = dictClassData.get(obj, None) if not dataObj: return jsonify('error') # on fait une requête pour ne sélectionner que les géométries, en faisant la transfromation en geojson query = select([ dictClassData[obj].gid.label('gid'), func.ST_AsGeoJSON(func.ST_Transform(dictClassData[obj].geom, 4326)).label('geom') ]).where(dictClassData[obj].geom != None) dataQuery = db.session.execute(query).fetchall() geom_all = [] for data in dataQuery: data = dict(data) geom_all.append({'gid': data['gid'], 'geometry': data['geom']}) return jsonify(geom_all)
def get_dataset_geojson(request, table, page_number): """ Returns geojson created from the geospatial columns of a given page of a table """ # Get the range of database IDs included in the current page of data as well # as the total number of pages id_range, page_count = get_pagination_id_range(table, page_number) # Get a session session = m.get_session() t = getattr(m.Base.classes, table) # Get geospatial columns geo = m.GEOSPATIAL_COLUMNS geospatial_columns = session.query(geo.column).filter(geo.dataset_uuid == table).all() geo_column_objects = [] geo_column_names = [] # Create the geospatial object from the columns for col in geospatial_columns: geo_column_objects.append(geofunc.ST_AsGeoJSON(getattr(t, col[0]))) geo_column_names.append(col[0]) # build up geospatial select functions # Note: we're just grabbing the first geospatial column right now. it is explicitly labeled 'geometry' # a picker for geo columns might be desirable someday geojson = session.query(t, geo_column_objects[0].label('geometry')).filter( t.id > id_range[0], t.id <= id_range[1] ) # Get a DataFrame with the results of the query data = pd.read_sql(geojson.statement, geojson.session.bind) geo_column_names.append('geometry') # Build some properly formatted geojson to pass into leaflet geojson = [] for i, r in data.iterrows(): # Geometry and properties are both required for a 'Feature' object. geometry = r['geometry'] properties = r.drop(geo_column_names).to_dict() geojson.append({ 'type': 'Feature', 'properties': properties, 'geometry': json.loads(geometry), 'keys': sorted(properties.keys()) }) return JsonResponse(geojson, safe=False)
def get_geojson(self): geom = json.loads(db.session.scalar(geofunc.ST_AsGeoJSON(self.geom))) return json.dumps( { "type": "Feature", "id": self.code, "properties": self.get_properties(), "crs": { "type": "name", "properties": { "name": "urn:ogc:def:crs:OGC:1.3:CRS84" } }, "geometry": geom }, separators=(',', ':'))
def get_place(id_): session = app.session() place = session.query( Place.id, Place.label, functions.ST_AsGeoJSON( Place.geometry).label('geometry')).filter_by(id=id_).first() if not place: return jsonify({'message': 'No user found.'}) place_data = { 'id': place.id, 'label': place.label, 'geometry': place.geometry } session.close() return jsonify(place_data)
def crossingsv2(): table = models.Crossings bbox = request.args.get('bbox') all_rows = request.args.get('all') geojson_query = gfunc.ST_AsGeoJSON(table.geom, 7) geojson_geom = geojson_query.label('geom') if all_rows == 'true': select = db.session.query(table.id, geojson_geom, table.grade, table.curbramps) result = select.all() else: if not bbox: select = db.session.query(table.id, geojson_geom, table.grade, table.curbramps) result = select.limit(10).all() else: bounds = [float(b) for b in bbox.split(',')] in_bbox = sql_utils.in_bbox(table.geom, bounds) select = db.session.query(table.id, geojson_geom, table.grade, table.curbramps) result = select.filter(in_bbox).all() fc = geojson.FeatureCollection([]) for row in result: feature = geojson.Feature() geometry = json.loads(row.geom) for i, lonlat in enumerate(geometry['coordinates']): lon = round(lonlat[0], 7) lat = round(lonlat[1], 7) geometry['coordinates'][i] = [lon, lat] feature['geometry'] = geometry feature['properties'] = {'id': row.id, 'grade': str(round(row.grade, 3)), 'curbramps': row.curbramps} fc['features'].append(feature) return jsonify(fc)
def add_way_point( route_id: uuid.UUID, latitude: float, longitude: float, db_session: Session, ) -> None: route, way_points = (db_session.query( Route, functions.ST_AsGeoJSON( Route.way_points)).filter_by(id=route_id).first()) # Points use longitude, latitude order. if way_points is not None: coordinates = json.loads(way_points)["coordinates"] else: coordinates = [[longitude, latitude]] coordinates.append([longitude, latitude]) coordinates_str = ', '.join(f"{lon} {lat}" for lon, lat in coordinates) route.way_points = f"LINESTRING({coordinates_str})"
def get_ndvi(): # print(dict(request.data)) # data = Ndvi.query.filter(Ndvi.geom!=None).all() query = select([ Ndvi.gid.label('gid'), func.ST_AsGeoJSON(func.ST_Transform(Ndvi.geom, 4326)).label('geom') ]).where(Ndvi.geom != None) dataQuery = db.session.execute(query).fetchall() data_all = [] for ndvi in dataQuery: ndvi = dict(ndvi) data_all.append({ 'type': 'Feature', 'properties': { 'gid': ndvi['gid'], }, 'geometry': json.loads(ndvi['geom']) }) return jsonify(data_all)
def to_json(self): geo_location = None if self.geo_location is not None: json.loads(db.session.scalar(func.ST_AsGeoJSON(self.geo_location))) json_inst_deploy = { 'id': self.id, 'reference_designator': self.reference_designator, 'platform_deployment_id': self.platform_deployment_id, 'display_name': self.display_name, 'depth': self.depth, 'start_date': None, 'end_date': None, 'geo_location': geo_location } if self.start_date is not None: json_inst_deploy['start_date'] = self._pytype(self.start_date) if self.end_date is not None: json_inst_deploy['end_date'] = self._pytype(self.end_date) return json_inst_deploy
def get_query_geojson(request, table, columnName, queryString): """ Returns geojson created from the geospatial columns of query """ t = getattr(m.Base.classes, table) # Get geospatial columns geo = m.GEOSPATIAL_COLUMNS geospatial_columns = session.query( geo.column).filter(geo.dataset_uuid == table).all() geo_column_objects = [] geo_column_names = [] # Create the geospatial object from the columns for col in geospatial_columns: geo_column_objects.append(geofunc.ST_AsGeoJSON(getattr(t, col[0]))) geo_column_names.append(col[0]) # build up geospatial select functions # Note: we're just grabbing the first geospatial column right now. it is explicitly labeled 'geometry' # a picker for geo columns might be desirable someday geojson = session.query(t, geo_column_objects[0].label('geometry')).filter( getattr(t, columnName).ilike("%" + queryString + "%") #getattr(t, columnName) == queryString ) # Get a DataFrame with the results of the query data = pd.read_sql(geojson.statement, geojson.session.bind) geo_column_names.append('geometry') # Build some properly formatted geojson to pass into leaflet geojson = [] for i, r in data.iterrows(): # Geometry and properties are both required for a 'Feature' object. geometry = r['geometry'] properties = r.drop(geo_column_names).to_dict() geojson.append({ 'type': 'Feature', 'properties': properties, 'geometry': json.loads(geometry), 'keys': sorted(properties.keys()) }) return JsonResponse(geojson, safe=False)
class Geom(db.Model): """Class to refer to """ __tablename__="spatial" gid = db.Column(db.Integer,primary_key=True,autoincrement= True ) name = db.Column(db.Text, nullable = False) habitat = db.Column(db.Integer,nullable=False) water = db.Column(db.Integer,nullable = False) species = db.Column(db.Integer, nullable = False) community = db.Column(db.Integer,nullable=False) ecosystem = db.Column(db.Integer,nullable = False) economy = db.Column(db.Integer, nullable = False) f2012 = db.Column(db.Integer,nullable=False) f2007 = db.Column(db.Integer,nullable = False) f2002 = db.Column(db.Integer, nullable = False) scale = db.Column(db.Text, nullable = False) geom = db.Column(Geometry(geometry_type='MULTIPOLYGON', srid=4326)) coords = db.column_property(func.ST_AsGeoJSON(geom)) def serialize(self): obj= {} obj["geometry"]=json.loads(self.coords) obj["geometry"]["type"]="MultiPolygon" obj["properties"]={ 'id': self.gid, 'name':self.name, 'habitat': self.habitat, 'water': self.water, 'species': self.species, 'community':self.community, 'ecosystem': self.ecosystem, 'economy': self.economy, 'f2012': self.f2012, 'f2007':self.f2007, 'f2002': self.f2002, 'scale': self.scale } obj["type"]="Feature" return obj
def show_source_geojson(slug): source = Source.query.filter_by(slug=slug).first_or_404() geom_json = json.loads( db.session.scalar(geofunc.ST_AsGeoJSON(source.bbox, 6))) return jsonify({ 'type': "Feature", 'id': source.slug, 'properties': { 'name': source.name, 'vintage': source.vintage, 'resolution': '{}{}'.format( source.resolution, source.resolution_unit, ), 'slug': source.slug, 'url_template': source.url_template, 'min_zoom': source.min_zoom, 'max_zoom': source.max_zoom, }, 'geometry': geom_json, })
def to_geojson(cls, locs, props={}): response = {} response['type'] = 'FeatureCollection' response['features'] = [] for i in range(0, len(locs)): loc_dict = {} loc_dict['type'] = 'Feature' # omg I stored the order of lat/long wrong? jfc # stupid fix until I reingest the location data geo = json.loads( db.session.scalar(geo_func.ST_AsGeoJSON(locs[i].point))) geo['coordinates'][0], geo['coordinates'][1] = geo['coordinates'][ 1], geo['coordinates'][0] loc_dict['geometry'] = geo loc_dict['properties'] = {} loc_dict['properties']['name'] = locs[i].named_location loc_dict['properties']['elevation'] = locs[i].elevation loc_dict['properties']['domain'] = locs[i].domain if props: for key in props.keys(): loc_dict['properties'][key] = props[key][i] response['features'].append(loc_dict) return response
def find_by_sgg_cd(cls, sgg_cd): return db.session.query(cls.emd_cd, cls.emd_ko_nm, func.ST_AsGeoJSON(func.ST_Centroid(cls.geom)).label('geojson')). \ filter(cls.emd_cd.like('{0}%'.format(sgg_cd))). \ order_by(cls.emd_cd.asc()).all()
def forecast(request_type): """ example queries: http://127.0.0.1:5000/forecast/district.json?datetime=2014121112&hours=6 http://127.0.0.1:5000/forecast/state.json?datetime=2014121112&hours=3 http://127.0.0.1:5000/forecast/station.json?datetime=2014121112&hours=9 """ # datetime format JJJJMMTTHH (Observation: JJJJMMMTT00) request_datetime = datetime.strptime( request.args.get("datetime", 0000000000), "%Y%m%d%H") hours = request.args.get( "hours", None) # the number of hours into the future a forecast was made if request_type == 'state' or request_type == 'district' or request_type == 'station': if request_type == 'state': model = State geo = State.geometry elif request_type == 'district': model = District geo = District.geometry elif request_type == 'station': model = Station geo = Station.region start_time = time.clock() forecasts = db.session \ .query( model.name, func.ST_AsGeoJSON(geo).label('geometry'), func1.ST_SummaryStats(func1.ST_CLIP(Forecast.rast, 1, geo, -999, True), 1, True).label('stats_tmp'), # func1.ST_SummaryStats(func1.ST_CLIP(Forecast.rast, 2, geo, -999, True), 1, True).label('stats_tmin'), # func1.ST_SummaryStats(func1.ST_CLIP(Forecast.rast, 3, geo, -999, True), 1, True).label('stats_tmax'), func1.ST_SummaryStats(func1.ST_CLIP(Forecast.rast, 4, geo, -999, True), 1, True).label('stats_pwat') ) \ .filter(Forecast.rast.ST_Intersects(geo), Forecast.forecast_date == request_datetime, Forecast.forecast_hour == hours) \ .all() print(time.clock() - start_time, "seconds for the query") # abort on empty queries (usually due to dates or hours not covered) if len(forecasts) == 0: abort(404) # build the response (FeatureCollection) start_time = time.clock() features = [] for f in forecasts: response_builder = { 'name': f.name, 'type': request_type, 'geometry': f.geometry } # unpack summary stats (string of form '(<count>, <sum>, <mean>, <stddev>, <min>, <max>)' for i in range(6): stats_tmp = f.stats_tmp.replace('(', '').replace(')', '').split(',') # stats_tmin = f.stats_tmin.replace('(', '').replace(')', '').split(',') # stats_tmax = f.stats_tmax.replace('(', '').replace(')', '').split(',') stats_pwat = f.stats_pwat.replace('(', '').replace(')', '').split(',') keys = ['count', 'sum', 'mean', 'stddev', 'min', 'max'] response_builder['tmp_' + keys[i]] = stats_tmp[i] # response_builder['tmin_'+keys[i]] = stats_tmin[i] # response_builder['tmax_'+keys[i]] = stats_tmax[i] response_builder['pwat_' + keys[i]] = stats_pwat[i] features.append(to_feature(response_builder)) # build the feature collection feautre_collection = { "type": "FeatureCollection", "features": features } print(time.clock() - start_time, "seconds for building the dict") # jsonify start_time = time.clock() response = json.jsonify(feautre_collection) print(time.clock() - start_time, "seconds for jsonification") return response else: abort(404)
def point2latlng(point): "Converts point to lat and lng" geom_json = json.loads(db.session.scalar(func.ST_AsGeoJSON(point))) coords = geom_json['coordinates'] coords.reverse() return coords
def from_point_to_xy(pt): """Extract x and y coordinates from a point geometry.""" # noinspection PyUnresolvedReferences point_json = json.loads(db.session.scalar(func.ST_AsGeoJSON(pt.point))) return point_json['coordinates']
def _handle_parameters(self): response = self.request.response # Make sure the _LOCATION_ cookie is correctly set: The old version GUI # version used to store the map center and the zoom level which is not # understood by new GUI (which stores the map extent as 4 coordinates) if '_LOCATION_' in self.request.cookies: c = urllib.parse.unquote(self.request.cookies['_LOCATION_']) if len(c.split('|')) == 3: response.delete_cookie('_LOCATION_') # Check if language (_LOCALE_) is set if self.request is not None: if '_LOCALE_' in self.request.params: response.set_cookie('_LOCALE_', self.request.params.get('_LOCALE_'), timedelta(days=90)) elif '_LOCALE_' in self.request.cookies: pass # Check if profile (_PROFILE_) is set if self.request is not None: # TODO if '_PROFILE_' in self.request.params: # Set the profile cookie profile_code = self.request.params.get('_PROFILE_') response.set_cookie('_PROFILE_', profile_code, timedelta(days=90)) # Update _LOCATION_ from cookies to profile geometry bbox # retrieved from database profile_db = DBSession.query(Profile). \ filter(Profile.code == profile_code). \ first() if profile_db is not None: # Calculate and transform bounding box # bbox = DBSession.scalar(geofunctions.ST_Envelope(profile_db.geometry)) bbox = DBSession.scalar( geofunctions.ST_Envelope( geofunctions.ST_Transform(profile_db.geometry, 900913))) gjson = geojson.loads( DBSession.scalar(geofunctions.ST_AsGeoJSON(bbox))) coords = gjson['coordinates'][0] p1 = coords[0] p2 = coords[2] l = '%s,%s' % (','.join([str(x) for x in p1]), ','.join( [str(x) for x in p2])) response.set_cookie('_LOCATION_', urllib.parse.quote(l), timedelta(days=90)) elif '_PROFILE_' in self.request.cookies: # Profile already set, leave it pass else: # If no profile is set, set the default profile response.set_cookie('_PROFILE_', get_default_profile(self.request), timedelta(days=90))