def geojson_bbox(bounds): bounds_parts = bounds.split(',') line_result = get_base_line_query().filter(DbLine.maxheight.isnot(None)).filter( func.ST_Intersects( func.ST_MakeEnvelope( bounds_parts[0], bounds_parts[1], bounds_parts[2], bounds_parts[3], 4326 ), func.ST_Transform(func.ST_Centroid(DbLine.way),4326), ) ).all() point_result = get_base_point_query().filter(DbPoint.maxheight.isnot(None)).filter( func.ST_Intersects( func.ST_MakeEnvelope( float(bounds_parts[0]), float(bounds_parts[1]), float(bounds_parts[2]), float(bounds_parts[3]), 4326 ), func.ST_Transform(DbPoint.way,4326), ) ).all() return construct_geojson(line_result + point_result)
def find_distance(self,geom_1,geom_2): sql = sqlalchemy.select( [func.ST_Distance( func.ST_Transform(geom_1,3857), func.ST_Transform(geom_2,3857) ) ] ) result = connection.execute(sql).fetchone() return result[0]
def find_distance_to_nearby_prop(geom, cat_id, buffer_size=2000): sql = sqlalchemy.select([ func.ST_Distance(func.ST_Transform(geom, 3857), func.ST_Transform(table.model.c.geom, 3857)) ]).where( and_( func.ST_Distance(func.ST_Transform(geom, 3857), func.ST_Transform(table.model.c.geom, 3857)) < buffer_size, table.model.c.category_id == cat_id)) result = connection.execute(sql).fetchall() return result
def get_municipalities(): """List all enabled municipalities --- tags: - Reférentiel géo definitions: area_name: type: string description: Municipality name area_code: type: string description: Municipality insee code geometry: type: geometry responses: 200: description: A list of municipalities """ try: q = db.session.query( LAreas.area_name, LAreas.area_code, func.ST_Transform(LAreas.geom, 4326).label('geom')).filter( LAreas.enable, LAreas.id_type == 101) datas = q.all() features = [] for data in datas: feature = get_geojson_feature(data.geom) feature['properties']['area_name'] = data.area_name feature['properties']['area_code'] = data.area_code features.append(feature) return FeatureCollection(features) except Exception as e: return {'message': str(e)}, 400
def get_grs_schema(cls, grs_id, bbox: Tuple[float, float, float, float] = None, tiles=None): """Retrieve a Grid Schema definition with tiles associated.""" schema: GridRefSys = GridRefSys.query().filter( GridRefSys.id == grs_id).first() if schema is None: return 'GRS {} not found.'.format(grs_id), 404 geom_table = schema.geom_table srid_column = get_srid_column(geom_table.c, default_srid=4326) where = [] if bbox is not None: x_min, y_min, x_max, y_max = bbox where.append( func.ST_Intersects( func.ST_MakeEnvelope(x_min, y_min, x_max, y_max, 4326), func.ST_Transform( func.ST_SetSRID(geom_table.c.geom, srid_column), 4326))) if tiles: where.append(geom_table.c.tile.in_(tiles)) tiles = db.session.query( geom_table.c.tile, func.ST_AsGeoJSON( func.ST_Transform( func.ST_SetSRID(geom_table.c.geom, srid_column), 4326), 6, 3).cast( sqlalchemy.JSON).label('geom_wgs84')).filter(*where).all() dump_grs = Serializer.serialize(schema) dump_grs['tiles'] = [ dict(id=t.tile, geom_wgs84=t.geom_wgs84) for t in tiles ] return dump_grs, 200
def get_bbox(tile_id: str, grs: GridRefSys) -> str: """Retrieve the bounding box representation as string.""" geom_table = grs.geom_table bbox_result = db.session.query( geom_table.c.tile, func.ST_AsText(func.ST_BoundingDiagonal(func.ST_Transform(geom_table.c.geom, 4326))) ).filter( geom_table.c.tile == tile_id ).first() bbox = bbox_result[1][bbox_result[1].find('(') + 1:bbox_result[0].find(')')] bbox = bbox.replace(' ', ',') return bbox
def get_grs_schema(self, grs_id): """Retrieve a Grid Schema definition with tiles associated.""" schema = GridRefSys.query().filter(GridRefSys.id == grs_id).first() if schema is None: return 'GRS {} not found.'.format(grs_id), 404 geom_table = schema.geom_table tiles = db.session.query( geom_table.c.tile, func.ST_AsGeoJSON(func.ST_Transform(geom_table.c.geom, 4326), 6, 3).cast(sqlalchemy.JSON).label('geom_wgs84') ).all() dump_grs = Serializer.serialize(schema) dump_grs['tiles'] = [dict(id=t.tile, geom_wgs84=t.geom_wgs84) for t in tiles] return dump_grs, 200
def get_municipality(insee): """Get one enabled municipality by insee code --- tags: - Reférentiel géo parameters: - name: insee in: path type: string required: true default: none properties: area_name: type: string description: Municipality name area_code: type: string description: Municipality insee code geometry: type: geometry responses: 200: description: A municipality """ try: q = ( db.session.query( LAreas.area_name, LAreas.area_code, func.ST_Transform(LAreas.geom, 4326).label("geom"), ) .filter( LAreas.enable, LAreas.area_code == str(insee), LAreas.id_type == 101 ) .limit(1) ) datas = q.all() data = datas[0] feature = Feature(geometry=to_shape(data.geom)) feature["properties"]["area_name"] = data.area_name feature["properties"]["area_code"] = data.area_code return feature except Exception as e: return {"message": str(e)}, 400
def orchestrate(self): """Orchestrate datacube defintion and prepare temporal resolutions.""" self.datacube = Collection.query().filter( Collection.name == self.params['datacube']).one() temporal_schema = self.datacube.temporal_composition_schema cube_parameters: CubeParameters = CubeParameters.query().filter( CubeParameters.collection_id == self.datacube.id).first() if cube_parameters is None: raise RuntimeError( f'No parameters configured for data cube "{self.datacube.id}"') # This step acts like first execution. When no stac_url defined in cube parameters but it was given, save it. if self.properties.get( 'stac_url') and not cube_parameters.metadata_.get('stac_url'): logging.debug( f'No "stac_url"/"token" configured yet for cube parameters.' f'Using {self.properties["stac_url"]}') meta = cube_parameters.metadata_.copy() meta['stac_url'] = self.properties['stac_url'] meta['token'] = self.properties.get('token') cube_parameters.metadata_ = meta cube_parameters.save(commit=True) # Validate parameters cube_parameters.validate() # Pass the cube parameters to the data cube functions arguments props = deepcopy(cube_parameters.metadata_) props.update(self.properties) self.properties = props dstart = self.params['start_date'] dend = self.params['end_date'] if self.datacube.composite_function.alias == 'IDT': timeline = [[dstart, dend]] else: if self.datacube.composite_function.alias == 'STK': warnings.warn( 'The composite function STK is deprecated. Use LCF (Least Cloud Cover First) instead.', DeprecationWarning, stacklevel=2) timeline = Timeline(**temporal_schema, start_date=dstart, end_date=dend).mount() where = [Tile.grid_ref_sys_id == self.datacube.grid_ref_sys_id] if self.params.get('tiles'): where.append(Tile.name.in_(self.params['tiles'])) self.tiles = db.session.query(Tile).filter(*where).all() self.bands = Band.query().filter( Band.collection_id == self.warped_datacube.id).all() bands = self.datacube_bands self.band_map = { b.name: dict(name=b.name, data_type=b.data_type, nodata=b.nodata, min_value=b.min_value, max_value=b.max_value) for b in bands } if self.properties.get('reuse_from'): warnings.warn( 'The parameter `reuse_from` is deprecated and will be removed in next version. ' 'Use `reuse_data_cube` instead.') common_bands = _common_bands() collection_bands = [ b.name for b in self.datacube.bands if b.name not in common_bands ] reused_collection_bands = [b.name for b in self.bands] # The input cube (STK/MED) must have all bands of reused. Otherwise raise Error. if not set(collection_bands).issubset( set(reused_collection_bands)): raise RuntimeError( f'Reused data cube {self.warped_datacube.name} must have all bands of {self.datacube.name}' ) # Extra filter to only use bands of Input data cube. self.bands = [b for b in self.bands if b.name in collection_bands] if cube_parameters.reuse_cube: self.reused_datacube = cube_parameters.reuse_cube for tile in self.tiles: tile_name = tile.name grs: GridRefSys = tile.grs grid_geom: sqlalchemy.Table = grs.geom_table srid_column = get_srid_column(grid_geom.c) # TODO: Raise exception when using a native grid argument # Use bands resolution and match with SRID context (degree x degree) etc. tile_stats = db.session.query( (func.ST_XMin(grid_geom.c.geom)).label('min_x'), (func.ST_YMax(grid_geom.c.geom)).label('max_y'), (func.ST_XMax(grid_geom.c.geom) - func.ST_XMin(grid_geom.c.geom)).label('dist_x'), (func.ST_YMax(grid_geom.c.geom) - func.ST_YMin(grid_geom.c.geom)).label('dist_y'), (func.ST_Transform( func.ST_SetSRID(grid_geom.c.geom, srid_column), 4326)).label('feature')).filter( grid_geom.c.tile == tile_name).first() self.mosaics[tile_name] = dict(periods=dict()) for interval in timeline: startdate = interval[0] enddate = interval[1] if dstart is not None and startdate < dstart: continue if dend is not None and enddate > dend: continue period = f'{startdate}_{enddate}' self.mosaics[tile_name]['periods'][period] = {} self.mosaics[tile_name]['periods'][period][ 'start'] = startdate.strftime('%Y-%m-%d') self.mosaics[tile_name]['periods'][period][ 'end'] = enddate.strftime('%Y-%m-%d') self.mosaics[tile_name]['periods'][period][ 'dist_x'] = tile_stats.dist_x self.mosaics[tile_name]['periods'][period][ 'dist_y'] = tile_stats.dist_y self.mosaics[tile_name]['periods'][period][ 'min_x'] = tile_stats.min_x self.mosaics[tile_name]['periods'][period][ 'max_y'] = tile_stats.max_y self.mosaics[tile_name]['periods'][period][ 'feature'] = tile_stats.feature if self.properties.get('shape', None): self.mosaics[tile_name]['periods'][period][ 'shape'] = self.properties['shape']