def get_closest_node(lon, lat, session): c_node = session.query(Ways).order_by( Ways.the_geom.ST_Distance( func.ST_SetSRID(func.ST_Point(lon, lat), 4326))).limit(1).all() dist_source = session.query( func.ST_Distance( func.ST_SetSRID(func.ST_Point(c_node[0].x1, c_node[0].y1), 4326), func.ST_SetSRID(func.ST_Point(lon, lat), 4326))).all() dist_target = session.query( func.ST_Distance( func.ST_SetSRID(func.ST_Point(c_node[0].x2, c_node[0].y2), 4326), func.ST_SetSRID(func.ST_Point(lon, lat), 4326))).all() # distances return only one value if dist_source[0][0] < dist_target[0][0]: return c_node[0].source else: return c_node[0].target
def list_cube_items(self, cube_id: str, bbox: str = None, start: str = None, end: str = None, tiles: str = None, page: int = 1, per_page: int = 10): """Retrieve all data cube items done.""" cube = self.get_cube_or_404(cube_id=cube_id) where = [Item.collection_id == cube.id, Tile.id == Item.tile_id] # temporal filter if start: where.append(Item.start_date >= start) if end: where.append(Item.end_date <= end) # tile(string) filter if tiles: tiles = tiles.split(',') if isinstance(tiles, str) else tiles where.append(Tile.name.in_(tiles)) # spatial filter if bbox: xmin, ymin, xmax, ymax = [ float(coord) for coord in bbox.split(',') ] where.append( func.ST_Intersects( func.ST_SetSRID(Item.geom, 4326), func.ST_MakeEnvelope(xmin, ymin, xmax, ymax, 4326))) paginator = db.session.query(Item).filter(*where).order_by( Item.start_date.desc()).paginate(int(page), int(per_page), error_out=False) result = [] for item in paginator.items: obj = Serializer.serialize(item) obj['geom'] = None obj['min_convex_hull'] = None obj['tile_id'] = item.tile.name if item.assets.get('thumbnail'): obj['quicklook'] = item.assets['thumbnail']['href'] del obj['assets'] result.append(obj) return dict(items=result, page=page, per_page=page, total_items=paginator.total, total_pages=paginator.pages), 200
def random_point_from_district(d_name: str): district_cat_id = 163 sql = sqlalchemy.select([ func.ST_SetSRID(func.ST_GeneratePoints(table.model.c.geom, 1), 4326) ]).where( and_(table.model.c.name == d_name, table.model.c.category_id == district_cat_id)) result = connection.execute(sql).fetchone() if result is not None: return result[0] else: return print('None')
def get_grs_schema(cls, grs_id, bbox: Tuple[float, float, float, float] = None, tiles=None): """Retrieve a Grid Schema definition with tiles associated.""" schema: GridRefSys = GridRefSys.query().filter( GridRefSys.id == grs_id).first() if schema is None: return 'GRS {} not found.'.format(grs_id), 404 geom_table = schema.geom_table srid_column = get_srid_column(geom_table.c, default_srid=4326) where = [] if bbox is not None: x_min, y_min, x_max, y_max = bbox where.append( func.ST_Intersects( func.ST_MakeEnvelope(x_min, y_min, x_max, y_max, 4326), func.ST_Transform( func.ST_SetSRID(geom_table.c.geom, srid_column), 4326))) if tiles: where.append(geom_table.c.tile.in_(tiles)) tiles = db.session.query( geom_table.c.tile, func.ST_AsGeoJSON( func.ST_Transform( func.ST_SetSRID(geom_table.c.geom, srid_column), 4326), 6, 3).cast( sqlalchemy.JSON).label('geom_wgs84')).filter(*where).all() dump_grs = Serializer.serialize(schema) dump_grs['tiles'] = [ dict(id=t.tile, geom_wgs84=t.geom_wgs84) for t in tiles ] return dump_grs, 200
def orchestrate(self): """Orchestrate datacube defintion and prepare temporal resolutions.""" self.datacube = Collection.query().filter( Collection.name == self.params['datacube']).one() temporal_schema = self.datacube.temporal_composition_schema cube_parameters: CubeParameters = CubeParameters.query().filter( CubeParameters.collection_id == self.datacube.id).first() if cube_parameters is None: raise RuntimeError( f'No parameters configured for data cube "{self.datacube.id}"') # This step acts like first execution. When no stac_url defined in cube parameters but it was given, save it. if self.properties.get( 'stac_url') and not cube_parameters.metadata_.get('stac_url'): logging.debug( f'No "stac_url"/"token" configured yet for cube parameters.' f'Using {self.properties["stac_url"]}') meta = cube_parameters.metadata_.copy() meta['stac_url'] = self.properties['stac_url'] meta['token'] = self.properties.get('token') cube_parameters.metadata_ = meta cube_parameters.save(commit=True) # Validate parameters cube_parameters.validate() # Pass the cube parameters to the data cube functions arguments props = deepcopy(cube_parameters.metadata_) props.update(self.properties) self.properties = props dstart = self.params['start_date'] dend = self.params['end_date'] if self.datacube.composite_function.alias == 'IDT': timeline = [[dstart, dend]] else: if self.datacube.composite_function.alias == 'STK': warnings.warn( 'The composite function STK is deprecated. Use LCF (Least Cloud Cover First) instead.', DeprecationWarning, stacklevel=2) timeline = Timeline(**temporal_schema, start_date=dstart, end_date=dend).mount() where = [Tile.grid_ref_sys_id == self.datacube.grid_ref_sys_id] if self.params.get('tiles'): where.append(Tile.name.in_(self.params['tiles'])) self.tiles = db.session.query(Tile).filter(*where).all() self.bands = Band.query().filter( Band.collection_id == self.warped_datacube.id).all() bands = self.datacube_bands self.band_map = { b.name: dict(name=b.name, data_type=b.data_type, nodata=b.nodata, min_value=b.min_value, max_value=b.max_value) for b in bands } if self.properties.get('reuse_from'): warnings.warn( 'The parameter `reuse_from` is deprecated and will be removed in next version. ' 'Use `reuse_data_cube` instead.') common_bands = _common_bands() collection_bands = [ b.name for b in self.datacube.bands if b.name not in common_bands ] reused_collection_bands = [b.name for b in self.bands] # The input cube (STK/MED) must have all bands of reused. Otherwise raise Error. if not set(collection_bands).issubset( set(reused_collection_bands)): raise RuntimeError( f'Reused data cube {self.warped_datacube.name} must have all bands of {self.datacube.name}' ) # Extra filter to only use bands of Input data cube. self.bands = [b for b in self.bands if b.name in collection_bands] if cube_parameters.reuse_cube: self.reused_datacube = cube_parameters.reuse_cube for tile in self.tiles: tile_name = tile.name grs: GridRefSys = tile.grs grid_geom: sqlalchemy.Table = grs.geom_table srid_column = get_srid_column(grid_geom.c) # TODO: Raise exception when using a native grid argument # Use bands resolution and match with SRID context (degree x degree) etc. tile_stats = db.session.query( (func.ST_XMin(grid_geom.c.geom)).label('min_x'), (func.ST_YMax(grid_geom.c.geom)).label('max_y'), (func.ST_XMax(grid_geom.c.geom) - func.ST_XMin(grid_geom.c.geom)).label('dist_x'), (func.ST_YMax(grid_geom.c.geom) - func.ST_YMin(grid_geom.c.geom)).label('dist_y'), (func.ST_Transform( func.ST_SetSRID(grid_geom.c.geom, srid_column), 4326)).label('feature')).filter( grid_geom.c.tile == tile_name).first() self.mosaics[tile_name] = dict(periods=dict()) for interval in timeline: startdate = interval[0] enddate = interval[1] if dstart is not None and startdate < dstart: continue if dend is not None and enddate > dend: continue period = f'{startdate}_{enddate}' self.mosaics[tile_name]['periods'][period] = {} self.mosaics[tile_name]['periods'][period][ 'start'] = startdate.strftime('%Y-%m-%d') self.mosaics[tile_name]['periods'][period][ 'end'] = enddate.strftime('%Y-%m-%d') self.mosaics[tile_name]['periods'][period][ 'dist_x'] = tile_stats.dist_x self.mosaics[tile_name]['periods'][period][ 'dist_y'] = tile_stats.dist_y self.mosaics[tile_name]['periods'][period][ 'min_x'] = tile_stats.min_x self.mosaics[tile_name]['periods'][period][ 'max_y'] = tile_stats.max_y self.mosaics[tile_name]['periods'][period][ 'feature'] = tile_stats.feature if self.properties.get('shape', None): self.mosaics[tile_name]['periods'][period][ 'shape'] = self.properties['shape']