def update_meta(metatable, table): """ After ingest/update, update the metatable registry to reflect table information. :param metatable: MetaTable instance to update. :param table: Table instance to update from. :returns: None """ metatable.update_date_added() metatable.obs_from, metatable.obs_to = postgres_session.query( func.min(table.c.point_date), func.max(table.c.point_date) ).first() metatable.bbox = postgres_session.query( func.ST_SetSRID( func.ST_Envelope(func.ST_Union(table.c.geom)), 4326 ) ).first()[0] metatable.column_names = { c.name: str(c.type) for c in metatable.column_info() if c.name not in {'geom', 'point_date', 'hash'} } postgres_session.add(metatable) postgres_session.commit()
def read_geoidheights_raster(session): """Reads the entire geoidheight raster out of the db and saves it locally. postgis is very slow for random access to rasters right now.""" name = 'geoidheight-cached.tif' dataset = gdal.Open(name, GA_ReadOnly) if dataset is not None: logging.info('geoidheight raster loaded from cached file') else: logging.info('building geoidheight raster from nyc3dcars') # pylint: disable-msg=E1101 union = func.ST_Union(GeoidHeight.rast) # pylint: enable-msg=E1101 gtiff = func.ST_AsGDALRaster(union, 'GTiff') raster, = session.query(gtiff) \ .one() with open(name, 'wb') as raster_file: raster_file.write(raster) dataset = gdal.Open(name, GA_ReadOnly) return parse_dataset(dataset)
def update_meta(metatable, table): """ After ingest/update, update the metatable registry to reflect table information. :param metatable: MetaTable instance to update. :param table: Table instance to update from. :returns: None """ metatable.update_date_added() metatable.obs_from, metatable.obs_to = session.query( func.min(table.c.point_date), func.max(table.c.point_date) ).first() metatable.bbox = session.query( func.ST_SetSRID( func.ST_Envelope(func.ST_Union(table.c.geom)), 4326 ) ).first()[0] session.add(metatable) try: session.commit() except: session.rollback() raise
def compiled_footprint(session, catalogs): """Returns a Geoalchemy2 footprint polygon from the compiled footprint.""" if len(catalogs) == 1: agg_footprint = catalogs[0].footprint else: agg_footprint = session.query( func.ST_Union(*[catalog.footprint for catalog in catalogs]))\ .one()[0] return agg_footprint
def __build_geometry(self): """ Un-executed query to find geometry from component parts """ __ = func.ST_Union(DatasetFeature.geometry) return (select([func.ST_SetSrid(__, srid.world)]).select_from( DatasetFeature.__table__.join(Attitude)).where( Attitude.member_of == self.id).group_by(Attitude.member_of))
def test_raster_union(self): """ Test we can retrieve coordinates of a point from the database """ # Get the first pixel as a point rasters = self.session.query( func.ST_AsTiff(func.ST_Union(ImageData.raster, type_=Raster))).all() assert len(rasters) == 1
def get_plots_bounding_box_as_json(self): positions = [] if self.role.name == Role._ADMIN_ROLE: plots = Plot().queryObject().all() else: plots = self.plots for plot in plots: positions.append(plot.geom) return self.session.scalar(func.ST_AsGeoJson(func.ST_Envelope( func.ST_Union(array(positions))))) if len(positions) > 0\ else None
def test_raster_union2(self): """ Test we can retrieve coordinates of a point from the database """ # Get the first pixel as a point merged = self.session.query( func.ST_Union(ImageData.raster, type_=Raster)).filter(ImageData.id.in_([1, 2])).all() assert len(merged) == 1
def update_meta(metadata, table): """ After ingest/update, update the metadata registry to reflect :param metadata: :param table: """ metadata.update_date_added() metadata.obs_from, metadata.obs_to =\ session.query(func.min(table.c.point_date), func.max(table.c.point_date)).first() bbox = session.query( func.ST_SetSRID(func.ST_Envelope(func.ST_Union(table.c.geom)), 4326)).first()[0] metadata.bbox = bbox session.add(metadata) try: session.commit() except: session.rollback() raise
def getGeometryTest(self, TestGeom): Geom = self._session.query(Samplingfeature).first() print "Queried Geometry: ", self._session.query(Geom.FeatureGeometry.ST_AsText()).first() GeomText = self._session.query(func.ST_Union(Geom.FeatureGeometry,func.ST_GeomFromText(TestGeom)).ST_AsText()).first() print GeomText
def calculate_summary(self, product_name: str, time: Range) -> TimePeriodOverview: """ Create a summary of the given product/time range. """ log = self.log.bind(product_name=product_name, time=time) log.debug("summary.query") begin_time, end_time, where_clause = self._where(product_name, time) select_by_srid = (select(( func.ST_SRID(DATASET_SPATIAL.c.footprint).label("srid"), func.count().label("dataset_count"), func.ST_Transform( func.ST_Union(DATASET_SPATIAL.c.footprint), self._target_srid(), type_=Geometry(), ).label("footprint_geometry"), func.sum(DATASET_SPATIAL.c.size_bytes).label("size_bytes"), func.max(DATASET_SPATIAL.c.creation_time).label( "newest_dataset_creation_time"), )).where(where_clause).group_by("srid").alias("srid_summaries")) # Union all srid groups into one summary. result = self._engine.execute( select(( func.sum( select_by_srid.c.dataset_count).label("dataset_count"), func.array_agg(select_by_srid.c.srid).label("srids"), func.sum(select_by_srid.c.size_bytes).label("size_bytes"), func.ST_Union( func.ST_Buffer(select_by_srid.c.footprint_geometry, 0), type_=Geometry(srid=self._target_srid()), ).label("footprint_geometry"), func.max(select_by_srid.c.newest_dataset_creation_time).label( "newest_dataset_creation_time"), func.now().label("summary_gen_time"), ))) rows = result.fetchall() log.debug("summary.query.done", srid_rows=len(rows)) assert len(rows) == 1 row = dict(rows[0]) row["dataset_count"] = int( row["dataset_count"]) if row["dataset_count"] else 0 if row["footprint_geometry"] is not None: row["footprint_crs"] = self._get_srid_name( row["footprint_geometry"].srid) row["footprint_geometry"] = geo_shape.to_shape( row["footprint_geometry"]) else: row["footprint_crs"] = None row["crses"] = None if row["srids"] is not None: row["crses"] = {self._get_srid_name(s) for s in row["srids"]} del row["srids"] # Convert from Python Decimal if row["size_bytes"] is not None: row["size_bytes"] = int(row["size_bytes"]) has_data = row["dataset_count"] > 0 log.debug("counter.calc") # Initialise all requested days as zero day_counts = Counter({ d.date(): 0 for d in pd.date_range(begin_time, end_time, closed="left") }) region_counts = Counter() if has_data: day_counts.update( Counter({ day.date(): count for day, count in self._engine.execute( select([ func.date_trunc( "day", DATASET_SPATIAL.c.center_time.op( "AT TIME ZONE")(self.grouping_time_zone), ).label("day"), func.count(), ]).where(where_clause).group_by("day")) })) region_counts = Counter({ item: count for item, count in self._engine.execute( select([ DATASET_SPATIAL.c.region_code.label("region_code"), func.count(), ]).where(where_clause).group_by("region_code")) }) summary = TimePeriodOverview( **row, timeline_period="day", time_range=Range(begin_time, end_time), timeline_dataset_counts=day_counts, region_dataset_counts=region_counts, # TODO: filter invalid from the counts? footprint_count=row["dataset_count"] or 0, ) log.debug( "summary.calc.done", dataset_count=summary.dataset_count, footprints_missing=summary.dataset_count - summary.footprint_count, ) return summary
def find_intersected_area(self, search_area: str) -> Iterable[object]: return self.session.query( func.ST_Union(func.ST_Intersection(func.ST_GeomFromText(search_area, 4326), Polygon.geom)) .label('intersected')) \ .filter(Polygon.geom.ST_Intersects(func.ST_GeomFromText(search_area, 4326)))
def upgrade(): conn = op.get_bind() metadata = sa.MetaData(bind=conn) area_type = sa.Table('bib_areas_types', metadata, schema='ref_geo', autoload_with=conn) conn.execute(area_type.insert().values( type_name='Ancienne régions', type_code='REG_1970', type_desc='Type anciennes régions', ref_name='Fusion départements IGN admin_express')) area = sa.Table('l_areas', metadata, schema='ref_geo', autoload_with=conn) insert = area.insert({ 'id_type': func.ref_geo.get_id_area_type('REG_1970'), 'area_name': sa.bindparam('name'), 'area_code': sa.bindparam('code'), 'geom': sa.select([func.ST_Multi(func.ST_Union(area.c.geom))]) \ .where(sa.and_( area.c.id_type==func.ref_geo.get_id_area_type('DEP'), area.c.area_code.in_(sa.bindparam('deps', expanding=True)))), 'enable': False, }) # Note: 'expanding' parameters can't be used with executemany() # Note: are excluded regions not modified in 2016 for params in [ { 'name': 'Alsace', 'code': 42, 'deps': ['67', '68'] }, { 'name': 'Aquitaine', 'code': 72, 'deps': ['24', '33', '40', '47', '64'] }, { 'name': 'Auvergne', 'code': 83, 'deps': ['03', '15', '43', '63'] }, # approximatif : 03 et 43 font partiellement partie de la région { 'name': 'Bourgogne', 'code': 26, 'deps': ['21', '58', '71', '89'] }, { 'name': 'Centre', 'code': 24, 'deps': ['18', '28', '36', '37', '41', '45'] }, { 'name': 'Champagne-Ardenne', 'code': 21, 'deps': ['08', '10', '51', '52'] }, { 'name': 'Franche-Comté', 'code': 43, 'deps': ['25', '39', '70', '90'] }, { 'name': 'Languedoc-Roussillon', 'code': 91, 'deps': ['11', '30', '34', '48', '66'] }, { 'name': 'Limousin', 'code': 74, 'deps': ['19', '23', '87'] }, { 'name': 'Lorraine', 'code': 41, 'deps': ['54', '55', '57', '88'] }, { 'name': 'Midi-Pyrénées', 'code': 73, 'deps': ['9', '12', '31', '32', '46', '65', '81', '82'] }, { 'name': 'Nord-Pas-de-Calais', 'code': 31, 'deps': ['59', '62'] }, { 'name': 'Basse-Normandie', 'code': 25, 'deps': ['14', '50', '61'] }, { 'name': 'Haute-Normandie', 'code': 23, 'deps': ['27', '76'] }, { 'name': 'Picardie', 'code': 22, 'deps': ['02', '60', '80'] }, { 'name': 'Poitou-Charentes', 'code': 54, 'deps': ['16', '17', '79', '86'] }, { 'name': 'Rhône-Alpes', 'code': 82, 'deps': ['01', '07', '26', '38', '42', '69', '73', '74'] }, ]: conn.execute(insert, params)