roi_rows = pgdb_helper.query(sql) #insert roi_geoms for each rap cell, all forecast hours for dt in forecast_times: rois = [] for roi_row in roi_rows: roi_name = str(uuid.uuid4()) datagranule_id = rap_granule_id values = (roi_name, rap_granule_id, dt, dt + timedelta(minutes=3), roi_row[0], roi_row[1], roi_row[2], roi_row[3], 1) rois.append(values) for t in range(-1, -5, -1): starttime = dt + t * timedelta(hours=1) endtime = starttime + timedelta(minutes=3) values = (roi_name, rap_granule_id, starttime, endtime, roi_row[0], roi_row[1], roi_row[2], roi_row[3], t) rois.append(values) sql = """ insert into forecast_roi_geoms (roi_name, rap_granule_id, starttime, endtime, geom, center, storm_poly, center_lat, center_lon, type) values (%s, %s, %s, %s, ST_GeomFromEWKT(%s), ST_GeomFromEWKT(%s), NULL, %s, %s, %s) """ pgdb_helper.insertMany(sql, rois) logger.info("Inserted %d forecast ROIs for %s, mask %s" % (len(rois), dt, config.mask_name)) pgdb_helper.submit("create index fcst_roi_geoms_center_indx on forecast_roi_geoms using gist(center)") pgdb_helper.submit("create index fcst_roi_geoms_geom_indx on forecast_roi_geoms using gist(geom)")
def write_to_pg_vector(self, provider_name, variable_name, granule_name, table_name, srid, level, start_time, end_time=None, block_size=(100, 100), overwrite=False, threshold=None, mask_name=None): with SqaAccess(engine=self.engine) as orm_access: provider = orm_access.find('provider', {'name': provider_name})[0] variable = orm_access.find('variable', {'name': variable_name})[0] extent = self.raster.wkt_extent() if end_time is None: end_time = datetime.max granule = DataGranule(provider=provider, variable=variable, starttime=start_time, endtime=end_time, extent=extent, level=level, name=granule_name, srid=srid, table_name=table_name, file_name=self.raster.dsname) if overwrite: check_granule_result = orm_access.find(DataGranule, filterr={ 'provider_id': provider.id, 'variable_id': variable.id, 'level': level, 'starttime': start_time, 'endtime': end_time, 'file_name': self.raster.dsname }) if len(check_granule_result): check_granule = check_granule_result[0] self.config.logger.warn('found existing datagranule %d' % check_granule.id) sql = "drop table if exists %s;" % check_granule.table_name orm_access.session.execute(sql) #orm_access.delete(DataGranule, id=check_granule.id) orm_access.session.delete(check_granule) orm_access.session.commit() orm_access.insertOne(granule) pgdb_helper = PGDbHelper(conn_str=self.config.pgsql_conn_str(), echo=self.config.logsql) sql = """ create table {table_name} ( id serial not null, datagranule_id integer not null, geom geometry not null, value double precision, CONSTRAINT {table_name}_pkey PRIMARY KEY (id) ) """.format(table_name=table_name) #orm_access.session.execute(sql) pgdb_helper.submit(sql) values = [] for shapes in self.raster.vector_generator(block_size=block_size): for shape in shapes: values.append( (granule.id, shape[0].ExportToWkt(), shape[1])) if len(values) > 1000: sql = """ insert into {table_name} (datagranule_id, geom, value) values (%s, st_geomfromtext(%s, 4326), %s) """.format(table_name=table_name) #orm_access.session.execute(sql, values) pgdb_helper.insertMany(sql, values) values = [] if len(values) > 0: sql = """ insert into {table_name} (datagranule_id, geom, value) values (%s, st_geomfromtext(%s, 4326), %s) """.format(table_name=table_name) pgdb_helper.insertMany(sql, values) sql = """ create index {table_name}_geom_idx on {table_name} using GIST(geom) """.format(table_name=table_name) pgdb_helper.submit(sql)