Ejemplo n.º 1
0
 def latlon2xy(self, lat, lon, srid_to):
     db = PGDbHelper(self.config.pgsql_conn_str())
     sql = """
         SELECT ST_AsGeoJSON(ST_Transform(ST_GeomFromText('POINT(%f %f)', %d), %d))
     """ % (lon, lat, SRID_WGS84, srid_to)
     results = db.query(sql)
     str_json = results[0][0]
     obj = json.loads(str_json)
     return tuple(obj["coordinates"])
Ejemplo n.º 2
0
import gzip
import os
from datetime import datetime, timedelta
from ci.models.gdal_raster import GDALRaster
from ci.util.nc_file_helper import nc_get_1d_vars_as_list
from ci.ingest import config, base_ingestor, proj_helper
from ci.config import get_instance
from ci.db.pgdbhelper import PGDbHelper
from ci.util.common import TimeMe

block_sizes = []
for i in range(1, 50):
    block_sizes.append((i * 20, i * 20))

conf = get_instance()
pgdb_helper = PGDbHelper(conn_str=conf.pgsql_conn_str(), echo=conf.logsql)


def cb(x):
    if x < 35:
        return 0
    else:
        return 1


def process_mrms_file(mrms_file):

    provider_name = "MRMS"
    variable_name = "REFL"

    ext_parts = os.path.splitext(mrms_file)
Ejemplo n.º 3
0
from datetime import timedelta, datetime
from ci.db.pgdbhelper import PGDbHelper
from ci.config import get_instance
from ci.util.proj_helper import ProjHelper
from ci.models.spatial_reference import SRID_RAP


__author__ = 'ujjwal'


forecast_time = datetime(year=2014, month=8, day=7, hour=20, minute=30, second=0)
forecast_times = [forecast_time]

#get accessor to the old db
config = get_instance()
pgdb_helper = PGDbHelper(conn_str=config.pgsql_conn_str(), echo=config.logsql)
proj_helper = ProjHelper(config=config)
logger = config.logger

radius = config.ci_roi_radius
mask_name = config.mask_name

#create table
pgdb_helper.submit("drop table if exists forecast_roi_geoms;")
pgdb_helper.submit(
    """
        create table forecast_roi_geoms
        (
            id serial not null,
            roi_name text not null,
            rap_granule_id int not null,
Ejemplo n.º 4
0
 def create_gist_index(self, table_name, index_name, column_name="rast"):
     pgdb_helper = PGDbHelper(conn_str=self.config.pgsql_conn_str())
     pgdb_helper.create_gist_index(table_name, index_name, column_name)
Ejemplo n.º 5
0
    for gtopo_file in files:
        granule_name = "GTOPO30Elev"
        srid = 4326
        band_num = 1
        block_size = (50, 50)
        dtime = datetime(year=1979, month=1, day=1, hour=0, minute=0, second=0)
        level = 0

        ras = GDALRaster(gtopo_file, srid)
        ras.nodata_value = -9999
        bbox = proj_helper.get_bbox(srid)

        base_ingestor.ingest(ras=ras,
                             provider_name=provider_name,
                             variable_name=variable_name,
                             granule_name=granule_name,
                             table_name=granule_name,
                             srid=srid,
                             level=level,
                             block_size=block_size,
                             dynamic=False,
                             start_time=dtime,
                             end_time=datetime.max,
                             subset_bbox=bbox,
                             overwrite=True)

        #create slope and aspect rasters
        pgdb_helper = PGDbHelper(conn_str=config.pgsql_conn_str())
        pgdb_helper.insert_slope_and_aspect_rasters(granule_name,
                                                    overwrite=True)
Ejemplo n.º 6
0
            return response

    return '', 204


if __name__ == '__main__':

    config = get_instance()

    #config_file = os.path.join("/home/ujjwal/DPR_SM/python/dpr_sm/ingest/lis_config.yml")
    #config = get_instance(config_file=config_file)

    engine = create_engine(config.sqa_connection_string())

    mapper = Mapper(engine=engine)
    mapper.map_tables()

    pgdb_access = PGDbHelper(config.pgsql_conn_str(), echo=config.logsql)
    pgdb_access.ensure_gist_index('rastertile', 'rastertile_rast_gist_idx',
                                  'rast')
    pgdb_access.ensure_datagranule_id_index("rastertile",
                                            "rastertile_datagranule_id_idx",
                                            "datagranule_id")

    #check and create tile cache dir if necessary
    tiles_dir = os.path.join(os.path.dirname(__file__), "tiles")
    if not os.path.exists(tiles_dir):
        os.mkdir(tiles_dir)

    app.run(port=5001)
Ejemplo n.º 7
0
import os
from sqlalchemy import create_engine
from ci.db.pgdbhelper import PGDbHelper
from ci.db.sqa.mapper import Mapper
from ci.config import get_instance

__author__ = 'ujjwal'

config = get_instance()
pgdb_helper = PGDbHelper(conn_str=config.pgsql_conn_str(), echo=True)

engine = create_engine(config.sqa_connection_string())
mapper = Mapper(engine=engine)
mapper.map_tables()

missing_data = -999


def generate_output_file(fname):
    if os.path.exists(fname):
        os.remove(fname)

    with open(fname, 'w') as of:
        sql = """
            select distinct(var.name)
            from forecast_roi_stats rs
            left join variable var on var.id=rs.variable_id
            order by var.name asc
            """
        rows = pgdb_helper.query(sql)
        var_names = []
Ejemplo n.º 8
0
from sqlalchemy import create_engine
from ci.db.sqa.access import SqaAccess
from ci.db.pgdbhelper import PGDbHelper
from ci.db.sqa.mapper import Mapper
from ci.db.sqa.models import DataGranule, Variable, Provider
from ci.ingest import config, logger

__author__ = 'ujjwal'

engine = create_engine(config.sqa_connection_string())
pgdb_helper = PGDbHelper(conn_str=config.pgsql_conn_str())

mapper = Mapper(engine=engine)
mapper.map_tables()


def get_granules(var_name, start_date, end_date):
    sql = """
        select datagranule.id, datagranule.starttime, datagranule.endtime from datagranule
        join provider on provider.id = datagranule.provider_id
        join variable on variable.id = datagranule.variable_id
        where provider.name like 'RAP' and variable.name like '%s'
        and (('%s', '%s') overlaps (datagranule.starttime, datagranule.endtime))
        order by datagranule.starttime asc
    """ % (var_name, start_date, end_date)

    rows = pgdb_helper.query(sql)
    return rows


if __name__ == '__main__':
Ejemplo n.º 9
0
from multiprocessing import Pool
from ci.db.pgdbhelper import PGDbHelper
from ci.config import get_instance
from ci.util.common import TimeMe

__author__ = 'ujjwal'

#get accessor to the old db
config = get_instance()
logger = config.logger

start_date = config.start_date
end_date = config.end_date
pgdb_helper = PGDbHelper(conn_str=config.pgsql_conn_str(), echo=config.logsql)

RASTERTILE_TABLE = "rastertile_reproj"
cache = {}

#create table for roi stats
pgdb_helper.submit("""
        drop table if exists forecast_roi_stats;
    """)
pgdb_helper.submit("""
        create table forecast_roi_stats
        (
            id serial not null,
            roi_id int not null,
            roi_name text not null,
            count int not null,
            sum float not null,
            mean float not null,
Ejemplo n.º 10
0
    def write_to_pg_vector(self,
                           provider_name,
                           variable_name,
                           granule_name,
                           table_name,
                           srid,
                           level,
                           start_time,
                           end_time=None,
                           block_size=(100, 100),
                           overwrite=False,
                           threshold=None,
                           mask_name=None):

        with SqaAccess(engine=self.engine) as orm_access:
            provider = orm_access.find('provider', {'name': provider_name})[0]
            variable = orm_access.find('variable', {'name': variable_name})[0]

            extent = self.raster.wkt_extent()
            if end_time is None:
                end_time = datetime.max

            granule = DataGranule(provider=provider,
                                  variable=variable,
                                  starttime=start_time,
                                  endtime=end_time,
                                  extent=extent,
                                  level=level,
                                  name=granule_name,
                                  srid=srid,
                                  table_name=table_name,
                                  file_name=self.raster.dsname)
            if overwrite:
                check_granule_result = orm_access.find(DataGranule,
                                                       filterr={
                                                           'provider_id':
                                                           provider.id,
                                                           'variable_id':
                                                           variable.id,
                                                           'level':
                                                           level,
                                                           'starttime':
                                                           start_time,
                                                           'endtime':
                                                           end_time,
                                                           'file_name':
                                                           self.raster.dsname
                                                       })

                if len(check_granule_result):
                    check_granule = check_granule_result[0]
                    self.config.logger.warn('found existing datagranule %d' %
                                            check_granule.id)

                    sql = "drop table if exists %s;" % check_granule.table_name
                    orm_access.session.execute(sql)

                    #orm_access.delete(DataGranule, id=check_granule.id)
                    orm_access.session.delete(check_granule)
                    orm_access.session.commit()

            orm_access.insertOne(granule)

            pgdb_helper = PGDbHelper(conn_str=self.config.pgsql_conn_str(),
                                     echo=self.config.logsql)
            sql = """
                create table {table_name}
                (
                    id serial not null,
                    datagranule_id integer not null,
                    geom geometry not null,
                    value double precision,
                    CONSTRAINT {table_name}_pkey PRIMARY KEY (id)
                )
                """.format(table_name=table_name)
            #orm_access.session.execute(sql)
            pgdb_helper.submit(sql)

            values = []
            for shapes in self.raster.vector_generator(block_size=block_size):
                for shape in shapes:
                    values.append(
                        (granule.id, shape[0].ExportToWkt(), shape[1]))
                    if len(values) > 1000:
                        sql = """
                            insert into {table_name} (datagranule_id, geom, value) values (%s, st_geomfromtext(%s, 4326), %s)
                            """.format(table_name=table_name)
                        #orm_access.session.execute(sql, values)
                        pgdb_helper.insertMany(sql, values)
                        values = []

            if len(values) > 0:
                sql = """
                        insert into {table_name} (datagranule_id, geom, value) values (%s, st_geomfromtext(%s, 4326), %s)
                    """.format(table_name=table_name)
                pgdb_helper.insertMany(sql, values)

        sql = """
            create index {table_name}_geom_idx on {table_name} using GIST(geom)
            """.format(table_name=table_name)
        pgdb_helper.submit(sql)
Ejemplo n.º 11
0
                         block_size=block_size,
                         dynamic=False,
                         start_time=t_start,
                         end_time=t_end,
                         subset_bbox=bbox,
                         overwrite=True)

    logger.info("Inserted %s" % granule_name)


start_time = datetime(year=2014, month=7, day=23, hour=13, minute=0, second=0)
end_time = datetime(year=2014, month=7, day=23, hour=18, minute=0, second=0)

dtime = start_time
tstep = timedelta(hours=1)
pgdb_helper = PGDbHelper(conn_str=config.pgsql_conn_str(), echo=True)

while dtime < end_time:
    sql = """
        select id, track_id, starttime, endtime, center_lat, center_lon, type from ci_events
        where starttime >= %s and starttime < %s order by starttime
    """
    values = (dtime, dtime + tstep)
    rows = pgdb_helper.query(sql, values)

    if len(rows):
        lats = []
        lons = []
        for row in rows:
            lats.append(row[4])
            lons.append(row[5])
Ejemplo n.º 12
0
import ogr
from datetime import timedelta
from multiprocessing import Pool
from ci.db.pgdbhelper import PGDbHelper
from ci.db.adminpgdbhelper import AdminPGDbHelper
from ci.models.spatial_reference import SRID_RAP
from ci.models.spatial_reference import SRID_ALBERS
from ci.config import get_instance
from ci.util.proj_helper import ProjHelper
from ci.util.common import TimeMe

__author__ = 'ujjwal'

#get accessor to the old db
config = get_instance()
pgdb_helper = PGDbHelper(conn_str=config.pgsql_conn_str(), echo=config.logsql)
#pgdb_helper = PGDbHelper(conn_str=config.pgsql_conn_str(), echo=config.logsql)

proj_helper = ProjHelper(config=config)
logger = config.logger

#create table for roi geoms
pgdb_helper.submit("""
    drop table if exists roi_geoms_reproj;
    drop table if exists roi_geoms;
    create table roi_geoms
    (
        id serial not null,
        roi_name text not null,
        mrms_granule_id int not null,
        starttime timestamp without time zone NOT NULL,
Ejemplo n.º 13
0
from ci.models.spatial_reference import SRID_RAP
from ci.db.pgdbhelper import PGDbHelper
from ci.config import get_instance

__author__ = 'ujjwal'

config = get_instance()
logger = config.logger
pgdb_helper = PGDbHelper(conn_str=config.pgsql_conn_str(), echo=config.logsql)

sql = """
    DROP TABLE IF EXISTS rastertile_reproj;
    CREATE TABLE rastertile_reproj
    (
      id serial NOT NULL,
      rast raster NOT NULL,
      datagranule_id integer NOT NULL,
      CONSTRAINT rastertile_reproj_pkey PRIMARY KEY (id),
      CONSTRAINT rastertile_reproj_datagranule_id_fkey FOREIGN KEY (datagranule_id)
          REFERENCES datagranule (id) MATCH SIMPLE
          ON UPDATE NO ACTION ON DELETE NO ACTION
    )
    WITH (
      OIDS=FALSE
    );
"""
pgdb_helper.submit(sql)

sql = """
    SELECT datagranule.id, datagranule.srid FROM datagranule
    JOIN variable on variable.id=datagranule.variable_id
Ejemplo n.º 14
0
from ci.config import get_instance
from ci.db.pgdbhelper import PGDbHelper
from ci.db.sqa.access import SqaAccess
from ci.db.sqa.mapper import Mapper
from ci.db.sqa.models import DataGranule, Variable, Provider, RasterTile
from sqlalchemy import create_engine


config = get_instance()

logger = config.logger
engine = create_engine(config.sqa_connection_string())
mapper = Mapper(engine=engine)
mapper.map_tables()

pgdb_helper = PGDbHelper(conn_str=config.pgsql_conn_str())

with SqaAccess(engine=engine) as sqa:
    refl_reclass_vars = sqa.find(Variable, filterr={'name': 'REFL_reclass'})
    refl_var = sqa.find(Variable, filterr={'name': 'REFL'})[0]

    if len(refl_reclass_vars) == 0:
        refl_reclass_var = Variable(name='REFL_reclass', unit=None, description="Reclassified MRMS REFL")
        sqa.insertOne(refl_reclass_var)
    else:
        refl_reclass_var = refl_reclass_vars[0]

    mrms_provider = sqa.find(Provider, filterr={'name': 'MRMS'})[0]
    mrms_granules = sqa.find(DataGranule, filterr={'provider_id': mrms_provider.id, 'variable_id': refl_var.id})

    for g in mrms_granules: