コード例 #1
0
 def latlon2xy(self, lat, lon, srid_to):
     db = PGDbHelper(self.config.pgsql_conn_str())
     sql = """
         SELECT ST_AsGeoJSON(ST_Transform(ST_GeomFromText('POINT(%f %f)', %d), %d))
     """ % (lon, lat, SRID_WGS84, srid_to)
     results = db.query(sql)
     str_json = results[0][0]
     obj = json.loads(str_json)
     return tuple(obj["coordinates"])
コード例 #2
0
ファイル: test_tile_size.py プロジェクト: ujjwaln/cageo
import gzip
import os
from datetime import datetime, timedelta
from ci.models.gdal_raster import GDALRaster
from ci.util.nc_file_helper import nc_get_1d_vars_as_list
from ci.ingest import config, base_ingestor, proj_helper
from ci.config import get_instance
from ci.db.pgdbhelper import PGDbHelper
from ci.util.common import TimeMe

block_sizes = []
for i in range(1, 50):
    block_sizes.append((i * 20, i * 20))

conf = get_instance()
pgdb_helper = PGDbHelper(conn_str=conf.pgsql_conn_str(), echo=conf.logsql)


def cb(x):
    if x < 35:
        return 0
    else:
        return 1


def process_mrms_file(mrms_file):

    provider_name = "MRMS"
    variable_name = "REFL"

    ext_parts = os.path.splitext(mrms_file)
コード例 #3
0
ファイル: gtopo_ingestor.py プロジェクト: ujjwaln/cageo
    for gtopo_file in files:
        granule_name = "GTOPO30Elev"
        srid = 4326
        band_num = 1
        block_size = (50, 50)
        dtime = datetime(year=1979, month=1, day=1, hour=0, minute=0, second=0)
        level = 0

        ras = GDALRaster(gtopo_file, srid)
        ras.nodata_value = -9999
        bbox = proj_helper.get_bbox(srid)

        base_ingestor.ingest(ras=ras,
                             provider_name=provider_name,
                             variable_name=variable_name,
                             granule_name=granule_name,
                             table_name=granule_name,
                             srid=srid,
                             level=level,
                             block_size=block_size,
                             dynamic=False,
                             start_time=dtime,
                             end_time=datetime.max,
                             subset_bbox=bbox,
                             overwrite=True)

        #create slope and aspect rasters
        pgdb_helper = PGDbHelper(conn_str=config.pgsql_conn_str())
        pgdb_helper.insert_slope_and_aspect_rasters(granule_name,
                                                    overwrite=True)
コード例 #4
0
 def create_gist_index(self, table_name, index_name, column_name="rast"):
     pgdb_helper = PGDbHelper(conn_str=self.config.pgsql_conn_str())
     pgdb_helper.create_gist_index(table_name, index_name, column_name)
コード例 #5
0
import os
from sqlalchemy import create_engine
from ci.db.pgdbhelper import PGDbHelper
from ci.db.sqa.mapper import Mapper
from ci.config import get_instance

__author__ = 'ujjwal'

config = get_instance()
pgdb_helper = PGDbHelper(conn_str=config.pgsql_conn_str(), echo=True)

engine = create_engine(config.sqa_connection_string())
mapper = Mapper(engine=engine)
mapper.map_tables()

missing_data = -999


def generate_output_file(fname):
    if os.path.exists(fname):
        os.remove(fname)

    with open(fname, 'w') as of:
        sql = """
            select distinct(var.name)
            from forecast_roi_stats rs
            left join variable var on var.id=rs.variable_id
            order by var.name asc
            """
        rows = pgdb_helper.query(sql)
        var_names = []
コード例 #6
0
ファイル: weather_service.py プロジェクト: ujjwaln/cageo
            return response

    return '', 204


if __name__ == '__main__':

    config = get_instance()

    #config_file = os.path.join("/home/ujjwal/DPR_SM/python/dpr_sm/ingest/lis_config.yml")
    #config = get_instance(config_file=config_file)

    engine = create_engine(config.sqa_connection_string())

    mapper = Mapper(engine=engine)
    mapper.map_tables()

    pgdb_access = PGDbHelper(config.pgsql_conn_str(), echo=config.logsql)
    pgdb_access.ensure_gist_index('rastertile', 'rastertile_rast_gist_idx',
                                  'rast')
    pgdb_access.ensure_datagranule_id_index("rastertile",
                                            "rastertile_datagranule_id_idx",
                                            "datagranule_id")

    #check and create tile cache dir if necessary
    tiles_dir = os.path.join(os.path.dirname(__file__), "tiles")
    if not os.path.exists(tiles_dir):
        os.mkdir(tiles_dir)

    app.run(port=5001)
コード例 #7
0
    def write_to_pg_vector(self,
                           provider_name,
                           variable_name,
                           granule_name,
                           table_name,
                           srid,
                           level,
                           start_time,
                           end_time=None,
                           block_size=(100, 100),
                           overwrite=False,
                           threshold=None,
                           mask_name=None):

        with SqaAccess(engine=self.engine) as orm_access:
            provider = orm_access.find('provider', {'name': provider_name})[0]
            variable = orm_access.find('variable', {'name': variable_name})[0]

            extent = self.raster.wkt_extent()
            if end_time is None:
                end_time = datetime.max

            granule = DataGranule(provider=provider,
                                  variable=variable,
                                  starttime=start_time,
                                  endtime=end_time,
                                  extent=extent,
                                  level=level,
                                  name=granule_name,
                                  srid=srid,
                                  table_name=table_name,
                                  file_name=self.raster.dsname)
            if overwrite:
                check_granule_result = orm_access.find(DataGranule,
                                                       filterr={
                                                           'provider_id':
                                                           provider.id,
                                                           'variable_id':
                                                           variable.id,
                                                           'level':
                                                           level,
                                                           'starttime':
                                                           start_time,
                                                           'endtime':
                                                           end_time,
                                                           'file_name':
                                                           self.raster.dsname
                                                       })

                if len(check_granule_result):
                    check_granule = check_granule_result[0]
                    self.config.logger.warn('found existing datagranule %d' %
                                            check_granule.id)

                    sql = "drop table if exists %s;" % check_granule.table_name
                    orm_access.session.execute(sql)

                    #orm_access.delete(DataGranule, id=check_granule.id)
                    orm_access.session.delete(check_granule)
                    orm_access.session.commit()

            orm_access.insertOne(granule)

            pgdb_helper = PGDbHelper(conn_str=self.config.pgsql_conn_str(),
                                     echo=self.config.logsql)
            sql = """
                create table {table_name}
                (
                    id serial not null,
                    datagranule_id integer not null,
                    geom geometry not null,
                    value double precision,
                    CONSTRAINT {table_name}_pkey PRIMARY KEY (id)
                )
                """.format(table_name=table_name)
            #orm_access.session.execute(sql)
            pgdb_helper.submit(sql)

            values = []
            for shapes in self.raster.vector_generator(block_size=block_size):
                for shape in shapes:
                    values.append(
                        (granule.id, shape[0].ExportToWkt(), shape[1]))
                    if len(values) > 1000:
                        sql = """
                            insert into {table_name} (datagranule_id, geom, value) values (%s, st_geomfromtext(%s, 4326), %s)
                            """.format(table_name=table_name)
                        #orm_access.session.execute(sql, values)
                        pgdb_helper.insertMany(sql, values)
                        values = []

            if len(values) > 0:
                sql = """
                        insert into {table_name} (datagranule_id, geom, value) values (%s, st_geomfromtext(%s, 4326), %s)
                    """.format(table_name=table_name)
                pgdb_helper.insertMany(sql, values)

        sql = """
            create index {table_name}_geom_idx on {table_name} using GIST(geom)
            """.format(table_name=table_name)
        pgdb_helper.submit(sql)