Exemplo n.º 1
0
def run():
    """Main RHEAS routine."""
    config_filename, dbname, db_update, verbose, logfile = parseArgs()
    if verbose:
        log_level = logging.DEBUG
    else:
        log_level = logging.INFO
    if logfile is None:
        logging.basicConfig(level=log_level,
                            format='%(levelname)s: %(message)s')
    else:
        logging.basicConfig(filename=logfile,
                            level=log_level,
                            format='%(levelname)s: %(message)s')
    log = logging.getLogger(__name__)
    if dbname is None:
        dbname = "rheas"
    dbio.connect(dbname)
    # check if database update is requested
    if db_update:
        log.info("Updating database!")
        update(dbname, config_filename)
    else:
        options = config.loadFromFile(config_filename)
        # check what simulations have been requested
        if "nowcast" in options:
            nowcast.execute(dbname, options)
        if "forecast" in options:
            forecast.execute(dbname, options)
Exemplo n.º 2
0
def run():
    """Main RHEAS routine."""
    config_filename, dbname, db_update, verbose, logfile = parseArgs()
    if verbose:
        log_level = logging.DEBUG
    else:
        log_level = logging.INFO
    if logfile is None:
        logging.basicConfig(level=log_level, format='%(levelname)s: %(message)s')
    else:
        logging.basicConfig(filename=logfile, level=log_level, format='%(levelname)s: %(message)s')
    log = logging.getLogger(__name__)
    if dbname is None:
        dbname = "rheas"
    dbio.connect(dbname)
    # check if database update is requested
    if db_update:
        log.info("Updating database!")
        update(dbname, config_filename)
    else:
        options = config.loadFromFile(config_filename)
        # check what simulations have been requested
        if "nowcast" in options:
            nowcast.execute(dbname, options)
        if "forecast" in options:
            forecast.execute(dbname, options)
Exemplo n.º 3
0
def calcSMDI(model):
    """Calculate Soil Moisture Deficit Index (Narasimhan & Srinivasan, 2005)."""
    db = dbio.connect(model.dbname)
    cur = db.cursor()
    sql = "select fdate,(ST_DumpValues(rast)).valarray from {0}.soil_moist where layer=2 order by fdate".format(model.name)
    cur.execute(sql)
    results = cur.fetchall()
    data = np.array([np.array(r[1]).ravel() for r in results])
    i = np.where(np.not_equal(data[0, :], None))[0]
    clim = pandas.DataFrame(data[:, i], index=np.array([r[0] for r in results], dtype='datetime64'), columns=range(len(i)))
    st = "{0}-{1}-{2}".format(model.startyear, model.startmonth, model.startday)
    et = "{0}-{1}-{2}".format(model.endyear, model.endmonth, model.endday)
    p = clim[st:et]
    smdi = np.zeros(p.shape)
    for j in clim.columns:
        MSW = clim[j].median()
        maxSW = clim[j].max()
        minSW = clim[j].min()
        SW = p[j].rolling('7D').median().values[7:]
        SD = (SW - MSW) / (maxSW - MSW) * 100.0
        SD[SD == 0.0] = (SW[SD == 0.0] - MSW) / (MSW - minSW) * 100.0
        smdi[:7, j] = SD[:7] / 50.0
        smdi[7:, j] = 0.5 * smdi[6:-1, j] + SD / 50.0
    cur.close()
    db.close()
    smdi = np.clip(smdi, -4.0, 4.0)
    return smdi
Exemplo n.º 4
0
def calcDrySpells(model, droughtfun=np.mean, duration=14, recovduration=2):
    """Calculate maps of number of dry spells during simulation period."""
    # FIXME: Currently only uses precipitation to identify dry spells. Need to change it to also use soil moisture and runoff
    db = dbio.connect(model.dbname)
    cur = db.cursor()
    sql = "select fdate,(ST_DumpValues(rast)).valarray from {0}.rainf where fdate>=date'{1}-{2}-{3}' and fdate<=date'{4}-{5}-{6}' order by fdate".format(model.name, model.startyear, model.startmonth, model.startday, model.endyear, model.endmonth, model.endday)
    cur.execute(sql)
    results = cur.fetchall()
    data = np.array([np.array(r[1]).ravel() for r in results])
    i = np.where(np.not_equal(data[0, :], None))[0]
    p = pandas.DataFrame(data[:, i], index=np.array([r[0] for r in results], dtype='datetime64'), columns=range(len(i)))
    cur.close()
    db.close()
    ndroughts = np.zeros(p.values.shape)
    for pi in p.columns:
        drought_thresh = droughtfun(p[pi])
        days = 0
        for i in range(recovduration-1, len(p[pi])):
            if p.values[i, pi] <= drought_thresh:
                days += 1
            elif all(p.values[i-j, pi] > drought_thresh for j in range(recovduration)):
                days = 0
            else:
                days += 1
            if days == duration:
                ndroughts[i, pi] = 1
    return np.cumsum(ndroughts, axis=0)
Exemplo n.º 5
0
def calcSPI(duration, model):
    """Calculate Standardized Precipitation Index for specified month
    *duration*."""
    log = logging.getLogger(__name__)
    startdate = date(model.startyear + model.skipyear, model.startmonth, model.startday)
    enddate = date(model.endyear, model.endmonth, model.endday)
    nt = (enddate - startdate).days + 1
    ndays = ((startdate + relativedelta(months=duration)) - startdate).days + 1
    # tablename = "precip."+model.precip
    if duration < 1 or ndays > nt:
        log.warning("Cannot calculate SPI with {0} months duration.".format(duration))
        spi = None
    else:
        db = dbio.connect(model.dbname)
        cur = db.cursor()
        sql = "select fdate,(ST_DumpValues(rast)).valarray from {0}.rainf where fdate>=date'{1}-{2}-{3}' and fdate<=date'{4}-{5}-{6}' order by fdate".format(model.name, model.startyear, model.startmonth, model.startday, model.endyear, model.endmonth, model.endday)
        cur.execute(sql)
        results = cur.fetchall()
        data = np.array([np.array(r[1]).ravel() for r in results])
        i = np.where(np.not_equal(data[0, :], None))[0]
        p = pandas.DataFrame(data[:, i], index=np.array([r[0] for r in results], dtype='datetime64'), columns=range(len(i)))
        pm = p.rolling(duration*30).mean()  # assume each month is 30 days
        g = [stats.gamma.fit(pm[j][duration*30:]) for j in pm.columns]
        cdf = np.array([stats.gamma.cdf(pm[j],*g[j]) for j in pm.columns]).T
        spi = np.zeros(cdf.shape)
        spi[duration*30:, :] = stats.norm.ppf(cdf[duration*30:, :])
        spi = _clipToValidRange(spi)
        cur.close()
        db.close()
    return spi
Exemplo n.º 6
0
def createDatabase(dbname):
    subprocess.call(["{0}/createdb".format(rpath.bins), dbname])
    db = dbio.connect(dbname)
    cur = db.cursor()
    cur.execute("create extension postgis; create extension postgis_topology;")
    cur.execute("create schema vic; create schema dssat; create schema crops;")
    db.commit()
    cur.execute(
        "create table vic.input (resolution double precision,snowbandfile text,vegparam text,veglib text,soilfile text,rootzones integer,basefile text)"
    )
    db.commit()
    cur.execute(
        "insert into vic.input values (0.25, 'vic/global_snowbands_0.25deg.txt', 'vic/global_lai_0.25deg.txt', 'vic/vic_veglib.txt', 'vic/global_soil_0.25deg.txt', 2, 'vic/dssat.inp.base')"
    )
    cur.execute(
        "create schema precip; create schema tmax; create schema tmin; create schema wind; create schema lai"
    )
    cur.execute(
        "create table dssat.cultivars (gid serial primary key, ensemble int, geom geometry, p1 numeric, p2 numeric, p5 numeric, g2 numeric, g3 numeric, phint numeric)"
    )
    db.commit()
    subprocess.call([
        "{0}/psql".format(rpath.bins), "-d", dbname, "-f",
        "{0}/tests/vic_soils.sql".format(rpath.data)
    ])
    cur.close()
    db.close()
Exemplo n.º 7
0
def ingestSoils(dbname="rheas"):
    """Ingest soil information from downloaded files."""
    filenames = glob.glob("SoilGrids-for-DSSAT-10km v1.0 (by country)/*.SOL")
    db = dbio.connect(dbname)
    cur = db.cursor()
    if dbio.tableExists(dbname, "dssat", "soils"):
        print("Overwriting existing DSSAT soils table in database!")
        cur.execute("drop table dssat.soils")
        db.commit()
    cur.execute(
        "create table dssat.soils (rid serial primary key, geom geometry(Point, 4326), props text)"
    )
    db.commit()
    for filename in filenames:
        try:
            profiles = parseSolFile(filename)
            for latlon in profiles:
                lat, lon = latlon
                sql = "insert into dssat.soils (geom, props) values (st_geomfromtext('POINT({0} {1})', 4326), '{2}')".format(
                    lon, lat, profiles[latlon])
                cur.execute(sql)
        except:
            print("Cannot process file {0}".format(filename))
    db.commit()
    cur.close()
    db.close()
Exemplo n.º 8
0
Arquivo: iri.py Projeto: nowucme/RHEAS
def ingest(dbname, filename, dt, lt, cname, stname):
    """Imports Geotif *filename* into database *db*."""
    db = dbio.connect(dbname)
    cur = db.cursor()
    schemaname, tablename = stname.split(".")
    cur.execute(
        "select * from information_schema.tables where table_schema='{0}' and table_name='{1}'"
        .format(schemaname, tablename))
    if not bool(cur.rowcount):
        cur.execute(
            "create table {0}.{1} (rid serial not null primary key, fdate date, tercile text, leadtime int, rast raster)"
            .format(schemaname, tablename))
        db.commit()
    cur.execute(
        "select * from {0} where fdate='{1}' and tercile = '{2}' and leadtime = {3}"
        .format(stname, dt.strftime("%Y-%m-%d"), cname, lt))
    if bool(cur.rowcount):
        cur.execute(
            "delete from {0} where fdate='{1}' and tercile = '{2}' and leadtime = {3}"
            .format(stname, dt.strftime("%Y-%m-%d"), cname, lt))
        db.commit()
    dbio.ingest(dbname, filename, dt, stname, False, False)
    sql = "update {0} set tercile = '{1}' where tercile is null".format(
        stname, cname)
    cur.execute(sql)
    sql = "update {0} set leadtime = '{1}' where leadtime is null".format(
        stname, lt)
    cur.execute(sql)
    db.commit()
    cur.close()
Exemplo n.º 9
0
Arquivo: iri.py Projeto: nowucme/RHEAS
def _getForcings(e, dbname, ptable, rtables, name, dt0, dt1):
    """Extract meteorological forcings for ensemble member."""
    db = dbio.connect(dbname)
    cur = db.cursor()
    data = {}
    for v in ['precip', 'tmax', 'tmin', 'wind']:
        temptable = ''.join(random.SystemRandom().choice(string.ascii_letters)
                            for _ in range(8))
        sql = "create table {7} as (with f as (select gid,st_worldtorastercoordx(rast,geom) as xf,st_worldtorastercoordy(rast,geom) as yf,rid as ftile from {6}.{0},{1}.basin where fdate=date'{2}-{3}-{4}' and st_intersects(rast,geom)) select c.gid,xf,yf,x,y,ftile as tile from f inner join precip.{5}_iri_xy as c on c.gid=f.gid)".format(
            rtables[v], name, dt0.year, dt0.month, dt0.day, ptable, v,
            temptable)
        cur.execute(sql)
        db.commit()
        cur.execute("create index {0}_r on {0}(tile)".format(temptable))
        db.commit()
        sql = "select gid,fdate,st_value(rast,xf,yf) from {6}.{0},{7} as xy inner join iri_years as i on xy.x=i.x and xy.y=i.y where ens={2} and rid=tile and fdate>=date(concat_ws('-',yr,'{3}-{4}')) and fdate<=(date(concat_ws('-',yr,'{3}-{4}'))+interval'{5} days') order by gid,fdate".format(
            rtables[v], ptable, e + 1, dt0.month, dt0.day, (dt1 - dt0).days, v,
            temptable)
        cur.execute(sql)
        data[v] = cur.fetchall()
        cur.execute("drop table {0}".format(temptable))
        db.commit()
    cur.close()
    db.close()
    return data
Exemplo n.º 10
0
 def testTable(self):
     db = dbio.connect(self.dbname)
     cur = db.cursor()
     cur.execute(
         "select * from information_schema.tables where table_name='chirps' and table_schema='precip'"
     )
     assert bool(cur.rowcount) is True
Exemplo n.º 11
0
def generate(options, models):
    """Generate meteorological forecast forcings from downscaled NMME data."""
    log = logging.getLogger(__name__)
    options['vic']['tmax'] = options['vic']['temperature']
    options['vic']['tmin'] = options['vic']['temperature']
    db = dbio.connect(models.dbname)
    cur = db.cursor()
    dt0 = datetime(models.startyear, models.startmonth, models.startday)
    dt1 = datetime(models.endyear, models.endmonth, models.endday)
    # check if forecast period exists in NMME data
    sql = "select count(distinct(fdate)) from precip.nmme where fdate>=date'{0}' and fdate<=date'{1}'".format(dt0.strftime("%Y-%m-%d"), dt1.strftime("%Y-%m-%d"))
    cur.execute(sql)
    ndata = cur.fetchone()[0]
    if ndata == (dt1 - dt0).days + 1:
        prec, tmax, tmin, wind = _getForcings(options, models, models.res)
        if tmax is None or tmin is None or wind is None:
            log.error("No data found to generate VIC forcings for NMME forecast. Exiting...")
            sys.exit()
        else:
            for e in range(len(models)):
                models[e].writeForcings(prec[e], tmax[e], tmin[e], wind)
    else:
        log.error("Not enough data found for requested forecast period! Exiting...")
        sys.exit()
    cur.close()
    db.close()
Exemplo n.º 12
0
Arquivo: nmme.py Projeto: nasa/RHEAS
def ingest(dbname, varname, filename, dt, ens):
    """Imports Geotif *filename* into database *dbname*."""
    schema = {'Precipitation': 'precip', 'Temperature': 'tmax'}
    db = dbio.connect(dbname)
    cur = db.cursor()
    cur.execute(
        "select * from information_schema.tables where table_schema='{0}' and table_name='nmme'".format(schema[varname]))
    if not bool(cur.rowcount):
        cur.execute("create table {0}.nmme (rid serial not null primary key, fdate date, ensemble int, rast raster)".format(
            schema[varname]))
        db.commit()
    cur.execute("select * from {0}.nmme where fdate='{1}' and ensemble = {2}".format(schema[varname], dt.strftime("%Y-%m-%d"), ens))
    if bool(cur.rowcount):
        cur.execute("delete from {0}.nmme where fdate='{1}' and ensemble = {2}".format(schema[varname], dt.strftime("%Y-%m-%d"), ens))
        db.commit()
    dbio.ingest(dbname, filename, dt, "{0}.nmme".format(schema[varname]), False, False)
    sql = "update {0}.nmme set ensemble = {1} where ensemble is null".format(schema[varname], ens)
    cur.execute(sql)
    db.commit()
    cur.execute("select * from raster_resampled where sname='{0}' and tname like 'nmme_%'".format(schema[varname]))
    tables = [r[1] for r in cur.fetchall()]
    for table in tables:
        cur.execute("select * from {0}.{1} where fdate='{2}' and ensemble = {3}".format(schema[varname], table, dt.strftime("%Y-%m-%d"), ens))
        if bool(cur.rowcount):
            cur.execute("delete from {0}.{1} where fdate='{2}' and ensemble = {3}".format(schema[varname], table, dt.strftime("%Y-%m-%d"), ens))
            db.commit()
    tilesize = (10, 10)
    dbio.createResampledTables(dbname, schema[varname], "nmme", dt, tilesize, False, "and ensemble={0}".format(ens))
    _setEnsemble(dbname, schema[varname], ens)
    cur.close()
    db.close()
Exemplo n.º 13
0
 def tearDown(self):
     db = dbio.connect(self.dbname)
     cur = db.cursor()
     cur.execute("drop schema {0} cascade".format(self.options['forecast']['name']))
     db.commit()
     cur.close()
     db.close()
Exemplo n.º 14
0
 def writeLAI(self, modelpath, gid, viclai=None, tablename="lai.modis"):
     """Writes LAI file for DSSAT."""
     fout = open("{0}/LAI.txt".format(modelpath), 'w')
     db = dbio.connect(self.dbname)
     cur = db.cursor()
     cur.execute("select * from information_schema.tables where table_name=%s and table_schema='lai'",
                 (tablename.split(".")[1],))
     if bool(cur.rowcount) and not self.lai == "vic":
         sql = "select fdate,avg((st_summarystats(st_clip(rast,geom))).mean) from {0},{1}.agareas where st_intersects(rast,geom) and fdate>=date '{2}-{3}-{4}' and fdate<=date '{5}-{6}-{7}' and gid={8} group by fdate".format(
             tablename, self.name, self.startyear, self.startmonth, self.startday, self.endyear, self.endmonth, self.endday, gid)
         cur.execute(sql)
         if bool(cur.rowcount):
             results = cur.fetchall()
             lai = {}
             for r in results:
                 if r[1] is None:
                     lai[r[0]] = -9999.0
                 else:
                     lai[r[0]] = r[1] / 10.0
         else:
             lai = {}
     else:
         lai = viclai
     enddate = date(self.endyear, 12, 31)
     startdate = date(self.startyear, 1, 1)
     for t in range((enddate - startdate).days + 1):
         dt = startdate + timedelta(t)
         if lai is not None and dt in lai:
             fout.write("{0:.1f}\n".format(lai[dt]))
         else:
             fout.write("-9999.0\n")
     fout.close()
     cur.close()
     db.close()
Exemplo n.º 15
0
def ingestTables(dbname):
    subprocess.call(["{0}/psql".format(rpath.bins), "-d", dbname, "-f", "{0}/tests/precip_chirps.sql".format(rpath.data)])
    subprocess.call(["{0}/psql".format(rpath.bins), "-d", dbname, "-f", "{0}/tests/precip_trmm.sql".format(rpath.data)])
    subprocess.call(["{0}/psql".format(rpath.bins), "-d", dbname, "-f", "{0}/tests/tmax_ncep.sql".format(rpath.data)])
    subprocess.call(["{0}/psql".format(rpath.bins), "-d", dbname, "-f", "{0}/tests/tmin_ncep.sql".format(rpath.data)])
    subprocess.call(["{0}/psql".format(rpath.bins), "-d", dbname, "-f", "{0}/tests/wind_ncep.sql".format(rpath.data)])
    subprocess.call(["{0}/psql".format(rpath.bins), "-d", dbname, "-f", "{0}/tests/precip_chirps_4.sql".format(rpath.data)])
    subprocess.call(["{0}/psql".format(rpath.bins), "-d", dbname, "-f", "{0}/tests/precip_trmm_4.sql".format(rpath.data)])
    subprocess.call(["{0}/psql".format(rpath.bins), "-d", dbname, "-f", "{0}/tests/tmax_ncep_4.sql".format(rpath.data)])
    subprocess.call(["{0}/psql".format(rpath.bins), "-d", dbname, "-f", "{0}/tests/tmin_ncep_4.sql".format(rpath.data)])
    subprocess.call(["{0}/psql".format(rpath.bins), "-d", dbname, "-f", "{0}/tests/wind_ncep_4.sql".format(rpath.data)])
    subprocess.call(["{0}/psql".format(rpath.bins), "-d", dbname, "-f", "{0}/tests/cropland.sql".format(rpath.data)])
    subprocess.call(["{0}/psql".format(rpath.bins), "-d", dbname, "-f", "{0}/tests/plantstart.sql".format(rpath.data)])
    subprocess.call(["{0}/psql".format(rpath.bins), "-d", dbname, "-f", "{0}/tests/dssat_soils.sql".format(rpath.data)])
    db = dbio.connect(dbname)
    cur = db.cursor()
    sql = """create or replace function resampled(_s text, _t text, out result double precision) as
    $func$
    begin
    execute format('select st_scalex(rast) from %s.%s limit 1',quote_ident(_s),quote_ident(_t)) into result;
    end
    $func$ language plpgsql;"""
    cur.execute(sql)
    cur.execute("create or replace view raster_resampled as (select r_table_schema as sname,r_table_name as tname,resampled(r_table_schema,r_table_name) as resolution from raster_columns)")
    cur.execute("create schema soilmoist")
    db.commit()
    subprocess.call(["{0}/psql".format(rpath.bins), "-d", dbname, "-f", "{0}/tests/soilmoist_smos.sql".format(rpath.data)])
Exemplo n.º 16
0
def _saveTimeSeriesFromShapefile(filepath, name, varname, startdate, enddate, dbname):
    """Extract geophysical variable *varname* from *dbname*, averaging for each date
    between *startdate* and *enddate* over polygons derived from *filepath*
    shapefile."""
    logging.basicConfig(level=logging.INFO, format='%(message)s')
    log = logging.getLogger(__name__)
    tablename = _importShapefile(filepath, dbname)
    db = dbio.connect(dbname)
    cur = db.cursor()
    cur.execute("select distinct(gid) from {0}".format(tablename))
    results = cur.fetchall()
    npolygons = len(results)
    sql = "select gid,fdate,(st_summarystats(rast)).mean as mean from {0}.{1},{2} where st_intersects(rast,geom)".format(name, varname, tablename)
    try:
        sdt = datetime.strptime(startdate, "%Y-%m-%d")
        edt = datetime.strptime(enddate, "%Y-%m-%d")
        sql += " and fdate>=date'{0}' and fdate<=date'{1} group by gid,fdate,rast order by gid,fdate".format(sdt.strftime("%Y-%m-%d"), edt.strftime("%Y-%m-%d"))
    except ValueError:
        sql += " group by gid,fdate,rast order by fdate,gid"
        log.warning("Start and/or end dates were invalid. Ignoring...")
    cur.execute(sql)
    results = cur.fetchall()
    csvfile = filepath.replace(".shp", ".csv")
    with open(csvfile, 'w') as fout:
        fout.write("date,{0}".format(",".join(["p{0}".format(i+1) for i in range(npolygons)])))
        for res in results:
            if res[0] == 1:
                fout.write("\n{0},{1:f}".format(res[1].strftime("%Y-%m-%d"), res[2]))
            else:
                fout.write(",{0:f}".format(res[2]))
    cur.close()
    db.close()
Exemplo n.º 17
0
 def _ESP(self, options):
     """Generate meteorological forcings using the Ensemble Streamflow Prediction method."""
     ndays = (date(self.endyear, self.endmonth, self.endday) -
              date(self.startyear, self.startmonth, self.startday)).days
     db = dbio.connect(self.models[0].dbname)
     cur = db.cursor()
     sql = "select distinct (date_part('year', fdate)) as year from precip.{0}".format(
         options['vic']['precip'])
     cur.execute(sql)
     years = map(lambda y: int(y[0]), cur.fetchall())
     random.shuffle(years)
     if self.startyear in years:
         years.remove(self.startyear)
     # will need to check whether enough days exist in sampled year before
     # removing
     years.remove(max(years))
     for e in range(self.nens):
         model = self.models[e]
         model.startyear = years[e]
         t = date(model.startyear, model.startmonth,
                  model.startday) + timedelta(ndays)
         model.endyear, model.endmonth, model.endday = t.year, t.month, t.day
         prec, tmax, tmin, wind = model.getForcings(options['vic'])
         model.writeForcings(prec, tmax, tmin, wind)
     cur.close()
     db.close()
Exemplo n.º 18
0
Arquivo: test.py Projeto: maduhu/RHEAS
 def tearDown(self):
     db = dbio.connect(self.dbname)
     cur = db.cursor()
     cur.execute("drop table dssat.cultivars")
     db.commit()
     cur.close()
     db.close()
Exemplo n.º 19
0
def mean(dbname, name):
    """Calculate ensemble average from raster."""
    log = logging.getLogger(__name__)
    schemaname, tablename = name.split(".")
    db = dbio.connect(dbname)
    cur = db.cursor()
    if _columnExists(cur, name, "ensemble"):
        cur.execute("select max(ensemble) from {0}".format(name))
        nens = cur.fetchone()[0]
        ssql = "select fdate,st_union(rast,'MEAN') as rast from {0} group by fdate".format(
            name)
        sql = "select * from information_schema.columns where table_schema='{0}' and table_name='{1}_mean'".format(
            schemaname, tablename)
        cur.execute(sql)
        if bool(cur.rowcount):
            cur.execute("drop table {0}_mean".format(name))
        sql = "create table {0}.{1}_mean as ({2})".format(
            schemaname, tablename, ssql)
        cur.execute(sql)
    else:
        log.warning(
            "Cannot calculate ensemble average maps, no ensemble exists.")
    db.commit()
    cur.close()
    db.close()
Exemplo n.º 20
0
def _resampleClimatology(dbname, ptable, name, dt0):
    """Resample finer scale climatology to IRI spatial resolution."""
    tilesize = 10
    res = 2.5
    db = dbio.connect(dbname)
    cur = db.cursor()
    cur.execute(
        "select * from pg_catalog.pg_class c inner join pg_catalog.pg_namespace n on c.relnamespace=n.oid where n.nspname='precip' and c.relname='{0}_iri'".format(ptable))
    if not bool(cur.rowcount):
        sql = "create table precip.{1}_iri as (with f as (select fdate,st_tile(st_rescale(rast,{0},'average'),{2},{2}) as rast from precip.{1}) select fdate,rast,dense_rank() over (order by st_upperleftx(rast),st_upperlefty(rast)) as rid from f)".format(
            res, ptable, tilesize)
        cur.execute(sql)
        cur.execute(
            "create index {0}_iri_r on precip.{0}_iri(rid)".format(ptable))
        cur.execute(
            "create index {0}_iri_t on precip.{0}_iri(fdate)".format(ptable))
        db.commit()
    _deleteTableIfExists(dbname, 'precip', "{0}_iri_xy".format(ptable))
    sql = "create table precip.{0}_iri_xy as (select gid,st_worldtorastercoordx(rast,geom) as x,st_worldtorastercoordy(rast,geom) as y,rid as tile from precip.{0}_iri,{1}.basin where fdate=date'{2}-{3}-{4}' and st_intersects(rast,geom))".format(
        ptable, name, dt0.year, dt0.month, dt0.day)
    cur.execute(sql)
    db.commit()
    cur.execute(
        "create index {0}_iri_xy_r on precip.{0}_iri_xy(tile)".format(ptable))
    db.commit()
    cur.close()
    db.close()
Exemplo n.º 21
0
Arquivo: nmme.py Projeto: nasa/RHEAS
def _getForcings(options, models, res):
    """Retrieve meteorological forcings for ensemble."""
    nens = len(models)
    db = dbio.connect(models.dbname)
    cur = db.cursor()
    rtables = dbio.getResampledTables(models.dbname, options, res)
    rsmp = rtables['precip'].split("_")[1]
    prec = [None] * nens
    tmax = [None] * nens
    tmin = [None] * nens
    temp = [None] * nens
    for e in range(nens):
        prec[e] = _queryDataset(models.dbname, "precip.nmme_{0}".format(rsmp), models.name, models.startyear, models.startmonth, models.startday, models.endyear, models.endmonth, models.endday, e+1)
        temp[e] = _queryDataset(models.dbname, "tmax.nmme_{0}".format(rsmp), models.name, models.startyear, models.startmonth, models.startday, models.endyear, models.endmonth, models.endday, e+1)
    sql = "select distinct(date_part('year',fdate)) from tmax.{0}".format(rtables['tmax'])
    cur.execute(sql)
    years = [r[0] for r in cur.fetchall()]
    years.remove(min(years))
    years.remove(max(years))
    if len(years) > 0:
        ndays = (datetime(models.endyear, models.endmonth, models.endday) - datetime(models.startyear, models.startmonth, models.startday)).days
        yr = int(np.random.choice(years))
        t0 = datetime(yr, models.startmonth, models.startday)
        t1 = t0 + timedelta(ndays)
        vtmax = _queryDataset(models.dbname, "tmax.{0}".format(rtables['tmax']), models.name, t0.year, t0.month, t0.day, t1.year, t1.month, t1.day)
        vtmin = _queryDataset(models.dbname, "tmin.{0}".format(rtables['tmin']), models.name, t0.year, t0.month, t0.day, t1.year, t1.month, t1.day)
        wind = _queryDataset(models.dbname, "wind.{0}".format(rtables['wind']), models.name, t0.year, t0.month, t0.day, t1.year, t1.month, t1.day)
        for e in range(nens):
            tmax[e] = [(vtmax[i][0], vtmax[i][1], temp[e][i][2] + 0.5 * (vtmax[i][2] - vtmin[i][2])) for i in range(len(vtmax))]
            tmin[e] = [(vtmin[i][0], vtmin[i][1], temp[e][i][2] - 0.5 * (vtmax[i][2] - vtmin[i][2])) for i in range(len(vtmin))]
    else:
        prec = tmax = tmin = wind = None
    return prec, tmax, tmin, wind
Exemplo n.º 22
0
 def __init__(self,
              dbname,
              name,
              resolution,
              startyear,
              startmonth,
              startday,
              endyear,
              endmonth,
              endday,
              nens,
              vicopts,
              shapefile=None,
              assimilate="Y"):
     self.path = tempfile.mkdtemp(dir=".")
     self.startyear = startyear
     self.startmonth = startmonth
     self.startday = startday
     self.endyear = endyear
     self.endmonth = endmonth
     self.endday = endday
     self.lat = []
     self.lon = []
     self.elev = []
     self.depths = []
     self.dbname = dbname
     self.name = name
     self.res = resolution
     self.nens = nens
     self.shapefile = shapefile
     self.assimilate = assimilate
     try:
         self.grid_decimal = - \
             (decimal.Decimal(self.res).as_tuple().exponent - 1)
     except:
         self.grid_decimal = - \
             (decimal.Decimal(str(self.res)).as_tuple().exponent - 1)
     db = dbio.connect(self.dbname)
     cur = db.cursor()
     if 'lai' in vicopts or ('save' in vicopts
                             and vicopts['save'].find("lai") >= 0):
         self.lai = "vic"
     else:
         self.lai = None
     if 'save to' in vicopts:
         self.datafrom = vicopts['save to']
     else:
         self.datafrom = "db"
     cur.execute(
         "select * from information_schema.tables where table_name='basin' and table_schema=%s",
         (name, ))
     if not bool(cur.rowcount):
         print "ERROR! No simulation named {0} exists in database. You might have to run VIC.".format(
             name)
         sys.exit()
     cur.execute('select basefile from vic.input where resolution=%f;' %
                 self.res)
     self.basefile = "{0}/{1}".format(rpath.data, cur.fetchone()[0])
     cur.close()
     db.close()
Exemplo n.º 23
0
Arquivo: nmme.py Projeto: nasa/RHEAS
def generate(options, models):
    """Generate meteorological forecast forcings from downscaled NMME data."""
    log = logging.getLogger(__name__)
    options['vic']['tmax'] = options['vic']['temperature']
    options['vic']['tmin'] = options['vic']['temperature']
    db = dbio.connect(models.dbname)
    cur = db.cursor()
    dt0 = datetime(models.startyear, models.startmonth, models.startday)
    dt1 = datetime(models.endyear, models.endmonth, models.endday)
    # check if forecast period exists in NMME data
    sql = "select count(distinct(fdate)) from precip.nmme where fdate>=date'{0}' and fdate<=date'{1}'".format(dt0.strftime("%Y-%m-%d"), dt1.strftime("%Y-%m-%d"))
    cur.execute(sql)
    ndata = cur.fetchone()[0]
    if ndata == (dt1 - dt0).days + 1:
        prec, tmax, tmin, wind = _getForcings(options, models, models.res)
        if tmax is None or tmin is None or wind is None:
            log.error("No data found to generate VIC forcings for NMME forecast. Exiting...")
            sys.exit()
        else:
            for e in range(len(models)):
                models[e].writeForcings(prec[e], tmax[e], tmin[e], wind)
    else:
        log.error("Not enough data found for requested forecast period! Exiting...")
        sys.exit()
    cur.close()
    db.close()
Exemplo n.º 24
0
 def _ESP(self, options):
     """Generate meteorological forcings using the Ensemble Streamflow Prediction method."""
     ndays = (date(self.endyear, self.endmonth, self.endday) -
              date(self.startyear, self.startmonth, self.startday)).days
     db = dbio.connect(self.models[0].dbname)
     cur = db.cursor()
     if self.startmonth < self.endmonth:
         sql = "select distinct (date_part('year', fdate)) as year from precip.{0} where date_part('month', fdate) >= {1} and date_part('month', fdate) <= {2}".format(options['vic']['precip'], self.startmonth, self.endmonth)
     else:
         sql = "select distinct (date_part('year', fdate)) as year from precip.{0} where date_part('month', fdate) >= {1} or date_part('month', fdate) <= {2}".format(options['vic']['precip'], self.startmonth, self.endmonth)
     cur.execute(sql)
     years = map(lambda y: int(y[0]), cur.fetchall())
     random.shuffle(years)
     while len(years) < self.nens:
         years += years
     for e in range(self.nens):
         model = self.models[e]
         model.startyear = years[e]
         t = date(model.startyear, model.startmonth,
                  model.startday) + timedelta(ndays)
         model.endyear, model.endmonth, model.endday = t.year, t.month, t.day
         prec, tmax, tmin, wind = model.getForcings(options['vic'])
         model.writeForcings(prec, tmax, tmin, wind)
     cur.close()
     db.close()
Exemplo n.º 25
0
def _getForcings(options, models, res):
    """Retrieve meteorological forcings for ensemble."""
    nens = len(models)
    db = dbio.connect(models.dbname)
    cur = db.cursor()
    rtables = dbio.getResampledTables(models.dbname, options, res)
    rsmp = rtables['precip'].split("_")[1]
    prec = [None] * nens
    tmax = [None] * nens
    tmin = [None] * nens
    temp = [None] * nens
    for e in range(nens):
        prec[e] = _queryDataset(models.dbname, "precip.nmme_{0}".format(rsmp), models.name, models.startyear, models.startmonth, models.startday, models.endyear, models.endmonth, models.endday, e+1)
        temp[e] = _queryDataset(models.dbname, "tmax.nmme_{0}".format(rsmp), models.name, models.startyear, models.startmonth, models.startday, models.endyear, models.endmonth, models.endday, e+1)
    sql = "select distinct(date_part('year',fdate)) from tmax.{0}".format(rtables['tmax'])
    cur.execute(sql)
    years = [r[0] for r in cur.fetchall()]
    years.remove(min(years))
    years.remove(max(years))
    if len(years) > 0:
        ndays = (datetime(models.endyear, models.endmonth, models.endday) - datetime(models.startyear, models.startmonth, models.startday)).days
        yr = int(np.random.choice(years))
        t0 = datetime(yr, models.startmonth, models.startday)
        t1 = t0 + timedelta(ndays)
        vtmax = _queryDataset(models.dbname, "tmax.{0}".format(rtables['tmax']), models.name, t0.year, t0.month, t0.day, t1.year, t1.month, t1.day)
        vtmin = _queryDataset(models.dbname, "tmin.{0}".format(rtables['tmin']), models.name, t0.year, t0.month, t0.day, t1.year, t1.month, t1.day)
        wind = _queryDataset(models.dbname, "wind.{0}".format(rtables['wind']), models.name, t0.year, t0.month, t0.day, t1.year, t1.month, t1.day)
        for e in range(nens):
            tmax[e] = [(vtmax[i][0], vtmax[i][1], temp[e][i][2] + 0.5 * (vtmax[i][2] - vtmin[i][2])) for i in range(len(vtmax))]
            tmin[e] = [(vtmin[i][0], vtmin[i][1], temp[e][i][2] - 0.5 * (vtmax[i][2] - vtmin[i][2])) for i in range(len(vtmin))]
    else:
        prec = tmax = tmin = wind = None
    return prec, tmax, tmin, wind
Exemplo n.º 26
0
 def writeLAI(self, modelpath, gid, viclai=None, tablename="lai.modis"):
     """Writes LAI file for DSSAT."""
     fout = open("{0}/LAI.txt".format(modelpath), 'w')
     db = dbio.connect(self.dbname)
     cur = db.cursor()
     cur.execute("select * from information_schema.tables where table_name=%s and table_schema='lai'",
                 (tablename.split(".")[1],))
     if bool(cur.rowcount) and not self.lai == "vic":
         sql = "select fdate,avg((st_summarystats(st_clip(rast,geom))).mean) from {0},{1}.agareas where st_intersects(rast,geom) and fdate>=date '{2}-{3}-{4}' and fdate<=date '{5}-{6}-{7}' and gid={8} group by fdate".format(
             tablename, self.name, self.startyear, self.startmonth, self.startday, self.endyear, self.endmonth, self.endday, gid)
         cur.execute(sql)
         if bool(cur.rowcount):
             results = cur.fetchall()
             lai = {}
             for r in results:
                 if r[1] is None:
                     lai[r[0]] = -9999.0
                 else:
                     lai[r[0]] = r[1] / 10.0
         else:
             lai = {}
     else:
         lai = viclai
     enddate = date(self.endyear, 12, 31)
     startdate = date(self.startyear, 1, 1)
     for t in range((enddate - startdate).days + 1):
         dt = startdate + timedelta(t)
         if dt in lai:
             fout.write("{0:.1f}\n".format(lai[dt]))
         else:
             fout.write("-9999.0\n")
     fout.close()
     cur.close()
     db.close()
Exemplo n.º 27
0
def _saveRasters(filepath, name, varname, startdate, enddate, dbname):
    """"Save geophysical variable from *dbname* database, between *startdate*
    and *enddate* dates into Geotif files inside *filepath* directory."""
    logging.basicConfig(level=logging.INFO, format='%(message)s')
    log = logging.getLogger(__name__)
    if dbio.tableExists(dbname, name, varname):
        db = dbio.connect(dbname)
        cur = db.cursor()
        sql = "select fdate,st_astiff(st_union(rast)) as tif from {0}.{1}".format(
            name, varname)
        try:
            sdt = datetime.strptime(startdate, "%Y-%m-%d")
            edt = datetime.strptime(enddate, "%Y-%m-%d")
            sql += " where fdate>=date'{0}' and fdate<=date'{1} group by fdate".format(
                sdt.strftime("%Y-%m-%d"), edt.strftime("%Y-%m-%d"))
        except ValueError:
            sql += " group by fdate"
            log.warning("Start and/or end dates were invalid. Ignoring...")
        cur.execute(sql)
        results = cur.fetchall()
        for res in results:
            with open(
                    "{0}/{1}_{2}.tif".format(filepath, varname,
                                             res[0].strftime("%Y%m%d")),
                    'wb') as fout:
                fout.write(res[1])
    else:
        log.error("Variable {0} does not exist in schema {1}.".format(
            varname, name))
Exemplo n.º 28
0
Arquivo: vic.py Projeto: nowucme/RHEAS
 def _stateToDb(self, statefilepath):
     """Add path to state file into database."""
     db = dbio.connect(self.dbname)
     cur = db.cursor()
     cur.execute(
         "select schema_name from information_schema.schemata where schema_name='{0}'"
         .format(self.name))
     if not bool(cur.rowcount):
         cur.execute("create schema {0}".format(self.name))
         db.commit()
     cur.execute(
         "select table_name from information_schema.tables where table_schema='{0}' and table_name='state'"
         .format(self.name))
     if not bool(cur.rowcount):
         sql = "create table {0}.state (filename text, fdate date)".format(
             self.name)
         cur.execute(sql)
         db.commit()
     statefile = "{0}/vic.state_{1:04d}{2:02d}{3:02d}".format(
         statefilepath, self.endyear, self.endmonth, self.endday)
     statedate = "{0}-{1}-{2}".format(self.endyear, self.endmonth,
                                      self.endday)
     cur.execute("select * from {0}.state where fdate=date '{1}'".format(
         self.name, statedate))
     if bool(cur.rowcount):
         sql = "update {0}.state set filename='{1}' where fdate=date '{2}'".format(
             self.name, statefile, statedate)
     else:
         sql = "insert into {0}.state values ('{1}', date '{2}')".format(
             self.name, statefile, statedate)
     cur.execute(sql)
     db.commit()
     cur.close()
     db.close()
Exemplo n.º 29
0
def ingest(dbname, varname, filename, dt, ens):
    """Imports Geotif *filename* into database *dbname*."""
    schema = {'Precipitation': 'precip', 'Temperature': 'tmax'}
    db = dbio.connect(dbname)
    cur = db.cursor()
    cur.execute(
        "select * from information_schema.tables where table_schema='{0}' and table_name='nmme'".format(schema[varname]))
    if not bool(cur.rowcount):
        cur.execute("create table {0}.nmme (rid serial not null primary key, fdate date, ensemble int, rast raster)".format(
            schema[varname]))
        db.commit()
    cur.execute("select * from {0}.nmme where fdate='{1}' and ensemble = {2}".format(schema[varname], dt.strftime("%Y-%m-%d"), ens))
    if bool(cur.rowcount):
        cur.execute("delete from {0}.nmme where fdate='{1}' and ensemble = {2}".format(schema[varname], dt.strftime("%Y-%m-%d"), ens))
        db.commit()
    dbio.ingest(dbname, filename, dt, "{0}.nmme".format(schema[varname]), False, False)
    sql = "update {0}.nmme set ensemble = {1} where ensemble is null".format(schema[varname], ens)
    cur.execute(sql)
    db.commit()
    cur.execute("select * from raster_resampled where sname='{0}' and tname like 'nmme_%'".format(schema[varname]))
    tables = [r[1] for r in cur.fetchall()]
    for table in tables:
        cur.execute("select * from {0}.{1} where fdate='{2}' and ensemble = {3}".format(schema[varname], table, dt.strftime("%Y-%m-%d"), ens))
        if bool(cur.rowcount):
            cur.execute("delete from {0}.{1} where fdate='{2}' and ensemble = {3}".format(schema[varname], table, dt.strftime("%Y-%m-%d"), ens))
            db.commit()
    tilesize = (10, 10)
    dbio.createResampledTables(dbname, schema[varname], "nmme", dt, tilesize, False, "and ensemble={0}".format(ens))
    _setEnsemble(dbname, schema[varname], ens)
    cur.close()
    db.close()
Exemplo n.º 30
0
 def _ESP(self, options):
     """Generate meteorological forcings using the Ensemble Streamflow Prediction method."""
     ndays = (date(self.endyear, self.endmonth, self.endday) -
              date(self.startyear, self.startmonth, self.startday)).days
     db = dbio.connect(self.models[0].dbname)
     cur = db.cursor()
     if self.startmonth < self.endmonth:
         sql = "select distinct (date_part('year', fdate)) as year from precip.{0} where date_part('month', fdate) >= {1} and date_part('month', fdate) <= {2}".format(options['vic']['precip'], self.startmonth, self.endmonth)
     else:
         sql = "select distinct (date_part('year', fdate)) as year from precip.{0} where date_part('month', fdate) >= {1} or date_part('month', fdate) <= {2}".format(options['vic']['precip'], self.startmonth, self.endmonth)
     cur.execute(sql)
     years = map(lambda y: int(y[0]), cur.fetchall())
     random.shuffle(years)
     while len(years) < self.nens:
         years += years
     for e in range(self.nens):
         model = self.models[e]
         model.startyear = years[e]
         t = date(model.startyear, model.startmonth,
                  model.startday) + timedelta(ndays)
         model.endyear, model.endmonth, model.endday = t.year, t.month, t.day
         prec, tmax, tmin, wind = model.getForcings(options['vic'])
         model.writeForcings(prec, tmax, tmin, wind)
     cur.close()
     db.close()
Exemplo n.º 31
0
 def _stateToDb(self, statefilepath):
     """Add path to state file into database."""
     db = dbio.connect(self.dbname)
     cur = db.cursor()
     cur.execute(
         "select schema_name from information_schema.schemata where schema_name='{0}'".format(self.name))
     if not bool(cur.rowcount):
         cur.execute("create schema {0}".format(self.name))
         db.commit()
     cur.execute(
         "select table_name from information_schema.tables where table_schema='{0}' and table_name='state'".format(self.name))
     if not bool(cur.rowcount):
         sql = "create table {0}.state (filename text, fdate date)".format(
             self.name)
         cur.execute(sql)
         db.commit()
     statefile = "{0}/vic.state_{1:04d}{2:02d}{3:02d}".format(
         statefilepath, self.endyear, self.endmonth, self.endday)
     statedate = "{0}-{1}-{2}".format(self.endyear,
                                      self.endmonth, self.endday)
     cur.execute(
         "select * from {0}.state where fdate=date '{1}'".format(self.name, statedate))
     if bool(cur.rowcount):
         sql = "update {0}.state set filename='{1}' where fdate=date '{2}'".format(
             self.name, statefile, statedate)
     else:
         sql = "insert into {0}.state values ('{1}', date '{2}')".format(
             self.name, statefile, statedate)
     cur.execute(sql)
     db.commit()
     cur.close()
     db.close()
Exemplo n.º 32
0
 def cultivar(self, ens, gid):
     """Retrieve Cultivar parameters for pixel and ensemble member."""
     db = dbio.connect(self.dbname)
     cur = db.cursor()
     if dbio.columnExists(self.dbname, "dssat", "cultivars", "name"):
         name_query = ",c.name"
     else:
         name_query = ""
     sql = "select p1,p2,p5,g2,g3,phint{3} from dssat.cultivars as c,{0}.agareas as a where crop='maize' and ensemble={1} and st_intersects(c.geom,a.geom) and a.gid={2}".format(
         self.name, ens + 1, gid, name_query)
     cur.execute(sql)
     if not bool(cur.rowcount):
         sql = "select p1,p2,p5,g2,g3,phint{3} from dssat.cultivars as c,{0}.agareas as a where crop='maize' and ensemble={1} and a.gid={2} order by st_centroid(c.geom) <-> st_centroid(a.geom)".format(
             self.name, ens + 1, gid, name_query)
         cur.execute(sql)
     if name_query:
         p1, p2, p5, g2, g3, phint, cname = cur.fetchone()
     else:
         p1, p2, p5, g2, g3, phint = cur.fetchone()
         cname = ""
     # FIXME: Should the name of the cultivar be reflected in the line below?
     cultivar = "990002 MEDIUM SEASON    IB0001  {0:.1f} {1:.3f} {2:.1f} {3:.1f}  {4:.2f} {5:.2f}".format(
         p1, p2, p5, g2, g3, phint)
     cur.close()
     db.close()
     self.cultivars[gid].append(cname)
     return cultivar
Exemplo n.º 33
0
 def planting(self, lat, lon, fromShapefile=False):
     """Retrieve planting dates for pixel."""
     if self.crop is None:
         self.crop = "maize"
     db = dbio.connect(self.dbname)
     cur = db.cursor()
     sql = "select st_value(rast,st_geomfromtext('POINT({0} {1})',4326)) as doy from crops.plantstart where type like '{2}' and st_intersects(rast,st_geomfromtext('POINT({0} {1})',4326)) order by doy".format(
         lon, lat, self.crop)
     cur.execute(sql)
     results = cur.fetchall()
     plantdates = [
         date(self.startyear, 1, 1) + timedelta(r[0] - 1) for r in results
         if r[0] is not None
     ]
     cur.close()
     db.close()
     startdt = date(self.startyear, self.startmonth, self.startday)
     planting = [
         p for p in plantdates if p >= startdt
         and p <= date(self.endyear, self.endmonth, self.endday)
     ]
     if planting is []:
         planting = [
             plantdates[np.argmax([(t - startdt).days for t in plantdates
                                   if (t - startdt).days < 0])]
         ]
     return planting
Exemplo n.º 34
0
def calcSeverity(model, cid, varname="soil_moist"):
    """Calculate drought severity from *climatology* table stored in database."""
    log = logging.getLogger(__name__)
    outvars = model.getOutputStruct(model.model_path + "/global.txt")
    col = outvars[varname][1]
    if varname in ["soil_moist"]:
        p = np.loadtxt("{0}/{1}_{2:.{4}f}_{3:.{4}f}".format(model.model_path, outvars['runoff'][0], model.gid[cid][0], model.gid[cid][1], model.grid_decimal))[:, col:col+model.nlayers]
        p = pandas.Series(np.sum(p, axis=1), [datetime(model.startyear, model.startmonth, model.startday) + timedelta(t) for t in range(len(p))])
    else:
        p = np.loadtxt("{0}/{1}_{2:.{4}f}_{3:.{4}f}".format(model.model_path, outvars['runoff'][0], model.gid[cid][0], model.gid[cid][1], model.grid_decimal))[:, col]
        p = pandas.Series(p, [datetime(model.startyear, model.startmonth, model.startday) + timedelta(t) for t in range(len(p))])
    db = dbio.connect(model.dbname)
    cur = db.cursor()
    if dbio.tableExists(model.dbname, model.name, varname):
        if varname in ["soil_moist"]:
            lvar = ",layer"
        else:
            lvar = ""
        if dbio.columnExists(model.dbname, model.name, varname, "ensemble"):
            fsql = "with f as (select fdate{3},avg(st_value(rast,st_geomfromtext('POINT({0} {1})',4326))) as vals from {2}.{4} where st_intersects(rast,st_geomfromtext('POINT({0} {1})',4326)) group by fdate{3})".format(model.gid[cid][1], model.gid[cid][0], model.name, lvar, varname)
        else:
            fsql = "with f as (select fdate{3},st_value(rast,st_geomfromtext('POINT({0} {1})',4326)) as vals from {2}.{4} where st_intersects(rast,st_geomfromtext('POINT({0} {1})',4326)))".format(model.gid[cid][1], model.gid[cid][0], model.name, lvar, varname)
        sql = "{0} select fdate,sum(vals) from f group by fdate".format(fsql)
        cur.execute(sql)
        if bool(cur.rowcount):
            results = cur.fetchall()
            clim = pandas.Series([r[1] for r in results], [r[0] for r in results])
        else:
            clim = p
    else:
        log.warning("Climatology table does not exist. Severity calculation will be inaccurate!")
        clim = p
    s = 100.0 - np.array(map(lambda v: stats.percentileofscore(clim, v), p))
    return s
Exemplo n.º 35
0
def run():
    """Main RHEAS routine."""
    config_filename, dbname, db_update = parseArgs()
    if dbname is None:
        dbname = "rheas"
    dbio.connect(dbname)
    # check if database update is requested
    if db_update:
        print "Updating database!"
        update(dbname, config_filename)
    else:
        options = config.loadFromFile(config_filename)
        # check what simulations have been requested
        if "nowcast" in options:
            nowcast.execute(dbname, options)
        if "forecast" in options:
            forecast.execute(dbname, options)
Exemplo n.º 36
0
Arquivo: vic.py Projeto: nowucme/RHEAS
 def _dropIndexTable(self, sname):
     """Deletes index table."""
     db = dbio.connect(self.dbname)
     cur = db.cursor()
     cur.execute("drop table {0}_xy".format(sname))
     db.commit()
     cur.close()
     db.close()
Exemplo n.º 37
0
 def testDeterministicVIC(self):
     nowcast.execute(self.dbname, self.options)
     db = dbio.connect(self.dbname)
     cur = db.cursor()
     cur.execute("select * from information_schema.tables where table_name='runoff' and table_schema='basin'")
     assert bool(cur.rowcount) is True
     cur.close()
     db.close()
Exemplo n.º 38
0
def run():
    """Main RHEAS routine."""
    config_filename, dbname, db_update = parseArgs()
    if dbname is None:
        dbname = "rheas"
    dbio.connect(dbname)
    # check if database update is requested
    if db_update:
        print "Updating database!"
        update(dbname, config_filename)
    else:
        options = config.loadFromFile(config_filename)
        # check what simulations have been requested
        if "nowcast" in options:
            nowcast.execute(dbname, options)
        if "forecast" in options:
            forecast.execute(dbname, options)
Exemplo n.º 39
0
 def tearDown(self):
     """Clean up data generated after each unit test."""
     db = dbio.connect(self.dbname)
     cur = db.cursor()
     cur.execute("drop schema {0} cascade".format(self.options['forecast']['name']))
     db.commit()
     cur.close()
     db.close()
Exemplo n.º 40
0
 def _dropIndexTable(self, sname):
     """Deletes index table."""
     db = dbio.connect(self.dbname)
     cur = db.cursor()
     cur.execute("drop table {0}_xy".format(sname))
     db.commit()
     cur.close()
     db.close()
Exemplo n.º 41
0
 def tearDown(self):
     db = dbio.connect(self.dbname)
     cur = db.cursor()
     cur.execute("drop schema {0} cascade".format(
         self.options['forecast']['name']))
     db.commit()
     cur.close()
     db.close()
Exemplo n.º 42
0
 def __call__(self, t):
     db = dbio.connect(self.dbname)
     cur = db.cursor()
     var = self.rtable.split(".")[0]
     sql = "select gid,fdate,st_nearestvalue(rast,x,y) from {0},{1}_xy where rid=tile and tile={8} and fdate>=date'{2}-{3}-{4}' and fdate<=date'{5}-{6}-{7}' order by gid,fdate".format(
         self.rtable, var, self.startyear, self.startmonth, self.startday, self.endyear, self.endmonth, self.endday, t)
     cur.execute(sql)
     data = cur.fetchall()
     return data
Exemplo n.º 43
0
def addCultivar(dbname, shapefile, params, nens=40, crop="maize"):
    """Add cultivar parameters to the database *dbname* corresponding
    to the area defined in the *shapefile*. The *params* is a list of dictionaries,
    where the keys of each dictionary correspond to parameters, and each object in
    the list corresponds to a cultivar variant. The *nens* parameters is the size
    of the ensemble to be created."""
    temptable = ''.join(random.SystemRandom().choice(string.ascii_letters)
                        for _ in range(8))
    if os.path.exists(shapefile):
        subprocess.call(
            "{0}/shp2pgsql -d -s 4326 -g geom {1} {2} | {0}/psql -d {3}".
            format(rpath.bins, shapefile, temptable, dbname),
            shell=True)
        db = dbio.connect(dbname)
        cur = db.cursor()
        e = 0
        while e < nens:
            for c in range(len(params)):
                if crop == "maize" and all(
                        p in params[c]
                        for p in ['p1', 'p2', 'p5', 'g2', 'g3', 'phint']):
                    if e < nens:
                        sql = "insert into dssat.cultivars (geom) (select geom from {0})".format(
                            temptable)
                        cur.execute(sql)
                        sql = "update dssat.cultivars set crop='maize',ensemble={0},{1} where ensemble is null".format(
                            e + 1, ",".join([
                                "{0}={1}".format(k, params[c][k])
                                for k in params[c]
                            ]))
                        cur.execute(sql)
                        e += 1
                elif crop == "rice" and all(
                        p in params[c] for p in
                    ['p1', 'p2r', 'p5', 'p2o', 'g1', 'g2', 'g3', 'g4']):
                    if e < nens:
                        sql = "insert into dssat.cultivars (geom) (select geom from {0})".format(
                            temptable)
                        cur.execute(sql)
                        sql = "update dssat.cultivars set crop='rice',ensemble={0},{1} where ensemble is null".format(
                            e + 1, ",".join([
                                "{0}={1}".format(k, params[c][k])
                                for k in params[c]
                            ]))
                        cur.execute(sql)
                        e += 1
                else:
                    print("Missing parameters for {0} crop".format(crop))
                    params.pop(c)  # remove element with missing parameters
                    break
        cur.execute("drop table {0}".format(temptable))
        db.commit()
        cur.close()
        db.close()
    else:
        print("Shapefile {0} cannot be found. Not adding cultivars!".format(
            shapefile))
Exemplo n.º 44
0
 def _getTiles(self, itable):
     """Get raster tile IDs for the domain."""
     db = dbio.connect(self.dbname)
     cur = db.cursor()
     cur.execute("select distinct(tile) from {0}".format(itable))
     tiles = [int(r[0]) for r in cur.fetchall()]
     cur.close()
     db.close()
     return tiles
Exemplo n.º 45
0
Arquivo: vic.py Projeto: nowucme/RHEAS
 def _getTiles(self, itable):
     """Get raster tile IDs for the domain."""
     db = dbio.connect(self.dbname)
     cur = db.cursor()
     cur.execute("select distinct(tile) from {0}".format(itable))
     tiles = [int(r[0]) for r in cur.fetchall()]
     cur.close()
     db.close()
     return tiles
Exemplo n.º 46
0
 def __call__(self, t):
     db = dbio.connect(self.dbname)
     cur = db.cursor()
     var = self.rtable.split(".")[0]
     sql = "select gid,fdate,st_nearestvalue(rast,x,y) from {0},{1}_xy where rid=tile and tile={8} and fdate>=date'{2}-{3}-{4}' and fdate<=date'{5}-{6}-{7}' order by gid,fdate".format(
         self.rtable, var, self.startyear, self.startmonth, self.startday, self.endyear, self.endmonth, self.endday, t)
     cur.execute(sql)
     data = cur.fetchall()
     return data
Exemplo n.º 47
0
 def __init__(self,
              path,
              dbname,
              resolution,
              startyear,
              startmonth,
              startday,
              endyear,
              endmonth,
              endday,
              name="",
              savestate="",
              nlayer=3):
     log = logging.getLogger(__name__)
     self.model_path = path
     self.nodata = -9999.
     if bool(name):
         self.name = name
     else:
         self.name = None
     self.startyear = startyear
     self.startmonth = startmonth
     self.startday = startday
     self.startdate = datetime(startyear, startmonth, startday)
     self.endyear = endyear
     self.endmonth = endmonth
     self.endday = endday
     self.enddate = datetime(endyear, endmonth, endday)
     self.nlayers = nlayer
     self.dbname = dbname
     db = dbio.connect(dbname)
     cur = db.cursor()
     cur.execute(
         "select resolution from vic.input order by abs(resolution - {0})".
         format(resolution))
     if not bool(cur.rowcount):
         log.error(
             "No appropriate VIC input files found in the database. Exiting!"
         )
         sys.exit()
     self.res = cur.fetchone()[0]
     cur.close()
     try:
         self.grid_decimal = - \
             (decimal.Decimal(self.res).as_tuple().exponent - 1)
     except:
         self.grid_decimal = - \
             (decimal.Decimal(str(self.res)).as_tuple().exponent - 1)
     self.lat = []
     self.lon = []
     self.gid = OrderedDict()
     self.lgid = OrderedDict()
     self.depths = OrderedDict()
     self.skipyear = 0
     self.elev = OrderedDict()
     self.statefile = ""
Exemplo n.º 48
0
Arquivo: test.py Projeto: maduhu/RHEAS
 def tearDown(self):
     db = dbio.connect(self.dbname)
     cur = db.cursor()
     for table in ["precip.chirps", "precip.trmm", "tmax.ncep", "tmin.ncep", "wind.ncep"]:
         cur.execute("drop table {0}".format(table))
         cur.execute("drop table {0}_4".format(table))
     cur.execute("drop schema soilmoist cascade")
     db.commit()
     cur.close()
     db.close()
Exemplo n.º 49
0
 def _getTileData(self, rtable, t):
     """Retrieve data from *rtable* for specific tile *t*."""
     db = dbio.connect(self.dbname)
     cur = db.cursor()
     var = rtable.split(".")[0]
     sql = "select gid,fdate,st_value(rast,x,y) from {0},{1}_xy where rid=tile and tile={8} and fdate>=date'{2}-{3}-{4}' and fdate<=date'{5}-{6}-{7}' order by gid,fdate".format(
         rtable, var, self.startyear, self.startmonth, self.startday, self.endyear, self.endmonth, self.endday, t)
     cur.execute(sql)
     data = cur.fetchall()
     return data
Exemplo n.º 50
0
 def yieldTable(self):
     """Create table for crop yield statistics and crop type."""
     super(Model, self).yieldTable()
     db = dbio.connect(self.dbname)
     cur = db.cursor()
     sql = "update {0}.yield set crop='rice' where crop is null".format(self.name)
     cur.execute(sql)
     db.commit()
     cur.close()
     db.close()
Exemplo n.º 51
0
 def writeSoilFile(self, shapefile):
     """Write soil parameter file for current simulation based on basin shapefile."""
     ds = ogr.Open(shapefile)
     lyr = ds.GetLayer()
     db = dbio.connect(self.dbname)
     cur = db.cursor()
     cur.execute(
         "select * from information_schema.tables where table_name='basin' and table_schema=%s", (self.name,))
     if not bool(cur.rowcount):
         temptable = ''.join(random.SystemRandom().choice(
             string.ascii_letters) for _ in range(8))
         cur.execute(
             "create table {0}(gid serial primary key, geom geometry)".format(temptable))
         for i in range(lyr.GetFeatureCount()):
             f = lyr.GetNextFeature()
             g = f.GetGeometryRef()
             cur.execute("insert into {0}(geom) values(st_geomfromtext('{1}',4326))".format(
                 temptable, g.ExportToWkt()))
         sql = "select updategeometrysrid('{0}','geom',4326)".format(
             temptable)
         db.commit()
         cur.execute(
             "create index {0}_s on {0} using gist(geom)".format(temptable))
         ds = None
         cur.execute(
             "select schema_name from information_schema.schemata where schema_name='{0}'".format(self.name))
         if not bool(cur.rowcount):
             cur.execute("create schema {0}".format(self.name))
         sql = "create table {0}.basin (gid integer, elev real, depths real[], geom geometry(Point, 4326), line text, constraint {0}_gidkey primary key(gid),  CONSTRAINT enforce_dims_geom CHECK (st_ndims(geom) = 2), CONSTRAINT enforce_geotype_geom CHECK (geometrytype(geom) = 'POINT'::text OR geom IS NULL))".format(self.name)
         cur.execute(sql)
         sql = "insert into {0}.basin (gid, elev, depths, geom, line) select v.id,v.elev,v.depths,v.geom,v.line from vic.soils as v,{1} as t where st_intersects(v.geom,t.geom) and resolution={2}".format(
             self.name, temptable, self.res)
         cur.execute(sql)
         cur.execute("drop table {0}".format(temptable))
         db.commit()
         cur.execute(
             "create index basin_s on {0}.basin using gist(geom)".format(self.name))
         db.commit()
     sql = "select line,gid,st_y(geom),st_x(geom),elev,depths from {0}.basin order by gid".format(
         self.name)
     cur.execute(sql)
     lines = cur.fetchall()
     with open(self.model_path + '/soil.txt', 'w') as fout:
         for line in lines:
             gid, lat, lon, elev, depths = line[1:]
             fout.write("{0}\n".format(line[0]))
             self.lat.append(lat)
             self.lon.append(lon)
             self.gid[gid] = (lat, lon)
             self.lgid[(lat, lon)] = gid
             self.depths[gid] = depths
             self.elev[gid] = elev
     cur.execute("alter table {0}.basin drop column line".format(self.name))
     cur.close()
     db.close()
Exemplo n.º 52
0
def _deleteTableIfExists(dbname, sname, tname):
    """Check if table exists and delete it."""
    db = dbio.connect(dbname)
    cur = db.cursor()
    cur.execute(
        "select * from information_schema.tables where table_schema='{0}' and table_name='{1}'".format(sname, tname))
    if bool(cur.rowcount):
        cur.execute("drop table {0}.{1}".format(sname, tname))
        db.commit()
    cur.close()
    db.close()
Exemplo n.º 53
0
 def _calcCroplandFract(self):
     """Calculate fraction of cropland for specific pixel."""
     db = dbio.connect(self.dbname)
     cur = db.cursor()
     sql = "select gid,avg((st_summarystats(st_clip(rast,geom))).mean) from dssat.cropland,{0}.agareas where st_intersects(rast,geom) group by gid order by gid".format(
         self.name)
     cur.execute(sql)
     fract = dict((r[0], r[1]) for r in cur.fetchall())
     cur.close()
     db.close()
     return fract
Exemplo n.º 54
0
 def paramFromDB(self):
     """Retrieve file parameters from database."""
     db = dbio.connect(self.dbname)
     cur = db.cursor()
     # cur = self.db.cursor()
     cur.execute(
         'select veglib,vegparam,snowbandfile from vic.input where resolution=%f;' % self.res)
     veglib, vegparam, snowbands = cur.fetchone()
     cur.close()
     db.close()
     return veglib, vegparam, snowbands