示例#1
0
def _saveRasters(filepath, name, varname, startdate, enddate, dbname):
    """"Save geophysical variable from *dbname* database, between *startdate*
    and *enddate* dates into Geotif files inside *filepath* directory."""
    logging.basicConfig(level=logging.INFO, format='%(message)s')
    log = logging.getLogger(__name__)
    if dbio.tableExists(dbname, name, varname):
        db = dbio.connect(dbname)
        cur = db.cursor()
        sql = "select fdate,st_astiff(st_union(rast)) as tif from {0}.{1}".format(
            name, varname)
        try:
            sdt = datetime.strptime(startdate, "%Y-%m-%d")
            edt = datetime.strptime(enddate, "%Y-%m-%d")
            sql += " where fdate>=date'{0}' and fdate<=date'{1} group by fdate".format(
                sdt.strftime("%Y-%m-%d"), edt.strftime("%Y-%m-%d"))
        except ValueError:
            sql += " group by fdate"
            log.warning("Start and/or end dates were invalid. Ignoring...")
        cur.execute(sql)
        results = cur.fetchall()
        for res in results:
            with open(
                    "{0}/{1}_{2}.tif".format(filepath, varname,
                                             res[0].strftime("%Y%m%d")),
                    'wb') as fout:
                fout.write(res[1])
    else:
        log.error("Variable {0} does not exist in schema {1}.".format(
            varname, name))
示例#2
0
def calcSeverity(model, cid, varname="soil_moist"):
    """Calculate drought severity from *climatology* table stored in database."""
    log = logging.getLogger(__name__)
    outvars = model.getOutputStruct(model.model_path + "/global.txt")
    col = outvars[varname][1]
    if varname in ["soil_moist"]:
        p = np.loadtxt("{0}/{1}_{2:.{4}f}_{3:.{4}f}".format(model.model_path, outvars['runoff'][0], model.gid[cid][0], model.gid[cid][1], model.grid_decimal))[:, col:col+model.nlayers]
        p = pandas.Series(np.sum(p, axis=1), [datetime(model.startyear, model.startmonth, model.startday) + timedelta(t) for t in range(len(p))])
    else:
        p = np.loadtxt("{0}/{1}_{2:.{4}f}_{3:.{4}f}".format(model.model_path, outvars['runoff'][0], model.gid[cid][0], model.gid[cid][1], model.grid_decimal))[:, col]
        p = pandas.Series(p, [datetime(model.startyear, model.startmonth, model.startday) + timedelta(t) for t in range(len(p))])
    db = dbio.connect(model.dbname)
    cur = db.cursor()
    if dbio.tableExists(model.dbname, model.name, varname):
        if varname in ["soil_moist"]:
            lvar = ",layer"
        else:
            lvar = ""
        if dbio.columnExists(model.dbname, model.name, varname, "ensemble"):
            fsql = "with f as (select fdate{3},avg(st_value(rast,st_geomfromtext('POINT({0} {1})',4326))) as vals from {2}.{4} where st_intersects(rast,st_geomfromtext('POINT({0} {1})',4326)) group by fdate{3})".format(model.gid[cid][1], model.gid[cid][0], model.name, lvar, varname)
        else:
            fsql = "with f as (select fdate{3},st_value(rast,st_geomfromtext('POINT({0} {1})',4326)) as vals from {2}.{4} where st_intersects(rast,st_geomfromtext('POINT({0} {1})',4326)))".format(model.gid[cid][1], model.gid[cid][0], model.name, lvar, varname)
        sql = "{0} select fdate,sum(vals) from f group by fdate".format(fsql)
        cur.execute(sql)
        if bool(cur.rowcount):
            results = cur.fetchall()
            clim = pandas.Series([r[1] for r in results], [r[0] for r in results])
        else:
            clim = p
    else:
        log.warning("Climatology table does not exist. Severity calculation will be inaccurate!")
        clim = p
    s = 100.0 - np.array(map(lambda v: stats.percentileofscore(clim, v), p))
    return s
示例#3
0
def ingestSoils(dbname="rheas"):
    """Ingest soil information from downloaded files."""
    filenames = glob.glob("SoilGrids-for-DSSAT-10km v1.0 (by country)/*.SOL")
    db = dbio.connect(dbname)
    cur = db.cursor()
    if dbio.tableExists(dbname, "dssat", "soils"):
        print("Overwriting existing DSSAT soils table in database!")
        cur.execute("drop table dssat.soils")
        db.commit()
    cur.execute(
        "create table dssat.soils (rid serial primary key, geom geometry(Point, 4326), props text)"
    )
    db.commit()
    for filename in filenames:
        try:
            profiles = parseSolFile(filename)
            for latlon in profiles:
                lat, lon = latlon
                sql = "insert into dssat.soils (geom, props) values (st_geomfromtext('POINT({0} {1})', 4326), '{2}')".format(
                    lon, lat, profiles[latlon])
                cur.execute(sql)
        except:
            print("Cannot process file {0}".format(filename))
    db.commit()
    cur.close()
    db.close()
示例#4
0
文件: drought.py 项目: denismeia/mypy
def calcSMDI(model, cid):
    """Calculate Soil Moisture Deficit Index (Narasimhan & Srinivasan, 2005)."""
    log = logging.getLogger(__name__)
    outvars = model.getOutputStruct(model.model_path + "/global.txt")
    col = outvars['soil_moist'][1]
    p = np.loadtxt("{0}/{1}_{2:.{4}f}_{3:.{4}f}".format(
        model.model_path, outvars['soil_moist'][0], model.gid[cid][0],
        model.gid[cid][1], model.grid_decimal))[:, col:col + model.nlayers]
    p = pandas.Series(np.sum(p, axis=1), [
        datetime(model.startyear, model.startmonth, model.startday) +
        timedelta(t) for t in range(len(p))
    ])
    db = dbio.connect(model.dbname)
    cur = db.cursor()
    if dbio.tableExists(model.dbname, model.name, "soil_moist"):
        if dbio.columnExists(model.dbname, model.name, "soil_moist",
                             "ensemble"):
            fsql = "with f as (select fdate,layer,avg(st_value(rast,st_geomfromtext('POINT({0} {1})',4326))) as sm from {2}.soil_moist where st_intersects(rast,st_geomfromtext('POINT({0} {1})',4326)) group by fdate,layer)".format(
                model.gid[cid][1], model.gid[cid][0], model.name)
        else:
            fsql = "with f as (select fdate,layer,st_value(rast,st_geomfromtext('POINT({0} {1})',4326)) as sm from {2}.soil_moist where st_intersects(rast,st_geomfromtext('POINT({0} {1})',4326)))".format(
                model.gid[cid][1], model.gid[cid][0], model.name)
        sql = "{0} select fdate,sum(sm) from f group by fdate".format(fsql)
        cur.execute(sql)
        if bool(cur.rowcount):
            results = cur.fetchall()
            clim = pandas.Series([r[1] for r in results],
                                 [r[0] for r in results])
        else:
            clim = p
    else:
        log.warning(
            "Climatology table does not exist. SMDI calculation will be inaccurate!"
        )
        clim = p
    smdi = np.zeros(len(p))
    MSW = clim.median()
    maxSW = clim.max()
    minSW = clim.min()
    for i in range(7, len(smdi)):
        SW = np.median(p[i - 7:i + 1])
        if SW == MSW:
            SD = (SW - MSW) / (MSW - minSW) * 100.0
        else:
            SD = (SW - MSW) / (maxSW - MSW) * 100.0
        if i > 7:
            smdi[i] = 0.5 * smdi[i - 1] + SD / 50.0
        else:
            smdi[i] = SD / 50.0
    cur.close()
    db.close()
    return smdi
示例#5
0
文件: drought.py 项目: nasa/RHEAS
def calcSMDI(model, cid):
    """Calculate Soil Moisture Deficit Index (Narasimhan & Srinivasan, 2005)."""
    log = logging.getLogger(__name__)
    outvars = model.getOutputStruct(model.model_path + "/global.txt")
    col = outvars['soil_moist'][1]
    p = np.loadtxt("{0}/{1}_{2:.{4}f}_{3:.{4}f}".format(model.model_path, outvars['soil_moist'][0], model.gid[cid][0], model.gid[cid][1], model.grid_decimal))[:, col:col+model.nlayers]
    p = pandas.Series(np.sum(p, axis=1), [datetime(model.startyear, model.startmonth, model.startday) + timedelta(t) for t in range(len(p))])
    db = dbio.connect(model.dbname)
    cur = db.cursor()
    if dbio.tableExists(model.dbname, model.name, "soil_moist"):
        if dbio.columnExists(model.dbname, model.name, "soil_moist", "ensemble"):
            fsql = "with f as (select fdate,layer,avg(st_value(rast,st_geomfromtext('POINT({0} {1})',4326))) as sm from {2}.soil_moist where st_intersects(rast,st_geomfromtext('POINT({0} {1})',4326)) group by fdate,layer)".format(model.gid[cid][1], model.gid[cid][0], model.name)
        else:
            fsql = "with f as (select fdate,layer,st_value(rast,st_geomfromtext('POINT({0} {1})',4326)) as sm from {2}.soil_moist where st_intersects(rast,st_geomfromtext('POINT({0} {1})',4326)))".format(model.gid[cid][1], model.gid[cid][0], model.name)
        sql = "{0} select fdate,sum(sm) from f group by fdate".format(fsql)
        cur.execute(sql)
        if bool(cur.rowcount):
            results = cur.fetchall()
            clim = pandas.Series([r[1] for r in results], [r[0] for r in results])
        else:
            clim = p
    else:
        log.warning("Climatology table does not exist. SMDI calculation will be inaccurate!")
        clim = p
    smdi = np.zeros(len(p))
    MSW = clim.median()
    maxSW = clim.max()
    minSW = clim.min()
    for i in range(7, len(smdi)):
        SW = np.median(p[i-7:i+1])
        if SW == MSW:
            SD = (SW - MSW) / (MSW - minSW) * 100.0
        else:
            SD = (SW - MSW) / (maxSW - MSW) * 100.0
        if i > 7:
            smdi[i] = 0.5 * smdi[i-1] + SD / 50.0
        else:
            smdi[i] = SD / 50.0
    cur.close()
    db.close()
    return smdi
示例#6
0
def _saveRasters(filepath, name, varname, startdate, enddate, dbname):
    """"Save geophysical variable from *dbname* database, between *startdate*
    and *enddate* dates into Geotif files inside *filepath* directory."""
    logging.basicConfig(level=logging.INFO, format='%(message)s')
    log = logging.getLogger(__name__)
    if dbio.tableExists(dbname, name, varname):
        db = dbio.connect(dbname)
        cur = db.cursor()
        sql = "select fdate,st_astiff(st_union(rast)) as tif from {0}.{1}".format(name, varname)
        try:
            sdt = datetime.strptime(startdate, "%Y-%m-%d")
            edt = datetime.strptime(enddate, "%Y-%m-%d")
            sql += " where fdate>=date'{0}' and fdate<=date'{1} group by fdate".format(sdt.strftime("%Y-%m-%d"), edt.strftime("%Y-%m-%d"))
        except ValueError:
            sql += " group by fdate"
            log.warning("Start and/or end dates were invalid. Ignoring...")
        cur.execute(sql)
        results = cur.fetchall()
        for res in results:
            with open("{0}/{1}_{2}.tif".format(filepath, varname, res[0].strftime("%Y%m%d")), 'wb') as fout:
                fout.write(res[1])
    else:
        log.error("Variable {0} does not exist in schema {1}.".format(varname, name))
示例#7
0
def ingestSoils(dbname="rheas"):
    """Ingest soil information from downloaded files."""
    filenames = glob.glob("SoilGrids-for-DSSAT-10km v1.0 (by country)/*.SOL")
    db = dbio.connect(dbname)
    cur = db.cursor()
    if dbio.tableExists(dbname, "dssat", "soils"):
        print("Overwriting existing DSSAT soils table in database!")
        cur.execute("drop table dssat.soils")
        db.commit()
    cur.execute("create table dssat.soils (rid serial primary key, geom geometry(Point, 4326), props text)")
    db.commit()
    for filename in filenames:
        try:
            profiles = parseSolFile(filename)
            for latlon in profiles:
                lat, lon = latlon
                sql = "insert into dssat.soils (geom, props) values (st_geomfromtext('POINT({0} {1})', 4326), '{2}')".format(lon, lat, profiles[latlon])
                cur.execute(sql)
        except:
            print("Cannot process file {0}".format(filename))
    db.commit()
    cur.close()
    db.close()
示例#8
0
文件: vic.py 项目: muguangyuze/RHEAS
 def writeToDB(self, data, dates, tablename, initialize, ensemble=False, skipsave=0):
     """Writes output data into database."""
     db = dbio.connect(self.dbname)
     cur = db.cursor()
     if dbio.tableExists(self.dbname, self.name, tablename) and ensemble and not dbio.columnExists(self.dbname, self.name, tablename, "ensemble"):
         print("WARNING! Table {0} exists but does not contain ensemble information. Overwriting entire table!")
         cur.execute("drop table {0}.{1}".format(self.name, tablename))
         db.commit()
     if dbio.tableExists(self.dbname, self.name, tablename):
         if initialize:
             for dt in [self.startdate + timedelta(t) for t in range((self.enddate - self.startdate).days+1)]:
                 dbio.deleteRasters(self.dbname, "{0}.{1}".format(self.name, tablename), dt)
     else:
         sql = "create table {0}.{1} (id serial not null primary key, rid int not null, fdate date not null, rast raster)".format(
             self.name, tablename)
         cur.execute(sql)
         if data.shape[1] > 1:
             cur.execute("alter table {0}.{1} add column layer int".format(self.name, tablename))
         if ensemble:
             cur.execute("alter table {0}.{1} add column ensemble int".format(self.name, tablename))
         db.commit()
     startyear, startmonth, startday = self.startyear, self.startmonth, self.startday
     if skipsave > 0:
         ts = date(self.startyear, self.startmonth,
                   self.startday) + timedelta(skipsave)
         data = data[skipsave:]
         startyear, startmonth, startday = ts.year, ts.month, ts.day
     tiffiles = []
     for t in range(data.shape[0]):
         dt = date(startyear, startmonth, startday) + timedelta(t)
         for lyr in range(data.shape[1]):
             filename = "{0}/{1}_{2}{3:02d}{4:02d}_{5:02d}.tif".format(
                 self.model_path, tablename, dt.year, dt.month, dt.day, lyr + 1)
             self._writeRaster(data[t, lyr, :, :], filename)
             tiffiles.append(filename)
     ps1 = subprocess.Popen(["{0}/raster2pgsql".format(rpath.bins), "-s", "4326", "-F", "-d", "-t", "auto"] + tiffiles + ["temp"], stdout=subprocess.PIPE)
     ps2 = subprocess.Popen(["{0}/psql".format(rpath.bins), "-d", self.dbname], stdin=ps1.stdout, stdout=subprocess.PIPE)
     ps1.stdout.close()
     ps2.communicate()[0]
     ps1.wait()
     cur.execute("alter table temp add column fdate date")
     cur.execute("update temp set fdate = date (concat_ws('-',substring(filename from {0} for 4),substring(filename from {1} for 2),substring(filename from {2} for 2)))".format(
         len(tablename) + 2, len(tablename) + 6, len(tablename) + 8))
     if data.shape[1] > 1:
         cur.execute("alter table temp add column layer int")
         cur.execute("update temp set layer=(substring(filename from {0} for 2))::int".format(
             len(tablename) + 11))
     cur.execute("select count(*) from temp")
     n = int(cur.fetchone()[0])
     ntiles = n / data.shape[0]
     if data.shape[1] > 1:
         cur.execute(
             "insert into {0}.{1} (rid,fdate,layer,rast) select ((rid+{2}) % {2})+1,fdate,layer,rast from temp".format(self.name, tablename, ntiles))
     else:
         cur.execute(
             "insert into {0}.{1} (rid,fdate,rast) select ((rid+{2}) % {2})+1,fdate,rast from temp".format(self.name, tablename, ntiles))
     if bool(ensemble):
         sql = "update {0}.{1} set ensemble = {2} where ensemble is null".format(
             self.name, tablename, int(ensemble))
         cur.execute(sql)
     cur.execute("drop index if exists {0}.{1}_dtidx".format(
         self.name, tablename))
     cur.execute("create index {1}_dtidx on {0}.{1}(fdate)".format(
         self.name, tablename))
     cur.execute("drop index if exists {0}.{1}_spidx".format(
         self.name, tablename))
     cur.execute("create index {1}_spidx on {0}.{1} using gist(st_convexhull(rast))".format(
         self.name, tablename))
     db.commit()
     cur.close()
     db.close()
示例#9
0
文件: vic.py 项目: nowucme/RHEAS
 def writeToDB(self,
               data,
               dates,
               tablename,
               initialize,
               ensemble=False,
               skipsave=0):
     """Writes output data into database."""
     db = dbio.connect(self.dbname)
     cur = db.cursor()
     if dbio.tableExists(self.dbname, self.name,
                         tablename) and ensemble and not dbio.columnExists(
                             self.dbname, self.name, tablename, "ensemble"):
         print(
             "WARNING! Table {0} exists but does not contain ensemble information. Overwriting entire table!"
         )
         cur.execute("drop table {0}.{1}".format(self.name, tablename))
         db.commit()
     if dbio.tableExists(self.dbname, self.name, tablename):
         if initialize:
             for dt in [
                     self.startdate + timedelta(t)
                     for t in range((self.enddate - self.startdate).days +
                                    1)
             ]:
                 dbio.deleteRasters(self.dbname,
                                    "{0}.{1}".format(self.name,
                                                     tablename), dt)
     else:
         sql = "create table {0}.{1} (id serial not null primary key, rid int not null, fdate date not null, rast raster)".format(
             self.name, tablename)
         cur.execute(sql)
         if data.shape[1] > 1:
             cur.execute("alter table {0}.{1} add column layer int".format(
                 self.name, tablename))
         if ensemble:
             cur.execute(
                 "alter table {0}.{1} add column ensemble int".format(
                     self.name, tablename))
         db.commit()
     startyear, startmonth, startday = self.startyear, self.startmonth, self.startday
     if skipsave > 0:
         ts = date(self.startyear, self.startmonth,
                   self.startday) + timedelta(skipsave)
         data = data[skipsave:]
         startyear, startmonth, startday = ts.year, ts.month, ts.day
     tiffiles = []
     for t in range(data.shape[0]):
         dt = date(startyear, startmonth, startday) + timedelta(t)
         for lyr in range(data.shape[1]):
             filename = "{0}/{1}_{2}{3:02d}{4:02d}_{5:02d}.tif".format(
                 self.model_path, tablename, dt.year, dt.month, dt.day,
                 lyr + 1)
             self._writeRaster(data[t, lyr, :, :], filename)
             tiffiles.append(filename)
     ps1 = subprocess.Popen([
         "{0}/raster2pgsql".format(rpath.bins), "-s", "4326", "-F", "-d",
         "-t", "auto"
     ] + tiffiles + ["temp"],
                            stdout=subprocess.PIPE)
     ps2 = subprocess.Popen(
         ["{0}/psql".format(rpath.bins), "-d", self.dbname],
         stdin=ps1.stdout,
         stdout=subprocess.PIPE)
     ps1.stdout.close()
     ps2.communicate()[0]
     ps1.wait()
     cur.execute("alter table temp add column fdate date")
     cur.execute(
         "update temp set fdate = date (concat_ws('-',substring(filename from {0} for 4),substring(filename from {1} for 2),substring(filename from {2} for 2)))"
         .format(
             len(tablename) + 2,
             len(tablename) + 6,
             len(tablename) + 8))
     if data.shape[1] > 1:
         cur.execute("alter table temp add column layer int")
         cur.execute(
             "update temp set layer=(substring(filename from {0} for 2))::int"
             .format(len(tablename) + 11))
     cur.execute("select count(*) from temp")
     n = int(cur.fetchone()[0])
     ntiles = n / data.shape[0]
     if data.shape[1] > 1:
         cur.execute(
             "insert into {0}.{1} (rid,fdate,layer,rast) select ((rid+{2}) % {2})+1,fdate,layer,rast from temp"
             .format(self.name, tablename, ntiles))
     else:
         cur.execute(
             "insert into {0}.{1} (rid,fdate,rast) select ((rid+{2}) % {2})+1,fdate,rast from temp"
             .format(self.name, tablename, ntiles))
     if bool(ensemble):
         sql = "update {0}.{1} set ensemble = {2} where ensemble is null".format(
             self.name, tablename, int(ensemble))
         cur.execute(sql)
     cur.execute("drop index if exists {0}.{1}_dtidx".format(
         self.name, tablename))
     cur.execute("create index {1}_dtidx on {0}.{1}(fdate)".format(
         self.name, tablename))
     cur.execute("drop index if exists {0}.{1}_spidx".format(
         self.name, tablename))
     cur.execute(
         "create index {1}_spidx on {0}.{1} using gist(st_convexhull(rast))"
         .format(self.name, tablename))
     db.commit()
     cur.close()
     db.close()