Пример #1
0
Файл: gpm.py Проект: E-LLP/RHEAS
def download(dbname, dts, bbox):
    """Downloads the PRISM data products for a set of
    dates *dt* and imports them into the PostGIS database *dbname*."""
    url = "jsimpson.pps.eosdis.nasa.gov"
    ftp = FTP(url)
    # FIXME: Change to RHEAS-specific password
    ftp.login('*****@*****.**', '*****@*****.**')
    ftp.cwd("data/imerg/gis")
    outpath = tempfile.mkdtemp()
    for dt in [dts[0] + timedelta(t) for t in range((dts[-1] - dts[0]).days+1)]:
        try:
            ftp.cwd("/data/imerg/gis/{0}/{1:02d}".format(dt.year, dt.month))
            filenames = [f for f in ftp.nlst() if re.match(r"3B.*{0}.*S000000.*1day\.tif.*".format(dt.strftime("%Y%m%d")), f) is not None]
            if len(filenames) > 0:
                fname = filenames[0]
                with open("{0}/{1}".format(outpath, fname), 'wb') as f:
                    ftp.retrbinary("RETR {0}".format(fname), f.write)
                with open("{0}/{1}".format(outpath, fname.replace("tif", "tfw")), 'wb') as f:
                    ftp.retrbinary("RETR {0}".format(fname.replace("tif", "tfw")), f.write)
                tfname = fname.replace("tif", "tfw")
                fname = datasets.uncompress(fname, outpath)
                datasets.uncompress(tfname, outpath)
                subprocess.call(["gdalwarp", "-t_srs", "epsg:4326", "{0}/{1}".format(outpath, fname), "{0}/prec.tif".format(outpath)])
                if bbox is not None:
                    subprocess.call(["gdal_translate", "-a_srs", "epsg:4326", "-projwin", "{0}".format(bbox[0]), "{0}".format(bbox[3]), "{0}".format(bbox[2]), "{0}".format(bbox[1]), "{0}/prec.tif".format(outpath), "{0}/prec1.tif".format(outpath)])
                else:
                    subprocess.call(["gdal_translate", "-a_srs", "epsg:4326", "{0}/prec.tif".format(outpath), "{0}/prec1.tif".format(outpath)])
                # multiply by 0.1 to get mm/hr and 24 to get mm/day
                cmd = " ".join(["gdal_calc.py", "-A", "{0}/prec1.tif".format(outpath), "--outfile={0}/prec2.tif".format(outpath), "--calc=\"0.1*A\""])
                subprocess.call(cmd, shell=True)
                dbio.ingest(dbname, "{0}/prec2.tif".format(outpath), dt, table, False)
        except:
            print("WARNING! No data were available to import into {0} for {1}.".format(table, dt.strftime("%Y-%m-%d")))
Пример #2
0
def download(dbname, dts, bbox):
    """Downloads the PRISM data products for a set of
    dates *dt* and imports them into the PostGIS database *dbname*."""
    log = logging.getLogger(__name__)
    url = "jsimpson.pps.eosdis.nasa.gov"
    ftp = FTP(url)
    # FIXME: Change to RHEAS-specific password
    ftp.login('*****@*****.**', '*****@*****.**')
    ftp.cwd("data/imerg/gis")
    outpath = tempfile.mkdtemp()
    for dt in [dts[0] + timedelta(t) for t in range((dts[-1] - dts[0]).days+1)]:
        try:
            if dt.year < datetime.today().year:
                ftp.cwd("/data/imerg/gis/{0}/{1:02d}".format(dt.year, dt.month))
            else:
                ftp.cwd("/data/imerg/gis/{0:02d}".format(dt.month))
            filenames = [f for f in ftp.nlst() if re.match("3B.*{0}.*E235959.*1day.tif".format(dt.strftime("%Y%m%d")), f) is not None]
            if len(filenames) > 0:
                fname = filenames[0]
                with open("{0}/{1}".format(outpath, fname), 'wb') as f:
                    ftp.retrbinary("RETR {0}".format(fname), f.write)
                with open("{0}/{1}".format(outpath, fname.replace("tif", "tfw")), 'wb') as f:
                    ftp.retrbinary("RETR {0}".format(fname.replace("tif", "tfw")), f.write)
                tfname = fname.replace("tif", "tfw")
                fname = datasets.uncompress(fname, outpath)
                datasets.uncompress(tfname, outpath)
                proc = subprocess.Popen(["gdalwarp", "-srcnodata", "29999", "-dstnodata", "-9999", "-overwrite", "-t_srs", "epsg:4326", "-ot", "Float32", "{0}/{1}".format(outpath, fname), "{0}/prec.tif".format(outpath)], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
                out, err = proc.communicate()
                log.debug(out)
                if bbox is not None:
                    proc = subprocess.Popen(["gdal_translate", "-ot", "Float32", "-a_srs", "epsg:4326", "-projwin", "{0}".format(bbox[0]), "{0}".format(bbox[3]), "{0}".format(bbox[2]), "{0}".format(bbox[1]), "{0}/prec.tif".format(outpath), "{0}/prec1.tif".format(outpath)], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
                    out, err = proc.communicate()
                    log.debug(out)
                else:
                    proc = subprocess.Popen(["gdal_translate", "-a_srs", "epsg:4326", "{0}/prec.tif".format(outpath), "{0}/prec1.tif".format(outpath)], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
                    out, err = proc.communicate()
                    log.debug(out)
                # multiply by 0.1 to get mm/hr and 24 to get mm/day
                proc = subprocess.Popen(["gdal_calc.py", "--NoDataValue=-9999", "-A", "{0}/prec1.tif".format(outpath), "--outfile={0}/prec2.tif".format(outpath), "--calc=0.1*A"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
                out, err = proc.communicate()
                log.debug(out)
                dbio.ingest(dbname, "{0}/prec2.tif".format(outpath), dt, table, True)
        except:
            log.warning("No data were available to import into {0} for {1}.".format(table, dt.strftime("%Y-%m-%d")))
Пример #3
0
 def wrapper(*args, **kwargs):
     outpath, filename, bbox, dt = fetch(*args, **kwargs)
     if filename is not None:
         lfilename = datasets.uncompress(filename, outpath)
         f = gdal.Open("{0}/{1}".format(outpath, lfilename))
         xul, xres, _, yul, _, yres = f.GetGeoTransform()
         data = f.ReadAsArray()
         nr, nc = data.shape
         lat = np.arange(yul + yres / 2.0, yul + yres * nr, yres)
         lon = np.arange(xul + xres / 2.0, xul + xres * nc, xres)
         i1, i2, j1, j2 = datasets.spatialSubset(lat, lon, xres, bbox)
         data = data[i1:i2, j1:j2]
         lat = lat[i1:i2]
         lon = lon[j1:j2]
         shutil.rmtree(outpath)
     else:
         data = lat = lon = None
     return data, lat, lon, dt
Пример #4
0
 def wrapper(*args, **kwargs):
     outpath, filename, bbox, dt = fetch(*args, **kwargs)
     if filename is not None:
         lfilename = datasets.uncompress(filename, outpath)
         f = gdal.Open("{0}/{1}".format(outpath, lfilename))
         xul, xres, _, yul, _, yres = f.GetGeoTransform()
         data = f.ReadAsArray()
         nr, nc = data.shape
         lat = np.arange(yul + yres/2.0, yul + yres * nr, yres)
         lon = np.arange(xul + xres/2.0, xul + xres * nc, xres)
         i1, i2, j1, j2 = datasets.spatialSubset(lat, lon, xres, bbox)
         data = data[i1:i2, j1:j2]
         lat = lat[i1:i2]
         lon = lon[j1:j2]
         shutil.rmtree(outpath)
     else:
         data = lat = lon = None
     return data, lat, lon, dt
Пример #5
0
def download(dbname, dts, bbox):
    """Downloads the PRISM data products for a set of
    dates *dt* and imports them into the PostGIS database *dbname*."""
    url = "jsimpson.pps.eosdis.nasa.gov"
    ftp = FTP(url)
    # FIXME: Change to RHEAS-specific password
    ftp.login('*****@*****.**', '*****@*****.**')
    ftp.cwd("data/imerg/gis")
    outpath = tempfile.mkdtemp()
    for dt in [
            dts[0] + timedelta(t) for t in range((dts[-1] - dts[0]).days + 1)
    ]:
        try:
            ftp.cwd("/data/imerg/gis/{0}/{1:02d}".format(dt.year, dt.month))
            filenames = [
                f for f in ftp.nlst() if re.match(
                    r"3B.*{0}.*S000000.*1day\.tif.*".format(
                        dt.strftime("%Y%m%d")), f) is not None
            ]
            if len(filenames) > 0:
                fname = filenames[0]
                with open("{0}/{1}".format(outpath, fname), 'wb') as f:
                    ftp.retrbinary("RETR {0}".format(fname), f.write)
                with open(
                        "{0}/{1}".format(outpath, fname.replace("tif", "tfw")),
                        'wb') as f:
                    ftp.retrbinary(
                        "RETR {0}".format(fname.replace("tif", "tfw")),
                        f.write)
                tfname = fname.replace("tif", "tfw")
                fname = datasets.uncompress(fname, outpath)
                datasets.uncompress(tfname, outpath)
                subprocess.call([
                    "gdalwarp", "-t_srs", "epsg:4326",
                    "{0}/{1}".format(outpath,
                                     fname), "{0}/prec.tif".format(outpath)
                ])
                if bbox is not None:
                    subprocess.call([
                        "gdal_translate", "-a_srs", "epsg:4326", "-projwin",
                        "{0}".format(bbox[0]), "{0}".format(bbox[3]),
                        "{0}".format(bbox[2]), "{0}".format(bbox[1]),
                        "{0}/prec.tif".format(outpath),
                        "{0}/prec1.tif".format(outpath)
                    ])
                else:
                    subprocess.call([
                        "gdal_translate", "-a_srs", "epsg:4326",
                        "{0}/prec.tif".format(outpath),
                        "{0}/prec1.tif".format(outpath)
                    ])
                # multiply by 0.1 to get mm/hr and 24 to get mm/day
                cmd = " ".join([
                    "gdal_calc.py", "-A", "{0}/prec1.tif".format(outpath),
                    "--outfile={0}/prec2.tif".format(outpath),
                    "--calc=\"0.1*A\""
                ])
                subprocess.call(cmd, shell=True)
                dbio.ingest(dbname, "{0}/prec2.tif".format(outpath), dt, table,
                            False)
        except:
            print(
                "WARNING! No data were available to import into {0} for {1}.".
                format(table, dt.strftime("%Y-%m-%d")))