Beispiel #1
0
def download(dbname, dts, bbox):
    """Downloads the MODIS evapotranspiration data product MOD16 for
    a set of dates *dt* and imports them into the PostGIS database *dbname*."""
    log = logging.getLogger(__name__)
    res = 0.01
    urlbase = "http://files.ntsg.umt.edu"
    tiles = modis.findTiles(bbox)
    if tiles is not None:
        for dt in [dts[0] + timedelta(dti) for dti in range((dts[-1] - dts[0]).days + 1)]:
            url = "{0}/data/NTSG_Products/MOD16/MOD16A2.105_MERRAGMAO/Y{1}".format(urlbase, dt.year)
            resp_year = requests.get(url)
            try:
                assert resp_year.status_code == 200
                days = [link for link in BeautifulSoup(resp_year.text, parse_only=SoupStrainer('a')) if isinstance(link, Tag) and link.text.find(dt.strftime("%j")) >= 0]
                assert len(days) > 0
                resp_day = requests.get("{0}{1}".format(urlbase, days[0].get('href')))
                assert resp_day.status_code == 200
                files = [link.get('href') for link in BeautifulSoup(resp_day.text, parse_only=SoupStrainer('a')) if isinstance(link, Tag) and link.text.find("hdf") > 0]
                files = [f for f in files if any(f.find("h{0:02d}v{1:02d}".format(t[1], t[0])) > 0 for t in tiles)]
                outpath = tempfile.mkdtemp()
                for fname in files:
                    resp_file = requests.get("{0}{1}".format(urlbase, fname)) 
                    filename = fname.split("/")[-1]
                    with open("{0}/{1}".format(outpath, filename), 'wb') as fout:
                        for chunk in resp_file:
                            fout.write(chunk)
                    proc = subprocess.Popen(["gdal_translate", "HDF4_EOS:EOS_GRID:{0}/{1}:MOD_Grid_MOD16A2:ET_1km".format(
                        outpath, filename), "{0}/{1}".format(outpath, filename).replace("hdf", "tif")], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
                    out, err = proc.communicate()
                    log.debug(out)
                tifs = glob.glob("{0}/*.tif".format(outpath))
                proc = subprocess.Popen(
                    ["gdal_merge.py", "-o", "{0}/et.tif".format(outpath)] + tifs, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
                out, err = proc.communicate()
                log.debug(out)
                proc = subprocess.Popen(["gdal_calc.py", "-A", "{0}/et.tif".format(outpath), "--outfile={0}/et1.tif".format(
                    outpath), "--NoDataValue=-9999", "--calc=(A<32701)*(0.1*A+9999)-9999"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
                out, err = proc.communicate()
                log.debug(out)
                proc = subprocess.Popen(["gdalwarp", "-t_srs", "+proj=latlong +ellps=sphere", "-tr", str(
                    res), str(-res), "{0}/et1.tif".format(outpath), "{0}/et2.tif".format(outpath)], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
                out, err = proc.communicate()
                log.debug(out)
                if bbox is None:
                    pstr = []
                else:
                    pstr = ["-projwin", str(bbox[0]), str(bbox[3]), str(bbox[2]), str(bbox[1])]
                proc = subprocess.Popen(["gdal_translate"] + pstr + ["-a_srs", "epsg:4326", "{0}/et2.tif".format(outpath), "{0}/et3.tif".format(outpath)], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
                out, err = proc.communicate()
                log.debug(out)
                dbio.ingest(
                    dbname, "{0}/et3.tif".format(outpath), dt, table, False)
                shutil.rmtree(outpath)
            except:
                log.warning("MOD16 data not available for {0}. Skipping download!".format(
                    dt.strftime("%Y-%m-%d")))
Beispiel #2
0
def download(dbname, dts, bbox):
    """Downloads the combined MODIS LAI data product MCD15 for
    a specific date *dt* and imports them into the PostGIS database *dbname*."""
    log = logging.getLogger(__name__)
    res = 0.01
    burl = "http://e4ftl01.cr.usgs.gov/MOTA/MCD15A2.005"
    tiles = modis.findTiles(bbox)
    if tiles is not None:
        for dt in [dts[0] + timedelta(dti) for dti in range((dts[-1] - dts[0]).days + 1)]:
            outpath = tempfile.mkdtemp()
            url = "{0}/{1:04d}.{2:02d}.{3:02d}".format(
                burl, dt.year, dt.month, dt.day)
            req = requests.get(url, auth=(username, password))
            if req.status_code == 200:
                dom = html.fromstring(req.text)
                files = [link for link in dom.xpath('//a/@href')]
                if len(files) > 0:
                    filenames = [filter(lambda s: re.findall(r'MCD.*h{0:02d}v{1:02d}.*hdf$'.format(t[1], t[0]), s), files) for t in tiles]
                    for filename in filenames:
                        if len(filename) > 0:
                            filename = filename[0]
                            proc = subprocess.Popen(["wget", "-L", "--load-cookies", ".cookiefile", "--save-cookies", ".cookiefile", "--user", username, "--password", password, "{0}/{1}".format(url, filename), "-O", "{0}/{1}".format(outpath, filename)], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
                            out, err = proc.communicate()
                            log.debug(out)
                            proc = subprocess.Popen(["gdal_translate", "HDF4_EOS:EOS_GRID:{0}/{1}:MOD_Grid_MOD15A2:Lai_1km".format(
                                outpath, filename), "{0}/{1}".format(outpath, filename).replace("hdf", "tif")], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
                            out, err = proc.communicate()
                            log.debug(out)
                    tifs = glob.glob("{0}/*.tif".format(outpath))
                    if len(tifs) > 0:
                        proc = subprocess.Popen(["gdal_merge.py", "-o", "{0}/lai.tif".format(outpath)] + tifs, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
                        out, err = proc.communicate()
                        log.debug(out)
                        proc = subprocess.Popen(["gdal_calc.py", "-A", "{0}/lai.tif".format(outpath), "--outfile={0}/lai1.tif".format(outpath), "--NoDataValue=-9999", "--calc=\"(A<101.0)*(0.1*A+9999.0)-9999.0\""], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
                        out, err = proc.communicate()
                        log.debug(out)
                        proc = subprocess.Popen(["gdalwarp", "-t_srs", "'+proj=latlong +ellps=sphere'", "-tr", str(res), str(-res), "{0}/lai1.tif".format(outpath), "{0}/lai2.tif".format(outpath)], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
                        out, err = proc.communicate()
                        log.debug(out)
                        proc = subprocess.Popen(["gdal_translate", "-a_srs", "epsg:4326", "{0}/lai2.tif".format(outpath), "{0}/lai3.tif".format(outpath)], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
                        out, err = proc.communicate()
                        log.debug(out)
                        dbio.ingest(
                            dbname, "{0}/lai3.tif".format(outpath), dt, table, False)
                    shutil.rmtree(outpath)
            else:
                log.warning("MCD15 data not available for {0}. Skipping download!".format(
                    dt.strftime("%Y-%m-%d")))
Beispiel #3
0
def download(dbname, dts, bbox):
    """Downloads the MODSCAG snow cover fraction data product for a specific
    date *dt* and imports it into the PostGIS database *dbname*."""
    log = logging.getLogger(__name__)
    res = 0.01
    tiles = modis.findTiles(bbox)
    for dt in [dts[0] + timedelta(dti) for dti in range((dts[-1] - dts[0]).days + 1)]:
        temppath = tempfile.mkdtemp()
        url = "https://snow-data.jpl.nasa.gov/modscag-historic/{0}/{1}".format(dt.year, dt.strftime("%j"))
        r = requests.get(url, auth=HTTPDigestAuth(username, password))
        if r.status_code == 200:
            dom = lxml.html.fromstring(r.text)
            links = [link for link in dom.xpath('//a/@href') if link.find("snow_fraction.tif") > 0]
            for t in tiles:
                filenames = filter(lambda f: f.find("h{0:02d}v{1:02d}".format(t[1], t[0])) > 0, links)
                if len(filenames) > 0:
                    filename = filenames[0]
                    r = requests.get("{0}/{1}".format(url, filename), auth=HTTPDigestAuth(username, password))
                    with open("{0}/{1}".format(temppath, filename), 'wb') as fout:
                        fout.write(r.content)
            tifs = glob.glob("{0}/*.tif".format(temppath))
            if len(tifs) > 0:
                proc = subprocess.Popen(["gdal_merge.py", "-o", "{0}/snow.tif".format(temppath)] + tifs, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
                out, err = proc.communicate()
                log.debug(out)
                proc = subprocess.Popen(["gdal_calc.py", "-A", "{0}/snow.tif".format(temppath), "--outfile={0}/snow1.tif".format(
                    temppath), "--NoDataValue=-9999", "--calc=\"(A<101.0)*(A+9999.0)-9999.0\""], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
                out, err = proc.communicate()
                log.debug(out)
                proc = subprocess.Popen(["gdalwarp", "-t_srs", "'+proj=latlong +ellps=sphere'", "-tr", str(
                    res), str(-res), "{0}/snow1.tif".format(temppath), "{0}/snow2.tif".format(temppath)], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
                out, err = proc.communicate()
                log.debug(out)
                if bbox is None:
                    pstr = []
                else:
                    pstr = ["-projwin", str(bbox[0]), str(bbox[3]), str(bbox[2]), str(bbox[1])]
                proc = subprocess.Popen(["gdal_translate", "-a_srs", "epsg:4326"] + pstr + ["{0}/snow2.tif".format(temppath), "{0}/snow3.tif".format(temppath)], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
                out, err = proc.communicate()
                log.debug(out)
                dbio.ingest(dbname, "{0}/snow3.tif".format(temppath), dt, table, False)
                shutil.rmtree(temppath)
            else:
                log.warning("MODSCAG data not available for {0}. Skipping download!".format(
                    dt.strftime("%Y-%m-%d")))
Beispiel #4
0
def download(dbname, dts, bbox):
    """Downloads the Terra MODIS snow cover fraction data product MOD10 for
    a specific date *dt* and imports them into the PostGIS database *dbname*."""
    log = logging.getLogger(__name__)
    res = 0.005
    url = "n5eil01u.ecs.nsidc.org"
    tiles = modis.findTiles(bbox)
    if tiles is not None:
        ftp = FTP(url)
        ftp.login()
        for dt in [dts[0] + timedelta(dti) for dti in range((dts[-1] - dts[0]).days + 1)]:
            try:
                ftp.cwd("SAN/MOST/MOD10A1.005/{1:04d}.{2:02d}.{3:02d}".format(url, dt.year, dt.month, dt.day))
                files = [f for f in ftp.nlst() if any(
                    f.find("h{0:02d}v{1:02d}".format(t[1], t[0])) > 0 for t in tiles)]
                files = filter(lambda s: s.endswith("hdf"), files)
                outpath = tempfile.mkdtemp()
                for fname in files:
                        with open("{0}/{1}".format(outpath, fname), 'wb') as f:
                            ftp.retrbinary("RETR {0}".format(fname), f.write)
                        proc = subprocess.Popen(["gdal_translate", "HDF4_EOS:EOS_GRID:{0}/{1}:MOD_Grid_Snow_500m:Fractional_Snow_Cover".format(outpath, fname), "{0}/{1}".format(outpath, fname).replace("hdf", "tif")], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
                        out, err = proc.communicate()
                        log.debug(out)
                tifs = glob.glob("{0}/*.tif".format(outpath))
                proc = subprocess.Popen(
                    ["gdal_merge.py", "-a_nodata", "-9999", "-o", "{0}/snow.tif".format(outpath)] + tifs, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
                out, err = proc.communicate()
                log.debug(out)
                proc = subprocess.Popen(["gdal_calc.py", "-A", "{0}/snow.tif".format(outpath), "--outfile={0}/snow1.tif".format(
                    outpath), "--NoDataValue=-9999", "--calc=\"(A<101.0)*(A+9999.0)-9999.0\""], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
                out, err = proc.communicate()
                log.debug(out)
                proc = subprocess.Popen(["gdalwarp", "-t_srs", "'+proj=latlong +ellps=sphere'", "-tr", str(
                    res), str(-res), "{0}/snow1.tif".format(outpath), "{0}/snow2.tif".format(outpath)], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
                out, err = proc.communicate()
                log.debug(out)
                proc = subprocess.Popen(["gdal_translate", "-a_srs", "epsg:4326", "{0}/snow2.tif".format(outpath), "{0}/snow3.tif".format(outpath)], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
                out, err = proc.communicate()
                log.debug(out)
                dbio.ingest(
                    dbname, "{0}/snow3.tif".format(outpath), dt, table, False)
                shutil.rmtree(outpath)
            except:
                log.warning("MOD10 data not available for {0}. Skipping download!".format(
                    dt.strftime("%Y-%m-%d")))
Beispiel #5
0
def download(dbname, dts, bbox):
    """Downloads the combined MODIS LAI data product MCD15 for
    a specific date *dt* and imports them into the PostGIS database *dbname*."""
    log = logging.getLogger(__name__)
    res = 0.005
    burl = "http://e4ftl01.cr.usgs.gov/MOTA/MCD15A2H.006"
    tiles = modis.findTiles(bbox)
    if tiles is not None:
        for dt in [dts[0] + timedelta(dti) for dti in range((dts[-1] - dts[0]).days + 1)]:
            outpath = tempfile.mkdtemp()
            url = "{0}/{1:04d}.{2:02d}.{3:02d}".format(burl, dt.year, dt.month, dt.day)
            filenames = []
            for t in tiles:
                try:
                    tmppath, fname = earthdata.download(url, "MCD15A2H.A{0}.h{1:02d}v{2:02d}.006.*.hdf".format(dt.strftime("%Y%j"), t[1], t[0]))
                except ConnectionError:
                    fname = None
                if fname:
                    filenames.append("{0}/{1}".format(tmppath, fname))
            for filename in filenames:
                proc = subprocess.Popen(["gdal_translate", "HDF4_EOS:EOS_GRID:{0}:MOD_Grid_MOD15A2H:Lai_500m".format(
                                filename), "{0}/{1}".format(outpath, filename.split("/")[-1]).replace("hdf", "tif")], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
                out, err = proc.communicate()
                log.debug(out)
                shutil.rmtree("/".join(filename.split("/")[:-1]))
            tifs = glob.glob("{0}/*.tif".format(outpath))
            if len(tifs) > 0:
                proc = subprocess.Popen(["gdal_merge.py", "-o", "{0}/lai.tif".format(outpath)] + tifs, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
                out, err = proc.communicate()
                log.debug(out)
                proc = subprocess.Popen(["gdal_calc.py", "-A", "{0}/lai.tif".format(outpath), "--outfile={0}/lai1.tif".format(outpath), "--NoDataValue=-9999", "--calc=(A<101.0)*(0.1*A+9999.0)-9999.0"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
                out, err = proc.communicate()
                log.debug(out)
                proc = subprocess.Popen(["gdalwarp", "-t_srs", "+proj=latlong +ellps=sphere", "-tr", str(res), str(-res), "{0}/lai1.tif".format(outpath), "{0}/lai2.tif".format(outpath)], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
                out, err = proc.communicate()
                log.debug(out)
                proc = subprocess.Popen(["gdal_translate", "-a_srs", "epsg:4326", "{0}/lai2.tif".format(outpath), "{0}/lai3.tif".format(outpath)], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
                out, err = proc.communicate()
                log.debug(out)
                dbio.ingest(dbname, "{0}/lai3.tif".format(outpath), dt, table, False)
            else:
                log.warning("MCD15 data not available for {0}. Skipping download!".format(dt.strftime("%Y-%m-%d")))
            shutil.rmtree(outpath)
Beispiel #6
0
def download(dbname, dts, bbox):
    """Downloads the MODIS evapotranspiration data product MOD16 for
    a set of dates *dt* and imports them into the PostGIS database *dbname*."""
    res = 0.01
    url = "ftp.ntsg.umt.edu"
    tiles = modis.findTiles(bbox)
    if tiles is not None:
        ftp = FTP(url)
        ftp.login()
        for dt in [dts[0] + timedelta(dti) for dti in range((dts[1] - dts[0]).days + 1)]:
            try:
                ftp.cwd(
                    "pub/MODIS/NTSG_Products/MOD16/MOD16A2.105_MERRAGMAO/Y{0}".format(dt.year))
                days = ftp.nlst()
                datadir = "D{0}".format(dt.strftime("%j"))
                if datadir in days:
                    ftp.cwd(datadir)
                    files = [f for f in ftp.nlst() if any(
                        f.find("h{0:02d}v{1:02d}".format(t[1], t[0])) > 0 for t in tiles)]
                    outpath = tempfile.mkdtemp()
                    for fname in files:
                        with open("{0}/{1}".format(outpath, fname), 'wb') as f:
                            ftp.retrbinary("RETR {0}".format(fname), f.write)
                        subprocess.call(["gdal_translate", "HDF4_EOS:EOS_GRID:{0}/{1}:MOD_Grid_MOD16A2:ET_1km".format(
                            outpath, fname), "{0}/{1}".format(outpath, fname).replace("hdf", "tif")])
                    tifs = " ".join(glob.glob("{0}/*.tif".format(outpath)))
                    subprocess.call(
                        ["gdal_merge.py", "-o", "{0}/et.tif".format(outpath)] + tifs)
                    cmd = " ".join(["gdal_calc.py", "-A", "{0}/et.tif".format(outpath), "--outfile={0}/et1.tif".format(
                        outpath), "--NoDataValue=-9999", "--calc=\"(A<32701)*(0.1*A+9999)-9999\""])
                    subprocess.call(cmd, shell=True)
                    cmd = " ".join(["gdalwarp", "-t_srs", "'+proj=latlong +ellps=sphere'", "-tr", str(
                        res), str(-res), "{0}/et1.tif".format(outpath), "{0}/et2.tif".format(outpath)])
                    subprocess.call(cmd, shell=True)
                    subprocess.call(["gdal_translate", "-a_srs", "epsg:4326",
                                     "{0}/et2.tif".format(outpath), "{0}/et3.tif".format(outpath)])
                    dbio.ingest(
                        dbname, "{0}/et3.tif".format(outpath), dt, table, False)
                    shutil.rmtree(outpath)
            except:
                print("MOD16 data not available for {0}. Skipping download!".format(
                    dt.strftime("%Y-%m-%d")))
Beispiel #7
0
def download(dbname, dts, bbox):
    """Downloads the combined MODIS LAI data product MCD15 for
    a specific date *dt* and imports them into the PostGIS database *dbname*."""
    res = 0.01
    burl = "http://e4ftl01.cr.usgs.gov/MOTA/MCD15A2.005"
    tiles = modis.findTiles(bbox)
    if tiles is not None:
        for dt in [dts[0] + timedelta(dti) for dti in range((dts[1] - dts[0]).days + 1)]:
            outpath = tempfile.mkdtemp()
            url = "{0}/{1:04d}.{2:02d}.{3:02d}".format(
                burl, dt.year, dt.month, dt.day)
            connection = urllib.urlopen(url)
            dom = html.fromstring(connection.read())
            files = [link for link in dom.xpath('//a/@href')]
            if len(files) > 0:
                filenames = [filter(lambda s: re.findall(
                    r'MCD.*h{0:02d}v{1:02d}.*hdf$'.format(t[1], t[0]), s), files) for t in tiles]
                for filename in filenames:
                    if len(filename) > 0:
                        filename = filename[0]
                        urllib.urlretrieve(
                            "{0}/{1}".format(url, filename), "{0}/{1}".format(outpath, filename))
                        subprocess.call(["gdal_translate", "HDF4_EOS:EOS_GRID:{0}/{1}:MOD_Grid_MOD15A2:Lai_1km".format(
                            outpath, filename), "{0}/{1}".format(outpath, filename).replace("hdf", "tif")])
                tifs = glob.glob("{0}/*.tif".format(outpath))
                if len(tifs) > 0:
                    subprocess.call(
                        ["gdal_merge.py", "-o", "{0}/lai.tif".format(outpath)] + tifs)
                    cmd = " ".join(["gdal_calc.py", "-A", "{0}/lai.tif".format(outpath), "--outfile={0}/lai1.tif".format(
                        outpath), "--NoDataValue=-9999", "--calc=\"(A<101.0)*(0.1*A+9999.0)-9999.0\""])
                    subprocess.call(cmd, shell=True)
                    cmd = " ".join(["gdalwarp", "-t_srs", "'+proj=latlong +ellps=sphere'", "-tr", str(
                        res), str(-res), "{0}/lai1.tif".format(outpath), "{0}/lai2.tif".format(outpath)])
                    subprocess.call(cmd, shell=True)
                    subprocess.call(["gdal_translate", "-a_srs", "epsg:4326",
                                     "{0}/lai2.tif".format(outpath), "{0}/lai3.tif".format(outpath)])
                    dbio.ingest(
                        dbname, "{0}/lai3.tif".format(outpath), dt, table, False)
                shutil.rmtree(outpath)
            else:
                print("MCD15 data not available for {0}. Skipping download!".format(
                    dt.strftime("%Y-%m-%d")))
Beispiel #8
0
def download(dbname, dts, bbox):
    """Downloads the MODIS evapotranspiration data product MOD16 for
    a set of dates *dt* and imports them into the PostGIS database *dbname*."""
    log = logging.getLogger(__name__)
    res = 0.01
    url = "ftp.ntsg.umt.edu"
    tiles = modis.findTiles(bbox)
    if tiles is not None:
        ftp = FTP(url)
        ftp.login()
        for dt in [
                dts[0] + timedelta(dti)
                for dti in range((dts[-1] - dts[0]).days + 1)
        ]:
            try:
                ftp.cwd(
                    "pub/MODIS/NTSG_Products/MOD16/MOD16A2.105_MERRAGMAO/Y{0}".
                    format(dt.year))
                days = ftp.nlst()
                datadir = "D{0}".format(dt.strftime("%j"))
                if datadir in days:
                    ftp.cwd(datadir)
                    files = [
                        f for f in ftp.nlst() if any(
                            f.find("h{0:02d}v{1:02d}".format(t[1], t[0])) > 0
                            for t in tiles)
                    ]
                    outpath = tempfile.mkdtemp()
                    for fname in files:
                        with open("{0}/{1}".format(outpath, fname), 'wb') as f:
                            ftp.retrbinary("RETR {0}".format(fname), f.write)
                        proc = subprocess.Popen([
                            "gdal_translate",
                            "HDF4_EOS:EOS_GRID:{0}/{1}:MOD_Grid_MOD16A2:ET_1km"
                            .format(outpath, fname), "{0}/{1}".format(
                                outpath, fname).replace("hdf", "tif")
                        ],
                                                stdout=subprocess.PIPE,
                                                stderr=subprocess.STDOUT)
                        out, err = proc.communicate()
                        log.debug(out)
                    tifs = glob.glob("{0}/*.tif".format(outpath))
                    proc = subprocess.Popen(
                        ["gdal_merge.py", "-o", "{0}/et.tif".format(outpath)] +
                        tifs,
                        stdout=subprocess.PIPE,
                        stderr=subprocess.STDOUT)
                    out, err = proc.communicate()
                    log.debug(out)
                    proc = subprocess.Popen([
                        "gdal_calc.py", "-A", "{0}/et.tif".format(outpath),
                        "--outfile={0}/et1.tif".format(outpath),
                        "--NoDataValue=-9999",
                        "--calc=\"(A<32701)*(0.1*A+9999)-9999\""
                    ],
                                            stdout=subprocess.PIPE,
                                            stderr=subprocess.STDOUT)
                    out, err = proc.communicate()
                    log.debug(out)
                    proc = subprocess.Popen([
                        "gdalwarp", "-t_srs", "'+proj=latlong +ellps=sphere'",
                        "-tr",
                        str(res),
                        str(-res), "{0}/et1.tif".format(outpath),
                        "{0}/et2.tif".format(outpath)
                    ],
                                            stdout=subprocess.PIPE,
                                            stderr=subprocess.STDOUT)
                    out, err = proc.communicate()
                    log.debug(out)
                    if bbox is None:
                        pstr = []
                    else:
                        pstr = [
                            "-projwin",
                            str(bbox[0]),
                            str(bbox[3]),
                            str(bbox[2]),
                            str(bbox[1])
                        ]
                    proc = subprocess.Popen(["gdal_translate"] + pstr + [
                        "-a_srs", "epsg:4326", "{0}/et2.tif".format(outpath),
                        "{0}/et3.tif".format(outpath)
                    ],
                                            stdout=subprocess.PIPE,
                                            stderr=subprocess.STDOUT)
                    out, err = proc.communicate()
                    log.debug(out)
                    dbio.ingest(dbname, "{0}/et3.tif".format(outpath), dt,
                                table, False)
                    shutil.rmtree(outpath)
            except:
                log.warning(
                    "MOD16 data not available for {0}. Skipping download!".
                    format(dt.strftime("%Y-%m-%d")))
Beispiel #9
0
def download(dbname, dts, bbox):
    """Downloads the MODIS evapotranspiration data product MOD16 for
    a set of dates *dt* and imports them into the PostGIS database *dbname*."""
    log = logging.getLogger(__name__)
    res = 0.01
    url = "ftp.ntsg.umt.edu"
    tiles = modis.findTiles(bbox)
    if tiles is not None:
        ftp = FTP(url)
        ftp.login()
        for dt in [dts[0] + timedelta(dti) for dti in range((dts[-1] - dts[0]).days + 1)]:
            try:
                ftp.cwd("pub/MODIS/NTSG_Products/MOD16/MOD16A2.105_MERRAGMAO/Y{0}".format(dt.year))
                days = ftp.nlst()
                datadir = "D{0}".format(dt.strftime("%j"))
                if datadir in days:
                    ftp.cwd(datadir)
                    files = [
                        f for f in ftp.nlst() if any(f.find("h{0:02d}v{1:02d}".format(t[1], t[0])) > 0 for t in tiles)
                    ]
                    outpath = tempfile.mkdtemp()
                    for fname in files:
                        with open("{0}/{1}".format(outpath, fname), "wb") as f:
                            ftp.retrbinary("RETR {0}".format(fname), f.write)
                        proc = subprocess.Popen(
                            [
                                "gdal_translate",
                                "HDF4_EOS:EOS_GRID:{0}/{1}:MOD_Grid_MOD16A2:ET_1km".format(outpath, fname),
                                "{0}/{1}".format(outpath, fname).replace("hdf", "tif"),
                            ],
                            stdout=subprocess.PIPE,
                            stderr=subprocess.STDOUT,
                        )
                        out, err = proc.communicate()
                        log.debug(out)
                    tifs = glob.glob("{0}/*.tif".format(outpath))
                    proc = subprocess.Popen(
                        ["gdal_merge.py", "-o", "{0}/et.tif".format(outpath)] + tifs,
                        stdout=subprocess.PIPE,
                        stderr=subprocess.STDOUT,
                    )
                    out, err = proc.communicate()
                    log.debug(out)
                    proc = subprocess.Popen(
                        [
                            "gdal_calc.py",
                            "-A",
                            "{0}/et.tif".format(outpath),
                            "--outfile={0}/et1.tif".format(outpath),
                            "--NoDataValue=-9999",
                            '--calc="(A<32701)*(0.1*A+9999)-9999"',
                        ],
                        stdout=subprocess.PIPE,
                        stderr=subprocess.STDOUT,
                    )
                    out, err = proc.communicate()
                    log.debug(out)
                    proc = subprocess.Popen(
                        [
                            "gdalwarp",
                            "-t_srs",
                            "'+proj=latlong +ellps=sphere'",
                            "-tr",
                            str(res),
                            str(-res),
                            "{0}/et1.tif".format(outpath),
                            "{0}/et2.tif".format(outpath),
                        ],
                        stdout=subprocess.PIPE,
                        stderr=subprocess.STDOUT,
                    )
                    out, err = proc.communicate()
                    log.debug(out)
                    if bbox is None:
                        pstr = []
                    else:
                        pstr = ["-projwin", str(bbox[0]), str(bbox[3]), str(bbox[2]), str(bbox[1])]
                    proc = subprocess.Popen(
                        ["gdal_translate"]
                        + pstr
                        + ["-a_srs", "epsg:4326", "{0}/et2.tif".format(outpath), "{0}/et3.tif".format(outpath)],
                        stdout=subprocess.PIPE,
                        stderr=subprocess.STDOUT,
                    )
                    out, err = proc.communicate()
                    log.debug(out)
                    dbio.ingest(dbname, "{0}/et3.tif".format(outpath), dt, table, False)
                    shutil.rmtree(outpath)
            except:
                log.warning("MOD16 data not available for {0}. Skipping download!".format(dt.strftime("%Y-%m-%d")))
Beispiel #10
0
def download(dbname, dts, bbox):
    """Downloads the MODSCAG snow cover fraction data product for a specific
    date *dt* and imports it into the PostGIS database *dbname*."""
    log = logging.getLogger(__name__)
    res = 0.01
    tiles = modis.findTiles(bbox)
    for dt in [
            dts[0] + timedelta(dti)
            for dti in range((dts[-1] - dts[0]).days + 1)
    ]:
        temppath = tempfile.mkdtemp()
        url = "https://snow-data.jpl.nasa.gov/modscag-historic/{0}/{1}".format(
            dt.year, dt.strftime("%j"))
        r = requests.get(url, auth=HTTPDigestAuth(username, password))
        if r.status_code == 200:
            dom = lxml.html.fromstring(r.text)
            links = [
                link for link in dom.xpath('//a/@href')
                if link.find("snow_fraction.tif") > 0
            ]
            for t in tiles:
                filenames = filter(
                    lambda f: f.find("h{0:02d}v{1:02d}".format(t[1], t[0])) >
                    0, links)
                if len(filenames) > 0:
                    filename = filenames[0]
                    r = requests.get("{0}/{1}".format(url, filename),
                                     auth=HTTPDigestAuth(username, password))
                    with open("{0}/{1}".format(temppath, filename),
                              'wb') as fout:
                        fout.write(r.content)
            tifs = glob.glob("{0}/*.tif".format(temppath))
            if len(tifs) > 0:
                proc = subprocess.Popen(
                    ["gdal_merge.py", "-o", "{0}/snow.tif".format(temppath)] +
                    tifs,
                    stdout=subprocess.PIPE,
                    stderr=subprocess.STDOUT)
                out, err = proc.communicate()
                log.debug(out)
                proc = subprocess.Popen([
                    "gdal_calc.py", "-A", "{0}/snow.tif".format(temppath),
                    "--outfile={0}/snow1.tif".format(temppath),
                    "--NoDataValue=-9999",
                    "--calc=\"(A<101.0)*(A+9999.0)-9999.0\""
                ],
                                        stdout=subprocess.PIPE,
                                        stderr=subprocess.STDOUT)
                out, err = proc.communicate()
                log.debug(out)
                proc = subprocess.Popen([
                    "gdalwarp", "-t_srs", "'+proj=latlong +ellps=sphere'",
                    "-tr",
                    str(res),
                    str(-res), "{0}/snow1.tif".format(temppath),
                    "{0}/snow2.tif".format(temppath)
                ],
                                        stdout=subprocess.PIPE,
                                        stderr=subprocess.STDOUT)
                out, err = proc.communicate()
                log.debug(out)
                if bbox is None:
                    pstr = []
                else:
                    pstr = [
                        "-projwin",
                        str(bbox[0]),
                        str(bbox[3]),
                        str(bbox[2]),
                        str(bbox[1])
                    ]
                proc = subprocess.Popen(
                    ["gdal_translate", "-a_srs", "epsg:4326"] + pstr + [
                        "{0}/snow2.tif".format(temppath),
                        "{0}/snow3.tif".format(temppath)
                    ],
                    stdout=subprocess.PIPE,
                    stderr=subprocess.STDOUT)
                out, err = proc.communicate()
                log.debug(out)
                dbio.ingest(dbname, "{0}/snow3.tif".format(temppath), dt,
                            table, False)
                shutil.rmtree(temppath)
            else:
                log.warning(
                    "MODSCAG data not available for {0}. Skipping download!".
                    format(dt.strftime("%Y-%m-%d")))
Beispiel #11
0
def download(dbname, dts, bbox):
    """Downloads the combined MODIS LAI data product MCD15 for
    a specific date *dt* and imports them into the PostGIS database *dbname*."""
    res = 0.01
    burl = "http://e4ftl01.cr.usgs.gov/MOTA/MCD15A2.005"
    tiles = modis.findTiles(bbox)
    if tiles is not None:
        for dt in [
                dts[0] + timedelta(dti)
                for dti in range((dts[-1] - dts[0]).days + 1)
        ]:
            outpath = tempfile.mkdtemp()
            url = "{0}/{1:04d}.{2:02d}.{3:02d}".format(burl, dt.year, dt.month,
                                                       dt.day)
            connection = urllib.urlopen(url)
            dom = html.fromstring(connection.read())
            files = [link for link in dom.xpath('//a/@href')]
            if len(files) > 0:
                filenames = [
                    filter(
                        lambda s: re.findall(
                            r'MCD.*h{0:02d}v{1:02d}.*hdf$'.format(t[1], t[0]),
                            s), files) for t in tiles
                ]
                for filename in filenames:
                    if len(filename) > 0:
                        filename = filename[0]
                        urllib.urlretrieve("{0}/{1}".format(url, filename),
                                           "{0}/{1}".format(outpath, filename))
                        subprocess.call([
                            "gdal_translate",
                            "HDF4_EOS:EOS_GRID:{0}/{1}:MOD_Grid_MOD15A2:Lai_1km"
                            .format(outpath, filename),
                            "{0}/{1}".format(outpath,
                                             filename).replace("hdf", "tif")
                        ])
                tifs = glob.glob("{0}/*.tif".format(outpath))
                if len(tifs) > 0:
                    subprocess.call(
                        ["gdal_merge.py", "-o", "{0}/lai.tif".format(outpath)
                         ] + tifs)
                    cmd = " ".join([
                        "gdal_calc.py", "-A", "{0}/lai.tif".format(outpath),
                        "--outfile={0}/lai1.tif".format(outpath),
                        "--NoDataValue=-9999",
                        "--calc=\"(A<101.0)*(0.1*A+9999.0)-9999.0\""
                    ])
                    subprocess.call(cmd, shell=True)
                    cmd = " ".join([
                        "gdalwarp", "-t_srs", "'+proj=latlong +ellps=sphere'",
                        "-tr",
                        str(res),
                        str(-res), "{0}/lai1.tif".format(outpath),
                        "{0}/lai2.tif".format(outpath)
                    ])
                    subprocess.call(cmd, shell=True)
                    subprocess.call([
                        "gdal_translate", "-a_srs", "epsg:4326",
                        "{0}/lai2.tif".format(outpath),
                        "{0}/lai3.tif".format(outpath)
                    ])
                    dbio.ingest(dbname, "{0}/lai3.tif".format(outpath), dt,
                                table, False)
                shutil.rmtree(outpath)
            else:
                print("MCD15 data not available for {0}. Skipping download!".
                      format(dt.strftime("%Y-%m-%d")))
Beispiel #12
0
def download(dbname, dts, bbox):
    """Downloads the MODIS evapotranspiration data product MOD16 for
    a set of dates *dt* and imports them into the PostGIS database *dbname*."""
    log = logging.getLogger(__name__)
    res = 0.01
    urlbase = "http://files.ntsg.umt.edu"
    tiles = modis.findTiles(bbox)
    if tiles is not None:
        for dt in [
                dts[0] + timedelta(dti)
                for dti in range((dts[-1] - dts[0]).days + 1)
        ]:
            url = "{0}/data/NTSG_Products/MOD16/MOD16A2.105_MERRAGMAO/Y{1}".format(
                urlbase, dt.year)
            resp_year = requests.get(url)
            try:
                assert resp_year.status_code == 200
                days = [
                    link
                    for link in BeautifulSoup(resp_year.text,
                                              parse_only=SoupStrainer('a'))
                    if isinstance(link, Tag)
                    and link.text.find(dt.strftime("%j")) >= 0
                ]
                assert len(days) > 0
                resp_day = requests.get("{0}{1}".format(
                    urlbase, days[0].get('href')))
                assert resp_day.status_code == 200
                files = [
                    link.get('href')
                    for link in BeautifulSoup(resp_day.text,
                                              parse_only=SoupStrainer('a'))
                    if isinstance(link, Tag) and link.text.find("hdf") > 0
                ]
                files = [
                    f for f in files if any(
                        f.find("h{0:02d}v{1:02d}".format(t[1], t[0])) > 0
                        for t in tiles)
                ]
                outpath = tempfile.mkdtemp()
                for fname in files:
                    resp_file = requests.get("{0}{1}".format(urlbase, fname))
                    filename = fname.split("/")[-1]
                    with open("{0}/{1}".format(outpath, filename),
                              'wb') as fout:
                        for chunk in resp_file:
                            fout.write(chunk)
                    proc = subprocess.Popen([
                        "gdal_translate",
                        "HDF4_EOS:EOS_GRID:{0}/{1}:MOD_Grid_MOD16A2:ET_1km".
                        format(outpath, filename), "{0}/{1}".format(
                            outpath, filename).replace("hdf", "tif")
                    ],
                                            stdout=subprocess.PIPE,
                                            stderr=subprocess.STDOUT)
                    out, err = proc.communicate()
                    log.debug(out)
                tifs = glob.glob("{0}/*.tif".format(outpath))
                proc = subprocess.Popen(
                    ["gdal_merge.py", "-o", "{0}/et.tif".format(outpath)] +
                    tifs,
                    stdout=subprocess.PIPE,
                    stderr=subprocess.STDOUT)
                out, err = proc.communicate()
                log.debug(out)
                proc = subprocess.Popen([
                    "gdal_calc.py", "-A", "{0}/et.tif".format(outpath),
                    "--outfile={0}/et1.tif".format(outpath),
                    "--NoDataValue=-9999", "--calc=(A<32701)*(0.1*A+9999)-9999"
                ],
                                        stdout=subprocess.PIPE,
                                        stderr=subprocess.STDOUT)
                out, err = proc.communicate()
                log.debug(out)
                proc = subprocess.Popen([
                    "gdalwarp", "-t_srs", "+proj=latlong +ellps=sphere", "-tr",
                    str(res),
                    str(-res), "{0}/et1.tif".format(outpath),
                    "{0}/et2.tif".format(outpath)
                ],
                                        stdout=subprocess.PIPE,
                                        stderr=subprocess.STDOUT)
                out, err = proc.communicate()
                log.debug(out)
                if bbox is None:
                    pstr = []
                else:
                    pstr = [
                        "-projwin",
                        str(bbox[0]),
                        str(bbox[3]),
                        str(bbox[2]),
                        str(bbox[1])
                    ]
                proc = subprocess.Popen(["gdal_translate"] + pstr + [
                    "-a_srs", "epsg:4326", "{0}/et2.tif".format(outpath),
                    "{0}/et3.tif".format(outpath)
                ],
                                        stdout=subprocess.PIPE,
                                        stderr=subprocess.STDOUT)
                out, err = proc.communicate()
                log.debug(out)
                dbio.ingest(dbname, "{0}/et3.tif".format(outpath), dt, table,
                            False)
                shutil.rmtree(outpath)
            except:
                log.warning(
                    "MOD16 data not available for {0}. Skipping download!".
                    format(dt.strftime("%Y-%m-%d")))
Beispiel #13
0
def download(dbname, dts, bbox):
    """Downloads the combined MODIS LAI data product MCD15 for
    a specific date *dt* and imports them into the PostGIS database *dbname*."""
    log = logging.getLogger(__name__)
    res = 0.005
    burl = "http://e4ftl01.cr.usgs.gov/MOTA/MCD15A2H.006"
    tiles = modis.findTiles(bbox)
    if tiles is not None:
        for dt in [
                dts[0] + timedelta(dti)
                for dti in range((dts[-1] - dts[0]).days + 1)
        ]:
            outpath = tempfile.mkdtemp()
            url = "{0}/{1:04d}.{2:02d}.{3:02d}".format(burl, dt.year, dt.month,
                                                       dt.day)
            filenames = []
            for t in tiles:
                try:
                    tmppath, fname = earthdata.download(
                        url, "MCD15A2H.A{0}.h{1:02d}v{2:02d}.006.*.hdf".format(
                            dt.strftime("%Y%j"), t[1], t[0]))
                except ConnectionError:
                    fname = None
                if fname:
                    filenames.append("{0}/{1}".format(tmppath, fname))
            for filename in filenames:
                proc = subprocess.Popen([
                    "gdal_translate",
                    "HDF4_EOS:EOS_GRID:{0}:MOD_Grid_MOD15A2H:Lai_500m".format(
                        filename), "{0}/{1}".format(
                            outpath,
                            filename.split("/")[-1]).replace("hdf", "tif")
                ],
                                        stdout=subprocess.PIPE,
                                        stderr=subprocess.STDOUT)
                out, err = proc.communicate()
                log.debug(out)
                shutil.rmtree("/".join(filename.split("/")[:-1]))
            tifs = glob.glob("{0}/*.tif".format(outpath))
            if len(tifs) > 0:
                proc = subprocess.Popen(
                    ["gdal_merge.py", "-o", "{0}/lai.tif".format(outpath)] +
                    tifs,
                    stdout=subprocess.PIPE,
                    stderr=subprocess.STDOUT)
                out, err = proc.communicate()
                log.debug(out)
                proc = subprocess.Popen([
                    "gdal_calc.py", "-A", "{0}/lai.tif".format(outpath),
                    "--outfile={0}/lai1.tif".format(outpath),
                    "--NoDataValue=-9999",
                    "--calc=(A<101.0)*(0.1*A+9999.0)-9999.0"
                ],
                                        stdout=subprocess.PIPE,
                                        stderr=subprocess.STDOUT)
                out, err = proc.communicate()
                log.debug(out)
                proc = subprocess.Popen([
                    "gdalwarp", "-t_srs", "+proj=latlong +ellps=sphere", "-tr",
                    str(res),
                    str(-res), "{0}/lai1.tif".format(outpath),
                    "{0}/lai2.tif".format(outpath)
                ],
                                        stdout=subprocess.PIPE,
                                        stderr=subprocess.STDOUT)
                out, err = proc.communicate()
                log.debug(out)
                proc = subprocess.Popen([
                    "gdal_translate", "-a_srs", "epsg:4326",
                    "{0}/lai2.tif".format(outpath),
                    "{0}/lai3.tif".format(outpath)
                ],
                                        stdout=subprocess.PIPE,
                                        stderr=subprocess.STDOUT)
                out, err = proc.communicate()
                log.debug(out)
                dbio.ingest(dbname, "{0}/lai3.tif".format(outpath), dt, table,
                            False)
            else:
                log.warning(
                    "MCD15 data not available for {0}. Skipping download!".
                    format(dt.strftime("%Y-%m-%d")))
            shutil.rmtree(outpath)