Ejemplo n.º 1
0
#!/usr/bin/env python
'''
Python script to pull all MODIS files for LAI product for given years
for given tiles into destination_folder

'''

from uclgeog.get_modis_files import get_modis_files
from datetime import datetime

years = [2017, 2016]
tiles = ['h09v05']
destination_folder = 'data'

for year in years:
    ndoys = (datetime(year, 12, 31) - datetime(year, 1, 1)).days + 1
    for doy in range(1, ndoys + 1, 1):
        print(year, ndoys, doy, end=' ')
        filenames = get_modis_files(doy,year,tiles,base_url='https://n5eil01u.ecs.nsidc.org/MOST',\
                                           version=6,
                                           product='MOD10A1')
        print(filenames)
Ejemplo n.º 2
0
def mosaic_and_clip(tiles=['h17v03'],
                    doy=1,
                    year=2020,
                    ofolder=None,
                    folder="data/",
                    grid=None,
                    layer="Lai_500m",
                    shpfile=None,
                    country_code=None,
                    product='MCD15A3H',
                    verbose=False,
                    nodata=255,
                    base_url='https://e4ftl01.cr.usgs.gov/MOTA',
                    frmat="MEM"):
    """
    Simple high-level function for downloading MODIS dataset
    from Earthdata.

    tiles:  list of MODIS tiles to access
    doy:          day of year for dataset. Not that some products are only produced
                  every 4 or 8 days, so requesting a dataset for a day that doesnt
                  exist will fail.
    year:         year of dataset. 1999 to now.
    

    folder:       folder for storing datasets
                  default: data
    layer:        data layer. See product specification page for more details.
                  https://lpdaac.usgs.gov/products/mcd15a3hv006/
                  default: Lai_500m 
    product:      product id. See product specification page for more details. e.g.
                  https://lpdaac.usgs.gov/products/mcd15a3hv006/
                  default: MCD15A3H
    verbose:      verbose flag
                  defaultL False
    country_code: FIPS country code for any masking:
                  https://en.wikipedia.org/wiki/List_of_FIPS_country_codes
                  default: None
    shpfile:      Shapefile to use for data masking
                  default: TM_WORLD_BORDERS-0.3.zip
    nodata:       no data value
                  default: 255
    base_url:     base URL of datasets
                  default: https://e4ftl01.cr.usgs.gov/MOTA
    frmat:        output file format: MEM, VRT or GTiff
                  default MEM (data array)

    """

    tiles = list(tiles)

    if ofolder == None:
        ofolder = folder
    folder_path = Path(folder)
    ofolder_path = Path(ofolder)
    # mkdir
    folder_path.mkdir(parents=True, exist_ok=True)
    ofolder_path.mkdir(parents=True, exist_ok=True)

    if verbose:
        print(f'Testing for MODIS files on this computer ...')

    # download files if we need to
    mfiles = get_modis_files(doy,year,tiles,product=product,version=6,\
                      destination_folder=folder,verbose=verbose,\
                      base_url=base_url)

    # Find all files to mosaic together
    hdf_files = find_mcdfiles(year, doy, tiles, folder, product=product)
    if verbose:
        print(f'files: {hdf_files}')

    # Create GDAL friendly-names...
    gdal_filenames = create_gdal_friendly_names(hdf_files,
                                                layer,
                                                grid=grid,
                                                product=product)
    if verbose:
        print(f'dataset: {gdal_filenames}')
    '''
    If borders specified:
    '''
    # get borders if needed
    if country_code == None:
        if verbose:
            print(f'output format {frmat}')
            print(f'No data value: {nodata}')

        if frmat == "MEM":
            g = gdal.Warp("", gdal_filenames, format="MEM", dstNodata=nodata)
            if g:
                data = g.ReadAsArray()
                if verbose:
                    print(f'returning data array')
                return data
            else:
                print(
                    f'failed to warp {str(gdal_filenames)} {year}, {doy}, {tiles}, {folder}'
                )
        elif frmat == "VRT":

            try:
                geotiff_fnamex = f"{layer:s}_{year:d}_{doy:03d}.vrt"
                geotiff_fname = ofolder_path / geotiff_fnamex
                g = gdal.Warp(geotiff_fname.as_posix(),
                              gdal_filenames,
                              format=frmat,
                              dstNodata=nodata)
            except:
                pass
            if g:
                del g
                ofile = geotiff_fname.as_posix()
                if verbose:
                    print(f'returning data in {ofile}')
                return ofile

        elif frmat == "GTiff":
            try:
                geotiff_fnamex = f"{layer:s}_{year:d}_{doy:03d}.tif"
                geotiff_fname = ofolder_path / geotiff_fnamex
                g = gdal.Warp(geotiff_fname.as_posix(),
                              gdal_filenames,
                              format=frmat,
                              dstNodata=nodata)
            except:
                pass
            if g:
                del g
                ofile = geotiff_fname.as_posix()
                if verbose:
                    print(f'returning data in {ofile}')
                return ofile
            else:
                print(
                    f'failed to warp {str(gdal_filenames)}  {year}, {doy}, {tiles}, {folder}'
                )
        else:
            print("Only MEM, VRT or GTiff formats supported!")

    else:

        if shpfile == None:
            shpfile = get_world(data=folder).replace('.zip', '.shp')

        if verbose:
            print(
                f'{shpfile:s} shapefile used to mask FIPS code {country_code:s}'
            )

        if verbose:
            print(f'output format {frmat}')
            print(f'No data value: {nodata}')

        if frmat == "MEM":
            g = gdal.Warp("",
                          gdal_filenames,
                          format="MEM",
                          dstNodata=nodata,
                          cutlineDSName=shpfile,
                          cutlineWhere=f"FIPS='{country_code:s}'",
                          cropToCutline=True)
            if g:
                data = g.ReadAsArray()
                if verbose:
                    print(f'returning data in array')
                return data
            else:
                print(
                    f'failed to warp {str(gdal_filenames)} {year}, {doy}, {tiles}, {folder}'
                )
        elif frmat == "VRT":

            try:
                geotiff_fnamex = f"{layer:s}_{year:d}_{doy:03d}_{country_code:s}.vrt"
                geotiff_fname = ofolder_path / geotiff_fnamex
                g = gdal.Warp(geotiff_fname.as_posix(),
                              gdal_filenames,
                              format=frmat,
                              dstNodata=nodata,
                              cutlineDSName=shpfile,
                              cutlineWhere=f"FIPS='{country_code:s}'",
                              cropToCutline=True)
            except:
                pass
            if g:
                del g
                ofile = geotiff_fname.as_posix()
                if verbose:
                    print(f'returning data in {ofile}')
                return ofile

        elif frmat == "GTiff":
            try:
                geotiff_fnamex = f"{layer:s}_{year:d}_{doy:03d}_{country_code:s}.tif"
                geotiff_fname = ofolder_path / geotiff_fnamex
                g = gdal.Warp(geotiff_fname.as_posix(),
                              gdal_filenames,
                              format=frmat,
                              dstNodata=nodata,
                              cutlineDSName=shpfile,
                              cutlineWhere=f"FIPS='{country_code:s}'",
                              cropToCutline=True)
            except:
                pass
            if g:
                del g
                ofile = geotiff_fname.as_posix()
                if verbose:
                    print(f'returning data in {ofile}')
                return ofile
            else:
                print(
                    f'failed to warp {str(gdal_filenames)}  {year}, {doy}, {tiles}, {folder}'
                )
        else:
            print("Only MEM, VRT or GTiff formats supported!")
Ejemplo n.º 3
0
if ofile.exists():
    done = True

# try to download it from server
if download:
    done = procure_dataset(fname, verbose=True)

from uclgeog.get_modis_files import get_modis_files
'''
Get the MODIS LC files from the server
to store in data
'''
try:
    url = 'https://e4ftl01.cr.usgs.gov//MODV6_Cmp_C/MOTA/'
    filename = get_modis_files(1,year,[tiles],base_url=url,\
                                               version=6,verbose=True,\
                                               destination_folder='data',\
                                               product='MCD12Q1')[0]
    print(filename)
except:
    print('server may be down')

from uclgeog.process_timeseries import mosaic_and_clip
'''
Extract and clip the dataset
'''
lc_data = mosaic_and_clip(tiles,
                          1,
                          year,
                          folder='data',
                          layer="LC_Type3",
                          shpfile='data/TM_WORLD_BORDERS-0.3.shp',
Ejemplo n.º 4
0
def procure_dataset(dataset_name, destination_folder="data",verbose=False,
                    locations=["/data/selene/ucfajlg/uclgeog_data/",\
                               "/data/selene/ucfajlg/uclgeog_data/lai_data/",\
                               "/archive/rsu_raid_0/plewis/public_html/uclgeog_data"][::-1],\
                    modis_urls=['https://e4ftl01.cr.usgs.gov/MOTA',\
                                'https://e4ftl01.cr.usgs.gov/MOLT',\
                                'https://e4ftl01.cr.usgs.gov/MOLA',\
                                'https://e4ftl01.cr.usgs.gov//MODV6_Cmp_C/MOTA/',\
                                'https://e4ftl01.cr.usgs.gov/VIIRS',\
                                'https://n5eil01u.ecs.nsidc.org/MOST/',\
                                'https://n5eil01u.ecs.nsidc.org/MOSA/',\
                                'https://n5eil01u.ecs.nsidc.org/VIIRS/'],\
                    urls=["http://www2.geog.ucl.ac.uk/~ucfajlg/uclgeog_data/",\
                          "http://www2.geog.ucl.ac.uk/~plewis/uclgeog_data/",\
                          "http://www2.geog.ucl.ac.uk/~plewis/uclgeog_data/lai_files/"][::-1]):
    """Procure a Geog0111 dataset. This function will look for the dataset called
    `dataset_name`, and either provide symbolic links or download the relevant
    files to a local folder called by default `data`, or with a user-provided name.
    The other two options are to do with the location of the dataset witin the UCL
    filesystem (`location`), and the external URL (list `urls`). It is assumed that in
    either case, `datasest_name` is a valid folder under both `location` and `url`.
    """
    dest_path = Path(destination_folder)
    if not dest_path.exists():
        dest_path.mkdir()
    output_fname = dest_path.joinpath(dataset_name)
    if output_fname.exists():
        return True

    done = False
    fully_qualified_hostname = getfqdn()
    if fully_qualified_hostname.find("geog.ucl.ac.uk") >= 0:
        if (verbose): print("Running on UCL's Geography computers")
        for location in locations:
            if (verbose): print(f'trying {location}')
            done = generate_symlinks(dataset_name,
                                     location,
                                     destination_folder=destination_folder,
                                     verbose=verbose)
            if done:
                break
    else:
        if (verbose):            print("Running outside UCL Geography. Will try to download data.\n",\
                   dataset_name,"\nThis might take a while!")
        for url in list(urls):
            if (verbose): print(f'trying {url}')
            done = download_data(dataset_name,
                                 url,
                                 verbose=verbose,
                                 destination_folder=destination_folder)
            if done:
                break
        if not done:
            # maybe a modis dataset: try that if its an hdf
            try:
                info = dataset_name.split('.')
                product = info[0]
                tile = info[2]
                version = int(info[3])
                year = int(info[1][1:5])
                doy = int(info[1][5:])
                dtype = info[-1]
                if dtype in ['hdf', 'tif']:
                    for url in modis_urls:
                        try:
                            filename = get_modis_files(doy,year,[tile],base_url=url,\
                                            version=version,\
                                            destination_folder=destination_folder,\
                                            product=product)[0]
                            done = True
                        except:
                            pass
            except:
                pass
    return (done)
Ejemplo n.º 5
0
#!/usr/bin/env python
'''
Python script to pull all MODIS files for LAI product for given years
for given tiles into destination_folder

'''

from uclgeog.get_modis_files import get_modis_files
from datetime import datetime

years = [2017, 2016]
tiles = ['h17v03', 'h18v03', 'h17v04', 'h18v04']
destination_folder = 'data'

for year in years:
    ndoys = (datetime(year, 12, 31) - datetime(year, 1, 1)).days + 1
    for doy in range(1, ndoys + 1, 4):
        print(year, ndoys, doy, end=' ')
        filenames = get_modis_files(doy,year,tiles,base_url='https://e4ftl01.cr.usgs.gov/MOTA',\
                                           version=6,\
                                           product='MCD15A3H')
        print(filenames)