def download_filelist(ftp_texts, file_type=None, outdir=None):
    """
    Reads text file of download links, downloads them.

    This script reads a text file with urls such as those output from ECHO REVERB
    and outputs them to an output directory. It will retry failed links 20 times before
    giving up and outputting a warning to the user.

    :param ftp_texts:    array of txt files ordered from reverb containing ftp links
    :param file_type:    file extension of the desired files, leave blank or False to grab all types.
    :param outdir:       folder where files are to be placed after download

    :return list failed: list of files which failed to download after the end of the script.
    """

    failed = []

    # force inputs to take list format
    ftp_texts = core.enf_list(ftp_texts)
    if file_type is not None:
        file_type = core.enf_list(file_type)

    for ftptext in ftp_texts:
        #verify that things exist.
        core.exists(ftptext)

        if not outdir:
            outdir, _ = os.path.split(ftptext)

        ftp = open(ftptext, 'r')
        sites = ftp.readlines()

        print("Attempting to download {0} files!".format(len(sites)))
        print("Saving all files to {0}".format(outdir))

        # perform the first attempt
        failed = download_urls(sites, outdir, file_type)

        # for 19 more times, if there are still items in the failed list, try again
        for i in range(1, 19):
            if len(failed) > 0:
                print("retry number {0} to grab {1} failed downloads!".format(
                    i, len(failed)))
                time.sleep(60)
                failed = download_urls(failed, file_type, outdir)

        # once all tries are complete, print a list of files which repeatedly failed
        if len(failed) > 0:
            print(
                'Files at the following URLs have failed 20 download attempts')
            print('Manually verify that these files exist on the server:')
            for i in failed:
                print(i)
        else:
            print('Finished with no errors!')

        # close the open text files and finish up
        ftp.close()

    return failed
Beispiel #2
0
def download_filelist(ftptexts, filetypes = False, outdir = False):

    """
    Reads text file of download links, downloads them.

     This script reads a text file with urls such as those output from ECHO REVERB
     and outputs them to an output directory. It will retry failed links 20 times before
     giving up and outputing a warning to the user.

     Inputs:
       ftptexts        array of txt files ordered from reverb containing ftp links
       filetype        file extension of the desired files, leave blank or False to grab all
                       types.
       outdir          folder where files are to be placed after download

     Outputs:
       failed          list of files which failed to download after the end of the script.
    """

    # force inputs to take list format
    ftptexts = core.enf_list(ftptexts)
    if filetypes:
        filetypes = core.enf_list(filetypes)

    for ftptext in ftptexts:
        #verify that things exist.
        core.exists(ftptext)

        if not outdir:
            outdir,_ = os.path.split(ftptext)

        ftp     = open(ftptext,'r')
        sites   = ftp.readlines()

        print("Attempting to download {0} files!".format(len(sites)))
        print("Saving all files to {0}".format(outdir))

        # perform the first attempt
        failed = download_urls(sites, outdir, filetypes)

        # for 19 more times, if there are still items in the failed list, try again
        for i in range(1,19):
            if len(failed)>0:
                print("retry number {0} to grab {1} failed downloads!".format(i,len(failed)))
                time.sleep(60)
                failed = download_urls(failed, filetypes, outdir)

        # once all tries are complete, print a list of files which repeatedly failed
        if len(failed)>0:
            print('Files at the following URLs have failed 20 download attempts')
            print('Manually verify that these files exist on the server:')
            for i in failed:
                print(i)
        else:
            print('Finished with no errors!')

        # close the open text files and finish up
        ftp.close()

    return failed
def extract_TRMM_NetCDF(filelist, outdir):

    """
     Function converts NetCDFs to tiffs. Designed to work with TRMM data downloaded
     from GLOVIS

     inputs:
       filelist    list of '.nc' files to convert to tiffs.
       outdir      directory to which tif files should be saved

    returns an output filelist of local filepaths of extracted data.
    """

    # Set up initial parameters.
    arcpy.env.workspace = outdir
    filelist = core.enf_list(filelist)
    output_filelist = []

    # convert every file in the list "filelist"
    for infile in filelist:

        # use arcpy module to make raster layer from netcdf
        arcpy.MakeNetCDFRasterLayer_md(infile, "r", "longitude", "latitude", "r", "", "", "BY_VALUE")
        outname = core.create_outname(outdir, infile, "e", "tif")
        arcpy.CopyRaster_management("r", outname, "", "", "", "NONE", "NONE", "")
        output_filelist.append(outname)
        print("Converted netCDF file " + outname + " to Raster")

    return output_filelist
def extract_GCMO_NetCDF(netcdf_list, variable, outdir):
    """
    Extracts all time layers from a "Global Climate Model Output" NetCDF layer

    :param netcdf_list:     List of netcdfs from CORDEX climate distribution
    :param variable:        The climate variable of interest (tsmax, tsmin, etc)
    :param outdir:          Output directory to save files.

    :return output_filelist: returns list of files created by this function
    """

    output_filelist = []

    if not os.path.exists(outdir):
        os.makedirs(outdir)

    netcdf_list = core.enf_list(netcdf_list)

    for netcdf in netcdf_list:
        # get net cdf properties object
        props = arcpy.NetCDFFileProperties(netcdf)
        
        print("finding dimensions")
        dims  = props.getDimensions()
        for dim in dims:
            print dim, props.getDimensionSize(dim)

        # make sure the variable is in this netcdf
        if variable:
            if not variable in props.getVariables():
                print("Valid variables for this file include {0}".format(props.getVariables()))
                raise Exception("Variable '{0}' is not in this netcdf!".format(variable))

        for dim in dims:
            if dim == "time":

                # set other dimensions
                x_dim = "lon"
                y_dim = "lat"
                band_dim = ""
                valueSelectionMethod = "BY_VALUE"
                
                size = props.getDimensionSize(dim)
                for i in range(size):

                    # sanitize the dimname for invalid characters
                    dimname = props.getDimensionValue(dim,i).replace(" 12:00:00 PM","")
                    dimname = dimname.replace("/","-").replace(" ","_")
                    
                    dim_value = [["time", props.getDimensionValue(dim,i)]]
                    print("extracting '{0}' from '{1}'".format(variable, dim_value))

                    outname = core.create_outname(outdir, netcdf, dimname, 'tif')
                    output_filelist.append(outname)
                    
                    arcpy.MakeNetCDFRasterLayer_md(netcdf, variable, x_dim, y_dim, "temp",
                                                   band_dim, dim_value, valueSelectionMethod)
                    arcpy.CopyRaster_management("temp", outname, "", "", "", "NONE", "NONE", "")
                    
    return output_filelist
def TRMM_NetCDF(filelist, outdir):
    """
    Function converts NetCDFs to tiffs. Designed to work with TRMM data
    downloaded from GLOVIS

    :param filelist:            list of '.nc' files to convert to tifs.
    :param outdir:              directory to which tif files should be saved

    :return output_filelist:    list of local filepaths of extracted data.
    """

    # Set up initial parameters.
    arcpy.env.workspace = outdir
    filelist = core.enf_list(filelist)
    output_filelist = []

    # convert every file in the list "filelist"
    for infile in filelist:

        # use arcpy module to make raster layer from netcdf
        arcpy.MakeNetCDFRasterLayer_md(infile, "r", "longitude", "latitude",
                                       "r", "", "", "BY_VALUE")
        arcpy.CopyRaster_management("r", infile[:-3] + ".tif", "", "", "",
                                    "NONE", "NONE", "")
        output_filelist.append(infile[:-3] + ".tif")
        print('Converted netCDF file ' + infile + ' to Raster')

    return output_filelist
Beispiel #6
0
def toa_reflectance_8(band_nums, meta_path, outdir = False):

    """
    Converts Landsat 8 bands to Top-of-Atmosphere reflectance.

     To be performed on raw Landsat 8 level 1 data. See link below for details
     see here [http://landsat.usgs.gov/Landsat8_Using_Product.php]

     Inputs:
       band_nums   A list of desired band numbers such as [3,4,5]
       meta_path   The full filepath to the metadata file for those bands
       outdir      Output directory to save converted files. If left False it will save ouput
                       files in the same directory as input files.
    """

    outlist = []

    #enforce the list of band numbers and grab metadata from the MTL file
    band_nums = core.enf_list(band_nums)
    band_nums = map(str, band_nums)
    OLI_bands = ['1','2','3','4','5','6','7','8','9']
    meta_path = os.path.abspath(meta_path)
    meta = grab_meta(meta_path)

    #cycle through each band in the list for calculation, ensuring each is in the list of OLI bands
    for band_num in band_nums:
        if band_num in OLI_bands:

            #scrape data from the given file path and attributes in the MTL file
            band_path = meta_path.replace("MTL.txt","B{0}.tif".format(band_num))
            Qcal = arcpy.Raster(band_path)                        
            Mp   = getattr(meta,"REFLECTANCE_MULT_BAND_{0}".format(band_num)) # multiplicative scaling factor
            Ap   = getattr(meta,"REFLECTANCE_ADD_BAND_{0}".format(band_num))  # additive rescaling factor
            SEA  = getattr(meta,"SUN_ELEVATION")*(math.pi/180)       # sun elevation angle theta_se

            #get rid of the zero values that show as the black background to avoid skewing values
            null_raster = arcpy.sa.SetNull(Qcal, Qcal, "VALUE = 0")

            #calculate top-of-atmosphere reflectance
            TOA_ref = (((null_raster * Mp) + Ap)/(math.sin(SEA)))


            #save the data to the automated name if outdir is given or in the parent folder if not
            if outdir:
                outdir = os.path.abspath(outdir)
                outname = core.create_outname(outdir, band_path, "TOA_Ref", "tif")
            else:
                folder = os.path.split(meta_path)[0]
                outname = core.create_outname(folder, band_path, "TOA_Ref", "tif")
                
            TOA_ref.save(outname)
            outlist.append(outname)
            print("Saved output at {0}".format(outname))

        #if listed band is not an OLI sensor band, skip it and print message
        else:
            print("Can only perform reflectance conversion on OLI sensor bands")
            print("Skipping band {0}".format(band_num))

    return outlist
def TRMM_NetCDF(filelist, outdir):
    """
    Function converts NetCDFs to tiffs. Designed to work with TRMM data
    downloaded from GLOVIS

    :param filelist:            list of '.nc' files to convert to tifs.
    :param outdir:              directory to which tif files should be saved

    :return output_filelist:    list of local filepaths of extracted data.
    """

    # Set up initial parameters.
    arcpy.env.workspace = outdir
    filelist = core.enf_list(filelist)
    output_filelist = []

    # convert every file in the list "filelist"
    for infile in filelist:
        
        # use arcpy module to make raster layer from netcdf
        arcpy.MakeNetCDFRasterLayer_md(infile, "r", "longitude", "latitude", "r", "", "", "BY_VALUE")
        arcpy.CopyRaster_management("r", infile[:-3] + ".tif", "", "", "", "NONE", "NONE", "")
        output_filelist.append(infile[:-3] + ".tif")
        print('Converted netCDF file ' + infile + ' to Raster')

    return output_filelist
Beispiel #8
0
def cloud_mask_8(band_nums, BQA_path, outdir=False):
    """
    Removal of cloud-covered pixels in raw Landsat 8 bands using the BQA file included.

    To be performed on raw Landsat 8 level 1 data.

    Inputs:
      band_nums   A list of desired band numbers such as [3 4 5]
      BQA_path    The full filepath to the BQA file for the Landsat 8 dataset
      outdir      Output directory to save cloudless band tifs and the cloud mask
    """

    #enforce the input band numbers as a list of strings
    band_nums = core.enf_list(band_nums)
    band_nums = map(str, band_nums)

    #define the range of values in the BQA file to be reclassified as cloud (0) or not cloud (1)
    outReclass = Reclassify(
        BQA_path, "Value",
        RemapRange([[50000, 65000, 0], [28670, 32000, 0], [2, 28669, 1],
                    [32001, 49999, 1], [1, 1, "NoData"]]))

    #set the name and save the binary cloud mask tiff file
    Mask_name = BQA_path.replace("_BQA", "")
    CloudMask_path = core.create_outname(outdir, Mask_name, "Mask", "tif")
    outReclass.save(CloudMask_path)

    #for each band listed in band_nums, apply the Con tool to erase cloud pixels and save each band as a new tiff
    for band_num in band_nums:
        band_path = BQA_path.replace("BQA.tif", "B{0}.tif".format(band_num))
        outname = core.create_outname(outdir, band_path, "NoClds", "tif")
        outCon = Con(outReclass, band_path, "", "VALUE = 1")
        outCon.save(outname)

    return
Beispiel #9
0
def fetch_Landsat8(path_row_pairs,
                   start_dto,
                   end_dto,
                   outdir,
                   max_cloud_cover=100,
                   bands=None):
    """
    This function downloads all landsat 8 tiles for the input path_row_pairs and
    within the bounds of the start_dto and the end_dto, and saves them to the output directory.
    It uses the amazon web service at
    [https://aws.amazon.com/public-data-sets/landsat/]

    :param path_row_pairs:  tupled integer values of path,row coordinates of tile. may be a list of several                                 tuples. example: [(1,1),(1,2)]
    :param start_dto:       python datetime object of start date of range
    :param end_dto:         python datetime object of end date of range
    :param outdir:          the folder to save the output landsat files in
    :param max_cloud_cover: maximum percent cloud cover that is acceptable to download the file.

    :return output_filelist: A list of tile names downloaded by this function.
    """

    # fetch an updated scene list with custom function.
    scene_list = fetch_Landsat8_scene_list()

    path_row_pairs = core.enf_list(path_row_pairs)
    output_tilenames = []

    for path_row_pair in path_row_pairs:
        #format input strings
        path, row = path_row_pair
        path_str = str(path).zfill(3)
        row_str = str(row).zfill(3)

        # loop through the scene list
        # if the date for the given path/row scene is within the date range, download it with landsat_8_scene

        for row in scene_list:
            tilename = row[0]
            datestring = row[1].split(".")[
                0]  # removes fractional seconds from datestring
            date = datetime.datetime.strptime(datestring, "%Y-%m-%d %H:%M:%S")
            pathrow_id = "LC8{0}{1}".format(path_str, row_str)
            cloud_cover = float(row[2])

            if cloud_cover < max_cloud_cover:
                if pathrow_id in row[0]:
                    if start_dto <= date <= end_dto:
                        amazon_url = row[-1]
                        fetch_Landsat8_tile(amazon_url, tilename, outdir,
                                            bands)
                        output_tilenames.append(os.path.join(outdir, tilename))

    print("Finished retrieving landsat 8 data!")
    return output_tilenames
Beispiel #10
0
def fetch_Landsat8_tile(amazon_url, tilename, outdir, bands=None):
    """
    This function makes use of the amazon web service hosted Landsat 8 OLI data.
    It recieves an amazon web url for a single landsat tile, and downloads the desired files

    :param amazon_url:  url to amazons page hosting these landsat tiles
    :param tilename:    landsat tile name
    :param outdir:      output directory to place landsat data
    :param bands:       list of bands to download when not all are desired, options include
                        any of [1,2,3,4,5,6,7,8,9,10,11,"QA"]. The MTL file is ALWAYS downloaded.

    :return tilepath:   returns a filepath to the new landsat tile folder with .TIFs in it
    """

    if bands is None:
        bands = map(str, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, "QA"])
    else:
        bands = map(str, (core.enf_list(bands)))

    # create the scene name from the input parameters and use that to generate the scene's unique url
    connection = urllib.urlopen(amazon_url)
    page = connection.read().split("\n")

    print("Downloading landsat tile {0}".format(tilename))

    for line in page:
        if "<li><a href=" in line:

            # pull filename from html code
            filename = line.split('"')[1]

            # pull out band information
            band_id = filename.replace(tilename + "_",
                                       "").split(".")[0].replace("B", "")
            good_band = band_id in bands
            mtl_file = "MTL" in band_id

            # download desired files.
            if good_band or mtl_file:
                link = amazon_url.replace("index.html", filename)
                savename = os.path.join(outdir, tilename, filename)

                # try twice if filepath doesn't already exist
                if not os.path.isfile(savename):
                    try:
                        download_url(link, savename)
                    except:
                        download_url(link, savename)
                    print("\tDownloaded {0}".format(filename))
                else:
                    print("\t Found {0}".format(filename))

    return os.path.join(outdir, tilename)
Beispiel #11
0
def _extract_HDF_datatype(hdf,
                          layer_indexs,
                          outdir=None,
                          datatype=None,
                          force_custom=False,
                          nodata_value=None):
    """
    This function wraps "_extract_HDF_layer_data" and "_gdal_dataset_to_tif"
    It only works for datatypes listed in the datatype_library.csv

    :param hdf:             a single hdf filepath
    :param layer_indexs:    list of int index values of layers to extract
    :param outdir:          filepath to output directory to place tifs. If left
                            as "None" output geotiffs will be placed right next to
                            input HDF.
    :param datatype:        a dnppy.convert.datatype object created from an
                            entry in the datatype_library.csv
    :param force_custom:    if True, this will force the data to take on the
                            projection and geotransform attributes from
                            the datatype object, even if valid projection
                            and geotransform info can be pulled from the gdal
                            dataset. Should almost never be True.
    :param nodata_value:    the value to set to Nodata

    :return:                list of filepaths to output files
    """

    output_filelist = []

    if outdir is None:
        outdir = os.path.dirname(hdf)

    data = _extract_HDF_layer_data(hdf, layer_indexs)
    layer_indexs = core.enf_list(layer_indexs)
    for layer_index in layer_indexs:

        dataset = data[layer_index]
        outpath = core.create_outname(outdir, hdf, str(layer_index), "tif")

        print("creating dataset at {0}".format(outpath))

        _gdal_dataset_to_tif(dataset,
                             outpath,
                             cust_projection=datatype.projectionTXT,
                             cust_geotransform=datatype.geotransform,
                             force_custom=force_custom,
                             nodata_value=nodata_value)

        output_filelist.append(outpath)

    return output_filelist
Beispiel #12
0
def fetch_Landsat8_tile(amazon_url, tilename, outdir, bands = None):
    """
    This function makes use of the amazon web service hosted Landsat 8 OLI data.
    It recieves an amazon web url for a single landsat tile, and downloads the desired files

    :param amazon_url:  url to amazons page hosting these landsat tiles
    :param tilename:    landsat tile name
    :param outdir:      output directory to place landsat data
    :param bands:       list of bands to download when not all are desired, options include
                        any of [1,2,3,4,5,6,7,8,9,10,11,"QA"]. The MTL file is ALWAYS downloaded.

    :return tilepath:   returns a filepath to the new landsat tile folder with .TIFs in it
    """

    if bands is None:
        bands = map(str, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, "QA"])
    else:
        bands = map(str, (core.enf_list(bands)))

    # create the scene name from the input parameters and use that to generate the scene's unique url
    connection = urllib.urlopen(amazon_url)
    page       = connection.read().split("\n")

    print("Downloading landsat tile {0}".format(tilename))

    for line in page:
        if "<li><a href=" in line:

            # pull filename from html code
            filename = line.split('"')[1]

            # pull out band information
            band_id   = filename.replace(tilename + "_","").split(".")[0].replace("B","")
            good_band = band_id in bands
            mtl_file  = "MTL" in band_id

            # download desired files.
            if good_band or mtl_file:
                link     = amazon_url.replace("index.html",filename)
                savename = os.path.join(outdir, tilename, filename)

                # try twice if filepath doesn't already exist
                if not os.path.isfile(savename):
                    try:    download_url(link, savename)
                    except: download_url(link, savename)
                    print("\tDownloaded {0}".format(filename))
                else:
                    print("\t Found {0}".format(filename))

    return os.path.join(outdir, tilename)
Beispiel #13
0
def atsat_bright_temp_8(band_nums, meta_path, outdir = False):

    """
    Converts Landsat 8 TIRS bands to at satellite brightnes temperature in Kelvins

     To be performed on raw Landsat 8 level 1 data. See link below for details
     see here http://landsat.usgs.gov/Landsat8_Using_Product.php

     Inputs:
       band_nums   A list of desired band numbers, which should be [10,11]
       meta_path   The full filepath to the metadata file for those bands
       outdir      Output directory to save converted files. If left False it will save ouput
                   files in the same directory as input files.
    """
    

    band_nums = core.enf_list(band_nums)
    band_nums = map(str, band_nums)
    meta = grab_meta(meta_path)
    
    for band_num in band_nums:
        if band_num in ["10","11"]:
            band_path = meta_path.replace("MTL.txt","B{0}.tif".format(band_num))
            Qcal = arcpy.Raster(band_path)

            # requires first converting to radiance
            Ml   = getattr(meta,"RADIANCE_MULT_BAND_" + band_num) # multiplicative scaling factor
            Al   = getattr(meta,"RADIANCE_ADD_BAND_" + band_num)  # additive rescaling factor

            TOA_rad = (Qcal * Ml) + Al
            
            # now convert to at-sattelite brightness temperature
            K1   = getattr(meta,"K1_CONSTANT_BAND_" + band_num)  # thermal conversion constant 1
            K2   = getattr(meta,"K2_CONSTANT_BAND_" + band_num)  # thermal conversion constant 2

            Bright_Temp = K2/(arcpy.sa.Ln((K1/TOA_rad) + 1))
            
            metaname = core.create_outname(outdir, meta_path, "Bright-Temp")
            shutil.copyfile(meta_path,metaname)
        
            outname = core.create_outname(outdir, band_path, "Bright-Temp")
            Bright_Temp.save(outname)
            print("Saved output at {0}".format(outname))
            del TOA_rad
            
        else:
            print("Can only perform brightness temperature on TIRS sensor bands!")
            print("Skipping band  {0}".format(outname))
    return
Beispiel #14
0
def fetch_Landsat8(path_row_pairs, start_dto, end_dto, outdir,
                   max_cloud_cover = 100, bands = None):
    """
    This function downloads all landsat 8 tiles for the input path_row_pairs and
    within the bounds of the start_dto and the end_dto, and saves them to the output directory.
    It uses the amazon web service at
    [https://aws.amazon.com/public-data-sets/landsat/]

    :param path_row_pairs:  tupled integer values of path,row coordinates of tile. may be a list of several                                 tuples. example: [(1,1),(1,2)]
    :param start_dto:       python datetime object of start date of range
    :param end_dto:         python datetime object of end date of range
    :param outdir:          the folder to save the output landsat files in
    :param max_cloud_cover: maximum percent cloud cover that is acceptable to download the file.

    :return output_filelist: A list of tile names downloaded by this function.
    """

    # fetch an updated scene list with custom function.
    scene_list = fetch_Landsat8_scene_list()

    path_row_pairs = core.enf_list(path_row_pairs)
    output_tilenames = []

    for path_row_pair in path_row_pairs:
        #format input strings
        path, row = path_row_pair
        path_str  = str(path).zfill(3)
        row_str   = str(row).zfill(3)

        # loop through the scene list
        # if the date for the given path/row scene is within the date range, download it with landsat_8_scene

        for row in scene_list:
            tilename    = row[0]
            datestring  = row[1].split(".")[0] # removes fractional seconds from datestring
            date        = datetime.datetime.strptime(datestring, "%Y-%m-%d %H:%M:%S")
            pathrow_id  = "LC8{0}{1}".format(path_str, row_str)
            cloud_cover = float(row[2])

            if cloud_cover < max_cloud_cover:
                if pathrow_id in row[0]:
                    if start_dto <=  date  <= end_dto:
                        amazon_url = row[-1]
                        fetch_Landsat8_tile(amazon_url, tilename, outdir, bands)
                        output_tilenames.append(os.path.join(outdir, tilename))

    print("Finished retrieving landsat 8 data!")
    return output_tilenames
def _extract_HDF_datatype(hdf, layer_indexs, outdir = None, datatype = None,
                             force_custom = False, nodata_value = None):
    """
    This function wraps "_extract_HDF_layer_data" and "_gdal_dataset_to_tif"
    It only works for datatypes listed in the datatype_library.csv

    :param hdf:             a single hdf filepath
    :param layer_indexs:    list of int index values of layers to extract
    :param outdir:          filepath to output directory to place tifs. If left
                            as "None" output geotiffs will be placed right next to
                            input HDF.
    :param datatype:        a dnppy.convert.datatype object created from an
                            entry in the datatype_library.csv
    :param force_custom:    if True, this will force the data to take on the
                            projection and geotransform attributes from
                            the datatype object, even if valid projection
                            and geotransform info can be pulled from the gdal
                            dataset. Should almost never be True.
    :param nodata_value:    the value to set to Nodata

    :return:                list of filepaths to output files
    """

    output_filelist = []

    if outdir is None:
        outdir = os.path.dirname(hdf)

    data = _extract_HDF_layer_data(hdf, layer_indexs)
    layer_indexs = core.enf_list(layer_indexs)
    for layer_index in layer_indexs:

        dataset = data[layer_index]
        outpath = core.create_outname(outdir, hdf, str(layer_index), "tif")

        print("creating dataset at {0}".format(outpath))

        _gdal_dataset_to_tif(dataset, outpath,
                            cust_projection = datatype.projectionTXT,
                            cust_geotransform = datatype.geotransform,
                            force_custom = force_custom,
                            nodata_value = nodata_value)

        output_filelist.append(outpath)

    return output_filelist
Beispiel #16
0
def fetch_Landsat8_tile(amazon_url, tilename, outdir, bands=None):
    """
    This function makes use of the amazon web service hosted Landsat 8 OLI data.
    It recieves an amazon web url for a single landsat tile, and downloads the desired files

    defaults to downlod all bands, but users can call
        bands = [1,2,3,4,5,6,7,8,9,10,11,"QA"] to control which files are downloaded.
        The MTL file is ALWAYS downloaded.
    """

    if bands is None:
        bands = map(str, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, "QA"])
    else:
        bands = map(str, (core.enf_list(bands)))

    # create the scene name from the input parameters and use that to generate the scene's unique url
    connection = urllib.urlopen(amazon_url)
    page = connection.read().split("\n")

    print("Downloading landsat tile {0}".format(tilename))

    for line in page:
        if "<li><a href=" in line:

            # pull filename from html code
            filename = line.split('"')[1]

            # pull out band information
            band_id = filename.replace(tilename + "_",
                                       "").split(".")[0].replace("B", "")
            good_band = band_id in bands
            mtl_file = "MTL" in band_id

            # download desired files.
            if good_band or mtl_file:
                link = amazon_url.replace("index.html", filename)
                savename = os.path.join(outdir, tilename, filename)
                download_url(link, savename)
                print("\tDownloaded {0}".format(filename))
    return
Beispiel #17
0
def toa_reflectance_8(band_nums, meta_path, outdir = False):

    """
    Converts Landsat 8 bands to Top of atmosphere reflectance.

     To be performed on raw Landsat 8 level 1 data. See link below for details
     see here [http://landsat.usgs.gov/Landsat8_Using_Product.php]

     Inputs:
       band_nums   A list of desired band numbers such as [3,4,5]
       meta_path   The full filepath to the metadata file for those bands
       outdir      Output directory to save converted files. If left False it will save ouput
                   files in the same directory as input files.
    """

    band_nums = core.enf_list(band_nums)
    band_nums = map(str, band_nums)
    OLI_bands = ['1','2','3','4','5','6','7','8','9']
    meta = grab_meta(meta_path)

    for band_num in band_nums:
        if band_num in OLI_bands:
            band_path = meta_path.replace("MTL.txt","B{0}.tif".format(band_num))
            Qcal = arcpy.Raster(band_path)                        
            Mp   = getattr(meta,"REFLECTANCE_MULT_BAND_" + band_num) # multiplicative scaling factor
            Ap   = getattr(meta,"REFLECTANCE_ADD_BAND_" + band_num)  # additive rescaling factor
            SEA  = getattr(meta,"SUN_ELEVATION")*(math.pi/180)       # sun elevation angle theta_se

            TOA_ref = (((Qcal * Mp) + Ap)/(math.sin(SEA)))
            
            metaname = core.create_outname(outdir, meta_path, "TOA-Ref", "txt")
            shutil.copyfile(meta_path,metaname)
            
            outname = core.create_outname(outdir, band_path, "TOA-Ref", "tif")
            TOA_ref.save(outname)
            print("Saved output at {0}".format(outname))
        else:
            print("Can only perform reflectance conversion on OLI sensor bands!")
            print("Skipping band {0}".format(band_num))
    return
Beispiel #18
0
def fetch_Landsat8_tile(amazon_url, tilename, outdir, bands = None):
    """
    This function makes use of the amazon web service hosted Landsat 8 OLI data.
    It recieves an amazon web url for a single landsat tile, and downloads the desired files

    defaults to downlod all bands, but users can call
        bands = [1,2,3,4,5,6,7,8,9,10,11,"QA"] to control which files are downloaded.
        The MTL file is ALWAYS downloaded.
    """

    if bands is None:
        bands = map(str,[1,2,3,4,5,6,7,8,9,10,11,"QA"])
    else:
        bands = map(str,(core.enf_list(bands)))

    # create the scene name from the input parameters and use that to generate the scene's unique url
    connection = urllib.urlopen(amazon_url)
    page       = connection.read().split("\n")

    print("Downloading landsat tile {0}".format(tilename))

    for line in page:
        if "<li><a href=" in line:

            # pull filename from html code
            filename = line.split('"')[1]

            # pull out band information
            band_id   = filename.replace(tilename + "_","").split(".")[0].replace("B","")
            good_band = band_id in bands
            mtl_file  = "MTL" in band_id

            # download desired files.
            if good_band or mtl_file:
                link     = amazon_url.replace("index.html",filename)
                savename = os.path.join(outdir, tilename, filename)
                download_url(link, savename)
                print("\tDownloaded {0}".format(filename))
    return
Beispiel #19
0
def toa_radiance_8(band_nums, meta_path, outdir=False):
    """
    Top of Atmosphere radiance (in Watts/(square meter * steradians * micrometers)) conversion for landsat 8 data

    To be performed on raw Landsat 8 level 1 data. See link below for details
    see here http://landsat.usgs.gov/Landsat8_Using_Product.php

    Inputs:
    band_nums   A list of desired band numbers such as [3 4 5]
    meta_path   The full filepath to the metadata file for those bands
    outdir      Output directory to save converted files.
    """

    band_nums = core.enf_list(band_nums)
    band_nums = map(str, band_nums)
    meta = grab_meta(meta_path)

    for band_num in band_nums:

        band_path = meta_path.replace("MTL.txt", "B{0}.tif".format(band_num))
        Qcal = arcpy.Raster(band_path)

        Ml = getattr(meta, "RADIANCE_MULT_BAND_" +
                     band_num)  # multiplicative scaling factor
        Al = getattr(meta, "RADIANCE_ADD_BAND_" +
                     band_num)  # additive rescaling factor

        TOA_rad = (Qcal * Ml) + Al

        metaname = core.create_outname(outdir, meta_path, "TOA-Rad", "txt")
        shutil.copyfile(meta_path, metaname)

        outname = core.create_outname(outdir, band_path, "TOA-Rad", "tif")
        TOA_rad.save(outname)

        print("Saved toa_radiance at {0}".format(outname))

    return
Beispiel #20
0
def TRMM_NetCDF(filelist, outdir):

    """
     Function converts NetCDFs to tiffs. Designed to work with TRMM data.

     inputs:
       filelist    list of '.nc' files to conver to tifs.
       outdir      directory to which tif files should be saved
    """

    # Set up initial parameters.
    arcpy.env.workspace = outdir
    filelist    = core.enf_list(filelist)

    # convert every file in the list "filelist"
    for infile in filelist:
        
        # use arcpy module to make raster layer from netcdf
        arcpy.MakeNetCDFRasterLayer_md(infile, "r", "longitude", "latitude", "r", "", "", "BY_VALUE")
        arcpy.CopyRaster_management("r", infile[:-3] + ".tif", "", "", "", "NONE", "NONE", "")
        print('Converted netCDF file ' + infile + ' to Raster')

    return
Beispiel #21
0
def toa_radiance_8(band_nums, meta_path, outdir = False):
    """
    Top of Atmosphere radiance (in Watts/(square meter * steradians * micrometers)) conversion for landsat 8 data

    To be performed on raw Landsat 8 level 1 data. See link below for details
    see here http://landsat.usgs.gov/Landsat8_Using_Product.php

    Inputs:
    band_nums   A list of desired band numbers such as [3 4 5]
    meta_path   The full filepath to the metadata file for those bands
    outdir      Output directory to save converted files.
    """

    band_nums = core.enf_list(band_nums)
    band_nums = map(str, band_nums)
    meta = grab_meta(meta_path)

    for band_num in band_nums:
        
        band_path   = meta_path.replace("MTL.txt","B{0}.tif".format(band_num))
        Qcal        = arcpy.Raster(band_path)

        Ml   = getattr(meta,"RADIANCE_MULT_BAND_" + band_num) # multiplicative scaling factor
        Al   = getattr(meta,"RADIANCE_ADD_BAND_" + band_num)  # additive rescaling factor

        TOA_rad = (Qcal * Ml) + Al

        metaname = core.create_outname(outdir, meta_path, "TOA-Rad", "txt")
        shutil.copyfile(meta_path,metaname)

        outname = core.create_outname(outdir, band_path, "TOA-Rad", "tif")
        TOA_rad.save(outname)

        print("Saved toa_radiance at {0}".format(outname))

    return
Beispiel #22
0
def fetch_SRTM(lat_lon_pairs, product, outdir = None, mosaic = None):
    """
    downloads data from the Shuttle Radar Topography Mission (SRTM)
    [http://e4ftl01.cr.usgs.gov/SRTM/]

    This data can be used to create DEMS of a variety of resolutions.

    Inputs:
        lat_lon_pairs   tupled integer values of lat,lon combinations.
                        may be a list of tuples. (N positive, E positive)
        product         short name of product you want. See link below
                        https://lpdaac.usgs.gov/products/measures_products_table
        outdir          local directory to save downloaded files
        mosaic          Set to TRUE to mosaic all downloaded DEM tiles.

    Returns:
        tif_list        a list of all successfully downloaded tif filepaths
                        for further manipulation

    Example:
        lat_lons = [(37,-76), (37,-77)]    # Two tiles
        prod = SRTMGL3                     #3 arc second DEM product)

        download.fetch_SRTM(lat_lons, prod)

    NOTE: arcmap will open the output hgt files ONLY if they are not renamed.
    turns out arcmap does some funky things when interpreting these files.
    """

    # build empty return list
    tif_list = []

    # sanitize input list
    lat_lon_pairs = core.enf_list(lat_lon_pairs)

    # determine product version
    if product is "SRTMGL30":
        print("Download of product SRTMGL30 is supported, but arcmap does not support this filetype")
        format_string = "{2}{3}{0}{1}.{4}.dem.zip"
        version = "002"

    else:
        format_string = "{0}{1}{2}{3}.{4}.hgt.zip"
        version = "003"


    host = "http://e4ftl01.cr.usgs.gov/SRTM"
    subhost = "{0}/{1}.{2}/2000.02.11/".format(host, product, version)

    print("Connecting to host at {0}".format(subhost))


    for lat_lon_pair in lat_lon_pairs:
        lat, lon = lat_lon_pair

        # set North-south, East-West convention.
        if lat >= 0:
            NS = "N"
        else:
            NS = "S"

        if lon >= 0:
            EW = "E"
        else:
            EW = "W"

        if product is "SRTMGL30":

            if abs(lon) <= 20:
                lon = 20
            elif abs(lon) <=60:
                lon = 60
            elif abs(lon) <= 100:
                lon = 100
            else:
                lon = 140

            if abs(lat) <= 10:
                lat = 10
            elif abs(lat) <=40:
                lat = 40
            else:
                lat = 90

            NS = NS.lower()
            EW = EW.lower()

        # build up the filename and file link
        filename = format_string.format(NS, str(abs(lat)).zfill(2),
                                        EW, str(abs(lon)).zfill(3),
                                        product)

        filelink = "{0}/{1}".format(subhost, filename)

        # decide where to put the file, then download it
        if outdir is not None:
            outpath  = os.path.join(outdir, filename)
        else:
            outpath = filename

        print("Downloading and extracting  {0}".format(filename))
        download_url(filelink, outpath)

        # unzip the file and reassemble descriptive name
        with zipfile.ZipFile(outpath, "r") as z:

            itemname = "{0}{1}{2}{3}.hgt".format(NS, str(abs(lat)).zfill(2),
                                                 EW, str(abs(lon)).zfill(3))
            z.extract(itemname, outdir)
            z.close()

        # clean up and add this file to output list
        os.remove(outpath)
        tif_list.append(os.path.join(outdir,itemname))

    if mosaic is True:

        arcpy.MosaicToNewRaster_management(tif_list, outdir, "SRTM_mosaic.tif",
                                       number_of_bands = 1, pixel_type = "32_BIT_SIGNED")

    print("Finished download and extraction of SRTM data")

    return tif_list
Beispiel #23
0
def extract_from_hdf(file_list, layer_list, layer_names = False, outdir = None):
    """
    Extracts tifs from MODIS HDF files, ensures proper projection.

    :param file_list:    either a list of '.hdf' files from which data should be extracted,
                         or a directory containing '.hdf'  files.
    :param layer_list:   list of layer numbers to pull out as individual tifs should be integers
                         such as [0,4] for the 0th and 4th layer respectively.
    :param layer_names:  list of layer names to put more descriptive file suffixes to each layer
    :param outdir:       directory to which tif files should be saved
                         if outdir is left as 'None', files are saved in the same directory as
                         the input file was found.

    :return output_filelist: returns a list of all files created by this function
    """

    if outdir is not None:
        if not os.path.exists(outdir):
            os.makedirs(outdir)

    # enforce lists for iteration purposes and sanitize inputs
    file_list = core.enf_filelist(file_list)

    
    for filename in file_list:
        if '.xml' in filename or '.ovr' in filename or not '.hdf' in filename:
            file_list.remove(filename)
            
    layer_list  = core.enf_list(layer_list)
    layer_names = core.enf_list(layer_names)
    
    # ignore user input layer_names if they are invalid, but print warnings
    if layer_names and not len(layer_names) == len(layer_list):
        Warning('layer_names must be the same length as layer_list!')
        Warning('Omitting user defined layer_names!')
        layer_names = False

    output_filelist = []

    # iterate through every file in the input file_list
    for infile in file_list:
        
        # pull the filename and path apart 
        path,name = os.path.split(infile)
        arcpy.env.workspace = path

        for i, layer in enumerate(layer_list):
            
            # specify the layer names.
            if layer_names:
                layername = layer_names[i]
            else:
                layername = str(layer).zfill(3)

            # use the input output directory if the user input one, otherwise build one  
            if outdir:
                outname = os.path.join(outdir, "{0}_{1}.tif".format(name[:-4], layername))
            else:
                outname = os.path.join(path, "{0}_{1}.tif".format(name[:-4], layername))

            # perform the extracting and projection definition
            try:
                arcpy.ExtractSubDataset_management(infile, outname, str(layer))
                define_projection(outname)
                output_filelist.append(outname)

                print("Extracted {0}".format(os.path.basename(outname)))
            except:
                print("Failed to extract {0} from {1}".format(os.path.basename(outname),
                                                               os.path.basename(infile)))

    return output_filelist
Beispiel #24
0
def toa_radiance_8(band_nums, meta_path, outdir=None):
    """
    Top of Atmosphere radiance (in Watts/(square meter x steradians x micrometers))
    conversion for landsat 8 data. To be performed on raw Landsat 8
    level 1 data. See link below for details:
    see here http://landsat.usgs.gov/Landsat8_Using_Product.php

    :param band_nums:   A list of desired band numbers such as [3, 4, 5]
    :param meta_path:   The full filepath to the metadata file for those bands
    :param outdir:      Output directory to save converted files.

    :return output_filelist:    List of filepaths created by this function.
    """

    meta_path = os.path.abspath(meta_path)
    output_filelist = []

    #enforce list of band numbers and grab the metadata from the MTL file
    band_nums = core.enf_list(band_nums)
    band_nums = map(str, band_nums)
    meta = landsat_metadata(meta_path)

    OLI_bands = ['1', '2', '3', '4', '5', '6', '7', '8', '9']

    #loop through each band
    for band_num in band_nums:
        if band_num in OLI_bands:

            #create the band name
            band_path = meta_path.replace("MTL.txt",
                                          "B{0}.tif".format(band_num))
            Qcal = arcpy.Raster(band_path)

            null_raster = arcpy.sa.SetNull(Qcal, Qcal, "VALUE = 0")

            #scrape the attribute data
            Ml = getattr(meta, "RADIANCE_MULT_BAND_{0}".format(
                band_num))  # multiplicative scaling factor
            Al = getattr(meta, "RADIANCE_ADD_BAND_{0}".format(
                band_num))  # additive rescaling factor

            #calculate Top-of-Atmosphere radiance
            TOA_rad = (null_raster * Ml) + Al
            del null_raster

            # create the output name and save the TOA radiance tiff
            if "\\" in meta_path:
                name = meta_path.split("\\")[-1]
            elif "//" in meta_path:
                name = meta_path.split("//")[-1]

            rad_name = name.replace("_MTL.txt", "_B{0}".format(band_num))

            if outdir is not None:
                outdir = os.path.abspath(outdir)
                outname = core.create_outname(outdir, rad_name, "TOA_Rad",
                                              "tif")
            else:
                folder = os.path.split(meta_path)[0]
                outname = core.create_outname(folder, rad_name, "TOA_Rad",
                                              "tif")

            TOA_rad.save(outname)
            output_filelist.append(outname)
            print("Saved toa_radiance at {0}".format(outname))

        #if listed band is not a OLI sensor band, skip it and print message
        else:
            print(
                "Can only perform reflectance conversion on OLI sensor bands")
            print("Skipping band {0}".format(band_num))

    return output_filelist
Beispiel #25
0
def download_urls(url_list, outdir, file_types=None):
    """
    Downloads a list of files. Retries failed downloads

    This script downloads a list of files and places it in the output directory. It was
    built to be nested within "Download_filelist" to allow loops to continuously retry
    failed files until they are successful or a retry limit is reached.

    :param url_list:   array of urls, probably as read from a text file
    :param file_types: list of file types to download. Useful for excluding extraneous
                       metadata by only downloading 'hdf' or 'tif' for example. Please note
                       that often times, you actually NEED the metadata.
    :param outdir:     folder where files are to be placed after download

    :return failed:    list of files which failed download
    """

    failed = []
    url_list = core.enf_list(url_list)

    # creates output folder at desired path if it doesn't already exist
    if not os.path.exists(outdir):
        os.makedirs(outdir)

    # establish a wait time that will increase when downloads fail. This helps to reduce
    # the frequency of REVERB server rejections for requesting too many downloads
    wait = 0

    for site in url_list:
        download = False
        url = site.rstrip()
        sub = url.split("/")
        leng = len(sub)
        name = sub[leng - 1]

        # Determine whether or not to download the file based on filetype.
        if file_types is not None:
            for filetype in file_types:
                if filetype in name[-4:]:
                    download = True
        else:
            download = True

        # attempt download of the file, or skip it.
        if download:

            try:
                # wait for the wait time before attempting writing a file
                time.sleep(wait)
                download_url(url, os.path.join(outdir, name))
                print("{0} is downloaded {1}".format(name, wait))

                # reduce the wait time when downloads succeed.
                if wait >= 1:
                    wait -= wait

            # add to the fail count if the download is unsuccessful and wait longer next time.
            except:
                print("{0} will be retried! {1}".format(sub[leng - 1], wait))
                wait += 5
                failed.append(url)

    print("Finished downloading urls!")
    return failed
Beispiel #26
0
def surface_reflectance_dos_457(band_nums, meta_path, outdir):
    """
    Converts Landsat 4, 5, and 7 band DNs to surface reflectance using dark object subtraction.

     To be performed on raw Landsat 4,5, and 7 level 1 data. See link below for details
     see here [http://landsat.usgs.gov/Landsat8_Using_Product.php]

     Inputs:
       band_nums   A list of desired band numbers such as [3,4,5]
       meta_path   The full filepath to the metadata file for those bands
       outdir      Output directory to save converted files. If left False it will save ouput
                   files in the same directory as input files.
    """
    band_nums = core.enf_list(band_nums)
    band_nums = map(str, band_nums)
    TM_ETM_bands = ['1', '2', '3', '4', '5', '7', '8']

    #landsat.toa_reflectance_457(band_nums, meta_path, outdir)

    OutList = []

    f = open(meta_path)
    MText = f.read()

    metadata = landsat.grab_meta(meta_path)
    oldMeta = []
    newMeta = []

    #the presence of a PRODUCT_CREATION_TIME category is used to identify old metadata
    #if this is not present, the meta data is considered new.
    #Band6length refers to the length of the Band 6 name string. In the new metadata this string is longer
    if "PRODUCT_CREATION_TIME" in MText:
        Meta = oldMeta
        Band6length = 2
    else:
        Meta = newMeta
        Band6length = 8

    #The tilename is located using the newMeta/oldMeta indixes and the date of capture is recorded
    if Meta == newMeta:
        TileName = getattr(metadata, "LANDSAT_SCENE_ID")
        year = TileName[9:13]
        jday = TileName[13:16]
        date = getattr(metadata, "DATE_ACQUIRED")
    elif Meta == oldMeta:
        TileName = getattr(metadata, "BAND1_FILE_NAME")
        year = TileName[13:17]
        jday = TileName[17:20]
        date = getattr(metadata, "ACQUISITION_DATE")

    #the spacecraft from which the imagery was capture is identified
    #this info determines the solar exoatmospheric irradiance (ESun) for each band
    spacecraft = getattr(metadata, "SPACECRAFT_ID")

    if "7" in spacecraft:
        ESun = (1969.0, 1840.0, 1551.0, 1044.0, 255.700, 0., 82.07, 1368.00)
    elif "5" in spacecraft:
        ESun = (1957.0, 1826.0, 1554.0, 1036.0, 215.0, 0., 80.67)
    elif "4" in spacecraft:
        ESun = (1957.0, 1825.0, 1557.0, 1033.0, 214.9, 0., 80.72)
    else:
        arcpy.AddError("This tool only works for Landsat 4, 5, or 7")
        raise arcpy.ExecuteError()

    #determing if year is leap year and setting the Days in year accordingly
    if float(year) % 4 == 0: DIY = 366.
    else: DIY = 365.

    #using the date to determing the distance from the sun
    theta = 2 * math.pi * float(jday) / DIY

    dSun2 = (1.00011 + 0.034221 * math.cos(theta) +
             0.001280 * math.sin(theta) + 0.000719 * math.cos(2 * theta) +
             0.000077 * math.sin(2 * theta))

    SZA = 90. - float(getattr(metadata, "SUN_ELEVATION"))

    for band_num in band_nums:
        if band_num in TM_ETM_bands:
            pathname = meta_path.replace("MTL.txt",
                                         "B{0}.tif".format(band_num))
            Oraster = arcpy.Raster(pathname)

            null_raster = arcpy.sa.SetNull(Oraster, Oraster, "VALUE = 0")

            #using the oldMeta/newMeta indixes to pull the min/max for radiance/Digital numbers
            if Meta == newMeta:
                LMax = getattr(metadata, "RADIANCE_MAXIMUM_BAND_" + band_num)
                LMin = getattr(metadata, "RADIANCE_MINIMUM_BAND_" + band_num)
                QCalMax = getattr(metadata,
                                  "QUANTIZE_CAL_MAX_BAND_" + band_num)
                QCalMin = getattr(metadata,
                                  "QUANTIZE_CAL_MIN_BAND_" + band_num)
            elif Meta == oldMeta:
                LMax = getattr(metadata, "LMAX_BAND" + band_num)
                LMin = getattr(metadata, "LMIN_BAND" + band_num)
                QCalMax = getattr(metadata, "QCALMAX_BAND" + band_num)
                QCalMin = getattr(metadata, "QCALMIN_BAND" + band_num)

            Radraster = (((LMax - LMin) / (QCalMax - QCalMin)) *
                         (null_raster - QCalMin)) + LMin
            Oraster = 0
            del null_raster

            #Calculating temperature for band 6 if present
            Refraster = (math.pi * Radraster * dSun2) / (
                ESun[int(band_num[0]) - 1] * math.cos(SZA * (math.pi / 180)))

            #Oraster = arcpy.Raster(raster_null)
            dark_object = arcpy.GetRasterProperties_management(
                Refraster, "MINIMUM")
            do_str = str(dark_object)
            do_flt = float(do_str)

            #Calculate the minimum value in each band and perform dark object subtraction
            Surfrefraster = Refraster - do_flt

            BandPath = "{0}\\{1}_B{2}_SurfRef.tif".format(
                outdir, TileName, band_num)
            Surfrefraster.save(BandPath)
            OutList.append(arcpy.Raster(BandPath))

            del Refraster, Radraster, Surfrefraster

            arcpy.AddMessage(
                "Surface Reflectance Calculated for Band {0}".format(band_num))
            print(
                "Surface Reflectance Calculated for Band {0}".format(band_num))

    f.close()
    return OutList
Beispiel #27
0
def toa_radiance_457(band_nums, meta_path, outdir = None):
    """
    Top of Atmosphere radiance (in Watts/(square meter x steradians x micrometers))
    conversion for Landsat 4, 5, and 7 data. To be performed on raw
    Landsat 4, 5, or 7 level 1 data.

    :param band_nums:   A list of desired band numbers such as [3, 4, 5]
    :param meta_path:   The full filepath to the metadata file for those bands
    :param outdir:      Output directory to save converted files.

    :return output_filelist:    List of filepaths created by this function.
    """

    output_filelist = []
    meta_path = os.path.abspath(meta_path)

    band_nums = core.enf_list(band_nums)
    band_nums = map(str, band_nums)

    #metadata format was changed August 29, 2012. This tool can process either the new or old format
    f = open(meta_path)
    MText = f.read()

    metadata = grab_meta(meta_path)

    #the presence of a PRODUCT_CREATION_TIME category is used to identify old metadata
    #if this is not present, the meta data is considered new.
    #Band6length refers to the length of the Band 6 name string. In the new metadata this string is longer
    if "PRODUCT_CREATION_TIME" in MText:
        Meta = "oldMeta"
        Band6length = 2
    else:
        Meta = "newMeta"
        Band6length = 8

    #The tilename is located using the newMeta/oldMeta indixes and the date of capture is recorded
    if Meta == "newMeta":
        TileName    = getattr(metadata, "LANDSAT_SCENE_ID")
        year        = TileName[9:13]
        jday        = TileName[13:16]
        date        = getattr(metadata, "DATE_ACQUIRED")
        
    elif Meta == "oldMeta":
        TileName    = getattr(metadata, "BAND1_FILE_NAME")
        year        = TileName[13:17]
        jday        = TileName[17:20]
        date        = getattr(metadata, "ACQUISITION_DATE")

    #the spacecraft from which the imagery was capture is identified
    #this info determines the solar exoatmospheric irradiance (ESun) for each band
    spacecraft = getattr(metadata, "SPACECRAFT_ID")

    if "7" in spacecraft:
        ESun = (1969.0, 1840.0, 1551.0, 1044.0, 255.700, 0., 82.07, 1368.00)
        TM_ETM_bands = ['1','2','3','4','5','7','8']
        
    elif "5" in spacecraft:
        ESun = (1957.0, 1826.0, 1554.0, 1036.0, 215.0, 0. ,80.67)
        TM_ETM_bands = ['1','2','3','4','5','7']
        
    elif "4" in spacecraft:
        ESun = (1957.0, 1825.0, 1557.0, 1033.0, 214.9, 0. ,80.72)
        TM_ETM_bands = ['1','2','3','4','5','7']
        
    else:
        arcpy.AddError("This tool only works for Landsat 4, 5, or 7")
        raise arcpy.ExecuteError()

    #Calculating values for each band
    for band_num in band_nums:
        if band_num in TM_ETM_bands:

            print("Processing Band {0}".format(band_num))
            pathname = meta_path.replace("MTL.txt", "B{0}.tif".format(band_num))
            Oraster = arcpy.Raster(pathname)

            null_raster = arcpy.sa.SetNull(Oraster, Oraster, "VALUE = 0")

            #using the oldMeta/newMeta indixes to pull the min/max for radiance/Digital numbers
            if Meta == "newMeta":
                LMax    = getattr(metadata, "RADIANCE_MAXIMUM_BAND_{0}".format(band_num))
                LMin    = getattr(metadata, "RADIANCE_MINIMUM_BAND_{0}".format(band_num))  
                QCalMax = getattr(metadata, "QUANTIZE_CAL_MAX_BAND_{0}".format(band_num))
                QCalMin = getattr(metadata, "QUANTIZE_CAL_MIN_BAND_{0}".format(band_num))
                
            elif Meta == "oldMeta":
                LMax    = getattr(metadata, "LMAX_BAND{0}".format(band_num))
                LMin    = getattr(metadata, "LMIN_BAND{0}".format(band_num))  
                QCalMax = getattr(metadata, "QCALMAX_BAND{0}".format(band_num))
                QCalMin = getattr(metadata, "QCALMIN_BAND{0}".format(band_num))

            Radraster = (((LMax - LMin)/(QCalMax-QCalMin)) * (null_raster - QCalMin)) + LMin
            Oraster = 0
            del null_raster

            band_rad = "{0}_B{1}".format(TileName, band_num)

            #create the output name and save the TOA radiance tiff
            if outdir is not None:
                outdir = os.path.abspath(outdir)
                outname = core.create_outname(outdir, band_rad, "TOA_Rad", "tif")
            else:
                folder = os.path.split(meta_path)[0]
                outname = core.create_outname(folder, band_rad, "TOA_Rad", "tif")
                
            Radraster.save(outname)
            output_filelist.append(outname)
            
            del Radraster

            print("toa radiance saved for Band {0}".format(band_num))

        #if listed band is not a TM/ETM+ sensor band, skip it and print message
        else:
            print("Can only perform reflectance conversion on TM/ETM+ sensor bands")
            print("Skipping band {0}".format(band_num))
         
    f.close()
    return output_filelist
def fetch_Landsat_WELD(product, tiles, years, outdir):
    """
    Fetch WELD data from the server at [http://e4ftl01.cr.usgs.gov/WELD].
    Weld data is corrected and processed Landsat 5 and 7 data that is distributed in the
    MODIS sinusoidal projection and grid format. Read more about WELD data.
    https://landsat.usgs.gov/WELD.php
    http://globalmonitoring.sdstate.edu/projects/weldglobal/

    :param product:     WELD product to download such as 'USWK','USMO','USYR'
    :param tiles:       list of tiles to grab such as ['h11v12','h11v11']
    :param years:       list of years to grab such as range(2001,2014)
    :param outdir:      output directory to save downloaded files

    :return output_filelist: A list of full filepaths to files fetched be this function
    """

    output_filelist = []

    # check formats
    global dates
    tiles = core.enf_list(tiles)
    years = core.enf_list(years)
    years = [str(year) for year in years]

    # create output directories
    for tile in tiles:
        if not os.path.exists(os.path.join(outdir, tile)):
            os.makedirs(os.path.join(outdir, tile))

    print('Connecting to servers!')

    # Map the contents of the directory
    site = 'https://e4ftl01.cr.usgs.gov/WELD/WELD' + product + '.001'
    try:
        dates = list_http_e4ftl01(site)
    except:
        print('Could not connect to site! check inputs!')

    # find just the folders within the desired year range.
    good_dates = []
    for date in dates:
        try:
            y, m, d = date.split(".")
            if y in years:
                good_dates.append(date)
        except:
            pass

    print("Found {0} days within year range".format(len(good_dates)))

    # for all folders within the desired date range,  map the subfolder contents.
    for good_date in good_dates:

        files = list_http_e4ftl01(site + '/' + good_date)

        for afile in files:
            # only list files with desired tilenames and not preview jpgs
            if not '.jpg' in afile:
                for tile in tiles:
                    if tile in afile:

                        # assemble the address
                        address = '/'.join([site, good_date, afile])
                        print("Downloading {0}".format(address))

                        #download the file.
                        outname = os.path.join(outdir, tile, afile)
                        output_filelist.append(outname)
                        download_url(address, outname)
    return
Beispiel #29
0
def degree_days_accum(rasterlist, critical_values = None, outdir = None):
    """
    Accumulates degree days in a time series rasterlist

    This function is the logical successor to calc.degree_days. Input a list of rasters
    containing daily data to be accumulated. Output raster for a given day will be the sum
    total of the input raster for that day and all preceding days. The last output raster in
    a years worth of data (image 356) would be the sum of all 365 images. The 25th output
    raster would be a sum of the first 25 days.
    Critical value rasters will also be created. Usefull for example: we wish to know on what day
    of our 365 day sequence every pixel hits a value of 100. Input 100 as a critical value
    and that output raster will be generated.

    :param rasterlist:          list of files, or directory containing rasters to accumulate
    :param critical_values:     Values at which the user wishes to know WHEN the total accumulation
                                value reaches this point. For every critical value, an output
                                raster will be created. This raster contains integer values denoting
                                the index number of the file at which the value was reached.
                                This input must be a list of ints or floats, not strings.
    :param outdir:              Desired output directory for all output files.

    :return output_filelist:    a list of all files created by this function.
    """

    output_filelist = []
    rasterlist = enf_rastlist(rasterlist)

    if critical_values:
        critical_values = core.enf_list(critical_values)

    # critical values of zero are problematic, so replace it with a small value.
    if 0 in critical_values:
        critical_values.remove(0)
        critical_values.append(0.000001)

    if outdir is not None and not os.path.exists(outdir):
        os.makedirs(outdir)

    for i, rast in enumerate(rasterlist):

        image, meta = to_numpy(rast,"float32")
        xs, ys = image.shape

        if i == 0:
            Sum  = numpy.zeros((xs,ys))
            Crit = numpy.zeros((len(critical_values),xs,ys))

        if image.shape == Sum.shape:

            # only bother to proceed if at least one pixel is positive
            if numpy.max(image) >= 0:
                for x in range(xs):
                    for y in range(ys):

                        if image[x,y] >= 0:
                            Sum[x,y] = Sum[x,y]+image[x,y]

                        if critical_values is not None:
                            for z,critical_value in enumerate(critical_values):
                                if Sum[x,y] >= critical_value and Crit[z,x,y]==0:
                                    Crit[z,x,y] = i
        else:
            print "Encountered an image of incorrect size! Skipping it!"

        Sum     = Sum.astype('float32')
        outname = core.create_outname(outdir, rast, "Accum")
        from_numpy(Sum, meta, outname)
        output_filelist.append(outname)

        del image

    # output critical accumulation rasters using some data from the last raster in previous loop
    Crit = Crit.astype('int16')
    crit_meta = meta
    crit_meta.NoData_Value = 0
    head , tail = os.path.split(outname)        # place these in the last raster output location
    for z, critical_value in enumerate(critical_values):
        outname = os.path.join(head, "Crit_Accum_Index_Val-{0}.tif".format(str(critical_value)))
        print("Saving {0}".format(outname))
        from_numpy(Crit[z,:,:], crit_meta, outname)

    return output_filelist
Beispiel #30
0
def grab_info(filepath, data_type = False, CustGroupings = None):

    """
    Extracts in-filename metadata from common NASA data products

     This function simply extracts relevant sorting information from a MODIS or Landsat
     filepath of any type or product and returns object properties relevant to that data.
     it will be expanded to include additional data products in the future.

     Inputs:
           filepath        Full or partial filepath to any modis product tile
           data_type       Manually tell the software what the data is.
           CustGroupings   User defined sorting by julian days of specified bin widths.
                           input of 5 for example will group January 1,2,3,4,5 in the first bin
                           and january 6,7,8,9,10 in the second bin, etc.

     Outputs:
           info            on object containing the attributes (product, year, day, tile)
                           retrieve these values by calling "info.product", "info.year" etc.

     Attributes by data type:
           All             type,year,j_day,month,day,season,CustGroupings,suffix

           MODIS           product,tile
           Landsat         sensor,satellite,WRSpath,WRSrow,groundstationID,Version,band

     Attribute descriptions:
           type            NASA data type, for exmaple 'MODIS' and 'Landsat'
           year            four digit year the data was taken
           j_day           julian day 1 to 365 or 366 for leap years
           month           three character month abbreviation
           day             day of the month
           season          'Winter','Spring','Summer', or 'Autumn'
           CustGroupings   bin number of data according to custom group value. sorted by
                           julian day
           suffix          Any additional trailing information in the filename. used to find
                           details about special

           product         usually a level 3 data product from sensor such as MOD11A1
           tile            MODIS sinusoidal tile h##v## format

           sensor          Landsat sensor
           satellite       usually 5,7, or 8 for the landsat satellite
           WRSpath         Landsat path designator
           WRSrow          Landsat row designator
           groundstationID ground station which recieved the data download fromt he satellite
           Version         Version of landsat data product
           band            band of landsat data product, usually 1 through 10 or 11.
    """

    
    # pull the filename and path apart 
    path, name = os.path.split(filepath)
    
    # create an info object class instance
    class info_object(object):pass
    info = info_object()

    # figure out what kind of data these files are. 
    if not data_type:
        data_type = identify(name)

    if data_type == 'MODIS':
        params  =['product', 'year', 'j_day', 'tile', 'type', 'version', 'tag', 'suffix']
        n       = name.split('.')
        end     = n[4]
        string  =[n[0], name[9:13] ,name[13:16], n[2], 'MODIS', n[3], end[:13], end[13:]]
            
    elif data_type =='Landsat':
        params  =['sensor', 'satellite', 'WRSpath', 'WRSrow', 'year', 'j_day',
                        'groundstationID', 'Version', 'band', 'type', 'suffix']
        n       = name.split('.')[0]
        string  =[n[1], n[2], n[3:6], n[6:9], n[9:13], n[13:16], n[16:19],
                n[19:21], n[23:].split('_')[0], 'Landsat', '_'.join(n[23:].split('_')[1:])]
            
    elif data_type == 'WELD_CONUS' or data_type == 'WELD_AK':
        params  = ['coverage', 'period', 'year', 'tile', 'start_day', 'end_day', 'type']
        n       = name.split('.')
        string  =[n[0], n[1], n[2] ,n[3], n[4][4:6], n[4][8:11], 'WELD']
        # take everything after the first underscore as a suffix if onecore.exists.
        if '_' in name:
            params.append('suffix')
            string.append('_'.join(name.split('_')[1:]))
            
    elif data_type == 'ASTER':
        params  = ['product','N','W','type','period']
        n       = name.split('_')
        string  = [n[0],n[1][1:3],n[1][5:9],n[-1].split('.')[0],'none']
    
    elif data_type == 'TRMM':
        print '{Grab_Data_Info} no support for TRMM data yet! you could add it!'
        return(False)

    elif data_type == 'AMSR_E':
        print '{Grab_Data_Info} no support for AMSR_E data yet! you could add it!'
        return False

    elif data_type == 'AIRS':
        print '{Grab_Data_Info} no support for AIRS data yet! you could add it!'
        return False

    # if data doesnt look like anything!
    else:
        print('Data type for file [{0}] could not be identified as any supported type'.format(name))
        print('improve this function by adding info for this datatype!')
        return False

    # Create atributes and assign parameter names and values
    for i in range(len(params)):
        setattr(info,params[i],string[i])
    
    # ................................................................................
    # perform additional data gathering only if data has no info.period atribute. Images with
    # this attribute represent data that is produced from many dates, not just one day.
    
    if not hasattr(info,'period'):
        
    # fill in date format values and custom grouping and season information based on julian day
    # many files are named according to julian day. we want the date info for these files.
        try:
            tempinfo    = datetime.datetime(int(info.year),1,1)+datetime.timedelta(int(int(info.j_day)-1))
            info.month  = tempinfo.strftime('%b')
            info.day    = tempinfo.day
            
        # some files are named according to date. we want the julian day info for these files
        except:
            fmt         = '%Y.%m.%d'
            tempinfo    = datetime.datetime.strptime('.'.join([info.year,info.month,info.day]),fmt)
            info.j_day  = tempinfo.strftime('%j')

    # fill in the seasons by checking the value of julian day
        if int(info.j_day) <=78 or int(info.j_day) >=355:
            info.season='Winter'
            
        elif int(info.j_day) <=171:
            info.season='Spring'
            
        elif int(info.j_day)<=265:
            info.season='Summer'
            
        elif int(info.j_day)<=354:
            info.season='Autumn'
        
    # bin by julian day if integer group width was input
    if not CustGroupings == None:

        CustGroupings = core.enf_list(CustGroupings)
        for grouping in CustGroupings:
            if isinstance(grouping,int):
                groupname = 'custom' + str(grouping)
                setattr(info, groupname, 1+(int(info.j_day)-1)/(grouping))
            else:
                print('invalid custom grouping entered!')
                print('CustGrouping must be one or more integers in a list')

    # make sure the filepath input actually leads to a real file, then give user the info
    if core.exists(filepath):
        #print('{0} file {1} has attributes:'.format(info.type, name))
        #print(vars(info))
        return info
    else:
        return False
Beispiel #31
0
def toa_radiance_457(band_nums, meta_path, outdir = False):
    """
    Top of Atmosphere radiance (in Watts/(square meter * steradians * micrometers)) conversion
    for Landsat 4, 5, and 7 data. To be performed on raw Landsat 4, 5, or 7 level 1 data. 

     Inputs:
       band_nums   A list of desired band numbers such as [3 4 5]
       meta_path   The full filepath to the metadata file for those bands
       outdir      Output directory to save converted files.
    """

    OutList = []
    band_nums = core.enf_list(band_nums)
    band_nums = map(str, band_nums)
    TM_ETM_bands = ['1','2','3','4','5','7','8']

    #metadata format was changed August 29, 2012. This tool can process either the new or old format
    f = open(meta_path)
    MText = f.read()

    metadata = grab_meta(meta_path)
    oldMeta = []
    newMeta = []

    #the presence of a PRODUCT_CREATION_TIME category is used to identify old metadata
    #if this is not present, the meta data is considered new.
    #Band6length refers to the length of the Band 6 name string. In the new metadata this string is longer
    if "PRODUCT_CREATION_TIME" in MText:
        Meta = oldMeta
        Band6length = 2
    else:
        Meta = newMeta
        Band6length = 8

    #The tilename is located using the newMeta/oldMeta indixes and the date of capture is recorded
    if Meta == newMeta:
        TileName    = getattr(metadata, "LANDSAT_SCENE_ID")
        year        = TileName[9:13]
        jday        = TileName[13:16]
        date        = getattr(metadata, "DATE_ACQUIRED")
        
    elif Meta == oldMeta:
        TileName    = getattr(metadata, "BAND1_FILE_NAME")
        year        = TileName[13:17]
        jday        = TileName[17:20]
        date        = getattr(metadata, "ACQUISITION_DATE")

    #the spacecraft from which the imagery was capture is identified
    #this info determines the solar exoatmospheric irradiance (ESun) for each band
    spacecraft = getattr(metadata, "SPACECRAFT_ID")

    if   "7" in spacecraft:
        ESun = (1969.0, 1840.0, 1551.0, 1044.0, 255.700, 0., 82.07, 1368.00)
        
    elif "5" in spacecraft:
        ESun = (1957.0, 1826.0, 1554.0, 1036.0, 215.0, 0. ,80.67)
        
    elif "4" in spacecraft:
        ESun = (1957.0, 1825.0, 1557.0, 1033.0, 214.9, 0. ,80.72)
        
    else:
        arcpy.AddError("This tool only works for Landsat 4, 5, or 7")
        raise arcpy.ExecuteError()

    #Calculating values for each band
    for band_num in band_nums:
        if band_num in TM_ETM_bands:

            print("Processing Band {0}".format(band_num))
            pathname = meta_path.replace("MTL.txt", "B{0}.tif".format(band_num))
            Oraster = arcpy.Raster(pathname)

         #using the oldMeta/newMeta indixes to pull the min/max for radiance/Digital numbers
        if Meta == newMeta:
            LMax    = getattr(metadata, "RADIANCE_MAXIMUM_BAND_" + band_num)
            LMin    = getattr(metadata, "RADIANCE_MINIMUM_BAND_" + band_num)  
            QCalMax = getattr(metadata, "QUANTIZE_CAL_MAX_BAND_" + band_num)
            QCalMin = getattr(metadata, "QUANTIZE_CAL_MIN_BAND_" + band_num)
            
        elif Meta == oldMeta:
            LMax    = getattr(metadata, "LMAX_BAND" + band_num)
            LMin    = getattr(metadata, "LMIN_BAND" + band_num)  
            QCalMax = getattr(metadata, "QCALMAX_BAND" + band_num)
            QCalMin = getattr(metadata, "QCALMIN_BAND" + band_num)

        Radraster = (((LMax - LMin)/(QCalMax-QCalMin)) * (Oraster - QCalMin)) + LMin
        Oraster = 0

        #Calculating temperature for band 6 if present
        BandPath = "{0}\\{1}_B{2}_TOA-Rad.tif".format(outdir,TileName,band_num)
        Radraster.save(BandPath)
        OutList.append(arcpy.Raster(BandPath))

        del Radraster

        arcpy.AddMessage("toa radiance saved for Band {0}".format(band_num))
        print("toa radiance saved for Band {0}".format(band_num))
         
    f.close()
    return OutList
Beispiel #32
0
def toa_radiance_457(band_nums, meta_path, outdir=False):
    """
    Top of Atmosphere radiance (in Watts/(square meter * steradians * micrometers)) conversion
    for Landsat 4, 5, and 7 data. To be performed on raw Landsat 4, 5, or 7 level 1 data. 

     Inputs:
       band_nums   A list of desired band numbers such as [3 4 5]
       meta_path   The full filepath to the metadata file for those bands
       outdir      Output directory to save converted files.
    """

    OutList = []
    band_nums = core.enf_list(band_nums)
    band_nums = map(str, band_nums)
    TM_ETM_bands = ['1', '2', '3', '4', '5', '7', '8']

    #metadata format was changed August 29, 2012. This tool can process either the new or old format
    f = open(meta_path)
    MText = f.read()

    metadata = grab_meta(meta_path)
    oldMeta = []
    newMeta = []

    #the presence of a PRODUCT_CREATION_TIME category is used to identify old metadata
    #if this is not present, the meta data is considered new.
    #Band6length refers to the length of the Band 6 name string. In the new metadata this string is longer
    if "PRODUCT_CREATION_TIME" in MText:
        Meta = oldMeta
        Band6length = 2
    else:
        Meta = newMeta
        Band6length = 8

    #The tilename is located using the newMeta/oldMeta indixes and the date of capture is recorded
    if Meta == newMeta:
        TileName = getattr(metadata, "LANDSAT_SCENE_ID")
        year = TileName[9:13]
        jday = TileName[13:16]
        date = getattr(metadata, "DATE_ACQUIRED")

    elif Meta == oldMeta:
        TileName = getattr(metadata, "BAND1_FILE_NAME")
        year = TileName[13:17]
        jday = TileName[17:20]
        date = getattr(metadata, "ACQUISITION_DATE")

    #the spacecraft from which the imagery was capture is identified
    #this info determines the solar exoatmospheric irradiance (ESun) for each band
    spacecraft = getattr(metadata, "SPACECRAFT_ID")

    if "7" in spacecraft:
        ESun = (1969.0, 1840.0, 1551.0, 1044.0, 255.700, 0., 82.07, 1368.00)

    elif "5" in spacecraft:
        ESun = (1957.0, 1826.0, 1554.0, 1036.0, 215.0, 0., 80.67)

    elif "4" in spacecraft:
        ESun = (1957.0, 1825.0, 1557.0, 1033.0, 214.9, 0., 80.72)

    else:
        arcpy.AddError("This tool only works for Landsat 4, 5, or 7")
        raise arcpy.ExecuteError()

    #Calculating values for each band
    for band_num in band_nums:
        if band_num in TM_ETM_bands:

            print("Processing Band {0}".format(band_num))
            pathname = meta_path.replace("MTL.txt",
                                         "B{0}.tif".format(band_num))
            Oraster = arcpy.Raster(pathname)

        #using the oldMeta/newMeta indixes to pull the min/max for radiance/Digital numbers
        if Meta == newMeta:
            LMax = getattr(metadata, "RADIANCE_MAXIMUM_BAND_" + band_num)
            LMin = getattr(metadata, "RADIANCE_MINIMUM_BAND_" + band_num)
            QCalMax = getattr(metadata, "QUANTIZE_CAL_MAX_BAND_" + band_num)
            QCalMin = getattr(metadata, "QUANTIZE_CAL_MIN_BAND_" + band_num)

        elif Meta == oldMeta:
            LMax = getattr(metadata, "LMAX_BAND" + band_num)
            LMin = getattr(metadata, "LMIN_BAND" + band_num)
            QCalMax = getattr(metadata, "QCALMAX_BAND" + band_num)
            QCalMin = getattr(metadata, "QCALMIN_BAND" + band_num)

        Radraster = (((LMax - LMin) / (QCalMax - QCalMin)) *
                     (Oraster - QCalMin)) + LMin
        Oraster = 0

        #Calculating temperature for band 6 if present
        BandPath = "{0}\\{1}_B{2}_TOA-Rad.tif".format(outdir, TileName,
                                                      band_num)
        Radraster.save(BandPath)
        OutList.append(arcpy.Raster(BandPath))

        del Radraster

        arcpy.AddMessage("toa radiance saved for Band {0}".format(band_num))
        print("toa radiance saved for Band {0}".format(band_num))

    f.close()
    return OutList
Beispiel #33
0
def grab_info(filepath, data_type = False, CustGroupings = None):

    """
    Extracts in-filename metadata from common NASA data products

     This function simply extracts relevant sorting information from a MODIS or Landsat
     filepath of any type or product and returns object properties relevant to that data.
     it will be expanded to include additional data products in the future.

     Inputs:
           filepath        Full or partial filepath to any modis product tile
           data_type       Manually tell the software what the data is.
           CustGroupings   User defined sorting by julian days of specified bin widths.
                           input of 5 for example will group January 1,2,3,4,5 in the first bin
                           and january 6,7,8,9,10 in the second bin, etc.

     Outputs:
           info            on object containing the attributes (product, year, day, tile)
                           retrieve these values by calling "info.product", "info.year" etc.

     Attributes by data type:
           All             type,year,j_day,month,day,season,CustGroupings,suffix

           MODIS           product,tile
           Landsat         sensor,satellite,WRSpath,WRSrow,groundstationID,Version,band

     Attribute descriptions:
           type            NASA data type, for exmaple 'MODIS' and 'Landsat'
           year            four digit year the data was taken
           j_day           julian day 1 to 365 or 366 for leap years
           month           three character month abbreviation
           day             day of the month
           season          'Winter','Spring','Summer', or 'Autumn'
           CustGroupings   bin number of data according to custom group value. sorted by
                           julian day
           suffix          Any additional trailing information in the filename. used to find
                           details about special

           product         usually a level 3 data product from sensor such as MOD11A1
           tile            MODIS sinusoidal tile h##v## format

           sensor          Landsat sensor
           satellite       usually 5,7, or 8 for the landsat satellite
           WRSpath         Landsat path designator
           WRSrow          Landsat row designator
           groundstationID ground station which recieved the data download fromt he satellite
           Version         Version of landsat data product
           band            band of landsat data product, usually 1 through 10 or 11.
    """

    
    # pull the filename and path apart 
    path, name = os.path.split(filepath)
    
    # create an info object class instance
    class info_object(object):pass
    info = info_object()

    # figure out what kind of data these files are. 
    if not data_type:
        data_type = identify(name)

    if data_type == 'MODIS':
        params  =['product', 'year', 'j_day', 'tile', 'type', 'version', 'tag', 'suffix']
        n       = name.split('.')
        end     = n[4]
        string  =[n[0], name[9:13] ,name[13:16], n[2], 'MODIS', n[3], end[:13], end[13:]]
            
    elif data_type =='Landsat':
        params  =['sensor', 'satellite', 'WRSpath', 'WRSrow', 'year', 'j_day',
                        'groundstationID', 'Version', 'band', 'type', 'suffix']
        n       = name.split('.')[0]
        string  =[n[1], n[2], n[3:6], n[6:9], n[9:13], n[13:16], n[16:19],
                n[19:21], n[23:].split('_')[0], 'Landsat', '_'.join(n[23:].split('_')[1:])]
            
    elif data_type == 'WELD_CONUS' or data_type == 'WELD_AK':
        params  = ['coverage', 'period', 'year', 'tile', 'start_day', 'end_day', 'type']
        n       = name.split('.')
        string  =[n[0], n[1], n[2] ,n[3], n[4][4:6], n[4][8:11], 'WELD']
        # take everything after the first underscore as a suffix if onecore.exists.
        if '_' in name:
            params.append('suffix')
            string.append('_'.join(name.split('_')[1:]))
            
    elif data_type == 'ASTER':
        params  = ['product','N','W','type','period']
        n       = name.split('_')
        string  = [n[0],n[1][1:3],n[1][5:9],n[-1].split('.')[0],'none']
    
    elif data_type == 'TRMM':
        print '{Grab_Data_Info} no support for TRMM data yet! you could add it!'
        return(False)

    elif data_type == 'AMSR_E':
        print '{Grab_Data_Info} no support for AMSR_E data yet! you could add it!'
        return False

    elif data_type == 'AIRS':
        print '{Grab_Data_Info} no support for AIRS data yet! you could add it!'
        return False

    # if data doesnt look like anything!
    else:
        print('Data type for file [{0}] could not be identified as any supported type'.format(name))
        print('improve this function by adding info for this datatype!')
        return False

    # Create atributes and assign parameter names and values
    for i in range(len(params)):
        setattr(info,params[i],string[i])
    
    # ................................................................................
    # perform additional data gathering only if data has no info.period atribute. Images with
    # this attribute represent data that is produced from many dates, not just one day.
    
    if not hasattr(info,'period'):
        
    # fill in date format values and custom grouping and season information based on julian day
    # many files are named according to julian day. we want the date info for these files.
        try:
            tempinfo    = datetime.datetime(int(info.year),1,1)+datetime.timedelta(int(int(info.j_day)-1))
            info.month  = tempinfo.strftime('%b')
            info.day    = tempinfo.day
            
        # some files are named according to date. we want the julian day info for these files
        except:
            fmt         = '%Y.%m.%d'
            tempinfo    = datetime.datetime.strptime('.'.join([info.year,info.month,info.day]),fmt)
            info.j_day  = tempinfo.strftime('%j')

    # fill in the seasons by checking the value of julian day
        if int(info.j_day) <=78 or int(info.j_day) >=355:
            info.season='Winter'
            
        elif int(info.j_day) <=171:
            info.season='Spring'
            
        elif int(info.j_day)<=265:
            info.season='Summer'
            
        elif int(info.j_day)<=354:
            info.season='Autumn'
        
    # bin by julian day if integer group width was input
    if not CustGroupings == None:

        CustGroupings = core.enf_list(CustGroupings)
        for grouping in CustGroupings:
            if isinstance(grouping,int):
                groupname = 'custom' + str(grouping)
                setattr(info, groupname, 1+(int(info.j_day)-1)/(grouping))
            else:
                print('invalid custom grouping entered!')
                print('CustGrouping must be one or more integers in a list')

    # make sure the filepath input actually leads to a real file, then give user the info
    if core.exists(filepath):
        #print('{0} file {1} has attributes:'.format(info.type, name))
        #print(vars(info))
        return info
    else:
        return False
Beispiel #34
0
def toa_reflectance_8(band_nums, meta_path, outdir=False):
    """
    Converts Landsat 8 bands to Top-of-Atmosphere reflectance.

     To be performed on raw Landsat 8 level 1 data. See link below for details
     see here [http://landsat.usgs.gov/Landsat8_Using_Product.php]

     Inputs:
       band_nums   A list of desired band numbers such as [3,4,5]
       meta_path   The full filepath to the metadata file for those bands
       outdir      Output directory to save converted files. If left False it will save ouput
                       files in the same directory as input files.
    """

    outlist = []

    #enforce the list of band numbers and grab metadata from the MTL file
    band_nums = core.enf_list(band_nums)
    band_nums = map(str, band_nums)
    OLI_bands = ['1', '2', '3', '4', '5', '6', '7', '8', '9']
    meta_path = os.path.abspath(meta_path)
    meta = grab_meta(meta_path)

    #cycle through each band in the list for calculation, ensuring each is in the list of OLI bands
    for band_num in band_nums:
        if band_num in OLI_bands:

            #scrape data from the given file path and attributes in the MTL file
            band_path = meta_path.replace("MTL.txt",
                                          "B{0}.tif".format(band_num))
            Qcal = arcpy.Raster(band_path)
            Mp = getattr(meta, "REFLECTANCE_MULT_BAND_{0}".format(
                band_num))  # multiplicative scaling factor
            Ap = getattr(meta, "REFLECTANCE_ADD_BAND_{0}".format(
                band_num))  # additive rescaling factor
            SEA = getattr(meta, "SUN_ELEVATION") * (
                math.pi / 180)  # sun elevation angle theta_se

            #get rid of the zero values that show as the black background to avoid skewing values
            null_raster = arcpy.sa.SetNull(Qcal, Qcal, "VALUE = 0")

            #calculate top-of-atmosphere reflectance
            TOA_ref = (((null_raster * Mp) + Ap) / (math.sin(SEA)))

            #save the data to the automated name if outdir is given or in the parent folder if not
            if outdir:
                outdir = os.path.abspath(outdir)
                outname = core.create_outname(outdir, band_path, "TOA_Ref",
                                              "tif")
            else:
                folder = os.path.split(meta_path)[0]
                outname = core.create_outname(folder, band_path, "TOA_Ref",
                                              "tif")

            TOA_ref.save(outname)
            outlist.append(outname)
            print("Saved output at {0}".format(outname))

        #if listed band is not an OLI sensor band, skip it and print message
        else:
            print(
                "Can only perform reflectance conversion on OLI sensor bands")
            print("Skipping band {0}".format(band_num))

    return outlist
Beispiel #35
0
def extract_from_hdf(filelist, layerlist, layernames=False, outdir=None):
    """
    Extracts tifs from MODIS extract_HDF_layer files, ensures proper projection.

     inputs:
       filelist    list of '.hdf' files from which data should be extracted (or a directory)
       layerlist   list of layer numbers to pull out as individual tifs should be integers
                   such as [0,4] for the 0th and 4th layer respectively.
       layernames  list of layer names to put more descriptive file suffixes to each layer
       outdir      directory to which tif files should be saved
                   if outdir is left as 'False', files are saved in the same directory as
                   the input file was found.
    """

    if outdir is not None:
        if not os.path.exists(outdir):
            os.makedirs(outdir)

    # enforce lists for iteration purposes and sanitize inputs
    filelist = core.enf_filelist(filelist)

    for filename in filelist:
        if '.xml' in filename or not '.hdf' in filename:
            filelist.remove(filename)

    layerlist = core.enf_list(layerlist)
    layernames = core.enf_list(layernames)

    # ignore user input layernames if they are invalid, but print warnings
    if layernames and not len(layernames) == len(layerlist):
        Warning('Layernames must be the same length as layerlist!')
        Warning('Ommiting user defined layernames!')
        layernames = False

    # create empty list to add failed file names into
    failed = []

    # iterate through every file in the input filelist
    for infile in filelist:

        # pull the filename and path apart
        path, name = os.path.split(infile)
        arcpy.env.workspace = path

        for i, layer in enumerate(layerlist):

            # specify the layer names.
            if layernames:
                layername = layernames[i]
            else:
                layername = str(layer).zfill(3)

            # use the input output directory if the user input one, otherwise build one
            if outdir:
                outname = os.path.join(
                    outdir, "{0}_{1}.tif".format(name[:-4], layername))
            else:
                outname = os.path.join(
                    path, "{0}_{1}.tif".format(name[:-4], layername))

            # perform the extracting and projection definition
            try:
                # extract the subdataset
                arcpy.ExtractSubDataset_management(infile, outname, str(layer))

                # define the projection as the MODIS Sinusoidal
                define_projection(outname)

                print("Extracted {0}".format(os.path.basename(outname)))

            except:
                print("Failed to extract {0}  from {1}".format(
                    os.path.basename(outname), os.path.basename(infile)))
            failed.append(infile)

    print("Finished extracting all hdfs! \n")
    return failed
Beispiel #36
0
def toa_radiance_8(band_nums, meta_path, outdir = None):
    """
    Top of Atmosphere radiance (in Watts/(square meter x steradians x micrometers))
    conversion for landsat 8 data. To be performed on raw Landsat 8
    level 1 data. See link below for details:
    see here http://landsat.usgs.gov/Landsat8_Using_Product.php

    :param band_nums:   A list of desired band numbers such as [3, 4, 5]
    :param meta_path:   The full filepath to the metadata file for those bands
    :param outdir:      Output directory to save converted files.

    :return output_filelist:    List of filepaths created by this function.
    """

    meta_path = os.path.abspath(meta_path)
    output_filelist = []

    #enforce list of band numbers and grab the metadata from the MTL file
    band_nums = core.enf_list(band_nums)
    band_nums = map(str, band_nums)
    meta = grab_meta(meta_path)
    
    OLI_bands = ['1','2','3','4','5','6','7','8','9']
    
    #loop through each band
    for band_num in band_nums:
        if band_num in OLI_bands:

            #create the band name
            band_path   = meta_path.replace("MTL.txt","B{0}.tif".format(band_num))
            Qcal        = arcpy.Raster(band_path)

            null_raster = arcpy.sa.SetNull(Qcal, Qcal, "VALUE = 0")

            #scrape the attribute data
            Ml   = getattr(meta,"RADIANCE_MULT_BAND_{0}".format(band_num)) # multiplicative scaling factor
            Al   = getattr(meta,"RADIANCE_ADD_BAND_{0}".format(band_num))  # additive rescaling factor

            #calculate Top-of-Atmosphere radiance
            TOA_rad = (null_raster * Ml) + Al
            del null_raster
            
            # create the output name and save the TOA radiance tiff
            if "\\" in meta_path:
                name = meta_path.split("\\")[-1]
            elif "//" in meta_path:
                name = meta_path.split("//")[-1]
                
            rad_name = name.replace("_MTL.txt", "_B{0}".format(band_num))

            if outdir is not None:
                outdir = os.path.abspath(outdir)
                outname = core.create_outname(outdir, rad_name, "TOA_Rad", "tif")
            else:
                folder = os.path.split(meta_path)[0]
                outname = core.create_outname(folder, rad_name, "TOA_Rad", "tif")
                
            TOA_rad.save(outname)
            output_filelist.append(outname)
            print("Saved toa_radiance at {0}".format(outname))

        #if listed band is not a OLI sensor band, skip it and print message
        else:
            print("Can only perform reflectance conversion on OLI sensor bands")
            print("Skipping band {0}".format(band_num))

    return output_filelist
Beispiel #37
0
def download_urls(url_list, outdir, filetypes = False):

    """
    Downloads a list of files. Retries failed downloads

     This script downloads a list of files and places it in the output directory. It was
     built to be nested within "Download_filelist" to allow loops to continuously retry
     failed files until they are successful or a retry limit is reached.

     Inputs:
       url_list        array of urls, probably as read from a text file
       filetypes       list of filetypes to download. Useful for excluding extraneous
                       metadata by only downloding 'hdf' or 'tif' for example. Please note
                       that often times, you actually NEED the metadata.
       outdir          folder where files are to be placed after download

     Output:
       failed          list of files which failed download
    """

    failed   = []
    url_list = core.enf_list(url_list)

    # creates output folder at desired path if it doesn't already exist
    if not os.path.exists(outdir):
        os.makedirs(outdir)

    # establish a wait time that will increase when downloads fail. This helps to reduce
    # the frequency of REVERB server rejections for requesting too many downloads
    wait = 0

    for site in url_list:
        download = False
        url      = site.rstrip()
        sub      = url.split("/")
        leng     = len(sub)
        name     = sub[leng-1]

        # Determine whether or not to download the file based on filetype.
        if filetypes:
            for filetype in filetypes:
                if filetype in name[-4:]:
                    download = True
        else:
            download = True

        # attempt download of the file, or skip it.
        if download:

            try:
                # wait for the wait time before attempting writing a file
                time.sleep(wait)
                download_url(url, os.path.join(outdir,name))
                print("{0} is downloaded {1}".format(name, wait))

                # reduce the wait time when downloads succeed.
                if wait >= 1:
                    wait -= wait

            # add to the fail count if the download is unsuccessful and wait longer next time.
            except:
                print("{0} will be retried! {1}".format(sub[leng-1], wait))
                wait += 5
                failed.append(url)


    print("Finished downloading urls!")
    return failed
Beispiel #38
0
def extract_HDF_layers(filelist, layerlist, layernames = None, outdir = None):

    """
     Function extracts tifs from HDFs.
     Use "Extract_MODIS_HDF" in the modis module for better
     handling of MODIS data with sinusoidal projections.

     inputs:
       filelist    list of '.hdf' files from which data should be extracted
       layerlist   list of layer numbers to pull out as individual tifs should be integers
                   such as [0,4] for the 0th and 4th layer respectively.
       layernames  list of layer names to put more descriptive names to each layer
       outdir      directory to which tif files should be saved
                   if outdir is left as 'False', files are saved in the same directory as
                   the input file was found.
    """


    # Set up initial arcpy modules, workspace, and parameters, and sanitize inputs.
    arcpy.env.overwriteOutput = True

    # enforce lists for iteration purposes
    filelist = core.enf_filelist(filelist)
    layerlist = core.enf_list(layerlist)
    layernames = core.enf_list(layernames)
    
    # ignore user input layernames if they are invalid, but print warnings
    if layernames and not len(layernames) == len(layerlist):
        print('layernames must be the same length as layerlist!')
        print('ommiting user defined layernames!')
        layernames=False

    # create empty list to add filenames into
    produced_files = []

    # iterate through every file in the input filelist
    for infile in filelist:
        # pull the filename and path apart 
        path,name = os.path.split(infile)
        arcpy.env.workspace = path

        for i in range(len(layerlist)):
            layer=layerlist[i]
            
            # specify the layer names.
            if layernames is not None:
                layername = layernames[i]
            else:
                layername = str(layer).zfill(3)

            # use the input output directory if the user input one, otherwise build one  
            if outdir is not None:
                if not os.path.exists(os.path.join(outdir)):
                    os.makedirs(outdir)
            else:
                outdir  = os.path.dirname(infile)

            outname = core.create_outname(outdir, infile, layername, ext = "tif")

            # perform the extracting and projection definition
            try:
                # extract the subdataset
                arcpy.ExtractSubDataset_management(infile, outname, str(layer))

                print('Extracted ' + outname)
                produced_files.append(outname)

            except:
                print('Failed to extract '+ outname + ' from ' + infile)

    return produced_files
Beispiel #39
0
def toa_reflectance_457(band_nums, meta_path, outdir=None):
    """
    This function is used to convert Landsat 4, 5, or 7 pixel values from
    digital numbers to Top-of-Atmosphere Reflectance. To be performed on raw
    Landsat 4, 5, or 7 data.

    :param band_nums:   A list of desired band numbers such as [3,4,5]
    :param meta_path:   The full filepath to the metadata file for those bands
    :param outdir:      Output directory to save converted files. If left False it will save ouput
                        files in the same directory as input files.

    :return output_filelist:    List of files created by this function
    """

    output_filelist = []

    band_nums = core.enf_list(band_nums)
    band_nums = map(str, band_nums)

    # metadata format was changed August 29, 2012. This tool can process either the new or old format
    f = open(meta_path)
    MText = f.read()

    meta_path = os.path.abspath(meta_path)
    metadata = landsat_metadata(meta_path)

    # the presence of a PRODUCT_CREATION_TIME category is used to identify old metadata
    # if this is not present, the meta data is considered new.
    # Band6length refers to the length of the Band 6 name string. In the new metadata this string is longer
    if "PRODUCT_CREATION_TIME" in MText:
        Meta = "oldMeta"
        Band6length = 2
    else:
        Meta = "newMeta"
        Band6length = 8

    # The tilename is located using the newMeta/oldMeta indixes and the date of capture is recorded
    if Meta == "newMeta":
        TileName = getattr(metadata, "LANDSAT_SCENE_ID")
        year = TileName[9:13]
        jday = TileName[13:16]
        date = getattr(metadata, "DATE_ACQUIRED")
    elif Meta == "oldMeta":
        TileName = getattr(metadata, "BAND1_FILE_NAME")
        year = TileName[13:17]
        jday = TileName[17:20]
        date = getattr(metadata, "ACQUISITION_DATE")

    # the spacecraft from which the imagery was capture is identified
    # this info determines the solar exoatmospheric irradiance (ESun) for each band
    spacecraft = getattr(metadata, "SPACECRAFT_ID")

    if "7" in spacecraft:
        ESun = (1969.0, 1840.0, 1551.0, 1044.0, 255.700, 0., 82.07, 1368.00)
        TM_ETM_bands = ['1', '2', '3', '4', '5', '7', '8']
    elif "5" in spacecraft:
        ESun = (1957.0, 1826.0, 1554.0, 1036.0, 215.0, 0., 80.67)
        TM_ETM_bands = ['1', '2', '3', '4', '5', '7']
    elif "4" in spacecraft:
        ESun = (1957.0, 1825.0, 1557.0, 1033.0, 214.9, 0., 80.72)
        TM_ETM_bands = ['1', '2', '3', '4', '5', '7']
    else:
        arcpy.AddError("This tool only works for Landsat 4, 5, or 7")
        raise arcpy.ExecuteError()

    # determing if year is leap year and setting the Days in year accordingly
    if float(year) % 4 == 0: DIY = 366.
    else: DIY = 365.

    # using the date to determining the distance from the sun
    theta = 2 * math.pi * float(jday) / DIY

    dSun2 = (1.00011 + 0.034221 * math.cos(theta) +
             0.001280 * math.sin(theta) + 0.000719 * math.cos(2 * theta) +
             0.000077 * math.sin(2 * theta))

    SZA = 90. - float(getattr(metadata, "SUN_ELEVATION"))

    # Calculating values for each band
    for band_num in band_nums:
        if band_num in TM_ETM_bands:

            print("Processing Band {0}".format(band_num))
            pathname = meta_path.replace("MTL.txt",
                                         "B{0}.tif".format(band_num))
            Oraster = arcpy.Raster(pathname)

            null_raster = arcpy.sa.SetNull(Oraster, Oraster, "VALUE = 0")

            # using the oldMeta/newMeta indices to pull the min/max for radiance/Digital numbers
            if Meta == "newMeta":
                LMax = getattr(metadata,
                               "RADIANCE_MAXIMUM_BAND_{0}".format(band_num))
                LMin = getattr(metadata,
                               "RADIANCE_MINIMUM_BAND_{0}".format(band_num))
                QCalMax = getattr(metadata,
                                  "QUANTIZE_CAL_MAX_BAND_{0}".format(band_num))
                QCalMin = getattr(metadata,
                                  "QUANTIZE_CAL_MIN_BAND_{0}".format(band_num))
            elif Meta == "oldMeta":
                LMax = getattr(metadata, "LMAX_BAND{0}".format(band_num))
                LMin = getattr(metadata, "LMIN_BAND{0}".format(band_num))
                QCalMax = getattr(metadata, "QCALMAX_BAND{0}".format(band_num))
                QCalMin = getattr(metadata, "QCALMIN_BAND{0}".format(band_num))

            Radraster = (((LMax - LMin) / (QCalMax - QCalMin)) *
                         (null_raster - QCalMin)) + LMin
            Oraster = 0
            del null_raster

            # Calculating temperature for band 6 if present
            Refraster = (math.pi * Radraster * dSun2) / (
                ESun[int(band_num[0]) - 1] * math.cos(SZA * (math.pi / 180)))

            # construc output names for each band based on whether outdir is set (default is False)
            if outdir is not None:
                outdir = os.path.abspath(outdir)
                BandPath = core.create_outname(outdir, pathname, "TOA_Ref",
                                               "tif")
            else:
                folder = os.path.split(meta_path)[0]
                BandPath = core.create_outname(folder, pathname, "TOA_Ref",
                                               "tif")

            Refraster.save(BandPath)
            output_filelist.append(BandPath)

            del Refraster, Radraster
            print("Reflectance Calculated for Band {0}".format(band_num))

        # if listed band is not a TM/ETM+ sensor band, skip it and print message
        else:
            print(
                "Can only perform reflectance conversion on TM/ETM+ sensor bands"
            )
            print("Skipping band {0}".format(band_num))

    f.close()
    return output_filelist
Beispiel #40
0
def fetch_MODIS(product,
                version,
                tiles,
                outdir,
                start_dto,
                end_dto,
                force_overwrite=False):
    """
    Fetch MODIS Land products from one of two servers. If this function
    runs and downloads 0 files, check that your inputs are consistent
    with the naming convention at the appropriate server address.

       http://e4ftl01.cr.usgs.gov
       ftp://n5eil01u.ecs.nsidc.org

    :param product:         MODIS product to download such as 'MOD10A1' or 'MYD11A1'
    :param version:         version number, usually '004' or '041' or '005'
    :param tiles:           list of tiles to grab such as ['h11v12','h11v11']
                            NOTE: for some MODIS products, the h and v are omitted.

    :param outdir :         output directory to save downloaded files
    :param start_dto:       datetime object, the starting date of the range of data to download
    :param end_dto:         datetime object, the ending date of the range of data to download
    :param force_overwrite: will re-download files even if they already exist

    :return out_filepaths:  a list of filepaths to all files created by this function
    """

    out_filepaths = []

    # check formats
    tiles = core.enf_list(tiles)

    # create output directories
    if not os.path.exists(outdir):
        os.makedirs(outdir)

    print("Connecting to servers!")

    # obtain the web address, protocol information, and subdirectory where
    # this tpe of MODIS data can be found.
    site, isftp, Dir = _find_modis_product(product, version)

    if Dir:
        print("Connected to {0}/{1}".format(site, Dir))
    else:
        print("Connected to {0}".format(site))

    # Depending on the type of connection (ftp vs http) populate the file list
    try:
        if isftp:
            dates, _ = list_ftp(site, False, False, Dir)
        else:
            dates = list_http_e4ftl01(site)
    except:
        raise ValueError("Could not connect to {0}/{1}".format(site, Dir))

    # refine contents down to just addresses of valid year and j_day
    good_dates = []
    for date in dates:
        try:
            date_dto = datetime.strptime(date, "%Y.%m.%d")
            if start_dto <= date_dto <= end_dto:
                good_dates.append(date)

        except:
            print("skipping non date folder name {0}".format(date))

    print('Found {0} days within range'.format(len(good_dates)))

    # for all folders within the desired date range,  map the subfolder contents.
    for good_date in good_dates:

        if isftp:
            files, _ = list_ftp(site, False, False, Dir + '/' + good_date)

        else:
            files = list_http_e4ftl01(site + '/' + good_date)

        for afile in files:

            # only list files with desired tile names and not preview jpgs
            if not '.jpg' in afile:
                for tile in tiles:
                    if tile in afile:

                        # assemble the address
                        if isftp:
                            address = '/'.join(
                                ['ftp://' + site, Dir, good_date, afile])
                        else:
                            address = '/'.join([site, good_date, afile])

                        #download the file
                        outname = os.path.join(outdir, afile)
                        out_filepaths.append(outname)
                        if not os.path.isfile(outname) and not force_overwrite:
                            download_url(address, outname)

                        print('Downloaded {0}'.format(address))

    print("Finished retrieving MODIS - {0} data!".format(product))
    print("Downloaded {0} files".format(len(out_filepaths)))

    return out_filepaths
Beispiel #41
0
def fetch_MODIS(product, version, tiles, outdir, start_dto, end_dto,
                                                force_overwrite = False):
    """
    Fetch MODIS Land products from one of two servers. If this function
    runs and downloads 0 files, check that your inputs are consistent
    with the naming convention at the appropriate server address.

       http://e4ftl01.cr.usgs.gov
       ftp://n5eil01u.ecs.nsidc.org

    :param product:         MODIS product to download such as 'MOD10A1' or 'MYD11A1'
    :param version:         version number, usually '004' or '041' or '005'
    :param tiles:           list of tiles to grab such as ['h11v12','h11v11']
                            NOTE: for some MODIS products, the h and v are omitted.

    :param outdir :         output directory to save downloaded files
    :param start_dto:       datetime object, the starting date of the range of data to download
    :param end_dto:         datetime object, the ending date of the range of data to download
    :param force_overwrite: will re-download files even if they already exist

    :return out_filepaths:  a list of filepaths to all files created by this function
    """

    out_filepaths = []

    # check formats
    tiles = core.enf_list(tiles)

    # create output directories
    if not os.path.exists(outdir):
        os.makedirs(outdir)

    print("Connecting to servers!")

    # obtain the web address, protocol information, and subdirectory where
    # this tpe of MODIS data can be found.
    site, isftp, Dir = _find_modis_product(product, version)

    if Dir:
        print("Connected to {0}/{1}".format(site, Dir))
    else:
        print("Connected to {0}".format(site))

    # Depending on the type of connection (ftp vs http) populate the file list
    try:
        if isftp:
            dates,_ = list_ftp(site, False, False, Dir)
        else:
            dates   = list_http_e4ftl01(site)
    except:
        raise ValueError("Could not connect to {0}/{1}".format(site,Dir))

    # refine contents down to just addresses of valid year and j_day
    good_dates = []
    for date in dates:
        try:
            date_dto = datetime.strptime(date, "%Y.%m.%d")
            if start_dto <= date_dto <= end_dto:
                good_dates.append(date)

        except:
            print("skipping non date folder name {0}".format(date))


    print('Found {0} days within range'.format(len(good_dates)))

    # for all folders within the desired date range,  map the subfolder contents.
    for good_date in good_dates:

        if isftp:
            files,_ = list_ftp(site, False, False, Dir + '/' + good_date)

        else:
            files   = list_http_e4ftl01(site + '/' + good_date)

        for afile in files:

            # only list files with desired tile names and not preview jpgs
            if not '.jpg' in afile:
                for tile in tiles:
                    if tile in afile:

                        # assemble the address
                        if isftp:
                            address='/'.join(['ftp://'+site, Dir, good_date, afile])
                        else:
                            address='/'.join([site, good_date, afile])

                        #download the file
                        outname = os.path.join(outdir, afile)
                        out_filepaths.append(outname)
                        if not os.path.isfile(outname) and not force_overwrite:
                            download_url(address, outname)

                        print('Downloaded {0}'.format(address))

    print("Finished retrieving MODIS - {0} data!".format(product))
    print("Downloaded {0} files".format(len(out_filepaths)))

    return out_filepaths
Beispiel #42
0
def Landsat_WELD(product, tiles, years, outdir):

    """
     Fetch WELD data from the server at [http://e4ftl01.cr.usgs.gov/WELD]

     Weld data is corrected and processed Landsat 5 and 7 data that is distributed in the
     MODIS sinusoidal projection and grid format. Read more about WELD data.
       https://landsat.usgs.gov/WELD.php
       http://globalmonitoring.sdstate.edu/projects/weldglobal/

     Inputs:
       product     WELD product to download such as 'USWK','USMO','USYR'
       tiles       list of tiles to grab such as ['h11v12','h11v11']
       years       list of years to grab such as range(2001,2014)
       outdir      output directory to save downloaded files
    """

    # check formats
    global dates
    tiles = core.enf_list(tiles)
    years = core.enf_list(years)
    years = [str(year) for year in years]

    # create output directories
    for tile in tiles:
        if not os.path.exists(os.path.join(outdir,tile)):
            os.makedirs(os.path.join(outdir,tile))

    print '{Fetch_Landsat_WELD} Connecting to servers!'

    # Map the contents of the directory
    site= 'http://e4ftl01.cr.usgs.gov/WELD/WELD'+product+'.001'
    try:
        dates = list_http(site)
    except:
        print '{Fetch_Landsat_WELD} Could not connect to site! check inputs!'

    # find just the folders within the desired year range.
    good_dates=[]
    for date in dates:
        try:
            y, m, d = date.split(".")
            if y in years:
                good_dates.append(date)
        except: pass

    print 'Found ' + str(len(good_dates)) + ' days within year range'

    # for all folders within the desired date range,  map the subfolder contents.
    for good_date in good_dates:

        files = list_http(site+'/'+good_date)

        for afile in files:
            # only list files with desired tilenames and not preview jpgs
            if not '.jpg' in afile:
                for tile in tiles:
                    if tile in afile:

                        # assemble the address
                        address = '/'.join([site,good_date,afile])
                        print '{Fetch_Landsat_WELD} Downloading' + address

                        #download the file.
                        outname = os.path.join(outdir,tile,afile)
                        download_url(address, outname)
    return
Beispiel #43
0
def degree_days_accum(rasterlist, critical_values=None, outdir=None):
    """
    Accumulates degree days in a time series rasterlist

    This function is the logical successor to calc.degree_days. Input a list of rasters
    containing daily data to be accumulated. Output raster for a given day will be the sum
    total of the input raster for that day and all preceding days. The last output raster in
    a years worth of data (image 356) would be the sum of all 365 images. The 25th output
    raster would be a sum of the first 25 days.
    Critical value rasters will also be created. Usefull for example: we wish to know on what day
    of our 365 day sequence every pixel hits a value of 100. Input 100 as a critical value
    and that output raster will be generated.

    :param rasterlist:          list of files, or directory containing rasters to accumulate
    :param critical_values:     Values at which the user wishes to know WHEN the total accumulation
                                value reaches this point. For every critical value, an output
                                raster will be created. This raster contains integer values denoting
                                the index number of the file at which the value was reached.
                                This input must be a list of ints or floats, not strings.
    :param outdir:              Desired output directory for all output files.

    :return output_filelist:    a list of all files created by this function.
    """

    output_filelist = []
    rasterlist = enf_rastlist(rasterlist)

    if critical_values:
        critical_values = core.enf_list(critical_values)

    # critical values of zero are problematic, so replace it with a small value.
    if 0 in critical_values:
        critical_values.remove(0)
        critical_values.append(0.000001)

    if outdir is not None and not os.path.exists(outdir):
        os.makedirs(outdir)

    for i, rast in enumerate(rasterlist):

        image, meta = to_numpy(rast, "float32")
        xs, ys = image.shape

        if i == 0:
            Sum = numpy.zeros((xs, ys))
            Crit = numpy.zeros((len(critical_values), xs, ys))

        if image.shape == Sum.shape:

            # only bother to proceed if at least one pixel is positive
            if numpy.max(image) >= 0:
                for x in range(xs):
                    for y in range(ys):

                        if image[x, y] >= 0:
                            Sum[x, y] = Sum[x, y] + image[x, y]

                        if critical_values is not None:
                            for z, critical_value in enumerate(
                                    critical_values):
                                if Sum[x,
                                       y] >= critical_value and Crit[z, x,
                                                                     y] == 0:
                                    Crit[z, x, y] = i
        else:
            print "Encountered an image of incorrect size! Skipping it!"

        Sum = Sum.astype('float32')
        outname = core.create_outname(outdir, rast, "Accum")
        from_numpy(Sum, meta, outname)
        output_filelist.append(outname)

        del image

    # output critical accumulation rasters using some data from the last raster in previous loop
    Crit = Crit.astype('int16')
    crit_meta = meta
    crit_meta.NoData_Value = 0
    head, tail = os.path.split(
        outname)  # place these in the last raster output location
    for z, critical_value in enumerate(critical_values):
        outname = os.path.join(
            head, "Crit_Accum_Index_Val-{0}.tif".format(str(critical_value)))
        print("Saving {0}".format(outname))
        from_numpy(Crit[z, :, :], crit_meta, outname)

    return output_filelist
Beispiel #44
0
def toa_radiance_457(band_nums, meta_path, outdir=None):
    """
    Top of Atmosphere radiance (in Watts/(square meter x steradians x micrometers))
    conversion for Landsat 4, 5, and 7 data. To be performed on raw
    Landsat 4, 5, or 7 level 1 data.

    :param band_nums:   A list of desired band numbers such as [3, 4, 5]
    :param meta_path:   The full filepath to the metadata file for those bands
    :param outdir:      Output directory to save converted files.

    :return output_filelist:    List of filepaths created by this function.
    """

    output_filelist = []
    meta_path = os.path.abspath(meta_path)

    band_nums = core.enf_list(band_nums)
    band_nums = map(str, band_nums)

    #metadata format was changed August 29, 2012. This tool can process either the new or old format
    f = open(meta_path)
    MText = f.read()

    metadata = landsat_metadata(meta_path)

    #the presence of a PRODUCT_CREATION_TIME category is used to identify old metadata
    #if this is not present, the meta data is considered new.
    #Band6length refers to the length of the Band 6 name string. In the new metadata this string is longer
    if "PRODUCT_CREATION_TIME" in MText:
        Meta = "oldMeta"
        Band6length = 2
    else:
        Meta = "newMeta"
        Band6length = 8

    #The tilename is located using the newMeta/oldMeta indixes and the date of capture is recorded
    if Meta == "newMeta":
        TileName = getattr(metadata, "LANDSAT_SCENE_ID")
        year = TileName[9:13]
        jday = TileName[13:16]
        date = getattr(metadata, "DATE_ACQUIRED")

    elif Meta == "oldMeta":
        TileName = getattr(metadata, "BAND1_FILE_NAME")
        year = TileName[13:17]
        jday = TileName[17:20]
        date = getattr(metadata, "ACQUISITION_DATE")

    #the spacecraft from which the imagery was capture is identified
    #this info determines the solar exoatmospheric irradiance (ESun) for each band
    spacecraft = getattr(metadata, "SPACECRAFT_ID")

    if "7" in spacecraft:
        ESun = (1969.0, 1840.0, 1551.0, 1044.0, 255.700, 0., 82.07, 1368.00)
        TM_ETM_bands = ['1', '2', '3', '4', '5', '7', '8']

    elif "5" in spacecraft:
        ESun = (1957.0, 1826.0, 1554.0, 1036.0, 215.0, 0., 80.67)
        TM_ETM_bands = ['1', '2', '3', '4', '5', '7']

    elif "4" in spacecraft:
        ESun = (1957.0, 1825.0, 1557.0, 1033.0, 214.9, 0., 80.72)
        TM_ETM_bands = ['1', '2', '3', '4', '5', '7']

    else:
        arcpy.AddError("This tool only works for Landsat 4, 5, or 7")
        raise arcpy.ExecuteError()

    #Calculating values for each band
    for band_num in band_nums:
        if band_num in TM_ETM_bands:

            print("Processing Band {0}".format(band_num))
            pathname = meta_path.replace("MTL.txt",
                                         "B{0}.tif".format(band_num))
            Oraster = arcpy.Raster(pathname)

            null_raster = arcpy.sa.SetNull(Oraster, Oraster, "VALUE = 0")

            #using the oldMeta/newMeta indixes to pull the min/max for radiance/Digital numbers
            if Meta == "newMeta":
                LMax = getattr(metadata,
                               "RADIANCE_MAXIMUM_BAND_{0}".format(band_num))
                LMin = getattr(metadata,
                               "RADIANCE_MINIMUM_BAND_{0}".format(band_num))
                QCalMax = getattr(metadata,
                                  "QUANTIZE_CAL_MAX_BAND_{0}".format(band_num))
                QCalMin = getattr(metadata,
                                  "QUANTIZE_CAL_MIN_BAND_{0}".format(band_num))

            elif Meta == "oldMeta":
                LMax = getattr(metadata, "LMAX_BAND{0}".format(band_num))
                LMin = getattr(metadata, "LMIN_BAND{0}".format(band_num))
                QCalMax = getattr(metadata, "QCALMAX_BAND{0}".format(band_num))
                QCalMin = getattr(metadata, "QCALMIN_BAND{0}".format(band_num))

            Radraster = (((LMax - LMin) / (QCalMax - QCalMin)) *
                         (null_raster - QCalMin)) + LMin
            Oraster = 0
            del null_raster

            band_rad = "{0}_B{1}".format(TileName, band_num)

            #create the output name and save the TOA radiance tiff
            if outdir is not None:
                outdir = os.path.abspath(outdir)
                outname = core.create_outname(outdir, band_rad, "TOA_Rad",
                                              "tif")
            else:
                folder = os.path.split(meta_path)[0]
                outname = core.create_outname(folder, band_rad, "TOA_Rad",
                                              "tif")

            Radraster.save(outname)
            output_filelist.append(outname)

            del Radraster

            print("toa radiance saved for Band {0}".format(band_num))

        #if listed band is not a TM/ETM+ sensor band, skip it and print message
        else:
            print(
                "Can only perform reflectance conversion on TM/ETM+ sensor bands"
            )
            print("Skipping band {0}".format(band_num))

    f.close()
    return output_filelist
Beispiel #45
0
def GCMO_NetCDF(netcdf_list, variable, outdir):
    """
    Extracts all time layers from a "Global Climate Model Output" NetCDF layer

    Inputs:
        netcdf_list     list of netcdfs from CORDEX climate distribution
        varaible        the climate variable of interest (tsmax, tsmin, etc)
        outdir          output directory to save files.
    """

    if not os.path.exists(outdir):
        os.makedirs(outdir)

    netcdf_list = core.enf_list(netcdf_list)

    for netcdf in netcdf_list:
        # get net cdf properties object
        props = arcpy.NetCDFFileProperties(netcdf)

        print("finding dimensions")
        dims = props.getDimensions()
        for dim in dims:
            print dim, props.getDimensionSize(dim)

        # make sure the variable is in this netcdf
        if variable:
            if not variable in props.getVariables():
                print("Valid variables for this file include {0}".format(
                    props.getVariables()))
                raise Exception(
                    "Variable '{0}' is not in this netcdf!".format(variable))

        for dim in dims:
            if dim == "time":

                # set other dimensions
                x_dim = "lon"
                y_dim = "lat"
                band_dim = ""
                valueSelectionMethod = "BY_VALUE"

                size = props.getDimensionSize(dim)
                for i in range(size):

                    # sanitize the dimname for invalid characters
                    dimname = props.getDimensionValue(dim, i).replace(
                        " 12:00:00 PM", "")
                    dimname = dimname.replace("/", "-").replace(" ", "_")

                    dim_value = [["time", props.getDimensionValue(dim, i)]]
                    print("extracting '{0}' from '{1}'".format(
                        variable, dim_value))

                    outname = core.create_outname(outdir, netcdf, dimname,
                                                  'tif')

                    arcpy.MakeNetCDFRasterLayer_md(netcdf, variable, x_dim,
                                                   y_dim, "temp", band_dim,
                                                   dim_value,
                                                   valueSelectionMethod)
                    arcpy.CopyRaster_management("temp", outname, "", "", "",
                                                "NONE", "NONE", "")

    return
Beispiel #46
0
def toa_reflectance_457(band_nums, meta_path, outdir = False):

   """
   This function is used to convert Landsat 4,5, or 7 pixel values from
   digital numbers to Radiance, Reflectance, or Temperature (if using Band 6)

   Inputs:
      band_nums   A list of desired band numbers such as [3,4,5]
      meta_path   The full filepath to the metadata file for those bands
      outdir      Output directory to save converted files. If left False it will save ouput
                   files in the same directory as input files.
   """
   
   OutList = []

   band_nums = core.enf_list(band_nums)
   band_nums = map(str, band_nums)
   TM_ETM_bands = ['1','2','3','4','5','7','8']

   #metadata format was changed August 29, 2012. This tool can process either the new or old format
   f = open(meta_path)
   MText = f.read()

   metadata = grab_meta(meta_path)
   oldMeta = []
   newMeta = []
    
   #the presence of a PRODUCT_CREATION_TIME category is used to identify old metadata
   #if this is not present, the meta data is considered new.
   #Band6length refers to the length of the Band 6 name string. In the new metadata this string is longer
   if "PRODUCT_CREATION_TIME" in MText:
      Meta = oldMeta
      Band6length = 2
   else:
      Meta = newMeta
      Band6length = 8

   #The tilename is located using the newMeta/oldMeta indixes and the date of capture is recorded
   if Meta == newMeta:
      TileName = getattr(metadata, "LANDSAT_SCENE_ID")
      year = TileName[9:13]
      jday = TileName[13:16]
      date = getattr(metadata, "DATE_ACQUIRED")
   elif Meta == oldMeta:
      TileName = getattr(metadata, "BAND1_FILE_NAME")
      year = TileName[13:17]
      jday = TileName[17:20]
      date = getattr(metadata, "ACQUISITION_DATE")

   #the spacecraft from which the imagery was capture is identified
   #this info determines the solar exoatmospheric irradiance (ESun) for each band
   spacecraft = getattr(metadata, "SPACECRAFT_ID")
    
   if   "7" in spacecraft:
      ESun = (1969.0, 1840.0, 1551.0, 1044.0, 255.700, 0., 82.07, 1368.00)
   elif "5" in spacecraft:
      ESun = (1957.0, 1826.0, 1554.0, 1036.0, 215.0, 0. ,80.67)
   elif "4" in spacecraft:
      ESun = (1957.0, 1825.0, 1557.0, 1033.0, 214.9, 0. ,80.72)
   else:
      arcpy.AddError("This tool only works for Landsat 4, 5, or 7")
      raise arcpy.ExecuteError()

   #determing if year is leap year and setting the Days in year accordingly
   if float(year) % 4 == 0: DIY = 366.
   else:DIY=365.

   #using the date to determing the distance from the sun
   theta = 2 * math.pi * float(jday)/DIY

   dSun2 = (1.00011 + 0.034221 * math.cos(theta) + 0.001280 * math.sin(theta) +
           0.000719 * math.cos(2*theta)+ 0.000077 * math.sin(2 * theta))

   SZA = 90. - float(getattr(metadata, "SUN_ELEVATION"))
    
   #Calculating values for each band
   for band_num in band_nums:
      if band_num in TM_ETM_bands:

         print("Processing Band {0}".format(band_num))
         pathname = meta_path.replace("MTL.txt", "B{0}.tif".format(band_num))
         Oraster = arcpy.Raster(pathname)

         #using the oldMeta/newMeta indixes to pull the min/max for radiance/Digital numbers
         if Meta == newMeta:
            LMax    = getattr(metadata, "RADIANCE_MAXIMUM_BAND_" + band_num)
            LMin    = getattr(metadata, "RADIANCE_MINIMUM_BAND_" + band_num)  
            QCalMax = getattr(metadata, "QUANTIZE_CAL_MAX_BAND_" + band_num)
            QCalMin = getattr(metadata, "QUANTIZE_CAL_MIN_BAND_" + band_num)
         elif Meta == oldMeta:
            LMax    = getattr(metadata, "LMAX_BAND" + band_num)
            LMin    = getattr(metadata, "LMIN_BAND" + band_num)  
            QCalMax = getattr(metadata, "QCALMAX_BAND" + band_num)
            QCalMin = getattr(metadata, "QCALMIN_BAND" + band_num)
    
         Radraster = (((LMax - LMin)/(QCalMax-QCalMin)) * (Oraster - QCalMin)) + LMin
         Oraster = 0
    
         #Calculating temperature for band 6 if present
         Refraster = (math.pi * Radraster * dSun2) / (ESun[int(band_num[0])-1] * math.cos(SZA*(math.pi/180)))
         BandPath = "{0}\\{1}_B{2}_TOA-Ref.tif".format(outdir,TileName,band_num)
    
         Refraster.save(BandPath)
         OutList.append(arcpy.Raster(BandPath))
    
         del Refraster,Radraster
    
         arcpy.AddMessage("Reflectance Calculated for Band {0}".format(band_num))
         print("Reflectance Calculated for Band {0}".format(band_num))
   f.close()
   return OutList
Beispiel #47
0
def toa_reflectance_457(band_nums, meta_path, outdir = None):
    """
    This function is used to convert Landsat 4, 5, or 7 pixel values from
    digital numbers to Top-of-Atmosphere Reflectance. To be performed on raw
    Landsat 4, 5, or 7 data.

    :param band_nums:   A list of desired band numbers such as [3,4,5]
    :param meta_path:   The full filepath to the metadata file for those bands
    :param outdir:      Output directory to save converted files. If left False it will save ouput
                        files in the same directory as input files.

    :return output_filelist:    List of files created by this function
    """
   
    output_filelist = []

    band_nums = core.enf_list(band_nums)
    band_nums = map(str, band_nums)

    # metadata format was changed August 29, 2012. This tool can process either the new or old format
    f = open(meta_path)
    MText = f.read()

    meta_path = os.path.abspath(meta_path)
    metadata = grab_meta(meta_path)
    
    # the presence of a PRODUCT_CREATION_TIME category is used to identify old metadata
    # if this is not present, the meta data is considered new.
    # Band6length refers to the length of the Band 6 name string. In the new metadata this string is longer
    if "PRODUCT_CREATION_TIME" in MText:
        Meta = "oldMeta"
        Band6length = 2
    else:
        Meta = "newMeta"
        Band6length = 8

    # The tilename is located using the newMeta/oldMeta indixes and the date of capture is recorded
    if Meta == "newMeta":
        TileName = getattr(metadata, "LANDSAT_SCENE_ID")
        year = TileName[9:13]
        jday = TileName[13:16]
        date = getattr(metadata, "DATE_ACQUIRED")
    elif Meta == "oldMeta":
        TileName = getattr(metadata, "BAND1_FILE_NAME")
        year = TileName[13:17]
        jday = TileName[17:20]
        date = getattr(metadata, "ACQUISITION_DATE")

    # the spacecraft from which the imagery was capture is identified
    # this info determines the solar exoatmospheric irradiance (ESun) for each band
    spacecraft = getattr(metadata, "SPACECRAFT_ID")
    
    if "7" in spacecraft:
        ESun = (1969.0, 1840.0, 1551.0, 1044.0, 255.700, 0., 82.07, 1368.00)
        TM_ETM_bands = ['1','2','3','4','5','7','8']
    elif "5" in spacecraft:
         ESun = (1957.0, 1826.0, 1554.0, 1036.0, 215.0, 0. ,80.67)
         TM_ETM_bands = ['1','2','3','4','5','7']
    elif "4" in spacecraft:
        ESun = (1957.0, 1825.0, 1557.0, 1033.0, 214.9, 0. ,80.72)
        TM_ETM_bands = ['1','2','3','4','5','7']
    else:
        arcpy.AddError("This tool only works for Landsat 4, 5, or 7")
        raise arcpy.ExecuteError()

    # determing if year is leap year and setting the Days in year accordingly
    if float(year) % 4 == 0: DIY = 366.
    else: DIY=365.

    # using the date to determining the distance from the sun
    theta = 2 * math.pi * float(jday)/DIY

    dSun2 = (1.00011 + 0.034221 * math.cos(theta) + 0.001280 * math.sin(theta) +
           0.000719 * math.cos(2*theta)+ 0.000077 * math.sin(2 * theta))

    SZA = 90. - float(getattr(metadata, "SUN_ELEVATION"))
    
    # Calculating values for each band
    for band_num in band_nums:
        if band_num in TM_ETM_bands:

            print("Processing Band {0}".format(band_num))
            pathname = meta_path.replace("MTL.txt", "B{0}.tif".format(band_num))
            Oraster = arcpy.Raster(pathname)
         
            null_raster = arcpy.sa.SetNull(Oraster, Oraster, "VALUE = 0")

            # using the oldMeta/newMeta indices to pull the min/max for radiance/Digital numbers
            if Meta == "newMeta":
                LMax    = getattr(metadata, "RADIANCE_MAXIMUM_BAND_{0}".format(band_num))
                LMin    = getattr(metadata, "RADIANCE_MINIMUM_BAND_{0}".format(band_num))  
                QCalMax = getattr(metadata, "QUANTIZE_CAL_MAX_BAND_{0}".format(band_num))
                QCalMin = getattr(metadata, "QUANTIZE_CAL_MIN_BAND_{0}".format(band_num))
            elif Meta == "oldMeta":
                LMax    = getattr(metadata, "LMAX_BAND{0}".format(band_num))
                LMin    = getattr(metadata, "LMIN_BAND{0}".format(band_num))  
                QCalMax = getattr(metadata, "QCALMAX_BAND{0}".format(band_num))
                QCalMin = getattr(metadata, "QCALMIN_BAND{0}".format(band_num))
    
            Radraster = (((LMax - LMin)/(QCalMax-QCalMin)) * (null_raster - QCalMin)) + LMin
            Oraster = 0
            del null_raster
    
            # Calculating temperature for band 6 if present
            Refraster = (math.pi * Radraster * dSun2) / (ESun[int(band_num[0])-1] * math.cos(SZA*(math.pi/180)))

            # construc output names for each band based on whether outdir is set (default is False)
            if outdir is not None:
                outdir = os.path.abspath(outdir)
                BandPath = core.create_outname(outdir, pathname, "TOA_Ref", "tif")
            else:
                folder = os.path.split(meta_path)[0]
                BandPath = core.create_outname(folder, pathname, "TOA_Ref", "tif")

            Refraster.save(BandPath)
            output_filelist.append(BandPath)

            del Refraster, Radraster
            print("Reflectance Calculated for Band {0}".format(band_num))

        # if listed band is not a TM/ETM+ sensor band, skip it and print message
        else:
            print("Can only perform reflectance conversion on TM/ETM+ sensor bands")
            print("Skipping band {0}".format(band_num))
         
    f.close()
    return output_filelist
Beispiel #48
0
def fetch_SRTM(lat_lon_pairs, product, outdir = None, mosaic = None):
    """
    downloads data from the Shuttle Radar Topography Mission (SRTM)
    [http://e4ftl01.cr.usgs.gov/SRTM/]

    This data can be used to create DEMS of a variety of resolutions.

    Inputs:
        lat_lon_pairs   tupled integer values of lat,lon combinations.
                        may be a list of tuples. (N positive, E positive)
        product         short name of product you want. See link below
                        https://lpdaac.usgs.gov/products/measures_products_table
        outdir          local directory to save downloaded files
        mosaic          Set to TRUE to mosaic all downloaded DEM tiles.

    Returns:
        tif_list        a list of all successfully downloaded tif filepaths
                        for further manipulation

    Example:
        lat_lons = [(37,-76), (37,-77)]    # Two tiles
        prod = "SRTMGL3"                   #3 arc second DEM product)

        download.fetch_SRTM(lat_lons, prod)

    NOTE: arcmap will open the output hgt files ONLY if they are not renamed.
    turns out arcmap does some funky things when interpreting these files.
    """

    # build empty return list
    tif_list = []

    # sanitize input list
    lat_lon_pairs = core.enf_list(lat_lon_pairs)

    # determine product version
    if product is "SRTMGL30":
        print("Download of product SRTMGL30 is supported, but arcmap does not support this filetype")
        format_string = "{2}{3}{0}{1}.{4}.dem.zip"
        version = "002"

    else:
        format_string = "{0}{1}{2}{3}.{4}.hgt.zip"
        version = "003"


    host = "http://e4ftl01.cr.usgs.gov/SRTM"
    subhost = "{0}/{1}.{2}/2000.02.11/".format(host, product, version)

    print("Connecting to host at {0}".format(subhost))


    for lat_lon_pair in lat_lon_pairs:
        lat, lon = lat_lon_pair

        # set North-south, East-West convention.
        if lat >= 0:
            NS = "N"
        else:
            NS = "S"

        if lon >= 0:
            EW = "E"
        else:
            EW = "W"

        if product is "SRTMGL30":

            if abs(lon) <= 20:
                lon = 20
            elif abs(lon) <=60:
                lon = 60
            elif abs(lon) <= 100:
                lon = 100
            else:
                lon = 140

            if abs(lat) <= 10:
                lat = 10
            elif abs(lat) <=40:
                lat = 40
            else:
                lat = 90

            NS = NS.lower()
            EW = EW.lower()

        # build up the filename and file link
        filename = format_string.format(NS, str(abs(lat)).zfill(2),
                                        EW, str(abs(lon)).zfill(3),
                                        product)

        filelink = "{0}/{1}".format(subhost, filename)

        # decide where to put the file, then download it
        if outdir is not None:
            outpath  = os.path.join(outdir, filename)
        else:
            outpath = filename

        print("Downloading and extracting  {0}".format(filename))
        download_url(filelink, outpath)

        # unzip the file and reassemble descriptive name
        with zipfile.ZipFile(outpath, "r") as z:

            itemname = "{0}{1}{2}{3}.hgt".format(NS, str(abs(lat)).zfill(2),
                                                 EW, str(abs(lon)).zfill(3))
            z.extract(itemname, outdir)
            z.close()

        # clean up and add this file to output list
        os.remove(outpath)
        tif_list.append(os.path.join(outdir,itemname))

    if mosaic is True:

        arcpy.MosaicToNewRaster_management(tif_list, outdir, "SRTM_mosaic.tif",
                                       number_of_bands = 1, pixel_type = "32_BIT_SIGNED")

    print("Finished download and extraction of SRTM data")

    return tif_list
Beispiel #49
0
def fetch_MODIS(product, version, tiles, outdir, years, j_days = False,
                                                force_overwrite = False):
    """
    Fetch MODIS Land products from one of two servers.

       http://e4ftl01.cr.usgs.gov
       ftp://n5eil01u.ecs.nsidc.org

    Inputs:
        product         MODIS product to download such as 'MOD10A1' or 'MYD11A1'
        version         version number, usually '004' or '041' or '005'
        tiles           list of tiles to grab such as ['h11v12','h11v11']
        outdir          output directory to save downloaded files
        years           list of years to grab such as range(2001,2014)
        j_days          list of days to grab such as range(31:60).
                        Defaults to all days in year
        force_overwrite will re-download files even if they already exist
    """

    def Find_MODIS_Product(product, version):
        """
        Subfunction to determine  server properties for MODIS data product.
        returns http/ftp handles

        the two current servers where aqua/terra MODIS data can be downloaded are
            site1='http://e4ftl01.cr.usgs.gov'
            site2='n5eil01u.ecs.nsidc.org'

        Inputs:
           product     modis product such as 'MOD10A1'
           versions    modis version, usually '005', '004', or '041'

        Outputs:
           site        server address where data can be found
           ftp         ftp handle for open ftp session
           Dir         subdirectory of server to further search for files of input product.
        """

        sat_designation = product[0:3]
        prod_ID = product[3:]

        site1 = 'http://e4ftl01.cr.usgs.gov/'
        site2 = 'n5eil01u.ecs.nsidc.org'

        isftp = False
        Dir   = False

        # refine the address of the desired data product
        if '10' in prod_ID:
            isftp = True
            site  = site2

        if sat_designation == 'MOD':
            if isftp:
                Dir = 'MOST/' + product + '.' + version
            else:
                site = site1+'MOLT/' + product + '.' + version

        elif sat_designation == 'MYD':
            if isftp:
                Dir = 'DP1/MOSA/' + product + '.' + version
            else:
                site = site1+'MOLA/' + product+'.' + version

        elif sat_designation == 'MCD':
            site = site1+'MOTA/' + product + '.' + version

        else:
            print('No such MODIS product is available for download with this script!')
            site = "None"

        return site, isftp, Dir



    # check formats
    tiles = core.enf_list(tiles)
    years = core.enf_list(years)
    years = [str(year) for year in years]

    if isinstance(j_days, list):
        js = [str(j_day).zfill(3) for j_day in j_days]
    elif isinstance(j_days, int) and j_days != False:
        js = [str(j_days)]
    else:
        js = [str(x).zfill(3) for x in range(367)]

    # do a quick input tile check for 6 characters.
    for tile in tiles:
        if not len(tile) == 6:
            print("Warning! your tiles appear to be invalid!")
            print("Warning! make sure they are in format 'h##v##")

    # create output directories
    if not os.path.exists(outdir):
        os.makedirs(outdir)

    print("Connecting to servers!")

    # obtain the web address, protocol information, and subdirectory where
    # this tpe of MODIS data can be found.
    site, isftp, Dir = Find_MODIS_Product(product, version)

    if Dir:
        print("Connected to {0}/{1}".format(site, Dir))
    else:
        print("Connected to {0}".format(site))

    # Depending on the type of connection (ftp vs http) populate the file list
    try:
        if isftp:
            dates,_ = list_ftp(site, False, False, Dir)
        else:
            dates   = list_http(site)
    except:
        raise ValueError("Could not connect to {0}/{1}".format(site,Dir))

    # refine contents down to just addresses of valid year and j_day
    good_dates=[]
    for date in dates:

        try:
            dto   = datetime.strptime(date, "%Y.%m.%d")
            j_day = dto.strftime("%j")
            year  = dto.strftime("%Y")

            if year in years:
                good_dates.append(date)

                if j_days:
                    if j_day not in js:
                        good_dates.remove(date)
        except ValueError:
            print("skipping non date folder name {0}".format(date))


    print('Found {0} days within range'.format(len(good_dates)))

    # for all folders within the desired date range,  map the subfolder contents.
    for good_date in good_dates:

        if isftp:
            files,_ = list_ftp(site, False, False, Dir + '/' + good_date)

        else:
            files   = list_http(site + '/' + good_date)

        for afile in files:

            # only list files with desired tile names and not preview jpgs
            if not '.jpg' in afile:
                for tile in tiles:
                    if tile in afile:

                        # assemble the address
                        if isftp:
                            address='/'.join(['ftp://'+site, Dir, good_date, afile])
                        else:
                            address='/'.join([site, good_date, afile])

                        #download the file
                        outname = os.path.join(outdir, afile)
                        if not os.path.isfile(outname) and not force_overwrite:
                            download_url(address, outname)

                        print('Downloaded {0}'.format(address))

    print('Finished retrieving MODIS - {0} data!'.format(product))
    return
Beispiel #50
0
def extract_from_hdf(filelist, layerlist, layernames = False, outdir = None):

    """
    Extracts tifs from MODIS extract_HDF_layer files, ensures proper projection.

     inputs:
       filelist    list of '.hdf' files from which data should be extracted (or a directory)
       layerlist   list of layer numbers to pull out as individual tifs should be integers
                   such as [0,4] for the 0th and 4th layer respectively.
       layernames  list of layer names to put more descriptive file suffixes to each layer
       outdir      directory to which tif files should be saved
                   if outdir is left as 'False', files are saved in the same directory as
                   the input file was found.
    """

    if outdir is not None:
        if not os.path.exists(outdir):
            os.makedirs(outdir)

    # enforce lists for iteration purposes and sanitize inputs
    filelist = core.enf_filelist(filelist)

    
    for filename in filelist:
        if '.xml' in filename or not '.hdf' in filename:
            filelist.remove(filename)
            
    layerlist  = core.enf_list(layerlist)
    layernames = core.enf_list(layernames)
    
    # ignore user input layernames if they are invalid, but print warnings
    if layernames and not len(layernames) == len(layerlist):
        Warning('Layernames must be the same length as layerlist!')
        Warning('Ommiting user defined layernames!')
        layernames = False

    # create empty list to add failed file names into
    failed = []

    # iterate through every file in the input filelist
    for infile in filelist:
        
        # pull the filename and path apart 
        path,name           = os.path.split(infile)
        arcpy.env.workspace = path

        for i,layer in enumerate(layerlist):
            
            # specify the layer names.
            if layernames:
                layername = layernames[i]
            else:
                layername = str(layer).zfill(3)

            # use the input output directory if the user input one, otherwise build one  
            if outdir:
                outname = os.path.join(outdir, "{0}_{1}.tif".format(name[:-4], layername))
            else:
                outname = os.path.join(path, "{0}_{1}.tif".format(name[:-4], layername))

            # perform the extracting and projection definition
            try:
                # extract the subdataset
                arcpy.ExtractSubDataset_management(infile, outname, str(layer))

                # define the projection as the MODIS Sinusoidal
                define_projection(outname)

                print("Extracted {0}".format(os.path.basename(outname)))

            except:
                print("Failed to extract {0}  from {1}".format(os.path.basename(outname),
                                                               os.path.basename(infile)))
            failed.append(infile)
                
    print("Finished extracting all hdfs! \n") 
    return failed
Beispiel #51
0
def surface_reflectance_dos_457(band_nums, meta_path, outdir):

    """
    Converts Landsat 4, 5, and 7 band DNs to surface reflectance using dark object subtraction.

     To be performed on raw Landsat 4,5, and 7 level 1 data. See link below for details
     see here [http://landsat.usgs.gov/Landsat8_Using_Product.php]

     Inputs:
       band_nums   A list of desired band numbers such as [3,4,5]
       meta_path   The full filepath to the metadata file for those bands
       outdir      Output directory to save converted files. If left False it will save ouput
                   files in the same directory as input files.
    """
    band_nums = core.enf_list(band_nums)
    band_nums = map(str, band_nums)
    TM_ETM_bands = ['1','2','3','4','5','7','8']

    #landsat.toa_reflectance_457(band_nums, meta_path, outdir)

    OutList = []

    f = open(meta_path)
    MText = f.read()

    metadata = landsat.grab_meta(meta_path)
    oldMeta = []
    newMeta = []
    
    #the presence of a PRODUCT_CREATION_TIME category is used to identify old metadata
    #if this is not present, the meta data is considered new.
    #Band6length refers to the length of the Band 6 name string. In the new metadata this string is longer
    if "PRODUCT_CREATION_TIME" in MText:
        Meta = oldMeta
        Band6length = 2
    else:
        Meta = newMeta
        Band6length = 8

    #The tilename is located using the newMeta/oldMeta indixes and the date of capture is recorded
    if Meta == newMeta:
        TileName = getattr(metadata, "LANDSAT_SCENE_ID")
        year = TileName[9:13]
        jday = TileName[13:16]
        date = getattr(metadata, "DATE_ACQUIRED")
    elif Meta == oldMeta:
        TileName = getattr(metadata, "BAND1_FILE_NAME")
        year = TileName[13:17]
        jday = TileName[17:20]
        date = getattr(metadata, "ACQUISITION_DATE")

    #the spacecraft from which the imagery was capture is identified
    #this info determines the solar exoatmospheric irradiance (ESun) for each band
    spacecraft = getattr(metadata, "SPACECRAFT_ID")
    
    if   "7" in spacecraft:
        ESun = (1969.0, 1840.0, 1551.0, 1044.0, 255.700, 0., 82.07, 1368.00)
    elif "5" in spacecraft:
        ESun = (1957.0, 1826.0, 1554.0, 1036.0, 215.0, 0. ,80.67)
    elif "4" in spacecraft:
        ESun = (1957.0, 1825.0, 1557.0, 1033.0, 214.9, 0. ,80.72)
    else:
        arcpy.AddError("This tool only works for Landsat 4, 5, or 7")
        raise arcpy.ExecuteError()

    #determing if year is leap year and setting the Days in year accordingly
    if float(year) % 4 == 0: DIY = 366.
    else:DIY=365.

    #using the date to determing the distance from the sun
    theta = 2 * math.pi * float(jday)/DIY

    dSun2 = (1.00011 + 0.034221 * math.cos(theta) + 0.001280 * math.sin(theta) + 0.000719 * math.cos(2*theta)+ 0.000077 * math.sin(2 * theta))

    SZA = 90. - float(getattr(metadata, "SUN_ELEVATION"))

    for band_num in band_nums:
        if band_num in TM_ETM_bands:
            pathname = meta_path.replace("MTL.txt", "B{0}.tif".format(band_num))
            Oraster = arcpy.Raster(pathname)

            null_raster = arcpy.sa.SetNull(Oraster, Oraster, "VALUE = 0")

            #using the oldMeta/newMeta indixes to pull the min/max for radiance/Digital numbers
            if Meta == newMeta:
                LMax    = getattr(metadata, "RADIANCE_MAXIMUM_BAND_" + band_num)
                LMin    = getattr(metadata, "RADIANCE_MINIMUM_BAND_" + band_num)  
                QCalMax = getattr(metadata, "QUANTIZE_CAL_MAX_BAND_" + band_num)
                QCalMin = getattr(metadata, "QUANTIZE_CAL_MIN_BAND_" + band_num)
            elif Meta == oldMeta:
                LMax    = getattr(metadata, "LMAX_BAND" + band_num)
                LMin    = getattr(metadata, "LMIN_BAND" + band_num)  
                QCalMax = getattr(metadata, "QCALMAX_BAND" + band_num)
                QCalMin = getattr(metadata, "QCALMIN_BAND" + band_num)
    
            Radraster = (((LMax - LMin)/(QCalMax-QCalMin)) * (null_raster - QCalMin)) + LMin
            Oraster = 0
            del null_raster
    
            #Calculating temperature for band 6 if present
            Refraster = (math.pi * Radraster * dSun2) / (ESun[int(band_num[0]) - 1] * math.cos(SZA * (math.pi/180)))            

            #Oraster = arcpy.Raster(raster_null)
            dark_object = arcpy.GetRasterProperties_management(Refraster, "MINIMUM")
            do_str = str(dark_object)
            do_flt = float(do_str)

            #Calculate the minimum value in each band and perform dark object subtraction          
            Surfrefraster = Refraster - do_flt

            BandPath = "{0}\\{1}_B{2}_SurfRef.tif".format(outdir, TileName, band_num)
            Surfrefraster.save(BandPath)
            OutList.append(arcpy.Raster(BandPath))

            del Refraster, Radraster, Surfrefraster
    
            arcpy.AddMessage("Surface Reflectance Calculated for Band {0}".format(band_num))
            print("Surface Reflectance Calculated for Band {0}".format(band_num))

    f.close()
    return OutList