def cloud_mask_8(band_nums, BQA_path, outdir=False): """ Removal of cloud-covered pixels in raw Landsat 8 bands using the BQA file included. To be performed on raw Landsat 8 level 1 data. Inputs: band_nums A list of desired band numbers such as [3 4 5] BQA_path The full filepath to the BQA file for the Landsat 8 dataset outdir Output directory to save cloudless band tifs and the cloud mask """ #enforce the input band numbers as a list of strings band_nums = core.enf_list(band_nums) band_nums = map(str, band_nums) #define the range of values in the BQA file to be reclassified as cloud (0) or not cloud (1) outReclass = Reclassify( BQA_path, "Value", RemapRange([[50000, 65000, 0], [28670, 32000, 0], [2, 28669, 1], [32001, 49999, 1], [1, 1, "NoData"]])) #set the name and save the binary cloud mask tiff file Mask_name = BQA_path.replace("_BQA", "") CloudMask_path = core.create_outname(outdir, Mask_name, "Mask", "tif") outReclass.save(CloudMask_path) #for each band listed in band_nums, apply the Con tool to erase cloud pixels and save each band as a new tiff for band_num in band_nums: band_path = BQA_path.replace("BQA.tif", "B{0}.tif".format(band_num)) outname = core.create_outname(outdir, band_path, "NoClds", "tif") outCon = Con(outReclass, band_path, "", "VALUE = 1") outCon.save(outname) return
def atsat_bright_temp_8(meta_path, outdir = False): """ Converts Landsat 8 TIRS bands to at satellite brightnes temperature in Kelvins To be performed on raw Landsat 8 level 1 data. See link below for details see here http://landsat.usgs.gov/Landsat8_Using_Product.php :param band_nums: A list of desired band numbers, which should be [10,11] :param meta_path: The full filepath to the metadata file for those bands :param outdir: Output directory to save converted files. If left False it will save ouput files in the same directory as input files. :return output_filelist: A list of all files created by this function """ #enforce the list of band numbers and grab metadata from the MTL file band_nums = ["10", "11"] meta_path = os.path.abspath(meta_path) meta = grab_meta(meta_path) output_filelist = [] #cycle through each band in the list for calculation, ensuring each is in the list of TIRS bands for band_num in band_nums: #scrape data from the given file path and attributes in the MTL file band_path = meta_path.replace("MTL.txt","B{0}.tif".format(band_num)) Qcal = arcpy.Raster(band_path) #get rid of the zero values that show as the black background to avoid skewing values null_raster = arcpy.sa.SetNull(Qcal, Qcal, "VALUE = 0") #requires first converting to radiance Ml = getattr(meta,"RADIANCE_MULT_BAND_{0}".format(band_num)) # multiplicative scaling factor Al = getattr(meta,"RADIANCE_ADD_BAND_{0}".format(band_num)) # additive rescaling factor TOA_rad = (null_raster * Ml) + Al #now convert to at-sattelite brightness temperature K1 = getattr(meta,"K1_CONSTANT_BAND_{0}".format(band_num)) # thermal conversion constant 1 K2 = getattr(meta,"K2_CONSTANT_BAND_{0}".format(band_num)) # thermal conversion constant 2 #calculate brightness temperature at the satellite Bright_Temp = K2/(arcpy.sa.Ln((K1/TOA_rad) + 1)) #save the data to the automated name if outdir is given or in the parent folder if not if outdir: outdir = os.path.abspath(outdir) outname = core.create_outname(outdir, band_path, "ASBTemp", "tif") else: folder = os.path.split(meta_path)[0] outname = core.create_outname(folder, band_path, "ASBTemp", "tif") Bright_Temp.save(outname) output_filelist.append(outname) print("Saved output at {0}".format(outname)) del TOA_rad, null_raster return output_filelist
def ndvi_457(Band4, Band3, outdir = False): """ calculates a normalized difference vegetation index on Landsat 4/5/7 TM/ETM+ data. To be performed on raw or processed Landsat 4/5/7/ TM/ETM+ data, preferably TOA or Surface Reflectance. Inputs: Band4 The full filepath to the band 4 tiff file, the TM/ETM+ NIR band Band3 The full filepath to the band 3 tiff file, the TM/ETM+ Visible Red band outdir Output directory to save NDVI tifs """ #Set the input bands to float Red = arcpy.sa.Float(Band3) NIR = arcpy.sa.Float(Band4) #Calculate the NDVI L457_NDVI = (NIR - Red)/(NIR + Red) #Create the output name and save the NDVI tiff name = Band3.split("\\")[-1] ndvi_name = name.replace("_B3","") if outdir: outname = core.create_outname(outdir, ndvi_name, "NDVI", "tif") else: folder = Band4.replace(name, "") outname = core.create_outname(folder, ndvi_name, "NDVI", "tif") L457_NDVI.save(outname) print("saved ndvi_457 at {0}".format(outname)) return outname
def make_cloud_mask_8(BQA_path, outdir=False): """ Creates a cloud mask tiff file from the Landsat 8 Quality Assessment Band (BQA) file. Requires only the BQA tiff file included in the dataset. Inputs: BQA_path The full filepath to the BQA file for the raw Landsat 8 dataset outdir Output directory to save cloudless band tifs and the cloud mask """ #define the range of values in the BQA file to be reclassified as cloud (0) or not cloud (1) remap = arcpy.sa.RemapRange([[50000, 65000, 0], [28670, 32000, 0], [2, 28669, 1], [32001, 49999, 1], [1, 1, "NoData"]]) outReclass = arcpy.sa.Reclassify(BQA_path, "Value", remap) #set the name and save the binary cloud mask tiff file BQA = os.path.abspath(BQA_path) name = os.path.split(BQA)[1] name_ext = os.path.splitext(name)[0] TileName = name_ext.replace("_BQA", "") #create an output name and save the mask tiff if outdir: outdir = os.path.abspath(outdir) CloudMask_path = core.create_outname(outdir, TileName, "Mask", "tif") else: folder = BQA_path.replace(BQA_split, "") CloudMask_path = core.create_outname(folder, TileName, "Mask", "tif") outReclass.save(CloudMask_path) return CloudMask_path
def toa_reflectance_8(band_nums, meta_path, outdir = False): """ Converts Landsat 8 bands to Top-of-Atmosphere reflectance. To be performed on raw Landsat 8 level 1 data. See link below for details see here [http://landsat.usgs.gov/Landsat8_Using_Product.php] Inputs: band_nums A list of desired band numbers such as [3,4,5] meta_path The full filepath to the metadata file for those bands outdir Output directory to save converted files. If left False it will save ouput files in the same directory as input files. """ outlist = [] #enforce the list of band numbers and grab metadata from the MTL file band_nums = core.enf_list(band_nums) band_nums = map(str, band_nums) OLI_bands = ['1','2','3','4','5','6','7','8','9'] meta_path = os.path.abspath(meta_path) meta = grab_meta(meta_path) #cycle through each band in the list for calculation, ensuring each is in the list of OLI bands for band_num in band_nums: if band_num in OLI_bands: #scrape data from the given file path and attributes in the MTL file band_path = meta_path.replace("MTL.txt","B{0}.tif".format(band_num)) Qcal = arcpy.Raster(band_path) Mp = getattr(meta,"REFLECTANCE_MULT_BAND_{0}".format(band_num)) # multiplicative scaling factor Ap = getattr(meta,"REFLECTANCE_ADD_BAND_{0}".format(band_num)) # additive rescaling factor SEA = getattr(meta,"SUN_ELEVATION")*(math.pi/180) # sun elevation angle theta_se #get rid of the zero values that show as the black background to avoid skewing values null_raster = arcpy.sa.SetNull(Qcal, Qcal, "VALUE = 0") #calculate top-of-atmosphere reflectance TOA_ref = (((null_raster * Mp) + Ap)/(math.sin(SEA))) #save the data to the automated name if outdir is given or in the parent folder if not if outdir: outdir = os.path.abspath(outdir) outname = core.create_outname(outdir, band_path, "TOA_Ref", "tif") else: folder = os.path.split(meta_path)[0] outname = core.create_outname(folder, band_path, "TOA_Ref", "tif") TOA_ref.save(outname) outlist.append(outname) print("Saved output at {0}".format(outname)) #if listed band is not an OLI sensor band, skip it and print message else: print("Can only perform reflectance conversion on OLI sensor bands") print("Skipping band {0}".format(band_num)) return outlist
def make_cloud_mask_8(BQA_path, outdir = None): """ Creates a cloud mask tiff file from the Landsat 8 Quality Assessment Band (BQA) file. Requires only the BQA tiff file included in the dataset. :param BQA_path: The full filepath to the BQA file for the raw Landsat 8 dataset :param outdir: Output directory to save cloudless band tifs and the cloud mask :return cloud_mask_path: Filepath to newly created cloud mask """ #define the range of values in the BQA file to be reclassified as cloud (0) or not cloud (1) remap = arcpy.sa.RemapRange([[50000,65000,0],[28670,32000,0],[2,28669,1],[32001,49999,1],[1,1,"NoData"]]) outReclass = arcpy.sa.Reclassify(BQA_path, "Value", remap) #set the name and save the binary cloud mask tiff file BQA = os.path.abspath(BQA_path) name = os.path.split(BQA)[1] name_ext = os.path.splitext(name)[0] TileName = name_ext.replace("_BQA", "") #create an output name and save the mask tiff if outdir is not None: outdir = os.path.abspath(outdir) cloud_mask_path = core.create_outname(outdir, TileName, "Mask", "tif") else: folder = os.path.dirname(BQA) cloud_mask_path = core.create_outname(folder, TileName, "Mask", "tif") outReclass.save(cloud_mask_path) return cloud_mask_path
def apply_cloud_mask(mask_path, folder, outdir=False): """ Removal of cloud-covered pixels in Landsat 4, 5, 7, or 8 bands using the mask created with landsat.make_cloud_mask_8 or landsat.make_cloud_mask_457. Inputs: folder The folder containing the raw or processed band tiffs to remove clouds from mask_path The full filepath to the mask file created by make_cloud_mask_8 or make_cloud_mask_457 outdir Output directory to save cloudless band tiffs *If left False the output tiffs will be saved in "folder" """ noclds_list = [] #enforce the input band numbers as a list of strings mpath = os.path.abspath(mask_path) mask_split = os.path.split(mpath)[1] name = os.path.splitext(mask_split)[0] tilename = name.replace("_Mask", "") folder = os.path.abspath(folder) #loop through each file in folder inlist = [] outlist = [] for band in os.listdir(folder): band_name = "{0}_B".format(tilename) #for each band (number 1-9) tif whose id matches the mask's, create an output name and append to the in and output lists if (band_name in band) and ( band[-4:] == ".tif" or band[-4:] == ".TIF") and ( "NoClds" not in band) and ("BQA" not in band): name = band.replace(".tif", "") if outdir: outname = core.create_outname(outdir, name, "NoClds", "tif") else: outname = core.create_outname(folder, name, "NoClds", "tif") inlist.append("{0}\\{1}".format(folder, band)) outlist.append(outname) #loop through the input list and apply the con to each file, saving to the corresponding path in the output list y = 0 for file in inlist: outcon = arcpy.sa.Con(mask_path, file, "", "VALUE = 1") outcon.save(outlist[y]) noclds_list.append(outlist[y]) y = y + 1 if y > (len(inlist) - 1): break return noclds_list
def apply_cloud_mask(mask_path, folder, outdir=False): """ Removal of cloud-covered pixels in Landsat 4, 5, 7, or 8 bands using the mask created with landsat.make_cloud_mask_8 or landsat.make_cloud_mask_457. Inputs: folder The folder containing the raw or processed band tiffs to remove clouds from mask_path The full filepath to the mask file created by make_cloud_mask_8 or make_cloud_mask_457 outdir Output directory to save cloudless band tiffs *If left False the output tiffs will be saved in "folder" """ noclds_list = [] #enforce the input band numbers as a list of strings mpath = os.path.abspath(mask_path) mask_split = os.path.split(mpath)[1] name = os.path.splitext(mask_split)[0] tilename = name.replace("_Mask", "") folder = os.path.abspath(folder) #loop through each file in folder inlist = [] outlist = [] for band in os.listdir(folder): band_name = "{0}_B".format(tilename) #for each band (number 1-9) tif whose id matches the mask's, create an output name and append to the in and output lists if (band_name in band) and (band[-4:] == ".tif" or band[-4:] == ".TIF") and ( "NoClds" not in band) and ("BQA" not in band): name = band.replace(".tif", "") if outdir: outname = core.create_outname(outdir, name, "NoClds", "tif") else: outname = core.create_outname(folder, name, "NoClds", "tif") inlist.append("{0}\\{1}".format(folder, band)) outlist.append(outname) #loop through the input list and apply the con to each file, saving to the corresponding path in the output list y = 0 for file in inlist: outcon = arcpy.sa.Con(mask_path, file, "", "VALUE = 1") outcon.save(outlist[y]) noclds_list.append(outlist[y]) y = y + 1 if y > (len(inlist) - 1): break return noclds_list
def atsat_bright_temp_8(band_nums, meta_path, outdir = False): """ Converts Landsat 8 TIRS bands to at satellite brightnes temperature in Kelvins To be performed on raw Landsat 8 level 1 data. See link below for details see here http://landsat.usgs.gov/Landsat8_Using_Product.php Inputs: band_nums A list of desired band numbers, which should be [10,11] meta_path The full filepath to the metadata file for those bands outdir Output directory to save converted files. If left False it will save ouput files in the same directory as input files. """ band_nums = core.enf_list(band_nums) band_nums = map(str, band_nums) meta = grab_meta(meta_path) for band_num in band_nums: if band_num in ["10","11"]: band_path = meta_path.replace("MTL.txt","B{0}.tif".format(band_num)) Qcal = arcpy.Raster(band_path) # requires first converting to radiance Ml = getattr(meta,"RADIANCE_MULT_BAND_" + band_num) # multiplicative scaling factor Al = getattr(meta,"RADIANCE_ADD_BAND_" + band_num) # additive rescaling factor TOA_rad = (Qcal * Ml) + Al # now convert to at-sattelite brightness temperature K1 = getattr(meta,"K1_CONSTANT_BAND_" + band_num) # thermal conversion constant 1 K2 = getattr(meta,"K2_CONSTANT_BAND_" + band_num) # thermal conversion constant 2 Bright_Temp = K2/(arcpy.sa.Ln((K1/TOA_rad) + 1)) metaname = core.create_outname(outdir, meta_path, "Bright-Temp") shutil.copyfile(meta_path,metaname) outname = core.create_outname(outdir, band_path, "Bright-Temp") Bright_Temp.save(outname) print("Saved output at {0}".format(outname)) del TOA_rad else: print("Can only perform brightness temperature on TIRS sensor bands!") print("Skipping band {0}".format(outname)) return
def extract_GCMO_NetCDF(netcdf_list, variable, outdir): """ Extracts all time layers from a "Global Climate Model Output" NetCDF layer :param netcdf_list: List of netcdfs from CORDEX climate distribution :param variable: The climate variable of interest (tsmax, tsmin, etc) :param outdir: Output directory to save files. :return output_filelist: returns list of files created by this function """ output_filelist = [] if not os.path.exists(outdir): os.makedirs(outdir) netcdf_list = core.enf_list(netcdf_list) for netcdf in netcdf_list: # get net cdf properties object props = arcpy.NetCDFFileProperties(netcdf) print("finding dimensions") dims = props.getDimensions() for dim in dims: print dim, props.getDimensionSize(dim) # make sure the variable is in this netcdf if variable: if not variable in props.getVariables(): print("Valid variables for this file include {0}".format(props.getVariables())) raise Exception("Variable '{0}' is not in this netcdf!".format(variable)) for dim in dims: if dim == "time": # set other dimensions x_dim = "lon" y_dim = "lat" band_dim = "" valueSelectionMethod = "BY_VALUE" size = props.getDimensionSize(dim) for i in range(size): # sanitize the dimname for invalid characters dimname = props.getDimensionValue(dim,i).replace(" 12:00:00 PM","") dimname = dimname.replace("/","-").replace(" ","_") dim_value = [["time", props.getDimensionValue(dim,i)]] print("extracting '{0}' from '{1}'".format(variable, dim_value)) outname = core.create_outname(outdir, netcdf, dimname, 'tif') output_filelist.append(outname) arcpy.MakeNetCDFRasterLayer_md(netcdf, variable, x_dim, y_dim, "temp", band_dim, dim_value, valueSelectionMethod) arcpy.CopyRaster_management("temp", outname, "", "", "", "NONE", "NONE", "") return output_filelist
def extract_TRMM_NetCDF(filelist, outdir): """ Function converts NetCDFs to tiffs. Designed to work with TRMM data downloaded from GLOVIS inputs: filelist list of '.nc' files to convert to tiffs. outdir directory to which tif files should be saved returns an output filelist of local filepaths of extracted data. """ # Set up initial parameters. arcpy.env.workspace = outdir filelist = core.enf_list(filelist) output_filelist = [] # convert every file in the list "filelist" for infile in filelist: # use arcpy module to make raster layer from netcdf arcpy.MakeNetCDFRasterLayer_md(infile, "r", "longitude", "latitude", "r", "", "", "BY_VALUE") outname = core.create_outname(outdir, infile, "e", "tif") arcpy.CopyRaster_management("r", outname, "", "", "", "NONE", "NONE", "") output_filelist.append(outname) print('Converted netCDF file ' + outname + ' to Raster') return output_filelist
def apply_linear_correction(rasterlist, factor, offset, suffix = 'lc', outdir = None, floor = -999999): """ Applies a linear correction to a raster dataset. New offset rasters are saved in the output directory with a suffix of "lc" unless one is specified. This may be used to apply any kind of linear relationship that can be described with "mx + b" such as conversion between between K,C, and F. Also useful when ground truthing satellite data and discovering linear errors. All outputs are 32 bit floating point values. :param rasterlist: list of rasters, a single raster, or a directory full of tiffs to Have a linear correction applied to them. :param factor: every pixel in the raster will be MULTIPLIED by this value. :param offset: this offset value will be ADDED to every pixel in the raster. :param suffix: output files will take the same name as input files with this string appended to the end. So input "FILE.tif" outputs "FILE_suffix.tif" :param outdir: directory to save output rasters. "None" will save output images in the same folder as the input images. :param floor: Used to manage NoData. All values less than floor are set to floor then floor is set to the new NoData value. defaults to -999,999 return outputpath: filepath to output files created by this function Example Usage to convert from MODIS Land surface temperature from digital number to kelvin, you must simply multiply by 0.02 as the stated scale factor listed at the link below [https://lpdaac.usgs.gov/products/modis_products_table/myd11a1]. Now that it is in kelvin, converting to Celsius can be done by adding (-273.15) So, use this function with:: factor = 0.02 offset = -273.15 and one may convert MODIS land surface temperature digital numbers directly to celsius! """ output_filelist = [] if outdir is not None and not os.path.isdir(outdir): os.makedirs(outdir) rasterlist = enf_rastlist(rasterlist) for raster in rasterlist: print("applying a linear correction to " + raster) image, metadata = to_numpy(raster, "float32") new_NoData = floor output = image * factor + offset low_value_indices = output < new_NoData output[low_value_indices] = new_NoData outname = core.create_outname(outdir,raster,suffix) from_numpy(output, metadata, outname, new_NoData) output_filelist.append(outname) print("Finished! \n ") return output_filelist
def ndvi_457(B4, B3, outdir = False): """ calculates a normalized difference vegetation index on Landsat 4/5/7 TM/ETM+ data. To be performed on raw or processed Landsat 4/5/7/ TM/ETM+ data. Inputs: B4 The full filepath to the band 4 tiff file, the TM/ETM+ NIR band B3 The full filepath to the band 3 tiff file, the TM/ETM+ Visible Red band outdir Output directory to save NDVI tifs """ Red = Float(B3) NIR = Float(B4) L457_NDVI = (NIR - Red)/(NIR + Red) band_path = B3.replace("_B3","") outname = core.create_outname(outdir, band_path, "NDVI", "tif") L457_NDVI.save(outname) print("saved ndvi_457 at {0}".format(outname)) return
def clip_to_shape(rasterlist, shapefile, outdir = False): """ Simple batch clipping script to clip rasters to shapefiles. :param rasterlist: single file, list of files, or directory for which to clip rasters :param shapefile: shapefile to which rasters will be clipped :param outdir: desired output directory. If no output directory is specified, the new files will simply have '_c' added as a suffix. :return output_filelist: list of files created by this function. """ rasterlist = enf_rastlist(rasterlist) output_filelist = [] # ensure output directorycore.exists if outdir and not os.path.exists(outdir): os.makedirs(outdir) for raster in rasterlist: # create output filename with "c" suffix outname = core.create_outname(outdir,raster,'c') # perform double clip , first using clip_management (preserves no data values) # then using arcpy.sa module which can actually do clipping geometry unlike the management tool. arcpy.Clip_management(raster, "#", outname, shapefile, "ClippingGeometry") out = ExtractByMask(outname, shapefile) out.save(outname) output_filelist.append(outname) print("Clipped and saved: {0}".format(outname)) return output_filelist
def extract_TRMM_NetCDF(filelist, outdir): """ Function converts NetCDFs to tiffs. Designed to work with TRMM data downloaded from GLOVIS inputs: filelist list of '.nc' files to convert to tiffs. outdir directory to which tif files should be saved returns an output filelist of local filepaths of extracted data. """ # Set up initial parameters. arcpy.env.workspace = outdir filelist = core.enf_list(filelist) output_filelist = [] # convert every file in the list "filelist" for infile in filelist: # use arcpy module to make raster layer from netcdf arcpy.MakeNetCDFRasterLayer_md(infile, "r", "longitude", "latitude", "r", "", "", "BY_VALUE") outname = core.create_outname(outdir, infile, "e", "tif") arcpy.CopyRaster_management("r", outname, "", "", "", "NONE", "NONE", "") output_filelist.append(outname) print("Converted netCDF file " + outname + " to Raster") return output_filelist
def clip_to_shape(rasterlist, shapefile, outdir = False): """ Simple batch clipping script to clip rasters to shapefiles. Inputs: rasterlist single file, list of files, or directory for which to clip rasters shapefile shapefile to which rasters will be clipped outdir desired output directory. If no output directory is specified, the new files will simply have '_c' added as a suffix. """ rasterlist = enf_rastlist(rasterlist) # ensure output directorycore.exists if outdir and not os.path.exists(outdir): os.makedirs(outdir) for raster in rasterlist: # create output filename with "c" suffix outname = core.create_outname(outdir,raster,'c') # perform double clip , first using clip_management (preserves no data values) # then using arcpy.sa module which can actually do clipping geometry unlike the management tool. arcpy.Clip_management(raster, "#", outname, shapefile, "ClippingGeometry") out = ExtractByMask(outname, shapefile) out.save(outname) print("Clipped and saved: {0}".format(outname)) return
def ndvi_457(B4, B3, outdir=False): """ calculates a normalized difference vegetation index on Landsat 4/5/7 TM/ETM+ data. To be performed on raw or processed Landsat 4/5/7/ TM/ETM+ data. Inputs: B4 The full filepath to the band 4 tiff file, the TM/ETM+ NIR band B3 The full filepath to the band 3 tiff file, the TM/ETM+ Visible Red band outdir Output directory to save NDVI tifs """ Red = Float(B3) NIR = Float(B4) L457_NDVI = (NIR - Red) / (NIR + Red) band_path = B3.replace("_B3", "") outname = core.create_outname(outdir, band_path, "NDVI", "tif") L457_NDVI.save(outname) print("saved ndvi_457 at {0}".format(outname)) return
def extract_GPM_precip(gpmfilepath): """ subfunction to extract only the calibrated precipitation estimate layer from GPM IMERG HDF5 files. """ outdir = os.path.dirname(gpmfilepath) outname = core.create_outname(outdir, gpmfilepath, "precip", "tif") arcpy.ExtractSubDataset_management(gpmfilepath, outname, "5") return outname
def toa_reflectance_8(band_nums, meta_path, outdir = False): """ Converts Landsat 8 bands to Top of atmosphere reflectance. To be performed on raw Landsat 8 level 1 data. See link below for details see here [http://landsat.usgs.gov/Landsat8_Using_Product.php] Inputs: band_nums A list of desired band numbers such as [3,4,5] meta_path The full filepath to the metadata file for those bands outdir Output directory to save converted files. If left False it will save ouput files in the same directory as input files. """ band_nums = core.enf_list(band_nums) band_nums = map(str, band_nums) OLI_bands = ['1','2','3','4','5','6','7','8','9'] meta = grab_meta(meta_path) for band_num in band_nums: if band_num in OLI_bands: band_path = meta_path.replace("MTL.txt","B{0}.tif".format(band_num)) Qcal = arcpy.Raster(band_path) Mp = getattr(meta,"REFLECTANCE_MULT_BAND_" + band_num) # multiplicative scaling factor Ap = getattr(meta,"REFLECTANCE_ADD_BAND_" + band_num) # additive rescaling factor SEA = getattr(meta,"SUN_ELEVATION")*(math.pi/180) # sun elevation angle theta_se TOA_ref = (((Qcal * Mp) + Ap)/(math.sin(SEA))) metaname = core.create_outname(outdir, meta_path, "TOA-Ref", "txt") shutil.copyfile(meta_path,metaname) outname = core.create_outname(outdir, band_path, "TOA-Ref", "tif") TOA_ref.save(outname) print("Saved output at {0}".format(outname)) else: print("Can only perform reflectance conversion on OLI sensor bands!") print("Skipping band {0}".format(band_num)) return
def _extract_HDF_datatype(hdf, layer_indexs, outdir=None, datatype=None, force_custom=False, nodata_value=None): """ This function wraps "_extract_HDF_layer_data" and "_gdal_dataset_to_tif" It only works for datatypes listed in the datatype_library.csv :param hdf: a single hdf filepath :param layer_indexs: list of int index values of layers to extract :param outdir: filepath to output directory to place tifs. If left as "None" output geotiffs will be placed right next to input HDF. :param datatype: a dnppy.convert.datatype object created from an entry in the datatype_library.csv :param force_custom: if True, this will force the data to take on the projection and geotransform attributes from the datatype object, even if valid projection and geotransform info can be pulled from the gdal dataset. Should almost never be True. :param nodata_value: the value to set to Nodata :return: list of filepaths to output files """ output_filelist = [] if outdir is None: outdir = os.path.dirname(hdf) data = _extract_HDF_layer_data(hdf, layer_indexs) layer_indexs = core.enf_list(layer_indexs) for layer_index in layer_indexs: dataset = data[layer_index] outpath = core.create_outname(outdir, hdf, str(layer_index), "tif") print("creating dataset at {0}".format(outpath)) _gdal_dataset_to_tif(dataset, outpath, cust_projection=datatype.projectionTXT, cust_geotransform=datatype.geotransform, force_custom=force_custom, nodata_value=nodata_value) output_filelist.append(outpath) return output_filelist
def toa_radiance_8(band_nums, meta_path, outdir=False): """ Top of Atmosphere radiance (in Watts/(square meter * steradians * micrometers)) conversion for landsat 8 data To be performed on raw Landsat 8 level 1 data. See link below for details see here http://landsat.usgs.gov/Landsat8_Using_Product.php Inputs: band_nums A list of desired band numbers such as [3 4 5] meta_path The full filepath to the metadata file for those bands outdir Output directory to save converted files. """ band_nums = core.enf_list(band_nums) band_nums = map(str, band_nums) meta = grab_meta(meta_path) for band_num in band_nums: band_path = meta_path.replace("MTL.txt", "B{0}.tif".format(band_num)) Qcal = arcpy.Raster(band_path) Ml = getattr(meta, "RADIANCE_MULT_BAND_" + band_num) # multiplicative scaling factor Al = getattr(meta, "RADIANCE_ADD_BAND_" + band_num) # additive rescaling factor TOA_rad = (Qcal * Ml) + Al metaname = core.create_outname(outdir, meta_path, "TOA-Rad", "txt") shutil.copyfile(meta_path, metaname) outname = core.create_outname(outdir, band_path, "TOA-Rad", "tif") TOA_rad.save(outname) print("Saved toa_radiance at {0}".format(outname)) return
def ndvi_457(Band4, Band3, outdir=False): """ calculates a normalized difference vegetation index on Landsat 4/5/7 TM/ETM+ data. To be performed on raw or processed Landsat 4/5/7/ TM/ETM+ data, preferably TOA or Surface Reflectance. Inputs: Band4 The full filepath to the band 4 tiff file, the TM/ETM+ NIR band Band3 The full filepath to the band 3 tiff file, the TM/ETM+ Visible Red band outdir Output directory to save NDVI tifs """ Band3 = os.path.abspath(Band3) Band4 = os.path.abspath(Band4) #Set the input bands to float Red = arcpy.sa.Float(Band3) NIR = arcpy.sa.Float(Band4) #Calculate the NDVI L457_NDVI = (NIR - Red) / (NIR + Red) #Create the output name and save the NDVI tiff name = os.path.split(Band3)[1] ndvi_name = name.replace("_B3", "") if outdir: outdir = os.path.abspath(outdir) outname = core.create_outname(outdir, ndvi_name, "NDVI", "tif") else: folder = os.path.split(Band3)[0] outname = core.create_outname(folder, ndvi_name, "NDVI", "tif") L457_NDVI.save(outname) print("saved ndvi_457 at {0}".format(outname)) return outname
def ndvi_8(Band5, Band4, outdir = False): """ calculates a normalized difference vegetation index on Landsat 8 OLI data. To be performed on raw or processed Landsat 8 OLI data, preferably TOA or Surface Reflectance. Inputs: Band5 The full filepath to the band 5 tiff file, the OLI NIR band Band4 The full filepath to the band 4 tiff file, the OLI Visible Red band outdir Output directory to save NDVI tifs """ Band4 = os.path.abspath(Band4) Band5 = os.path.abspath(Band5) #Set the input bands to float Red = arcpy.sa.Float(Band4) NIR = arcpy.sa.Float(Band5) #Calculate the NDVI L8_NDVI = (NIR - Red)/(NIR + Red) #Create the output name and save the NDVI tiff name = os.path.split(Band4)[1] ndvi_name = name.replace("_B4","") if outdir: outdir = os.path.abspath(outdir) outname = core.create_outname(outdir, ndvi_name, "NDVI", "tif") else: folder = os.path.split(Band4)[0] outname = core.create_outname(folder, ndvi_name, "NDVI", "tif") L8_NDVI.save(outname) print("saved ndvi_8 at {0}".format(outname)) return outname
def toa_radiance_8(band_nums, meta_path, outdir = False): """ Top of Atmosphere radiance (in Watts/(square meter * steradians * micrometers)) conversion for landsat 8 data To be performed on raw Landsat 8 level 1 data. See link below for details see here http://landsat.usgs.gov/Landsat8_Using_Product.php Inputs: band_nums A list of desired band numbers such as [3 4 5] meta_path The full filepath to the metadata file for those bands outdir Output directory to save converted files. """ band_nums = core.enf_list(band_nums) band_nums = map(str, band_nums) meta = grab_meta(meta_path) for band_num in band_nums: band_path = meta_path.replace("MTL.txt","B{0}.tif".format(band_num)) Qcal = arcpy.Raster(band_path) Ml = getattr(meta,"RADIANCE_MULT_BAND_" + band_num) # multiplicative scaling factor Al = getattr(meta,"RADIANCE_ADD_BAND_" + band_num) # additive rescaling factor TOA_rad = (Qcal * Ml) + Al metaname = core.create_outname(outdir, meta_path, "TOA-Rad", "txt") shutil.copyfile(meta_path,metaname) outname = core.create_outname(outdir, band_path, "TOA-Rad", "tif") TOA_rad.save(outname) print("Saved toa_radiance at {0}".format(outname)) return
def _extract_HDF_datatype(hdf, layer_indexs, outdir = None, datatype = None, force_custom = False, nodata_value = None): """ This function wraps "_extract_HDF_layer_data" and "_gdal_dataset_to_tif" It only works for datatypes listed in the datatype_library.csv :param hdf: a single hdf filepath :param layer_indexs: list of int index values of layers to extract :param outdir: filepath to output directory to place tifs. If left as "None" output geotiffs will be placed right next to input HDF. :param datatype: a dnppy.convert.datatype object created from an entry in the datatype_library.csv :param force_custom: if True, this will force the data to take on the projection and geotransform attributes from the datatype object, even if valid projection and geotransform info can be pulled from the gdal dataset. Should almost never be True. :param nodata_value: the value to set to Nodata :return: list of filepaths to output files """ output_filelist = [] if outdir is None: outdir = os.path.dirname(hdf) data = _extract_HDF_layer_data(hdf, layer_indexs) layer_indexs = core.enf_list(layer_indexs) for layer_index in layer_indexs: dataset = data[layer_index] outpath = core.create_outname(outdir, hdf, str(layer_index), "tif") print("creating dataset at {0}".format(outpath)) _gdal_dataset_to_tif(dataset, outpath, cust_projection = datatype.projectionTXT, cust_geotransform = datatype.geotransform, force_custom = force_custom, nodata_value = nodata_value) output_filelist.append(outpath) return output_filelist
def extract_MPE_NetCDF(netcdf_list, layer_indexs, outdir, area): """ extracts SMOS data from its native NetCDF format. :param netcdf_list: list of hdf files or directory with netcdfs :param layer_indexs: list of integer layer indices :param outdir: directory to place outputs :param area: presently only supports "CONUS" :return: A list of all files created as output """ netcdf_list = core.enf_filelist(netcdf_list) output_filelist = [] # load the GPM datatype from the library dtype = datatype_library()["MPE_HRAP_{0}".format(area)] # for every hdf file in the input list for netcdf in netcdf_list: data = _extract_NetCDF_layer_data(netcdf, layer_indexs) for layer_index in layer_indexs: dataset = data[layer_index] outpath = core.create_outname(outdir, netcdf, str(layer_index), "tif") print("creating dataset at {0}".format(outpath)) _gdal_dataset_to_tif(dataset, outpath, cust_projection=dtype.projectionTXT, cust_geotransform=dtype.geotransform, force_custom=False, nodata_value=-1) output_filelist.append(outpath) return output_filelist
def extract_MPE_NetCDF(netcdf_list, layer_indexs, outdir, area): """ extracts SMOS data from its native NetCDF format. :param netcdf_list: list of hdf files or directory with netcdfs :param layer_indexs: list of integer layer indices :param outdir: directory to place outputs :param area: presently only supports "CONUS" :return: A list of all files created as output """ netcdf_list = core.enf_filelist(netcdf_list) output_filelist = [] # load the GPM datatype from the library dtype = datatype_library()["MPE_HRAP_{0}".format(area)] # for every hdf file in the input list for netcdf in netcdf_list: data = _extract_NetCDF_layer_data(netcdf, layer_indexs) for layer_index in layer_indexs: dataset = data[layer_index] outpath = core.create_outname(outdir, netcdf, str(layer_index), "tif") print("creating dataset at {0}".format(outpath)) _gdal_dataset_to_tif(dataset, outpath, cust_projection = dtype.projectionTXT, cust_geotransform = dtype.geotransform, force_custom = False, nodata_value = -1) output_filelist.append(outpath) return output_filelist
def ndvi_8(B5, B4, outdir = False): """ calculates a normalized difference vegetation index on Landsat 8 OLI data. To be performed on raw or processed Landsat 8 OLI data. Inputs: B5 The full filepath to the band 5 tiff file, the OLI NIR band B4 The full filepath to the band 4 tiff file, the OLI Visible Red band outdir Output directory to save NDVI tifs """ Red = Float(B4) NIR = Float(B5) L8_NDVI = (NIR - Red)/(NIR + Red) band_path = B4.replace("_B4","") outname = core.create_outname(outdir, band_path, "NDVI", "tif") L8_NDVI.save(outname) print("saved ndvi_8 at {0}".format(outname)) return
def ndvi_8(B5, B4, outdir=False): """ calculates a normalized difference vegetation index on Landsat 8 OLI data. To be performed on raw or processed Landsat 8 OLI data. Inputs: B5 The full filepath to the band 5 tiff file, the OLI NIR band B4 The full filepath to the band 4 tiff file, the OLI Visible Red band outdir Output directory to save NDVI tifs """ Red = Float(B4) NIR = Float(B5) L8_NDVI = (NIR - Red) / (NIR + Red) band_path = B4.replace("_B4", "") outname = core.create_outname(outdir, band_path, "NDVI", "tif") L8_NDVI.save(outname) print("saved ndvi_8 at {0}".format(outname)) return
def surface_temp_8(band4_toa, meta_path, path_rad, nbt, sky_rad, outdir = False, L = 0.5): """ Calculates surface temperature from Landsat 8 OLI and TIRS data. Requires band 4 and 5 Top-of-Atmosphere Reflectance tiffs and the unprocessed band 10 and 11 tiffs. Note: if the default values of 0, 1, and 0 are used for the Path Radiance, Narrowband \ Transmissivity, and Sky Radiance constants, atmospheric conditions will not be accounted for and the surface values may be off. Values are attainable using MODTRAN. :param band4_toa: Filepath to the Band 4 Top-of-Atmosphere Reflectance tiff. use landsat.toa_reflectance_8 :param meta_path: Filepath to the metadata file (ending in _MTL.txt) :param path_rad: Path Radiance constant (default 0) :param nbt: Narrowband Transmissivity constant (default 1) :param sky_rad: Sky Radiance constant (default 0) :param outdir: Path to the desired output folder. If left False the output tiff will be place in band4_toa's folder :param L: Soil brightness correction factor, between 0 and 1. used to calculate Soil Adjusted Vegetation Index. Default L = 0.5 works well in most situations. when L = 0, SAVI = NDVI. :return surface_temp_8: Full filepath of tif created by this function """ band4_toa = os.path.abspath(band4_toa) meta_path = os.path.abspath(meta_path) # Grab metadata from the MTL file and set the pathnames for Band 5 TOA Reflectance and the raw Band 11 tiffs meta = landsat_metadata(meta_path) band5_toa = band4_toa.replace("_B4_", "_B5_") band10 = meta_path.replace("_MTL.txt", "_B10.tif") band11 = band10.replace("_B10.tif", "_B11.tif") # Soil Adjusted Vegetation Index red = arcpy.sa.Float(band4_toa) nir = arcpy.sa.Float(band5_toa) savi = ((1 + L) * (nir - red))/(L + (nir + red)) # Leaf Area Index # assigns LAI for 0.1 <= SAVI <= 0.687 lai_1 = ((arcpy.sa.Ln((0.69 - savi)/0.59))/(-0.91)) # assigns LAI for SAVI >= 0.687 lai_2 = arcpy.sa.Con(savi, lai_1, 6, "VALUE < 0.687") # assigns LAI for SAVI <= 0.1 lai = arcpy.sa.Con(savi, lai_2, 0, "VALUE >= 0.1") # Narrow Band Emissivity remap = 0.97 + (0.0033 * lai) nbe = arcpy.sa.Con(lai, remap, 0.98, "VALUE <= 3") # Get the radiance mult/add bands for bands 10 and 11 Ml_10 = getattr(meta, "RADIANCE_MULT_BAND_10") Al_10 = getattr(meta, "RADIANCE_ADD_BAND_10") Ml_11 = getattr(meta, "RADIANCE_MULT_BAND_11") Al_11 = getattr(meta, "RADIANCE_ADD_BAND_11") # Set values in the TIRS band tiffs to null null_10 = arcpy.sa.SetNull(band10, band10, "VALUE <= 1") null_11 = arcpy.sa.SetNull(band11, band11, "VALUE <= 1") # Initial Thermal Radiances itr_10 = (null_10 * Ml_10) + Al_10 itr_11 = (null_11 * Ml_11) + Al_11 # Corrected Thermal Radiances ctr_10 = ((itr_10 - path_rad)/nbt) - ((1 - nbe) * sky_rad) ctr_11 = ((itr_11 - path_rad)/nbt) - ((1 - nbe) * sky_rad) # Get the K1 and K2 constants for bands 10 and 11 K1_10 = getattr(meta, "K1_CONSTANT_BAND_10") K2_10 = getattr(meta, "K2_CONSTANT_BAND_10") K1_11 = getattr(meta, "K1_CONSTANT_BAND_11") K2_11 = getattr(meta, "K2_CONSTANT_BAND_11") # Calculate surface temperature based on bands 10 and 11 and average them for final output st_10 = (K2_10/(arcpy.sa.Ln(((nbe * K1_10)/ctr_10) + 1))) st_11 = (K2_11/(arcpy.sa.Ln(((nbe * K1_11)/ctr_10) + 1))) st = (st_10 + st_11)/2 # Create output name and save the Surface Temperature tiff tilename = getattr(meta, "LANDSAT_SCENE_ID") if outdir: outdir = os.path.abspath(outdir) surface_temp_8 = core.create_outname(outdir, tilename, "Surf_Temp", "tif") else: folder = os.path.split(band4_toa)[0] surface_temp_8 = core.create_outname(folder, tilename, "Surf_Temp", "tif") st.save(surface_temp_8) return surface_temp_8
def surface_temp_457(band3_toa, meta_path, path_rad, nbt, sky_rad, outdir = False, L = 0.5): """ Calculates surface temperature from Landsat 4/5 TM or 7 ETM+ data. Requires band 3 and 4 Top-of-Atmosphere Reflectance tiffs and the unprocessed band 6 (or 6_VCID_1 for Landsat 7) tiff. Note: if the default values of 0, 1, and 0 are used for the Path Radiance, Narrowband Transmissivity, and Sky Radiance constants, atmospheric conditions will not be accounted for and the surface values may be off. Values are attainable using MODTRAN. :param band3_toa: Filepath to the Band 3 Top-of-Atmosphere Reflectance tiff. use landsat.toa_reflectance_457 :param meta_path: Filepath to the metadata file (ending in _MTL.txt) :param path_rad: Path Radiance constant (default 0) :param nbt: Narrowband Transmissivity constant (default 1) :param sky_rad: Sky Radiance constant (default 0) :param outdir: Path to the desired output folder. If left False the output tiff will be place in band4_toa's folder :param L: Soil brightness correction factor, between 0 and 1. used to calculate Soil Adjusted Vegetation Index. Default L = 0.5 works well in most situations. when L = 0, SAVI = NDVI. :return surface_temp_457: Full filepath of tif created by this function """ band3_toa = os.path.abspath(band3_toa) meta_path = os.path.abspath(meta_path) # Set the pathname for band 4 band4_toa = band3_toa.replace("_B3_", "_B4_") # Grab metadata from the MTL file and identify the spacecraft ID meta = landsat_metadata(meta_path) spacecraft = getattr(meta, "SPACECRAFT_ID") # Set the band 6 number, K1 and K2 thermal constants, and band 6 pathname based on spacecraft ID if "4" in spacecraft or "5" in spacecraft: band_num = "6" K1 = 607.76 K2 = 1260.56 band6 = meta_path.replace("_MTL.txt", "_B6.tif") elif "7" in spacecraft: band_num = "6_VCID_1" K1 = 666.09 K2 = 1282.71 band6 = meta_path.replace("_MTL.txt", "_B6_VCID_1.tif") else: print("Enter the MTL file corresponding to a Landsat 4, 5, or 7 dataset") # Open the metadata text file and read to set the scene's tilename f = open(meta_path) MText = f.read() if "PRODUCT_CREATION_TIME" in MText: tilename = getattr(meta, "BAND1_FILE_NAME") else: tilename = getattr(meta, "LANDSAT_SCENE_ID") #Soil Adjusted Vegetation Index red = arcpy.sa.Float(band3_toa) nir = arcpy.sa.Float(band4_toa) savi = ((1 + L) * (nir - red))/(L + (nir - red)) #Leaf Area Index #assigns LAI for 0.1 <= SAVI <= 0.687 lai_1 = ((arcpy.sa.Ln((0.69 - savi)/0.59))/(-0.91)) #assigns LAI for SAVI >= 0.687 lai_2 = arcpy.sa.Con(savi, lai_1, 6, "VALUE < 0.687") #assigns LAI for SAVI <= 0.1 lai = arcpy.sa.Con(savi, lai_2, 0, "VALUE >= 0.1") #Narrow Band Emissivity remap = 0.97 + (0.0033 * lai) nbe = arcpy.sa.Con(lai, remap, 0.98, "VALUE <= 3") #Get the radiance mult/add bands for bands 10 and 11 Ml = getattr(meta, "RADIANCE_MULT_BAND_{0}".format(band_num)) Al = getattr(meta, "RADIANCE_ADD_BAND_{0}".format(band_num)) #Set values in the TIRS band tiffs to null null = arcpy.sa.SetNull(band6, band6, "VALUE <= 1") # Initial Thermal Radiances itr = (null * Ml) + Al # Corrected Thermal Radiances ctr = ((itr - path_rad)/nbt) - ((1 - nbe) * sky_rad) # Calculate surface temperature st = (K2/(arcpy.sa.Ln(((nbe * K1)/ctr) + 1))) #Create output name and save the surface temperature tiff if outdir: outdir = os.path.abspath(outdir) surface_temp_457 = core.create_outname(outdir, tilename, "Surf_Temp", "tif") else: folder = os.path.split(band3_toa)[0] surface_temp_457 = core.create_outname(folder, tilename, "Surf_Temp", "tif") st.save(surface_temp_457) return surface_temp_457
def toa_reflectance_457(band_nums, meta_path, outdir=False): """ This function is used to convert Landsat 4, 5, or 7 pixel values from digital numbers to Top-of-Atmosphere Reflectance. To be performed on raw Landsat 4, 5, or 7 data. Inputs: band_nums A list of desired band numbers such as [3,4,5] meta_path The full filepath to the metadata file for those bands outdir Output directory to save converted files. If left False it will save ouput files in the same directory as input files. """ outlist = [] band_nums = core.enf_list(band_nums) band_nums = map(str, band_nums) #metadata format was changed August 29, 2012. This tool can process either the new or old format f = open(meta_path) MText = f.read() meta_path = os.path.abspath(meta_path) metadata = grab_meta(meta_path) #the presence of a PRODUCT_CREATION_TIME category is used to identify old metadata #if this is not present, the meta data is considered new. #Band6length refers to the length of the Band 6 name string. In the new metadata this string is longer if "PRODUCT_CREATION_TIME" in MText: Meta = "oldMeta" Band6length = 2 else: Meta = "newMeta" Band6length = 8 #The tilename is located using the newMeta/oldMeta indixes and the date of capture is recorded if Meta == "newMeta": TileName = getattr(metadata, "LANDSAT_SCENE_ID") year = TileName[9:13] jday = TileName[13:16] date = getattr(metadata, "DATE_ACQUIRED") elif Meta == "oldMeta": TileName = getattr(metadata, "BAND1_FILE_NAME") year = TileName[13:17] jday = TileName[17:20] date = getattr(metadata, "ACQUISITION_DATE") #the spacecraft from which the imagery was capture is identified #this info determines the solar exoatmospheric irradiance (ESun) for each band spacecraft = getattr(metadata, "SPACECRAFT_ID") if "7" in spacecraft: ESun = (1969.0, 1840.0, 1551.0, 1044.0, 255.700, 0., 82.07, 1368.00) TM_ETM_bands = ['1', '2', '3', '4', '5', '7', '8'] elif "5" in spacecraft: ESun = (1957.0, 1826.0, 1554.0, 1036.0, 215.0, 0., 80.67) TM_ETM_bands = ['1', '2', '3', '4', '5', '7'] elif "4" in spacecraft: ESun = (1957.0, 1825.0, 1557.0, 1033.0, 214.9, 0., 80.72) TM_ETM_bands = ['1', '2', '3', '4', '5', '7'] else: arcpy.AddError("This tool only works for Landsat 4, 5, or 7") raise arcpy.ExecuteError() #determing if year is leap year and setting the Days in year accordingly if float(year) % 4 == 0: DIY = 366. else: DIY = 365. #using the date to determing the distance from the sun theta = 2 * math.pi * float(jday) / DIY dSun2 = (1.00011 + 0.034221 * math.cos(theta) + 0.001280 * math.sin(theta) + 0.000719 * math.cos(2 * theta) + 0.000077 * math.sin(2 * theta)) SZA = 90. - float(getattr(metadata, "SUN_ELEVATION")) #Calculating values for each band for band_num in band_nums: if band_num in TM_ETM_bands: print("Processing Band {0}".format(band_num)) pathname = meta_path.replace("MTL.txt", "B{0}.tif".format(band_num)) Oraster = arcpy.Raster(pathname) null_raster = arcpy.sa.SetNull(Oraster, Oraster, "VALUE = 0") #using the oldMeta/newMeta indixes to pull the min/max for radiance/Digital numbers if Meta == "newMeta": LMax = getattr(metadata, "RADIANCE_MAXIMUM_BAND_{0}".format(band_num)) LMin = getattr(metadata, "RADIANCE_MINIMUM_BAND_{0}".format(band_num)) QCalMax = getattr(metadata, "QUANTIZE_CAL_MAX_BAND_{0}".format(band_num)) QCalMin = getattr(metadata, "QUANTIZE_CAL_MIN_BAND_{0}".format(band_num)) elif Meta == "oldMeta": LMax = getattr(metadata, "LMAX_BAND{0}".format(band_num)) LMin = getattr(metadata, "LMIN_BAND{0}".format(band_num)) QCalMax = getattr(metadata, "QCALMAX_BAND{0}".format(band_num)) QCalMin = getattr(metadata, "QCALMIN_BAND{0}".format(band_num)) Radraster = (((LMax - LMin) / (QCalMax - QCalMin)) * (null_raster - QCalMin)) + LMin Oraster = 0 del null_raster #Calculating temperature for band 6 if present Refraster = (math.pi * Radraster * dSun2) / ( ESun[int(band_num[0]) - 1] * math.cos(SZA * (math.pi / 180))) #construc output names for each band based on whether outdir is set (default is False) if outdir: outdir = os.path.abspath(outdir) BandPath = core.create_outname(outdir, pathname, "TOA_Ref", "tif") else: folder = os.path.split(meta_path)[0] BandPath = core.create_outname(folder, pathname, "TOA_Ref", "tif") Refraster.save(BandPath) outlist.append(BandPath) del Refraster, Radraster print("Reflectance Calculated for Band {0}".format(band_num)) #if listed band is not a TM/ETM+ sensor band, skip it and print message else: print( "Can only perform reflectance conversion on TM/ETM+ sensor bands" ) print("Skipping band {0}".format(band_num)) f.close() return outlist
def many_stats(rasterlist, outdir, outname, saves=['AVG', 'NUM', 'STD', 'SUM'], low_thresh=None, high_thresh=None, numtype='float32'): """ Take statitics across many input rasters this function is used to take statistics on large groups of rasters with identical spatial extents. Similar to Rolling_Raster_Stats Inputs: rasterlist list of raster filepaths for which to take statistics outdir Directory where output should be stored. saves which statistics to save in a raster. In addition to the options supported by Defaults to all three ['AVG','NUM','STD']. low_thresh values below low_thresh are assumed erroneous and set to NoData high_thresh values above high_thresh are assumed erroneous and set to NoData. numtype type of numerical value. defaults to 32bit float. """ if not os.path.isdir(outdir): os.makedirs(outdir) rasterlist = enf_rastlist(rasterlist) # build the empty numpy array based on size of first raster temp_rast, metadata = to_numpy(rasterlist[0]) xs, ys = temp_rast.shape zs = len(rasterlist) rast_3d = numpy.zeros((xs, ys, zs)) metadata.NoData_Value = numpy.nan # open up the initial figure rastfig = raster_fig(temp_rast) # populate the 3d matrix with values from all rasters for i, raster in enumerate(rasterlist): # print a status and open a figure print('working on file {0}'.format(os.path.basename(raster))) new_rast, new_meta = to_numpy(raster, numtype) new_rast = new_rast.data if not new_rast.shape == (xs, ys): print new_rast.shape # set rasters to have 'nan' NoData_Value if new_meta.NoData_Value != metadata.NoData_Value: new_rast[new_rast == new_meta.NoData_Value] = metadata.NoData_Value # set values outside thresholds to nodata values if not low_thresh == None: new_rast[new_rast < low_thresh] = metadata.NoData_Value if not high_thresh == None: new_rast[new_rast > high_thresh] = metadata.NoData_Value new_rast = numpy.ma.masked_array(new_rast, numpy.isnan(new_rast)) # display a figure rastfig.update_fig(new_rast) rast_3d[:, :, i] = new_rast # build up our statistics by masking nan values and performin matrix opperations rastfig.close_fig() rast_3d_masked = numpy.ma.masked_array(rast_3d, numpy.isnan(rast_3d)) if "AVG" in saves: avg_rast = numpy.mean(rast_3d_masked, axis=2) avg_rast = numpy.array(avg_rast) rastfig = raster_fig(avg_rast, title="Average") avg_name = core.create_outname(outdir, outname, 'AVG', 'tif') print("Saving AVERAGE output raster as {0}".format(avg_name)) from_numpy(avg_rast, metadata, avg_name) rastfig.close_fig() del avg_rast if "STD" in saves: std_rast = numpy.std(rast_3d_masked, axis=2) std_rast = numpy.array(std_rast) rastfig = raster_fig(std_rast, title="Standard Deviation") std_name = core.create_outname(outdir, outname, 'STD', 'tif') print( "Saving STANDARD DEVIATION output raster as {0}".format(std_name)) from_numpy(std_rast, metadata, std_name) rastfig.close_fig() del std_rast if "NUM" in saves: num_rast = (numpy.zeros( (xs, ys)) + zs) - numpy.sum(rast_3d_masked.mask, axis=2) num_rast = numpy.array(num_rast) rastfig = raster_fig(num_rast, title="Good pixel count (NUM)") rastfig.close_fig() num_name = core.create_outname(outdir, outname, 'NUM', 'tif') print("Saving NUMBER output raster as {0}".format(num_name)) from_numpy(num_rast, metadata, num_name) rastfig.close_fig() del num_rast if "SUM" in saves: sum_rast = numpy.sum(rast_3d_masked, axis=2) sum_rast = numpy.array(sum_rast) rastfig = raster_fig(sum_rast, title="Good pixel count (NUM)") rastfig.close_fig() sum_name = core.create_outname(outdir, outname, 'SUM', 'tif') print("Saving NUMBER output raster as {0}".format(sum_name)) from_numpy(sum_rast, metadata, sum_name) rastfig.close_fig() del sum_rast rastfig.close_fig() return
def GCMO_NetCDF(netcdf_list, variable, outdir): """ Extracts all time layers from a "Global Climate Model Output" NetCDF layer Inputs: netcdf_list list of netcdfs from CORDEX climate distribution varaible the climate variable of interest (tsmax, tsmin, etc) outdir output directory to save files. """ if not os.path.exists(outdir): os.makedirs(outdir) netcdf_list = core.enf_list(netcdf_list) for netcdf in netcdf_list: # get net cdf properties object props = arcpy.NetCDFFileProperties(netcdf) print("finding dimensions") dims = props.getDimensions() for dim in dims: print dim, props.getDimensionSize(dim) # make sure the variable is in this netcdf if variable: if not variable in props.getVariables(): print("Valid variables for this file include {0}".format( props.getVariables())) raise Exception( "Variable '{0}' is not in this netcdf!".format(variable)) for dim in dims: if dim == "time": # set other dimensions x_dim = "lon" y_dim = "lat" band_dim = "" valueSelectionMethod = "BY_VALUE" size = props.getDimensionSize(dim) for i in range(size): # sanitize the dimname for invalid characters dimname = props.getDimensionValue(dim, i).replace( " 12:00:00 PM", "") dimname = dimname.replace("/", "-").replace(" ", "_") dim_value = [["time", props.getDimensionValue(dim, i)]] print("extracting '{0}' from '{1}'".format( variable, dim_value)) outname = core.create_outname(outdir, netcdf, dimname, 'tif') arcpy.MakeNetCDFRasterLayer_md(netcdf, variable, x_dim, y_dim, "temp", band_dim, dim_value, valueSelectionMethod) arcpy.CopyRaster_management("temp", outname, "", "", "", "NONE", "NONE", "") return
def toa_radiance_457(band_nums, meta_path, outdir=None): """ Top of Atmosphere radiance (in Watts/(square meter x steradians x micrometers)) conversion for Landsat 4, 5, and 7 data. To be performed on raw Landsat 4, 5, or 7 level 1 data. :param band_nums: A list of desired band numbers such as [3, 4, 5] :param meta_path: The full filepath to the metadata file for those bands :param outdir: Output directory to save converted files. :return output_filelist: List of filepaths created by this function. """ output_filelist = [] meta_path = os.path.abspath(meta_path) band_nums = core.enf_list(band_nums) band_nums = map(str, band_nums) #metadata format was changed August 29, 2012. This tool can process either the new or old format f = open(meta_path) MText = f.read() metadata = landsat_metadata(meta_path) #the presence of a PRODUCT_CREATION_TIME category is used to identify old metadata #if this is not present, the meta data is considered new. #Band6length refers to the length of the Band 6 name string. In the new metadata this string is longer if "PRODUCT_CREATION_TIME" in MText: Meta = "oldMeta" Band6length = 2 else: Meta = "newMeta" Band6length = 8 #The tilename is located using the newMeta/oldMeta indixes and the date of capture is recorded if Meta == "newMeta": TileName = getattr(metadata, "LANDSAT_SCENE_ID") year = TileName[9:13] jday = TileName[13:16] date = getattr(metadata, "DATE_ACQUIRED") elif Meta == "oldMeta": TileName = getattr(metadata, "BAND1_FILE_NAME") year = TileName[13:17] jday = TileName[17:20] date = getattr(metadata, "ACQUISITION_DATE") #the spacecraft from which the imagery was capture is identified #this info determines the solar exoatmospheric irradiance (ESun) for each band spacecraft = getattr(metadata, "SPACECRAFT_ID") if "7" in spacecraft: ESun = (1969.0, 1840.0, 1551.0, 1044.0, 255.700, 0., 82.07, 1368.00) TM_ETM_bands = ['1', '2', '3', '4', '5', '7', '8'] elif "5" in spacecraft: ESun = (1957.0, 1826.0, 1554.0, 1036.0, 215.0, 0., 80.67) TM_ETM_bands = ['1', '2', '3', '4', '5', '7'] elif "4" in spacecraft: ESun = (1957.0, 1825.0, 1557.0, 1033.0, 214.9, 0., 80.72) TM_ETM_bands = ['1', '2', '3', '4', '5', '7'] else: arcpy.AddError("This tool only works for Landsat 4, 5, or 7") raise arcpy.ExecuteError() #Calculating values for each band for band_num in band_nums: if band_num in TM_ETM_bands: print("Processing Band {0}".format(band_num)) pathname = meta_path.replace("MTL.txt", "B{0}.tif".format(band_num)) Oraster = arcpy.Raster(pathname) null_raster = arcpy.sa.SetNull(Oraster, Oraster, "VALUE = 0") #using the oldMeta/newMeta indixes to pull the min/max for radiance/Digital numbers if Meta == "newMeta": LMax = getattr(metadata, "RADIANCE_MAXIMUM_BAND_{0}".format(band_num)) LMin = getattr(metadata, "RADIANCE_MINIMUM_BAND_{0}".format(band_num)) QCalMax = getattr(metadata, "QUANTIZE_CAL_MAX_BAND_{0}".format(band_num)) QCalMin = getattr(metadata, "QUANTIZE_CAL_MIN_BAND_{0}".format(band_num)) elif Meta == "oldMeta": LMax = getattr(metadata, "LMAX_BAND{0}".format(band_num)) LMin = getattr(metadata, "LMIN_BAND{0}".format(band_num)) QCalMax = getattr(metadata, "QCALMAX_BAND{0}".format(band_num)) QCalMin = getattr(metadata, "QCALMIN_BAND{0}".format(band_num)) Radraster = (((LMax - LMin) / (QCalMax - QCalMin)) * (null_raster - QCalMin)) + LMin Oraster = 0 del null_raster band_rad = "{0}_B{1}".format(TileName, band_num) #create the output name and save the TOA radiance tiff if outdir is not None: outdir = os.path.abspath(outdir) outname = core.create_outname(outdir, band_rad, "TOA_Rad", "tif") else: folder = os.path.split(meta_path)[0] outname = core.create_outname(folder, band_rad, "TOA_Rad", "tif") Radraster.save(outname) output_filelist.append(outname) del Radraster print("toa radiance saved for Band {0}".format(band_num)) #if listed band is not a TM/ETM+ sensor band, skip it and print message else: print( "Can only perform reflectance conversion on TM/ETM+ sensor bands" ) print("Skipping band {0}".format(band_num)) f.close() return output_filelist
def gap_fill_temporal(rasterlist, outdir=None, continuous=True, NoData_Value=None, numpy_datatype="float32"): """ This function is designed to input a time sequence of rasters with partial voids and output a copy of each input image with every pixel equal to the last good value taken. This function will step forward in time through each raster and fill voids from the values of previous rasters. The resulting output image will contain all the data that was in the original image, with the voids filled with older data. A second output image will be generated where the pixel values are equal to the age of each pixel in the image. So if a void was filled with data that's 5 days old, the "age" raster will have a value of "5" at that location. :param rasterlist: A list of filepaths for rasters with which to fill gaps. THESE IMAGES MUST BE ORDERED FROM OLDEST TO NEWEST (ascending time). :param outdir: the path to the desired output folder, if left "None", outputs will be saved right next to respective inputs. :param continuous: if "True" an output raster will be generated for every single input raster, which can be used to fill gaps in an entire time series. So, for example output raster 2 will have all the good points in input raster 2, with gaps filled with data from raster 1. output raster 3 will then be gap filled with output raster 2, which might contain some fill values from raster 1, and so forth. If "False" an output raster will only be generated for the LAST raster in the input rasterlist. :param numpy_datatype: the numpy datatype of the output raster. usually "float32" :return output_filelist: returns a list of filepaths to new files created by this function. """ # enforce the list of rasters to ensure it's sanitized rasterlist = enf_rastlist(rasterlist) # create an empty list to store output arrays in output_filelist = [] # grab the first raster, then start stepping through the list old_rast, old_meta = to_numpy(rasterlist[0]) rastfig = raster_fig(old_rast) for i, araster in enumerate(rasterlist[1:]): new_rast, new_meta = to_numpy(araster) # combine new and old data and mask matrices outrast = new_rast outrast.data[new_rast.mask] = old_rast.data[new_rast.mask] outrast.mask[new_rast.mask] = old_rast.mask[new_rast.mask] # only save output if continuous is true or is last raster in series if continuous is True or i == (len(rasterlist[1:]) - 1): # create output name and save it if outdir is None: this_outdir = os.path.dirname(araster) else: this_outdir = outdir # update the figure rastfig.update_fig(outrast) outpath = core.create_outname(this_outdir, araster, "gft", "tif") print("Filled gaps in {0}".format(os.path.basename(araster))) outrast = outrast.astype(numpy_datatype) from_numpy(outrast, new_meta, outpath, NoData_Value) output_filelist.append(outpath) # prepare for next time step by setting current to old old_rast = new_rast return output_filelist
def make_cloud_mask_457(B2_TOA_Ref, outdir=False, Filter5Thresh=2.0, Filter6Thresh=2.0): """ Creates a binary mask raster for removal of cloud-covered pixels in raw Landsat 4, 5, and 7 bands. To be performed on Landsat 4, 5, or 7 data. Must be processed first with landsat.toa_reflectance_457 for bands 2, 3, 4, and 5 and landsat.atsat_bright_temp_457 for band 6. *Note that for this function to run properly, bands 2, 3, 4, 5, and 6 must each be in the same folder and have the correct naming convention output by the landsat.toa_reflectance_457 and landsat.atsat_bright_temp_457 functions (e.g. LT50410362011240PAC01_B2_TOA_Ref.tif, LT50410362011240PAC01_B6_Temp.tif). Inputs: B2_TOA-Ref.tif The full filepath to the band 2 top-of-atmosphere reflectance tiff file outdir Output directory to the cloud mask and TOA band tiffs Filter5Thresh Optional threshold value for Filter #5, default set at 2 Filter6Thresh Optional threshold value for Filter #6, default set at 2 """ #discern if Landsat 4/5 or 7 for band 6 and designate rasters for bands 2, 3, 4, 5, and 6 if "LT4" in B2_TOA_Ref or "LT5" in B2_TOA_Ref: band_6 = "6" elif "LE7" in B2_TOA_Ref: band_6 = "6_VCID_1" B2_path = os.path.abspath(B2_TOA_Ref) Band2 = arcpy.Raster(B2_path) band_path3 = B2_path.replace("B2_TOA_Ref.tif", "B3_TOA_Ref.tif") band_path4 = B2_path.replace("B2_TOA_Ref.tif", "B4_TOA_Ref.tif") band_path5 = B2_path.replace("B2_TOA_Ref.tif", "B5_TOA_Ref.tif") band_path6 = B2_path.replace("B2_TOA_Ref.tif", "B{0}_ASBTemp.tif".format(band_6)) Band3 = arcpy.Raster(band_path3) Band4 = arcpy.Raster(band_path4) Band5 = arcpy.Raster(band_path5) Band6 = arcpy.Raster(band_path6) del band_path3, band_path4, band_path5, band_path6 name = os.path.split(B2_path)[1] if outdir == False: outdir = os.path.split(B2_path)[0] #Establishing location of gaps in data. 0 = Gap, 1 = Data #This will be used multiple times in later steps arcpy.AddMessage("Creating Gap Mask") print "Creating Gap Mask" GapMask = ( (Band2 > 0) * (Band3 > 0) * (Band4 > 0) * (Band5 > 0) * (Band6 > 0)) GapMask.save(outdir + "\\GapMask.tif") arcpy.AddMessage("First pass underway") print "First pass underway" #Filter 1 - Brightness Threshold-------------------------------------------- Cloudmask = Band3 > .08 #Filter 2 - Normalized Snow Difference Index-------------------------------- NDSI = (Band2 - Band5) / (Band2 + Band5) Snow = (NDSI > .6) * Cloudmask Cloudmask = (NDSI < .6) * Cloudmask #Filter 3 - Temperature Threshold------------------------------------------- Cloudmask = (Band6 < 300) * Cloudmask #Filter 4 - Band 5/6 Composite---------------------------------------------- Cloudmask = (((1 - Band5) * Band6) < 225) * Cloudmask Amb = (((1 - Band5) * Band6) > 225) #Filter 5 - Band 4/3 Ratio (eliminates vegetation)-------------------------- #bright cloud tops are sometimes cut out by this filter. original threshold was #raising this threshold will make the algorithm more aggresive Cloudmask = ((Band4 / Band3) < Filter5Thresh) * Cloudmask Amb = ((Band4 / Band3) > Filter5Thresh) * Amb #Filter 6 - Band 4/2 Ratio (eliminates vegetation)-------------------------- #bright cloud tops are sometimes cut out by this filter. original threshold was #raising this threshold will make the algorithm more aggresive Cloudmask = ((Band4 / Band2) < Filter6Thresh) * Cloudmask Amb = ((Band4 / Band2) > Filter6Thresh) * Amb #Filter 7 - Band 4/5 Ratio (Eliminates desert features)--------------------- # DesertIndex recorded DesertIndMask = ((Band4 / Band5) > 1.0) Cloudmask = DesertIndMask * Cloudmask Amb = ((Band4 / Band5) < 1.0) * Amb #Filter 8 Band 5/6 Composite (Seperates warm and cold clouds)-------------- WarmCloud = (((1 - Band5) * Band6) > 210) * Cloudmask ColdCloud = (((1 - Band5) * Band6) < 210) * Cloudmask #Calculating percentage of the scene that is classified as Desert DesertGap = (DesertIndMask + 1) * GapMask try: arcpy.CalculateStatistics_management(DesertGap, ignore_values="0") DesertIndex = DesertGap.mean - 1 except: DesertGap.save(outdir + "\\Desert.tif") arcpy.CalculateStatistics_management(DesertGap, ignore_values="0") DesertIndex = DesertGap.mean - 1 os.remove(outdir + "\\Desert.tif") del DesertIndMask, DesertGap, NDSI #Calculating percentage of the scene that is classified as Snow ColdCloudGap = (ColdCloud + 1) * GapMask try: arcpy.CalculateStatistics_management(ColdCloudGap, ignore_values="0") ColdCloudMean = ColdCloudGap.mean - 1 del ColdCloudGap except: ColdCloudGap.save(outdir + "\\ColdCloud.tif") arcpy.CalculateStatistics_management(ColdCloudGap, ignore_values="0") ColdCloudMean = ColdCloudGap.mean - 1 os.remove(outdir + "\\ColdCloud.tif") del ColdCloudGap del Band2, Band3, Band4, Band5 SnowGap = (Snow + 1) * GapMask try: arcpy.CalculateStatistics_management(SnowGap, ignore_values="0") SnowPerc = SnowGap.mean - 1 del SnowGap except: SnowGap.save(outdir + "\\Snow.tif") arcpy.CalculateStatistics_management(SnowGap, ignore_values="0") SnowPerc = SnowGap.mean - 1 os.remove(outdir + "\\Snow.tif") del SnowGap del Snow del GapMask #Determining whether or not snow is present and adjusting the Cloudmask #accordinging. If snow is present the Warm Clouds are reclassfied as ambigious if SnowPerc > .01: SnowPresent = True Cloudmask = ColdCloud Amb = Amb + WarmCloud else: SnowPresent = False del ColdCloud, WarmCloud, SnowPerc #Collecting statistics for Cloud pixel Temperature values. These will be used in later conditionals Tempclouds = Cloudmask * Band6 Tempclouds.save(outdir + "\\TempClouds.tif") del Tempclouds #Converting TempClouds to a text file and writing its non-zero/NAN values to a list outtxt = outdir + "\\tempclouds.txt" arcpy.RasterToASCII_conversion(outdir + "\\TempClouds.tif", outtxt) f = open(outtxt) list = [] lines = f.readlines()[6:] for line in lines: for x in line.split(' '): try: x = float(x) if x > 0: list.append(x) except ValueError: pass f.close() #Band6clouds = Band6array[np.where(Band6array > 0)] #del Band6array TempMin = min(list) TempMax = max(list) TempMean = numpy.mean(list) TempStd = numpy.std(list) TempSkew = stats.skew(list) Temp98perc = numpy.percentile(list, 98.75) Temp97perc = numpy.percentile(list, 97.50) Temp82perc = numpy.percentile(list, 82.50) del list #delete all intermediary files in the output directory for file in os.listdir(outdir): if "GapMask" in file: os.remove("{0}\\{1}".format(outdir, file)) elif "TempClouds" in file: os.remove("{0}\\{1}".format(outdir, file)) elif "tempclouds" in file: os.remove("{0}\\{1}".format(outdir, file)) #Pass 2 is run if the following conditionals are met if ColdCloudMean > .004 and DesertIndex > .5 and TempMean < 295: #Pass 2 arcpy.AddMessage("Second Pass underway") #Adjusting Temperature thresholds based on skew if TempSkew > 0: if TempSkew > 1: shift = TempStd else: shift = TempStd * TempSkew else: shift = 0 Temp97perc += shift Temp82perc += shift if Temp97perc > Temp98perc: Temp82perc = Temp82perc - (Temp97perc - Temp98perc) Temp97perc = Temp98perc warmAmbmask = ((Band6 * Amb) < Temp97perc) warmAmbmask = warmAmbmask * ((Amb * Band6) > Temp82perc) coldAmbmask = (Band6 * Amb) < Temp82perc coldAmbmask = coldAmbmask * ((Amb * Band6) > 0) warmAmb = warmAmbmask * Band6 coldAmb = coldAmbmask * Band6 ThermEffect1 = warmAmbmask.mean ThermEffect2 = coldAmbmask.mean arcpy.CalculateStatistics_management(warmAmb, ignore_values="0") arcpy.CalculateStatistics_management(coldAmb, ignore_values="0") if ThermEffect1 < .4 and warmAmb.mean < 295 and SnowPresent == False: Cloudmask = Cloudmask + warmAmbmask + coldAmbmask arcpy.AddMessage("Upper Threshold Used") elif ThermEffect2 < .4 and coldAmb.mean < 295: Cloudmask = Cloudmask + coldAmbmask arcpy.AddMessage("Lower Threshold Used") #switch legend to 1=good data 0 = cloud pixel remap = arcpy.sa.RemapValue([[1, 0], [0, 1], ["NODATA", 1]]) Cloud_Mask = arcpy.sa.Reclassify(Cloudmask, "Value", remap) #create output name mask_path = name.replace("_B2_TOA_Ref.tif", "") if outdir: outdir = os.path.abspath(outdir) outname = core.create_outname(outdir, mask_path, "Mask", "tif") else: folder = B2_TOA_Ref.replace(name, "") outname = core.create_outname(folder, mask_path, "Mask", "tif") print "Cloud mask saved at {0}".format(outname) Cloud_Mask.save(outname) cloudmask457 = arcpy.Raster(outname) del name, mask_path, Cloud_Mask, remap return cloudmask457
def gap_fill_interpolate(in_rasterpath, out_rasterpath, model = None, max_cell_dist = None, min_points = None): """ Fills gaps in raster data by spatial kriging interpolation. This should only be used to fill small gaps in continuous datasets (like a DEM), and in instances where it makes sense. This function creates a feature class layer of points where pixels are not NoData, then performs a "kriging" interpolation on the point data to rebuild a uniform grid with a value at every location, thus filling gaps. WARNING: This script is processing intensive and may take a while to run even for modestly sized datasets. :param in_rasterpath: input filepath to raster to fill gaps :param out_rasterpath: filepath to store output gap filled raster in :param model: type of kriging model to run, options include "SPHERICAL", "CIRCULAR", "EXPONENTIAL", "GAUSSIAN", and "LINEAR" :param max_cell_dist: The maximum number of cells to interpolate between, data gaps which do not have at least "min_points" points within this distance will not be filled. :param min_points: Minimum number of surrounding points to use in determining value at missing cell. :return out_rasterpath: Returns path to file created by this function """ # check inputs if not is_rast(in_rasterpath): raise Exception("input raster path {0} is invalid!".format(in_rasterpath)) if max_cell_dist is None: max_cell_dist = 10 if min_points is None: min_points = 4 if model is None: model = "SPHERICAL" # set environments arcpy.env.overwriteOutput = True arcpy.env.snapRaster = in_rasterpath arcpy.CheckOutExtension("Spatial") # make a point shapefile version of input raster print("Creating point grid from input raster") head, tail = os.path.split(in_rasterpath) shp_path = core.create_outname(head, tail, "shp", "shp") dbf_path = shp_path.replace(".shp",".dbf") field = "GRID_CODE" arcpy.RasterToPoint_conversion(in_rasterpath, shp_path, "VALUE") # find the bad rows who GRID_CODE is 1, these should be NoData print("Finding points with NoData entries") bad_row_FIDs = [] rows = arcpy.UpdateCursor(dbf_path) for row in rows: grid_code = getattr(row, field) if grid_code == 1: bad_row_FIDs.append(row.FID) del rows # go back through the list and perform the deletions numbad = len(bad_row_FIDs) print("Deleting {0} points with NoData values".format(numbad)) rows = arcpy.UpdateCursor(dbf_path) for i, row in enumerate(rows): if row.FID in bad_row_FIDs: rows.deleteRow(row) # set up the parameters for kriging print("Setting up for kriging") _, meta = to_numpy(in_rasterpath) model = model cell_size = meta.cellHeight # from input raster lagSize = None majorRange = None partialSill = None nugget = None distance = float(cell_size) * float(max_cell_dist) # fn input min_points = min_points # fn input a = arcpy.sa.KrigingModelOrdinary() kmodel = arcpy.sa.KrigingModelOrdinary("SPHERICAL", lagSize = lagSize, majorRange = majorRange, partialSill = partialSill, nugget = nugget) kradius = arcpy.sa.RadiusFixed(distance = distance, minNumberOfPoints = min_points) # execute kriging print("Performing interpolation by kriging, this may take a while!") outkriging = arcpy.sa.Kriging(shp_path, field, kmodel, cell_size = cell_size, search_radius = kradius) outkriging.save(out_rasterpath) return out_rasterpath
def toa_reflectance_8(band_nums, meta_path, outdir=False): """ Converts Landsat 8 bands to Top-of-Atmosphere reflectance. To be performed on raw Landsat 8 level 1 data. See link below for details see here [http://landsat.usgs.gov/Landsat8_Using_Product.php] Inputs: band_nums A list of desired band numbers such as [3,4,5] meta_path The full filepath to the metadata file for those bands outdir Output directory to save converted files. If left False it will save ouput files in the same directory as input files. """ outlist = [] #enforce the list of band numbers and grab metadata from the MTL file band_nums = core.enf_list(band_nums) band_nums = map(str, band_nums) OLI_bands = ['1', '2', '3', '4', '5', '6', '7', '8', '9'] meta_path = os.path.abspath(meta_path) meta = grab_meta(meta_path) #cycle through each band in the list for calculation, ensuring each is in the list of OLI bands for band_num in band_nums: if band_num in OLI_bands: #scrape data from the given file path and attributes in the MTL file band_path = meta_path.replace("MTL.txt", "B{0}.tif".format(band_num)) Qcal = arcpy.Raster(band_path) Mp = getattr(meta, "REFLECTANCE_MULT_BAND_{0}".format( band_num)) # multiplicative scaling factor Ap = getattr(meta, "REFLECTANCE_ADD_BAND_{0}".format( band_num)) # additive rescaling factor SEA = getattr(meta, "SUN_ELEVATION") * ( math.pi / 180) # sun elevation angle theta_se #get rid of the zero values that show as the black background to avoid skewing values null_raster = arcpy.sa.SetNull(Qcal, Qcal, "VALUE = 0") #calculate top-of-atmosphere reflectance TOA_ref = (((null_raster * Mp) + Ap) / (math.sin(SEA))) #save the data to the automated name if outdir is given or in the parent folder if not if outdir: outdir = os.path.abspath(outdir) outname = core.create_outname(outdir, band_path, "TOA_Ref", "tif") else: folder = os.path.split(meta_path)[0] outname = core.create_outname(folder, band_path, "TOA_Ref", "tif") TOA_ref.save(outname) outlist.append(outname) print("Saved output at {0}".format(outname)) #if listed band is not an OLI sensor band, skip it and print message else: print( "Can only perform reflectance conversion on OLI sensor bands") print("Skipping band {0}".format(band_num)) return outlist
def spatially_match(snap_raster, rasterlist, outdir, NoData_Value=False, resamp_type=False): """ Prepares input rasters for further numerical processing This function simply ensures all rasters in "rasterlist" are identically projected and have the same cell size, then calls the raster.clip_and_snap function to ensure that the cells are perfectly coincident and that the total spatial extents of the images are identical, even when NoData values are considered. This is useful because it allows the two images to be passed on for numerical processing as nothing more than matrices of values, and the user can be sure that any index in any matrix is exactly coincident with the same index in any other matrix. This is especially important to use when comparing different datasets from different sources outside arcmap, for example MODIS and Landsat data with an ASTER DEM. inputs: snap_raster raster to which all other images will be snapped rasterlist list of rasters, a single raster, or a directory full of tiffs which will be clipped to the extent of "snap_raster" and aligned such that the cells are perfectly coincident. outdir the output directory to save newly created spatially matched tifs. resamp_type The resampling type to use if images are not identical cell sizes. "NEAREST","BILINEAR",and "CUBIC" are the most common. this function automatically invokes clip_and_snap project_resample """ # import modules and sanitize inputs tempdir = os.path.join(outdir, 'temp') if not os.path.isdir(outdir): os.makedirs(outdir) if not os.path.isdir(tempdir): os.makedirs(tempdir) rasterlist = enf_rastlist(rasterlist) core.exists(snap_raster) usetemp = False # set the snap raster environment in arcmap. arcpy.env.snapRaster = snap_raster print('Loading snap raster {0}'.format(snap_raster)) _, snap_meta = to_numpy(snap_raster) print('Bounds of rectangle to define boundaries: [{0}]'.format( snap_meta.rectangle)) # for every raster in the raster list, snap rasters and clip. for rastname in rasterlist: _, meta = to_numpy(rastname) head, tail = os.path.split(rastname) if snap_meta.projection.projectionName != meta.projection.projectionName: print('Projection discrepancy found. Reprojecting...') project_resample(rastname, snap_raster, tempdir, resamp_type) tempname = core.create_outname(tempdir, tail, "p") usetemp = True # define an output name and run the Clip_ans_Snap_Raster function on formatted tifs outname = core.create_outname(outdir, rastname, "sm") # if a temporary file was created in previous steps, use that one for clip and snap if usetemp: clip_and_snap(snap_raster, tempname, outname, NoData_Value) else: clip_and_snap(snap_raster, rastname, outname, NoData_Value) print('Finished matching raster {0}'.format(rastname)) return
def project_resample(filelist, reference_file, outdir=False, resampling_type=None, cell_size=None): """ Wrapper for multiple arcpy projecting functions. Projects to reference file Inputs a filelist and a reference file, then projects all rasters or feature classes in the filelist to match the projection of the reference file. Writes new files with a "_p" appended to the end of the input filenames. This also will perform resampling. Inputs: filelist list of files to be projected outdir optional desired output directory. If none is specified, output files will be named with '_p' as a suffix. reference_file Either a file with the desired projection, or a .prj file. resampling type exactly as the input for arcmaps project_Raster_management function cell_size exactly as the input for arcmaps project_Raster_management function Output: Spatial reference spatial referencing information for further checking. """ output_filelist = [] # sanitize inputs core.exists(reference_file) rasterlist = enf_rastlist(filelist) featurelist = core.enf_featlist(filelist) cleanlist = rasterlist + featurelist # ensure output directory exists if not os.path.exists(outdir): os.makedirs(outdir) # grab data about the spatial reference of the reference file. (prj or otherwise) if reference_file[-3:] == 'prj': Spatial_Reference = arcpy.SpatialReference(reference_file) else: Spatial_Reference = arcpy.Describe(reference_file).spatialReference # determine cell size if cell_size is None: cx = arcpy.GetRasterProperties_management(reference_file, "CELLSIZEX").getOutput(0) cy = arcpy.GetRasterProperties_management(reference_file, "CELLSIZEY").getOutput(0) cell_size = "{0} {1}".format(cx, cy) # determine wether coordinate system is projected or geographic and print info if Spatial_Reference.type == 'Projected': print('Found {0} projected coord system'.format( Spatial_Reference.PCSName)) else: print('Found {0} geographic coord system'.format( Spatial_Reference.GCSName)) for filename in cleanlist: # create the output filename outname = core.create_outname(outdir, filename, 'p') output_filelist.append(Spatial_Reference) # use ProjectRaster_management for rast files if is_rast(filename): arcpy.ProjectRaster_management(filename, outname, Spatial_Reference, resampling_type, cell_size) print('Wrote projected and resampled file to {0}'.format(outname)) # otherwise, use Project_management for featureclasses and featurelayers else: arcpy.Project_management(filename, outname, Spatial_Reference) print('Wrote projected file to {0}'.format(outname)) print("finished projecting!") return output_filelist
def many_stats(rasterlist, outdir, outname, saves = ['AVG','NUM','STD','SUM'], low_thresh = None, high_thresh = None, numtype = 'float32'): """ Take statitics across many input rasters this function is used to take statistics on large groups of rasters with identical spatial extents. Similar to Rolling_Raster_Stats Inputs: rasterlist list of raster filepaths for which to take statistics outdir Directory where output should be stored. saves which statistics to save in a raster. In addition to the options supported by Defaults to all three ['AVG','NUM','STD']. low_thresh values below low_thresh are assumed erroneous and set to NoData high_thresh values above high_thresh are assumed erroneous and set to NoData. numtype type of numerical value. defaults to 32bit float. """ if not os.path.isdir(outdir): os.makedirs(outdir) rasterlist = enf_rastlist(rasterlist) # build the empty numpy array based on size of first raster temp_rast, metadata = to_numpy(rasterlist[0]) xs, ys = temp_rast.shape zs = len(rasterlist) rast_3d = numpy.zeros((xs,ys,zs)) metadata.NoData_Value = numpy.nan # open up the initial figure rastfig = raster_fig(temp_rast) # populate the 3d matrix with values from all rasters for i, raster in enumerate(rasterlist): # print a status and open a figure print('working on file {0}'.format(os.path.basename(raster))) new_rast, new_meta = to_numpy(raster, numtype) new_rast = new_rast.data if not new_rast.shape == (xs, ys): print new_rast.shape # set rasters to have 'nan' NoData_Value if new_meta.NoData_Value != metadata.NoData_Value: new_rast[new_rast == new_meta.NoData_Value] = metadata.NoData_Value # set values outside thresholds to nodata values if not low_thresh == None: new_rast[new_rast < low_thresh] = metadata.NoData_Value if not high_thresh == None: new_rast[new_rast > high_thresh] = metadata.NoData_Value new_rast = numpy.ma.masked_array(new_rast, numpy.isnan(new_rast)) # display a figure rastfig.update_fig(new_rast) rast_3d[:,:,i] = new_rast # build up our statistics by masking nan values and performin matrix opperations rastfig.close_fig() rast_3d_masked = numpy.ma.masked_array(rast_3d, numpy.isnan(rast_3d)) if "AVG" in saves: avg_rast = numpy.mean(rast_3d_masked, axis = 2) avg_rast = numpy.array(avg_rast) rastfig = raster_fig(avg_rast, title = "Average") avg_name = core.create_outname(outdir, outname, 'AVG', 'tif') print("Saving AVERAGE output raster as {0}".format(avg_name)) from_numpy(avg_rast, metadata, avg_name) rastfig.close_fig() del avg_rast if "STD" in saves: std_rast = numpy.std(rast_3d_masked, axis = 2) std_rast = numpy.array(std_rast) rastfig = raster_fig(std_rast, title = "Standard Deviation") std_name = core.create_outname(outdir, outname, 'STD', 'tif') print("Saving STANDARD DEVIATION output raster as {0}".format(std_name)) from_numpy(std_rast, metadata, std_name) rastfig.close_fig() del std_rast if "NUM" in saves: num_rast = (numpy.zeros((xs,ys)) + zs) - numpy.sum(rast_3d_masked.mask, axis = 2) num_rast = numpy.array(num_rast) rastfig = raster_fig(num_rast, title = "Good pixel count (NUM)") rastfig.close_fig() num_name = core.create_outname(outdir, outname, 'NUM', 'tif') print("Saving NUMBER output raster as {0}".format(num_name)) from_numpy(num_rast, metadata, num_name) rastfig.close_fig() del num_rast if "SUM" in saves: sum_rast = numpy.sum(rast_3d_masked, axis = 2) sum_rast = numpy.array(sum_rast) rastfig = raster_fig(sum_rast, title = "Good pixel count (NUM)") rastfig.close_fig() sum_name = core.create_outname(outdir, outname, 'SUM', 'tif') print("Saving NUMBER output raster as {0}".format(sum_name)) from_numpy(sum_rast, metadata, sum_name) rastfig.close_fig() del sum_rast rastfig.close_fig() return
def project_resample(filelist, reference_file, outdir = False, resampling_type = None, cell_size = None): """ Wrapper for multiple arcpy projecting functions. Projects to reference file Inputs a filelist and a reference file, then projects all rasters or feature classes in the filelist to match the projection of the reference file. Writes new files with a "_p" appended to the end of the input filenames. This also will perform resampling. Inputs: filelist list of files to be projected outdir optional desired output directory. If none is specified, output files will be named with '_p' as a suffix. reference_file Either a file with the desired projection, or a .prj file. resampling type exactly as the input for arcmaps project_Raster_management function cell_size exactly as the input for arcmaps project_Raster_management function Output: Spatial reference spatial referencing information for further checking. """ output_filelist = [] # sanitize inputs core.exists(reference_file) rasterlist = enf_rastlist(filelist) featurelist = core.enf_featlist(filelist) cleanlist = rasterlist + featurelist # ensure output directory exists if not os.path.exists(outdir): os.makedirs(outdir) # grab data about the spatial reference of the reference file. (prj or otherwise) if reference_file[-3:]=='prj': Spatial_Reference = arcpy.SpatialReference(reference_file) else: Spatial_Reference = arcpy.Describe(reference_file).spatialReference # determine cell size if cell_size is None: cx = arcpy.GetRasterProperties_management(reference_file, "CELLSIZEX").getOutput(0) cy = arcpy.GetRasterProperties_management(reference_file, "CELLSIZEY").getOutput(0) cell_size = "{0} {1}".format(cx,cy) # determine wether coordinate system is projected or geographic and print info if Spatial_Reference.type == 'Projected': print('Found {0} projected coord system'.format(Spatial_Reference.PCSName)) else: print('Found {0} geographic coord system'.format(Spatial_Reference.GCSName)) for filename in cleanlist: # create the output filename outname = core.create_outname(outdir, filename, 'p') output_filelist.append(Spatial_Reference) # use ProjectRaster_management for rast files if is_rast(filename): arcpy.ProjectRaster_management(filename, outname, Spatial_Reference, resampling_type, cell_size) print('Wrote projected and resampled file to {0}'.format(outname)) # otherwise, use Project_management for featureclasses and featurelayers else: arcpy.Project_management(filename,outname,Spatial_Reference) print('Wrote projected file to {0}'.format(outname)) print("finished projecting!") return output_filelist
def toa_radiance_8(band_nums, meta_path, outdir=None): """ Top of Atmosphere radiance (in Watts/(square meter x steradians x micrometers)) conversion for landsat 8 data. To be performed on raw Landsat 8 level 1 data. See link below for details: see here http://landsat.usgs.gov/Landsat8_Using_Product.php :param band_nums: A list of desired band numbers such as [3, 4, 5] :param meta_path: The full filepath to the metadata file for those bands :param outdir: Output directory to save converted files. :return output_filelist: List of filepaths created by this function. """ meta_path = os.path.abspath(meta_path) output_filelist = [] #enforce list of band numbers and grab the metadata from the MTL file band_nums = core.enf_list(band_nums) band_nums = map(str, band_nums) meta = landsat_metadata(meta_path) OLI_bands = ['1', '2', '3', '4', '5', '6', '7', '8', '9'] #loop through each band for band_num in band_nums: if band_num in OLI_bands: #create the band name band_path = meta_path.replace("MTL.txt", "B{0}.tif".format(band_num)) Qcal = arcpy.Raster(band_path) null_raster = arcpy.sa.SetNull(Qcal, Qcal, "VALUE = 0") #scrape the attribute data Ml = getattr(meta, "RADIANCE_MULT_BAND_{0}".format( band_num)) # multiplicative scaling factor Al = getattr(meta, "RADIANCE_ADD_BAND_{0}".format( band_num)) # additive rescaling factor #calculate Top-of-Atmosphere radiance TOA_rad = (null_raster * Ml) + Al del null_raster # create the output name and save the TOA radiance tiff if "\\" in meta_path: name = meta_path.split("\\")[-1] elif "//" in meta_path: name = meta_path.split("//")[-1] rad_name = name.replace("_MTL.txt", "_B{0}".format(band_num)) if outdir is not None: outdir = os.path.abspath(outdir) outname = core.create_outname(outdir, rad_name, "TOA_Rad", "tif") else: folder = os.path.split(meta_path)[0] outname = core.create_outname(folder, rad_name, "TOA_Rad", "tif") TOA_rad.save(outname) output_filelist.append(outname) print("Saved toa_radiance at {0}".format(outname)) #if listed band is not a OLI sensor band, skip it and print message else: print( "Can only perform reflectance conversion on OLI sensor bands") print("Skipping band {0}".format(band_num)) return output_filelist
def gap_fill_interpolate(in_rasterpath, out_rasterpath, model=None, max_cell_dist=None, min_points=None): """ Fills gaps in raster data by spatial kriging interpolation. This should only be used to fill small gaps in continuous datasets (like a DEM), and in instances where it makes sense. This function creates a feature class layer of points where pixels are not NoData, then performs a "kriging" interpolation on the point data to rebuild a uniform grid with a value at every location, thus filling gaps. WARNING: This script is processing intensive and may take a while to run even for modestly sized datasets. :param in_rasterpath: input filepath to raster to fill gaps :param out_rasterpath: filepath to store output gap filled raster in :param model: type of kriging model to run, options include "SPHERICAL", "CIRCULAR", "EXPONENTIAL", "GAUSSIAN", and "LINEAR" :param max_cell_dist: The maximum number of cells to interpolate between, data gaps which do not have at least "min_points" points within this distance will not be filled. :param min_points: Minimum number of surrounding points to use in determining value at missing cell. :return out_rasterpath: Returns path to file created by this function """ # check inputs if not is_rast(in_rasterpath): raise Exception( "input raster path {0} is invalid!".format(in_rasterpath)) if max_cell_dist is None: max_cell_dist = 10 if min_points is None: min_points = 4 if model is None: model = "SPHERICAL" # set environments arcpy.env.overwriteOutput = True arcpy.env.snapRaster = in_rasterpath arcpy.CheckOutExtension("Spatial") # make a point shapefile version of input raster print("Creating point grid from input raster") head, tail = os.path.split(in_rasterpath) shp_path = core.create_outname(head, tail, "shp", "shp") dbf_path = shp_path.replace(".shp", ".dbf") field = "GRID_CODE" arcpy.RasterToPoint_conversion(in_rasterpath, shp_path, "VALUE") # find the bad rows who GRID_CODE is 1, these should be NoData print("Finding points with NoData entries") bad_row_FIDs = [] rows = arcpy.UpdateCursor(dbf_path) for row in rows: grid_code = getattr(row, field) if grid_code == 1: bad_row_FIDs.append(row.FID) del rows # go back through the list and perform the deletions numbad = len(bad_row_FIDs) print("Deleting {0} points with NoData values".format(numbad)) rows = arcpy.UpdateCursor(dbf_path) for i, row in enumerate(rows): if row.FID in bad_row_FIDs: rows.deleteRow(row) # set up the parameters for kriging print("Setting up for kriging") _, meta = to_numpy(in_rasterpath) model = model cell_size = meta.cellHeight # from input raster lagSize = None majorRange = None partialSill = None nugget = None distance = float(cell_size) * float(max_cell_dist) # fn input min_points = min_points # fn input a = arcpy.sa.KrigingModelOrdinary() kmodel = arcpy.sa.KrigingModelOrdinary("SPHERICAL", lagSize=lagSize, majorRange=majorRange, partialSill=partialSill, nugget=nugget) kradius = arcpy.sa.RadiusFixed(distance=distance, minNumberOfPoints=min_points) # execute kriging print("Performing interpolation by kriging, this may take a while!") outkriging = arcpy.sa.Kriging(shp_path, field, kmodel, cell_size=cell_size, search_radius=kradius) outkriging.save(out_rasterpath) return out_rasterpath
def make_cloud_mask_457(B2_TOA_Ref, outdir = None, Filter5Thresh = 2.0, Filter6Thresh = 2.0): """ Creates a binary mask raster for removal of cloud-covered pixels in raw Landsat 4, 5, and 7 bands. To be performed on Landsat 4, 5, or 7 data. Must be processed first with landsat.toa_reflectance_457 for bands 2, 3, 4, and 5 and landsat.atsat_bright_temp_457 for band 6. Note that for this function to run properly, bands 2, 3, 4, 5, and 6 must each be in the same folder and have the correct naming convention output by the landsat.toa_reflectance_457 and landsat.atsat_bright_temp_457 functions (e.g. LT50410362011240PAC01_B2_TOA_Ref.tif, LT50410362011240PAC01_B6_Temp.tif). :param B2_TOA_Ref: The full filepath to the band 2 top-of-atmosphere reflectance tiff file :param outdir: Output directory to the cloud mask and TOA band tiffs :param Filter5Thresh: Optional threshold value for Filter #5, default set at 2 :param Filter6Thresh: Optional threshold value for Filter #6, default set at 2 :return cloud_mask_path: Filepath to newly created cloud mask """ #discern if Landsat 4/5 or 7 for band 6 and designate rasters for bands 2, 3, 4, 5, and 6 if "LT4" in B2_TOA_Ref or "LT5" in B2_TOA_Ref: band_6 = "6" elif "LE7" in B2_TOA_Ref: band_6 = "6_VCID_1" else: band_6 = None B2_path = os.path.abspath(B2_TOA_Ref) Band2 = arcpy.Raster(B2_path) band_path3 = B2_path.replace("B2_TOA_Ref.tif", "B3_TOA_Ref.tif") band_path4 = B2_path.replace("B2_TOA_Ref.tif", "B4_TOA_Ref.tif") band_path5 = B2_path.replace("B2_TOA_Ref.tif", "B5_TOA_Ref.tif") band_path6 = B2_path.replace("B2_TOA_Ref.tif", "B{0}_ASBTemp.tif".format(band_6)) Band3 = arcpy.Raster(band_path3) Band4 = arcpy.Raster(band_path4) Band5 = arcpy.Raster(band_path5) Band6 = arcpy.Raster(band_path6) del band_path3, band_path4, band_path5, band_path6 name = os.path.split(B2_path)[1] if outdir is None: outdir = os.path.split(B2_path)[0] #Establishing location of gaps in data. 0 = Gap, 1 = Data #This will be used multiple times in later steps print("Creating Gap Mask") GapMask = ((Band2 > 0) * (Band3 > 0) * (Band4 > 0)*(Band5 > 0) * (Band6 > 0)) GapMask.save(os.path.join(outdir,"GapMask.tif")) print("First pass underway") #Filter 1 - Brightness Threshold-------------------------------------------- Cloudmask = Band3 > .08 #Filter 2 - Normalized Snow Difference Index-------------------------------- NDSI = (Band2 - Band5)/(Band2 + Band5) Snow = (NDSI > .6) * Cloudmask Cloudmask *= (NDSI < .6) #Filter 3 - Temperature Threshold------------------------------------------- Cloudmask *= (Band6 < 300) #Filter 4 - Band 5/6 Composite---------------------------------------------- Cloudmask *= (((1-Band5) * Band6) < 225) Amb = (((1 - Band5) * Band6) > 225) #Filter 5 - Band 4/3 Ratio (eliminates vegetation)-------------------------- #bright cloud tops are sometimes cut out by this filter. original threshold was #raising this threshold will make the algorithm more aggresive Cloudmask *= ((Band4/Band3) < Filter5Thresh) Amb *= ((Band4/Band3) > Filter5Thresh) #Filter 6 - Band 4/2 Ratio (eliminates vegetation)-------------------------- #bright cloud tops are sometimes cut out by this filter. original threshold was #raising this threshold will make the algorithm more aggresive Cloudmask *= ((Band4/Band2) < Filter6Thresh) Amb *= ((Band4/Band2) > Filter6Thresh) #Filter 7 - Band 4/5 Ratio (Eliminates desert features)--------------------- # DesertIndex recorded DesertIndMask = ((Band4/Band5) > 1.0) Cloudmask *= DesertIndMask Amb *= ((Band4/Band5) < 1.0) #Filter 8 Band 5/6 Composite (Seperates warm and cold clouds)-------------- WarmCloud = (((1 - Band5) * Band6) > 210) * Cloudmask ColdCloud = (((1 - Band5) * Band6) < 210) * Cloudmask #Calculating percentage of the scene that is classified as Desert DesertGap = (DesertIndMask + 1) * GapMask try: arcpy.CalculateStatistics_management(DesertGap,ignore_values = "0") DesertIndex = DesertGap.mean - 1 except: DesertGap.save(os.path.join(outdir, "Desert.tif")) arcpy.CalculateStatistics_management(DesertGap,ignore_values = "0") DesertIndex = DesertGap.mean - 1 os.remove(os.path.join(outdir, "Desert.tif")) del DesertIndMask, DesertGap, NDSI #Calculating percentage of the scene that is classified as Snow ColdCloudGap = (ColdCloud + 1) * GapMask try: arcpy.CalculateStatistics_management(ColdCloudGap,ignore_values = "0") ColdCloudMean = ColdCloudGap.mean - 1 del ColdCloudGap except: ColdCloudGap.save(os.path.join(outdir, "ColdCloud.tif")) arcpy.CalculateStatistics_management(ColdCloudGap,ignore_values = "0") ColdCloudMean = ColdCloudGap.mean - 1 os.remove(os.path.join(outdir, "ColdCloud.tif")) del ColdCloudGap del Band2, Band3, Band4, Band5 SnowGap = (Snow + 1) * GapMask try: arcpy.CalculateStatistics_management(SnowGap,ignore_values = "0") SnowPerc = SnowGap.mean - 1 del SnowGap except: SnowGap.save(os.path.join(outdir, "Snow.tif")) arcpy.CalculateStatistics_management(SnowGap,ignore_values = "0") SnowPerc = SnowGap.mean - 1 os.remove(os.path.join(outdir, "Snow.tif")) del SnowGap del Snow del GapMask #Determining whether or not snow is present and adjusting the Cloudmask #accordinging. If snow is present the Warm Clouds are reclassfied as ambigious if SnowPerc > .01: SnowPresent = True Cloudmask = ColdCloud Amb = Amb + WarmCloud else: SnowPresent = False del ColdCloud, WarmCloud, SnowPerc #Collecting statistics for Cloud pixel Temperature values. These will be used in later conditionals Tempclouds = Cloudmask * Band6 Tempclouds.save(os.path.join(outdir, "TempClouds.tif")) del Tempclouds #Converting TempClouds to a text file and writing its non-zero/NAN values to a list outtxt = os.path.join(outdir, "tempclouds.txt") arcpy.RasterToASCII_conversion(os.path.join(outdir, "TempClouds.tif"), outtxt) f = open(outtxt) alist = [] lines = f.readlines()[6:] for line in lines: for x in line.split(' '): try: x = float(x) if x > 0: alist.append(x) except ValueError: pass f.close() #Band6clouds = Band6array[np.where(Band6array > 0)] #del Band6array TempMin = min(alist) TempMax = max(alist) TempMean = numpy.mean(alist) TempStd = numpy.std(alist) TempSkew = stats.skew(alist) Temp98perc = numpy.percentile(alist, 98.75) Temp97perc = numpy.percentile(alist, 97.50) Temp82perc = numpy.percentile(alist, 82.50) del alist #delete all intermediary files in the output directory for file in os.listdir(outdir): if "GapMask" in file: os.remove("{0}\\{1}".format(outdir, file)) elif "TempClouds" in file: os.remove("{0}\\{1}".format(outdir, file)) elif "tempclouds" in file: os.remove("{0}\\{1}".format(outdir, file)) #Pass 2 is run if the following conditionals are met if ColdCloudMean > .004 and DesertIndex > .5 and TempMean < 295: #Pass 2 arcpy.AddMessage("Second Pass underway") #Adjusting Temperature thresholds based on skew if TempSkew > 0: if TempSkew > 1: shift = TempStd else: shift = TempStd * TempSkew else: shift = 0 Temp97perc += shift Temp82perc += shift if Temp97perc > Temp98perc: Temp82perc = Temp82perc -(Temp97perc - Temp98perc) Temp97perc = Temp98perc warmAmbmask = ((Band6 * Amb) < Temp97perc) warmAmbmask = warmAmbmask * ((Amb * Band6) > Temp82perc) coldAmbmask = (Band6 * Amb ) < Temp82perc coldAmbmask = coldAmbmask * ((Amb * Band6) > 0) warmAmb = warmAmbmask * Band6 coldAmb = coldAmbmask * Band6 ThermEffect1 = warmAmbmask.mean ThermEffect2 = coldAmbmask.mean arcpy.CalculateStatistics_management(warmAmb, ignore_values = "0") arcpy.CalculateStatistics_management(coldAmb, ignore_values = "0") if ThermEffect1 < .4 and warmAmb.mean < 295 and SnowPresent == False: Cloudmask = Cloudmask + warmAmbmask + coldAmbmask arcpy.AddMessage("Upper Threshold Used") elif ThermEffect2 < .4 and coldAmb.mean < 295: Cloudmask += coldAmbmask arcpy.AddMessage("Lower Threshold Used") #switch legend to 1=good data 0 = cloud pixel remap = arcpy.sa.RemapValue([[1,0],[0,1],["NODATA",1]]) Cloud_Mask = arcpy.sa.Reclassify(Cloudmask, "Value", remap) #create output name mask_path = name.replace("_B2_TOA_Ref.tif", "") if outdir: outdir = os.path.abspath(outdir) outname = core.create_outname(outdir, mask_path, "Mask", "tif") else: folder = B2_TOA_Ref.replace(name, "") outname = core.create_outname(folder, mask_path, "Mask", "tif") print "Cloud mask saved at {0}".format(outname) Cloud_Mask.save(outname) cloud_mask_path = arcpy.Raster(outname) del name, mask_path, Cloud_Mask, remap return cloud_mask_path
def gap_fill_temporal(rasterlist, outdir = None, continuous = True, NoData_Value = None, numpy_datatype = "float32"): """ This function is designed to input a time sequence of rasters with partial voids and output a copy of each input image with every pixel equal to the last good value taken. This function will step forward in time through each raster and fill voids from the values of previous rasters. The resulting output image will contain all the data that was in the original image, with the voids filled with older data. A second output image will be generated where the pixel values are equal to the age of each pixel in the image. So if a void was filled with data that's 5 days old, the "age" raster will have a value of "5" at that location. Inputs: :param rasterlist: a list of filepaths for rasters with which to fill gaps. THESE IMAGES MUST BE ORDERED FROM OLDEST TO NEWEST (ascending time). :param outdir: the path to the desired output folder, if left "None", outputs will be saved right next to respective inputs. :param continuous: if "True" an output raster will be generated for every single input raster, which can be used to fill gaps in an entire time series. So, for example output raster 2 will have all the good points in input raster 2, with gaps filled with data from raster 1. output raster 3 will then be gap filled with output raster 2, which might contain some fill values from raster 1, and so forth. If "False" an output raster will only be generated for the LAST raster in the input rasterlist. :param numpy_datatype the numpy datatype of the output raster. usually "float32" :returns a list of filepaths to new files created by this function. """ # enforce the list of rasters to ensure it's sanitized rasterlist = enf_rastlist(rasterlist) # create an empty list to store output arrays in output_filelist = [] # grab the first raster, then start stepping through the list old_rast, old_meta = to_numpy(rasterlist[0]) rastfig = raster_fig(old_rast) for i, araster in enumerate(rasterlist[1:]): new_rast, new_meta = to_numpy(araster) # combine new and old data and mask matrices outrast = new_rast outrast.data[new_rast.mask] = old_rast.data[new_rast.mask] outrast.mask[new_rast.mask] = old_rast.mask[new_rast.mask] # only save output if continuous is true or is last raster in series if continuous is True or i == (len(rasterlist[1:]) - 1): # create output name and save it if outdir is None: this_outdir = os.path.dirname(araster) else: this_outdir = outdir # update the figure rastfig.update_fig(outrast) outpath = core.create_outname(this_outdir, araster, "gft", "tif") print("Filled gaps in {0}".format(os.path.basename(araster))) outrast = outrast.astype(numpy_datatype) from_numpy(outrast, new_meta, outpath, NoData_Value) output_filelist.append(outpath) # prepare for next time step by setting current to old old_rast = new_rast return output_filelist
def atsat_bright_temp_457(meta_path, outdir = None): """ Converts band 6 from Landsat 4 and 5 or bands 6 VCID 1 and 2 from Landsat 7 to at satellite brightness temperature in Kelvins To be performed on raw Landsat 4, 5, or 7 level 1 data. :param meta_path: The full filepath to the metadata file, labeled '_MTL.txt', which must be in the same folder as band 6 or 6_VCID_1 and 6_VCID_2 :param outdir: Output directory to save converted files. If left False it will save ouput files in the same directory as input files. :return output_filelist: A list of all files created by this function """ output_filelist = [] meta_path = os.path.abspath(meta_path) metadata = grab_meta(meta_path) spacecraft = getattr(metadata, "SPACECRAFT_ID") if "4" in spacecraft or "5" in spacecraft: band_nums = ["6"] elif "7" in spacecraft: band_nums = ["6_VCID_1", "6_VCID_2"] else: print("Enter the MTL file corresponding to a Landsat 4, 5, or 7 dataset") # These lists will be used to parse the meta data text file and locate relevant information # metadata format was changed August 29, 2012. This tool can process either the new or old format f = open(meta_path) MText = f.read() # the presence of a PRODUCT_CREATION_TIME category is used to identify old metadata # if this is not present, the meta data is considered new. # Band6length refers to the length of the Band 6 name string. In the new metadata this string is longer if "PRODUCT_CREATION_TIME" in MText: Meta = "oldMeta" else: Meta = "newMeta" # The tile name is located using the newMeta/oldMeta indixes and the date of capture is recorded if Meta == "newMeta": TileName = getattr(metadata, "LANDSAT_SCENE_ID") year = TileName[9:13] jday = TileName[13:16] date = getattr(metadata, "DATE_ACQUIRED") elif Meta == "oldMeta": TileName = getattr(metadata, "BAND1_FILE_NAME") year = TileName[13:17] jday = TileName[17:20] date = getattr(metadata, "ACQUISITION_DATE") # the spacecraft from which the imagery was capture is identified # this info determines the solar exoatmospheric irradiance (ESun) for each band # Calculating values for each band for band_num in band_nums: print("Processing Band {0}".format(band_num)) pathname = meta_path.replace("MTL.txt", "B{0}.tif".format(band_num)) Oraster = arcpy.Raster(pathname) # get rid of the zero values that show as the black background to avoid skewing values null_raster = arcpy.sa.SetNull(Oraster, Oraster, "VALUE = 0") # using the oldMeta/newMeta indixes to pull the min/max for radiance/Digital numbers if Meta == "newMeta": LMax = getattr(metadata, "RADIANCE_MAXIMUM_BAND_{0}".format(band_num)) LMin = getattr(metadata, "RADIANCE_MINIMUM_BAND_{0}".format(band_num)) QCalMax = getattr(metadata, "QUANTIZE_CAL_MAX_BAND_{0}".format(band_num)) QCalMin = getattr(metadata, "QUANTIZE_CAL_MIN_BAND_{0}".format(band_num)) elif Meta == "oldMeta": LMax = getattr(metadata, "LMAX_BAND{0}".format(band_num)) LMin = getattr(metadata, "LMIN_BAND{0}".format(band_num)) QCalMax = getattr(metadata, "QCALMAX_BAND{0}".format(band_num)) QCalMin = getattr(metadata, "QCALMIN_BAND{0}".format(band_num)) Radraster = (((LMax - LMin)/(QCalMax-QCalMin)) * (null_raster - QCalMin)) + LMin Oraster = 0 # Calculating temperature for band 6 if present if "4" in spacecraft or "5" in spacecraft: Refraster = 1260.56/(arcpy.sa.Ln((607.76/Radraster) + 1.0)) if "7" in spacecraft: Refraster = 1282.71/(arcpy.sa.Ln((666.09/Radraster) + 1.0)) band_temp = "{0}_B{1}".format(TileName, band_num) # save the data to the automated name if outdir is given or in the parent folder if not if outdir: outdir = os.path.abspath(outdir) BandPath = core.create_outname(outdir, band_temp, "ASBTemp", "tif") else: folder = os.path.split(meta_path)[0] BandPath = core.create_outname(folder, band_temp, "ASBTemp", "tif") Refraster.save(BandPath) output_filelist.append(BandPath) del Refraster, Radraster, null_raster print("Temperature Calculated for Band {0}".format(band_num)) f.close() return output_filelist
def spatially_match(snap_raster, rasterlist, outdir, NoData_Value = False, resamp_type = False): """ Prepares input rasters for further numerical processing This function simply ensures all rasters in "rasterlist" are identically projected and have the same cell size, then calls the raster.clip_and_snap function to ensure that the cells are perfectly coincident and that the total spatial extents of the images are identical, even when NoData values are considered. This is useful because it allows the two images to be passed on for numerical processing as nothing more than matrices of values, and the user can be sure that any index in any matrix is exactly coincident with the same index in any other matrix. This is especially important to use when comparing different datasets from different sources outside arcmap, for example MODIS and Landsat data with an ASTER DEM. inputs: snap_raster raster to which all other images will be snapped rasterlist list of rasters, a single raster, or a directory full of tiffs which will be clipped to the extent of "snap_raster" and aligned such that the cells are perfectly coincident. outdir the output directory to save newly created spatially matched tifs. resamp_type The resampling type to use if images are not identical cell sizes. "NEAREST","BILINEAR",and "CUBIC" are the most common. this function automatically invokes clip_and_snap project_resample """ # import modules and sanitize inputs tempdir = os.path.join(outdir, 'temp') if not os.path.isdir(outdir): os.makedirs(outdir) if not os.path.isdir(tempdir): os.makedirs(tempdir) rasterlist = enf_rastlist(rasterlist) core.exists(snap_raster) usetemp = False # set the snap raster environment in arcmap. arcpy.env.snapRaster = snap_raster print('Loading snap raster {0}'.format(snap_raster)) _,snap_meta = to_numpy(snap_raster) print('Bounds of rectangle to define boundaries: [{0}]'.format(snap_meta.rectangle)) # for every raster in the raster list, snap rasters and clip. for rastname in rasterlist: _,meta = to_numpy(rastname) head,tail = os.path.split(rastname) if snap_meta.projection.projectionName != meta.projection.projectionName: print('Projection discrepancy found. Reprojecting...') project_resample(rastname, snap_raster, tempdir, resamp_type) tempname = core.create_outname(tempdir,tail,"p") usetemp = True # define an output name and run the Clip_ans_Snap_Raster function on formatted tifs outname = core.create_outname(outdir, rastname, "sm") # if a temporary file was created in previous steps, use that one for clip and snap if usetemp: clip_and_snap(snap_raster, tempname, outname, NoData_Value) else: clip_and_snap(snap_raster, rastname, outname, NoData_Value) print('Finished matching raster {0}'.format(rastname)) return
def toa_radiance_8(band_nums, meta_path, outdir = None): """ Top of Atmosphere radiance (in Watts/(square meter x steradians x micrometers)) conversion for landsat 8 data. To be performed on raw Landsat 8 level 1 data. See link below for details: see here http://landsat.usgs.gov/Landsat8_Using_Product.php :param band_nums: A list of desired band numbers such as [3, 4, 5] :param meta_path: The full filepath to the metadata file for those bands :param outdir: Output directory to save converted files. :return output_filelist: List of filepaths created by this function. """ meta_path = os.path.abspath(meta_path) output_filelist = [] #enforce list of band numbers and grab the metadata from the MTL file band_nums = core.enf_list(band_nums) band_nums = map(str, band_nums) meta = grab_meta(meta_path) OLI_bands = ['1','2','3','4','5','6','7','8','9'] #loop through each band for band_num in band_nums: if band_num in OLI_bands: #create the band name band_path = meta_path.replace("MTL.txt","B{0}.tif".format(band_num)) Qcal = arcpy.Raster(band_path) null_raster = arcpy.sa.SetNull(Qcal, Qcal, "VALUE = 0") #scrape the attribute data Ml = getattr(meta,"RADIANCE_MULT_BAND_{0}".format(band_num)) # multiplicative scaling factor Al = getattr(meta,"RADIANCE_ADD_BAND_{0}".format(band_num)) # additive rescaling factor #calculate Top-of-Atmosphere radiance TOA_rad = (null_raster * Ml) + Al del null_raster # create the output name and save the TOA radiance tiff if "\\" in meta_path: name = meta_path.split("\\")[-1] elif "//" in meta_path: name = meta_path.split("//")[-1] rad_name = name.replace("_MTL.txt", "_B{0}".format(band_num)) if outdir is not None: outdir = os.path.abspath(outdir) outname = core.create_outname(outdir, rad_name, "TOA_Rad", "tif") else: folder = os.path.split(meta_path)[0] outname = core.create_outname(folder, rad_name, "TOA_Rad", "tif") TOA_rad.save(outname) output_filelist.append(outname) print("Saved toa_radiance at {0}".format(outname)) #if listed band is not a OLI sensor band, skip it and print message else: print("Can only perform reflectance conversion on OLI sensor bands") print("Skipping band {0}".format(band_num)) return output_filelist
def atsat_bright_temp_457(meta_path, outdir=None): """ Converts band 6 from Landsat 4 and 5 or bands 6 VCID 1 and 2 from Landsat 7 to at satellite brightness temperature in Kelvins To be performed on raw Landsat 4, 5, or 7 level 1 data. :param meta_path: The full filepath to the metadata file, labeled '_MTL.txt', which must be in the same folder as band 6 or 6_VCID_1 and 6_VCID_2 :param outdir: Output directory to save converted files. If left False it will save ouput files in the same directory as input files. :return output_filelist: A list of all files created by this function """ output_filelist = [] meta_path = os.path.abspath(meta_path) metadata = landsat_metadata(meta_path) spacecraft = getattr(metadata, "SPACECRAFT_ID") if "4" in spacecraft or "5" in spacecraft: band_nums = ["6"] elif "7" in spacecraft: band_nums = ["6_VCID_1", "6_VCID_2"] else: print( "Enter the MTL file corresponding to a Landsat 4, 5, or 7 dataset") # These lists will be used to parse the meta data text file and locate relevant information # metadata format was changed August 29, 2012. This tool can process either the new or old format f = open(meta_path) MText = f.read() # the presence of a PRODUCT_CREATION_TIME category is used to identify old metadata # if this is not present, the meta data is considered new. # Band6length refers to the length of the Band 6 name string. In the new metadata this string is longer if "PRODUCT_CREATION_TIME" in MText: Meta = "oldMeta" else: Meta = "newMeta" # The tile name is located using the newMeta/oldMeta indixes and the date of capture is recorded if Meta == "newMeta": TileName = getattr(metadata, "LANDSAT_SCENE_ID") year = TileName[9:13] jday = TileName[13:16] date = getattr(metadata, "DATE_ACQUIRED") elif Meta == "oldMeta": TileName = getattr(metadata, "BAND1_FILE_NAME") year = TileName[13:17] jday = TileName[17:20] date = getattr(metadata, "ACQUISITION_DATE") # the spacecraft from which the imagery was capture is identified # this info determines the solar exoatmospheric irradiance (ESun) for each band # Calculating values for each band for band_num in band_nums: print("Processing Band {0}".format(band_num)) pathname = meta_path.replace("MTL.txt", "B{0}.tif".format(band_num)) Oraster = arcpy.Raster(pathname) # get rid of the zero values that show as the black background to avoid skewing values null_raster = arcpy.sa.SetNull(Oraster, Oraster, "VALUE = 0") # using the oldMeta/newMeta indixes to pull the min/max for radiance/Digital numbers if Meta == "newMeta": LMax = getattr(metadata, "RADIANCE_MAXIMUM_BAND_{0}".format(band_num)) LMin = getattr(metadata, "RADIANCE_MINIMUM_BAND_{0}".format(band_num)) QCalMax = getattr(metadata, "QUANTIZE_CAL_MAX_BAND_{0}".format(band_num)) QCalMin = getattr(metadata, "QUANTIZE_CAL_MIN_BAND_{0}".format(band_num)) elif Meta == "oldMeta": LMax = getattr(metadata, "LMAX_BAND{0}".format(band_num)) LMin = getattr(metadata, "LMIN_BAND{0}".format(band_num)) QCalMax = getattr(metadata, "QCALMAX_BAND{0}".format(band_num)) QCalMin = getattr(metadata, "QCALMIN_BAND{0}".format(band_num)) Radraster = (((LMax - LMin) / (QCalMax - QCalMin)) * (null_raster - QCalMin)) + LMin Oraster = 0 # Calculating temperature for band 6 if present if "4" in spacecraft or "5" in spacecraft: Refraster = 1260.56 / (arcpy.sa.Ln((607.76 / Radraster) + 1.0)) if "7" in spacecraft: Refraster = 1282.71 / (arcpy.sa.Ln((666.09 / Radraster) + 1.0)) band_temp = "{0}_B{1}".format(TileName, band_num) # save the data to the automated name if outdir is given or in the parent folder if not if outdir: outdir = os.path.abspath(outdir) BandPath = core.create_outname(outdir, band_temp, "ASBTemp", "tif") else: folder = os.path.split(meta_path)[0] BandPath = core.create_outname(folder, band_temp, "ASBTemp", "tif") Refraster.save(BandPath) output_filelist.append(BandPath) del Refraster, Radraster, null_raster print("Temperature Calculated for Band {0}".format(band_num)) f.close() return output_filelist
def atsat_bright_temp_8(meta_path, outdir=False): """ Converts Landsat 8 TIRS bands to at satellite brightnes temperature in Kelvins To be performed on raw Landsat 8 level 1 data. See link below for details see here http://landsat.usgs.gov/Landsat8_Using_Product.php :param band_nums: A list of desired band numbers, which should be [10,11] :param meta_path: The full filepath to the metadata file for those bands :param outdir: Output directory to save converted files. If left False it will save ouput files in the same directory as input files. :return output_filelist: A list of all files created by this function """ #enforce the list of band numbers and grab metadata from the MTL file band_nums = ["10", "11"] meta_path = os.path.abspath(meta_path) meta = landsat_metadata(meta_path) output_filelist = [] #cycle through each band in the list for calculation, ensuring each is in the list of TIRS bands for band_num in band_nums: #scrape data from the given file path and attributes in the MTL file band_path = meta_path.replace("MTL.txt", "B{0}.tif".format(band_num)) Qcal = arcpy.Raster(band_path) #get rid of the zero values that show as the black background to avoid skewing values null_raster = arcpy.sa.SetNull(Qcal, Qcal, "VALUE = 0") #requires first converting to radiance Ml = getattr(meta, "RADIANCE_MULT_BAND_{0}".format( band_num)) # multiplicative scaling factor Al = getattr(meta, "RADIANCE_ADD_BAND_{0}".format( band_num)) # additive rescaling factor TOA_rad = (null_raster * Ml) + Al #now convert to at-sattelite brightness temperature K1 = getattr(meta, "K1_CONSTANT_BAND_{0}".format( band_num)) # thermal conversion constant 1 K2 = getattr(meta, "K2_CONSTANT_BAND_{0}".format( band_num)) # thermal conversion constant 2 #calculate brightness temperature at the satellite Bright_Temp = K2 / (arcpy.sa.Ln((K1 / TOA_rad) + 1)) #save the data to the automated name if outdir is given or in the parent folder if not if outdir: outdir = os.path.abspath(outdir) outname = core.create_outname(outdir, band_path, "ASBTemp", "tif") else: folder = os.path.split(meta_path)[0] outname = core.create_outname(folder, band_path, "ASBTemp", "tif") Bright_Temp.save(outname) output_filelist.append(outname) print("Saved output at {0}".format(outname)) del TOA_rad, null_raster return output_filelist
def toa_radiance_457(band_nums, meta_path, outdir = None): """ Top of Atmosphere radiance (in Watts/(square meter x steradians x micrometers)) conversion for Landsat 4, 5, and 7 data. To be performed on raw Landsat 4, 5, or 7 level 1 data. :param band_nums: A list of desired band numbers such as [3, 4, 5] :param meta_path: The full filepath to the metadata file for those bands :param outdir: Output directory to save converted files. :return output_filelist: List of filepaths created by this function. """ output_filelist = [] meta_path = os.path.abspath(meta_path) band_nums = core.enf_list(band_nums) band_nums = map(str, band_nums) #metadata format was changed August 29, 2012. This tool can process either the new or old format f = open(meta_path) MText = f.read() metadata = grab_meta(meta_path) #the presence of a PRODUCT_CREATION_TIME category is used to identify old metadata #if this is not present, the meta data is considered new. #Band6length refers to the length of the Band 6 name string. In the new metadata this string is longer if "PRODUCT_CREATION_TIME" in MText: Meta = "oldMeta" Band6length = 2 else: Meta = "newMeta" Band6length = 8 #The tilename is located using the newMeta/oldMeta indixes and the date of capture is recorded if Meta == "newMeta": TileName = getattr(metadata, "LANDSAT_SCENE_ID") year = TileName[9:13] jday = TileName[13:16] date = getattr(metadata, "DATE_ACQUIRED") elif Meta == "oldMeta": TileName = getattr(metadata, "BAND1_FILE_NAME") year = TileName[13:17] jday = TileName[17:20] date = getattr(metadata, "ACQUISITION_DATE") #the spacecraft from which the imagery was capture is identified #this info determines the solar exoatmospheric irradiance (ESun) for each band spacecraft = getattr(metadata, "SPACECRAFT_ID") if "7" in spacecraft: ESun = (1969.0, 1840.0, 1551.0, 1044.0, 255.700, 0., 82.07, 1368.00) TM_ETM_bands = ['1','2','3','4','5','7','8'] elif "5" in spacecraft: ESun = (1957.0, 1826.0, 1554.0, 1036.0, 215.0, 0. ,80.67) TM_ETM_bands = ['1','2','3','4','5','7'] elif "4" in spacecraft: ESun = (1957.0, 1825.0, 1557.0, 1033.0, 214.9, 0. ,80.72) TM_ETM_bands = ['1','2','3','4','5','7'] else: arcpy.AddError("This tool only works for Landsat 4, 5, or 7") raise arcpy.ExecuteError() #Calculating values for each band for band_num in band_nums: if band_num in TM_ETM_bands: print("Processing Band {0}".format(band_num)) pathname = meta_path.replace("MTL.txt", "B{0}.tif".format(band_num)) Oraster = arcpy.Raster(pathname) null_raster = arcpy.sa.SetNull(Oraster, Oraster, "VALUE = 0") #using the oldMeta/newMeta indixes to pull the min/max for radiance/Digital numbers if Meta == "newMeta": LMax = getattr(metadata, "RADIANCE_MAXIMUM_BAND_{0}".format(band_num)) LMin = getattr(metadata, "RADIANCE_MINIMUM_BAND_{0}".format(band_num)) QCalMax = getattr(metadata, "QUANTIZE_CAL_MAX_BAND_{0}".format(band_num)) QCalMin = getattr(metadata, "QUANTIZE_CAL_MIN_BAND_{0}".format(band_num)) elif Meta == "oldMeta": LMax = getattr(metadata, "LMAX_BAND{0}".format(band_num)) LMin = getattr(metadata, "LMIN_BAND{0}".format(band_num)) QCalMax = getattr(metadata, "QCALMAX_BAND{0}".format(band_num)) QCalMin = getattr(metadata, "QCALMIN_BAND{0}".format(band_num)) Radraster = (((LMax - LMin)/(QCalMax-QCalMin)) * (null_raster - QCalMin)) + LMin Oraster = 0 del null_raster band_rad = "{0}_B{1}".format(TileName, band_num) #create the output name and save the TOA radiance tiff if outdir is not None: outdir = os.path.abspath(outdir) outname = core.create_outname(outdir, band_rad, "TOA_Rad", "tif") else: folder = os.path.split(meta_path)[0] outname = core.create_outname(folder, band_rad, "TOA_Rad", "tif") Radraster.save(outname) output_filelist.append(outname) del Radraster print("toa radiance saved for Band {0}".format(band_num)) #if listed band is not a TM/ETM+ sensor band, skip it and print message else: print("Can only perform reflectance conversion on TM/ETM+ sensor bands") print("Skipping band {0}".format(band_num)) f.close() return output_filelist
def extract_HDF_layers(filelist, layerlist, layernames = None, outdir = None): """ Function extracts tifs from HDFs. Use "Extract_MODIS_HDF" in the modis module for better handling of MODIS data with sinusoidal projections. inputs: filelist list of '.hdf' files from which data should be extracted layerlist list of layer numbers to pull out as individual tifs should be integers such as [0,4] for the 0th and 4th layer respectively. layernames list of layer names to put more descriptive names to each layer outdir directory to which tif files should be saved if outdir is left as 'False', files are saved in the same directory as the input file was found. """ # Set up initial arcpy modules, workspace, and parameters, and sanitize inputs. arcpy.env.overwriteOutput = True # enforce lists for iteration purposes filelist = core.enf_filelist(filelist) layerlist = core.enf_list(layerlist) layernames = core.enf_list(layernames) # ignore user input layernames if they are invalid, but print warnings if layernames and not len(layernames) == len(layerlist): print('layernames must be the same length as layerlist!') print('ommiting user defined layernames!') layernames=False # create empty list to add filenames into produced_files = [] # iterate through every file in the input filelist for infile in filelist: # pull the filename and path apart path,name = os.path.split(infile) arcpy.env.workspace = path for i in range(len(layerlist)): layer=layerlist[i] # specify the layer names. if layernames is not None: layername = layernames[i] else: layername = str(layer).zfill(3) # use the input output directory if the user input one, otherwise build one if outdir is not None: if not os.path.exists(os.path.join(outdir)): os.makedirs(outdir) else: outdir = os.path.dirname(infile) outname = core.create_outname(outdir, infile, layername, ext = "tif") # perform the extracting and projection definition try: # extract the subdataset arcpy.ExtractSubDataset_management(infile, outname, str(layer)) print('Extracted ' + outname) produced_files.append(outname) except: print('Failed to extract '+ outname + ' from ' + infile) return produced_files
def degree_days_accum(rasterlist, critical_values=None, outdir=None): """ Accumulates degree days in a time series rasterlist This function is the logical successor to calc.degree_days. Input a list of rasters containing daily data to be accumulated. Output raster for a given day will be the sum total of the input raster for that day and all preceding days. The last output raster in a years worth of data (image 356) would be the sum of all 365 images. The 25th output raster would be a sum of the first 25 days. Critical value rasters will also be created. Usefull for example: we wish to know on what day of our 365 day sequence every pixel hits a value of 100. Input 100 as a critical value and that output raster will be generated. :param rasterlist: list of files, or directory containing rasters to accumulate :param critical_values: Values at which the user wishes to know WHEN the total accumulation value reaches this point. For every critical value, an output raster will be created. This raster contains integer values denoting the index number of the file at which the value was reached. This input must be a list of ints or floats, not strings. :param outdir: Desired output directory for all output files. :return output_filelist: a list of all files created by this function. """ output_filelist = [] rasterlist = enf_rastlist(rasterlist) if critical_values: critical_values = core.enf_list(critical_values) # critical values of zero are problematic, so replace it with a small value. if 0 in critical_values: critical_values.remove(0) critical_values.append(0.000001) if outdir is not None and not os.path.exists(outdir): os.makedirs(outdir) for i, rast in enumerate(rasterlist): image, meta = to_numpy(rast, "float32") xs, ys = image.shape if i == 0: Sum = numpy.zeros((xs, ys)) Crit = numpy.zeros((len(critical_values), xs, ys)) if image.shape == Sum.shape: # only bother to proceed if at least one pixel is positive if numpy.max(image) >= 0: for x in range(xs): for y in range(ys): if image[x, y] >= 0: Sum[x, y] = Sum[x, y] + image[x, y] if critical_values is not None: for z, critical_value in enumerate( critical_values): if Sum[x, y] >= critical_value and Crit[z, x, y] == 0: Crit[z, x, y] = i else: print "Encountered an image of incorrect size! Skipping it!" Sum = Sum.astype('float32') outname = core.create_outname(outdir, rast, "Accum") from_numpy(Sum, meta, outname) output_filelist.append(outname) del image # output critical accumulation rasters using some data from the last raster in previous loop Crit = Crit.astype('int16') crit_meta = meta crit_meta.NoData_Value = 0 head, tail = os.path.split( outname) # place these in the last raster output location for z, critical_value in enumerate(critical_values): outname = os.path.join( head, "Crit_Accum_Index_Val-{0}.tif".format(str(critical_value))) print("Saving {0}".format(outname)) from_numpy(Crit[z, :, :], crit_meta, outname) return output_filelist