def discover(cls, path): """ Find products in path and return Data object for each date """ files = [] datedir = cls.Asset.Repository._datedir for root, dirs, filenames in os.walk(path): for filename in filenames: f = os.path.join(root, filename) print f parts = basename(f).split('_') if len(parts) == 3 or len(parts) == 4: try: datetime.strptime(parts[len(parts) - 3], datedir) files.append(f) except: pass datas = [] if len(files) == 0: return datas # Group by date sind = len(basename(files[0]).split('_')) - 3 func = lambda x: datetime.strptime(basename(x).split('_')[sind], datedir).date() for date, fnames in groupby(sorted(files), func): dat = cls(path=path) dat.ParseAndAddFiles(list(fnames)) datas.append(dat) return datas
def discover(cls, path): """ Find products in path and return Data object for each date """ files = [] datedir = cls.Asset.Repository._datedir for root, dirs, filenames in os.walk(path): for filename in filenames: f = os.path.join(root, filename) VerboseOut(f, 2) parts = basename(f).split('_') if len(parts) == 3 or len(parts) == 4: try: datetime.strptime(parts[len(parts) - 3], datedir) files.append(f) except: pass datas = [] if len(files) == 0: return datas # Group by date sind = len(basename(files[0]).split('_')) - 3 func = lambda x: datetime.strptime(basename(x).split('_')[sind], datedir).date() for date, fnames in groupby(sorted(files), func): dat = cls(path=path) dat.ParseAndAddFiles(list(fnames)) datas.append(dat) return datas
def rectify_product(full_fn): bfn_parts = basename(full_fn).split('_') if not len(bfn_parts) == 4: _match_failure_report( full_fn, "Failure to parse: Wrong number of '_'-delimited substrings.") return # extract metadata about the file (tile, date_str, sensor, product) = bfn_parts date_pattern = data_class.Asset.Repository._datedir try: date = datetime.datetime.strptime(date_str, date_pattern).date() except Exception: verbose_out(traceback.format_exc(), 4, sys.stderr) msg = "Failure to parse date: '{}' didn't adhere to pattern '{}'." _match_failure_report(full_fn, msg.format(date_str, date_pattern)) return (product, created) = mpo.update_or_create(product=product, sensor=sensor, tile=tile, date=date, driver=driver, name=full_fn) product.save() # TODO can subtract this item from starting_keys each time and possibly save some memory and time touched_rows.add(product.pk) if created: counts['add'] += 1 verbose_out("Product added to database: " + full_fn, 5) else: counts['update'] += 1 verbose_out("Product found in database: " + full_fn, 5)
def ParseAndAddFiles(self, filenames=None): """ Parse and Add filenames to existing filenames """ if filenames is None: filenames = self.find_files() datedir = self.Repository._datedir for f in filenames: bname = basename(f) parts = bname.split('_') if len(parts) < 3 or len(parts) > 4: # Skip this file VerboseOut('Unrecognizable file: %s' % f, 3) continue offset = 1 if len(parts) == 4 else 0 try: if self.date is None: # First time through self.date = datetime.strptime(parts[0 + offset], datedir).date() else: date = datetime.strptime(parts[0 + offset], datedir).date() if date != self.date: raise Exception('Mismatched dates: %s' % ' '.join(filenames)) sensor = parts[1 + offset] product = parts[2 + offset] self.AddFile(sensor, product, f) except Exception: # This was just a bad file VerboseOut('Unrecognizable file: %s' % f, 3) continue
def __init__(self, filename): """ Inspect a single file and get some metadata """ super(daymetAsset, self).__init__(filename) parts = basename(filename).split('_') self.sensor = 'daymet' self.asset = parts[1] self.tile = parts[2] self.date = datetime.datetime.strptime(parts[3], '%Y%j').date() self.products[self.asset] = filename
def __init__(self, filename): """ Inspect a single file and get some metadata """ super(merraAsset, self).__init__(filename) parts = basename(filename).split('_') self.sensor = 'merra' self.asset = parts[1] self.tile = parts[2] # e.g., ['MERRA', 'TS', 'h06v05', '2010001'] self.date = datetime.datetime.strptime(parts[3], '%Y%j').date() self.products[self.asset] = filename
def stack(self, suffix='stack', **kwargs): """ Stack products (from single date) into single image file """ for date in self.inv.dates: filenames = [self.inv[date].filenames[p] for p in self.inv.products(date)] img = gippy.GeoImage(filenames) bname = basename(filenames[0]) bname = bname[0:bname.rfind('_', 0)] fout = os.path.join(self.inv.projdir, bname + '_' + suffix) imgout = img.Process(fout) imgout.CopyMeta(img)
def __init__(self, filename): """ Inspect a single file and get some metadata """ super(merraAsset, self).__init__(filename) self.sensor = 'merra' self.tile = 'h01v01' parts = basename(filename).split('.') self.asset = parts[1].split('_')[2].upper() self._version = int(parts[0].split('_')[1]) if self.asset == "ASM": # assign date of static data sets self.date = self._assets[self.asset]['startdate'] else: self.date = datetime.datetime.strptime(parts[2], '%Y%m%d').date()
def process(self, products=None, overwrite=False, **kwargs): """ Make sure all products have been processed """ products = super(LandsatData, self).process(products, overwrite, **kwargs) if len(products) == 0: return start = datetime.now() # Add the sensor for this date to the basename self.basename = self.basename + '_' + self.sensor_set[0] # Read the assets try: img = self._readraw() except Exception, e: VerboseOut(traceback.format_exc(), 5) raise Exception('Error reading %s: %s' % (basename(self.assets[''].filename), e))
def process(self, products=None, overwrite=False, **kwargs): """ Make sure all products have been processed """ products = super(landsatData, self).process(products, overwrite, **kwargs) if len(products) == 0: return start = datetime.now() # Add the sensor for this date to the basename self.basename = self.basename + '_' + self.sensor_set[0] # Read the assets try: img = self._readraw() except Exception, e: VerboseOut(traceback.format_exc(), 5) raise Exception('Error reading %s: %s' % (basename(self.assets[''].filename), e))
def main(): title = Colors.BOLD + 'GIPS Project Masking (v%s)' % __version__ + Colors.OFF parser0 = GIPSParser(datasources=False, description=title) parser0.add_default_parser() parser0.add_projdir_parser() group = parser0.add_argument_group('masking options') group.add_argument('--filemask', help='Mask all files with this static mask', default=None) group.add_argument('--pmask', help='Mask files with this corresponding product', nargs='*', default=[]) h = 'Write mask to original image instead of creating new image' group.add_argument('--original', help=h, default=False, action='store_true') h = 'Overwrite existing files when creating new' group.add_argument('--overwrite', help=h, default=False, action='store_true') h = 'Suffix to apply to masked file (not compatible with --original)' group.add_argument('--suffix', help=h, default='-masked') #parser0.add_argument('-i', '--invert', help='Invert mask (0->1, 1->0)', default=False, action='store_true') #parser0.add_argument('--value', help='Mask == val', default=1) args = parser0.parse_args() # TODO - check that at least 1 of filemask or pmask is supplied try: print title for projdir in args.projdir: if args.filemask is not None: mask_file = gippy.GeoImage(args.filemask) inv = ProjectInventory(projdir, args.products) for date in inv.dates: VerboseOut('Masking files from %s' % date) available_masks = inv[date].masks(args.pmask) for p in inv.products(date): # don't mask any masks if p in available_masks: continue meta = '' update = True if args.original else False img = inv[date].open(p, update=update) if args.filemask is not None: img.AddMask(mask_file[0]) meta = basename(args.filemask) + ' ' for mask in available_masks: img.AddMask(inv[date].open(mask)[0]) meta = meta + basename(inv[date][mask]) + ' ' if meta != '': if args.original: VerboseOut(' %s' % (img.Basename()), 2) img.Process() img.SetMeta('MASKS', meta) else: fout = os.path.splitext( img.Filename())[0] + args.suffix + '.tif' if not os.path.exists(fout) or args.overwrite: VerboseOut( ' %s -> %s' % (img.Basename(), basename(fout)), 2) imgout = img.Process(fout) imgout.SetMeta('MASKS', meta) imgout = None img = None mask_file = None except Exception, e: import traceback VerboseOut(traceback.format_exc(), 4) print 'Masking error: %s' % e
class landsatData(Data): name = 'Landsat' version = '0.9.0' Asset = landsatAsset _prodpattern = '*.tif' # Group products belong to ('Standard' if not specified) _productgroups = { 'Index': ['bi', 'evi', 'lswi', 'msavi2', 'ndsi', 'ndvi', 'ndwi', 'satvi'], 'Tillage': ['ndti', 'crc', 'sti', 'isti'], 'LC8SR': ['ndvi8sr'] } __toastring = 'toa: use top of the atmosphere reflectance' _products = { #'Standard': 'rad': { 'assets': ['DN'], 'description': 'Surface-leaving radiance', 'arguments': [__toastring] }, 'ref': { 'assets': ['DN'], 'description': 'Surface reflectance', 'arguments': [__toastring] }, 'temp': { 'assets': ['DN'], 'description': 'Brightness (apparent) temperature', 'toa': True }, 'acca': { 'assets': ['DN'], 'description': 'Automated Cloud Cover Assessment', 'arguments': [ 'X: erosion kernel diameter in pixels (default: 5)', 'Y: dilation kernel diameter in pixels (default: 10)', 'Z: cloud height in meters (default: 4000)' ], 'nargs': '*', 'toa': True }, 'fmask': { 'assets': ['DN'], 'description': 'Fmask cloud cover', 'nargs': '*', 'toa': True }, 'tcap': { 'assets': ['DN'], 'description': 'Tassled cap transformation', 'toa': True }, 'dn': { 'assets': ['DN'], 'description': 'Raw digital numbers', 'toa': True }, 'volref': { 'assets': ['DN'], 'description': 'Volumetric water reflectance - valid for water only', 'arguments': [__toastring] }, 'wtemp': { 'assets': ['DN'], 'description': 'Water temperature (atmospherically correct) - valid for water only', # It's not really TOA, but the product code will take care of atm correction itself 'toa': True }, 'bqa': { 'assets': ['DN'], 'description': 'LC8 band quality', 'toa': True }, 'bqashadow': { 'assets': ['DN'], 'description': 'LC8 QA + Shadow Smear', 'arguments': [ 'X: erosion kernel diameter in pixels (default: 5)', 'Y: dilation kernel diameter in pixels (default: 10)', 'Z: cloud height in meters (default: 4000)' ], 'nargs': '*', 'toa': True }, #'Indices': { 'bi': { 'assets': ['DN'], 'description': 'Brightness Index', 'arguments': [__toastring] }, 'evi': { 'assets': ['DN'], 'description': 'Enhanced Vegetation Index', 'arguments': [__toastring] }, 'lswi': { 'assets': ['DN'], 'description': 'Land Surface Water Index', 'arguments': [__toastring] }, 'msavi2': { 'assets': ['DN'], 'description': 'Modified Soil-Adjusted Vegetation Index (revised)', 'arguments': [__toastring] }, 'ndsi': { 'assets': ['DN'], 'description': 'Normalized Difference Snow Index', 'arguments': [__toastring] }, 'ndvi': { 'assets': ['DN'], 'description': 'Normalized Difference Vegetation Index', 'arguments': [__toastring] }, 'ndwi': { 'assets': ['DN'], 'description': 'Normalized Difference Water Index', 'arguments': [__toastring] }, 'satvi': { 'assets': ['DN'], 'description': 'Soil-Adjusted Total Vegetation Index', 'arguments': [__toastring] }, #'Tillage Indices': { 'ndti': { 'assets': ['DN'], 'description': 'Normalized Difference Tillage Index', 'arguments': [__toastring] }, 'crc': { 'assets': ['DN'], 'description': 'Crop Residue Cover', 'arguments': [__toastring] }, 'sti': { 'assets': ['DN'], 'description': 'Standard Tillage Index', 'arguments': [__toastring] }, 'isti': { 'assets': ['DN'], 'description': 'Inverse Standard Tillage Index', 'arguments': [__toastring] }, # NEW!!! 'ndvi8sr': { 'assets': ['SR'], 'description': 'Normalized Difference Vegetation from LC8SR', }, 'landmask': { 'assets': ['SR'], 'description': 'Land mask from LC8SR', }, } def process(self, products=None, overwrite=False, **kwargs): """ Make sure all products have been processed """ products = super(landsatData, self).process(products, overwrite, **kwargs) if len(products) == 0: return start = datetime.now() assets = set() for key, val in products.requested.items(): assets.update(self._products[val[0]]['assets']) if len(assets) != 1: raise Exception( 'This driver does not support creation of products from different Assets at the same time' ) asset = list(assets)[0] # TODO: De-hack this # Better approach, but needs some thought, is to loop over assets # Ian, you are right. I just don't have enough time to do it. if asset == 'SR': datafiles = self.assets['SR'].datafiles() imgpaths = dict() for datafile in datafiles: key = datafile.partition('_')[2].split('.')[0] path = os.path.join('/vsitar/' + self.assets['SR'].filename, datafile) imgpaths[key] = path # print imgpaths bname = os.path.join(self.path, self.basename) for key, val in products.requested.items(): if val[0] == "ndvi8sr": sensor = 'LC8SR' fname = '%s_%s_%s' % (bname, sensor, key) img = gippy.GeoImage( [imgpaths['sr_band4'], imgpaths['sr_band5']]) missing = float(img[0].NoDataValue()) red = img[0].Read().astype('float32') nir = img[1].Read().astype('float32') wvalid = numpy.where((red != missing) & (nir != missing) & (red + nir != 0.0)) red[wvalid] *= 1.E-4 nir[wvalid] *= 1.E-4 # TODO: change this so that these pixels become missing red[(red != missing) & (red < 0.0)] = 0.0 red[red > 1.0] = 1.0 nir[(nir != missing) & (nir < 0.0)] = 0.0 nir[nir > 1.0] = 1.0 ndvi = missing + numpy.zeros_like(red) ndvi[wvalid] = (nir[wvalid] - red[wvalid]) / (nir[wvalid] + red[wvalid]) # set_trace() VerboseOut("writing " + fname, 2) imgout = gippy.GeoImage(fname, img, gippy.GDT_Float32, 1) imgout.SetNoData(-9999.) imgout.SetOffset(0.0) imgout.SetGain(1.0) imgout.SetBandName('NDVI', 1) imgout[0].Write(ndvi) if val[0] == "landmask": sensor = 'LC8SR' fname = '%s_%s_%s' % (bname, sensor, key) img = gippy.GeoImage( [imgpaths['cfmask'], imgpaths['cfmask_conf']]) cfmask = img[0].Read() # array([ 0, 1, 2, 3, 4, 255], dtype=uint8) # 0 means clear! but I want 1 to mean clear cfmask[cfmask > 0] = 2 cfmask[cfmask == 0] = 1 cfmask[cfmask == 2] = 0 VerboseOut("writing " + fname, 2) imgout = gippy.GeoImage(fname, img, gippy.GDT_Byte, 1) imgout.SetBandName('Land mask', 1) imgout[0].Write(cfmask) elif asset == 'DN': # This block contains everything that existed in the first generation Landsat driver # Add the sensor for this date to the basename self.basename = self.basename + '_' + self.sensors[asset] # Read the assets try: img = self._readraw() except Exception, e: VerboseOut(traceback.format_exc(), 5) raise Exception('Error reading %s: %s' % (basename(self.assets['DN'].filename), e)) meta = self.assets['DN'].meta visbands = self.assets['DN'].visbands lwbands = self.assets['DN'].lwbands md = self.meta_dict() # running atmosphere if any products require it toa = True for val in products.requested.values(): toa = toa and (self._products[val[0]].get('toa', False) or 'toa' in val) if not toa: start = datetime.now() if not settings().REPOS[self.Repository.name.lower()]['6S']: raise Exception( '6S is required for atmospheric correction') try: wvlens = [(meta[b]['wvlen1'], meta[b]['wvlen2']) for b in visbands] geo = self.metadata['geometry'] atm6s = SIXS(visbands, wvlens, geo, self.metadata['datetime'], sensor=self.sensor_set[0]) md["AOD Source"] = str(atm6s.aod[0]) md["AOD Value"] = str(atm6s.aod[1]) except Exception, e: VerboseOut(traceback.format_exc(), 4) raise Exception( 'Problem running 6S atmospheric model: %s' % e) # Break down by group groups = products.groups() # create non-atmospherically corrected apparent reflectance and temperature image reflimg = gippy.GeoImage(img) theta = numpy.pi * self.metadata['geometry']['solarzenith'] / 180.0 sundist = (1.0 - 0.016728 * numpy.cos(numpy.pi * 0.9856 * (float(self.day) - 4.0) / 180.0)) for col in self.assets['DN'].visbands: reflimg[col] = img[col] * (1.0 / ( (meta[col]['E'] * numpy.cos(theta)) / (numpy.pi * sundist * sundist))) for col in self.assets['DN'].lwbands: reflimg[col] = (((img[col].pow(-1)) * meta[col]['K1'] + 1).log().pow(-1)) * meta[col]['K2'] - 273.15 # This is landsat, so always just one sensor for a given date sensor = self.sensors['DN'] # Process standard products for key, val in groups['Standard'].items(): start = datetime.now() # TODO - update if no atmos desired for others toa = self._products[val[0]].get('toa', False) or 'toa' in val # Create product try: fname = os.path.join(self.path, self.basename + '_' + key) if val[0] == 'acca': s_azim = self.metadata['geometry']['solarazimuth'] s_elev = 90 - self.metadata['geometry']['solarzenith'] try: erosion = int(val[1]) if len(val) > 1 else 5 dilation = int(val[2]) if len(val) > 2 else 10 cloudheight = int(val[3]) if len(val) > 3 else 4000 except: erosion = 5 dilation = 10 cloudheight = 4000 resset = set([(reflimg[band].Resolution().x(), reflimg[band].Resolution().y()) for band in (self.assets['DN'].visbands + self.assets['DN'].lwbands)]) if len(resset) > 1: raise Exception( 'ACCA requires all bands to have the same ' 'spatial resolution. Found:\n\t' + str(resset)) imgout = ACCA(reflimg, fname, s_elev, s_azim, erosion, dilation, cloudheight) elif val[0] == 'fmask': try: tolerance = int(val[1]) if len(val) > 1 else 3 dilation = int(val[2]) if len(val) > 2 else 5 except: tolerance = 3 dilation = 5 imgout = Fmask(reflimg, fname, tolerance, dilation) elif val[0] == 'rad': imgout = gippy.GeoImage(fname, img, gippy.GDT_Int16, len(visbands)) for i in range(0, imgout.NumBands()): imgout.SetBandName(visbands[i], i + 1) imgout.SetNoData(-32768) imgout.SetGain(0.1) if toa: for col in visbands: img[col].Process(imgout[col]) else: for col in visbands: ((img[col] - atm6s.results[col][1]) / atm6s.results[col][0]).Process(imgout[col]) # Mask out any pixel for which any band is nodata #imgout.ApplyMask(img.DataMask()) elif val[0] == 'ref': imgout = gippy.GeoImage(fname, img, gippy.GDT_Int16, len(visbands)) for i in range(0, imgout.NumBands()): imgout.SetBandName(visbands[i], i + 1) imgout.SetNoData(-32768) imgout.SetGain(0.0001) if toa: for c in visbands: reflimg[c].Process(imgout[c]) else: for c in visbands: (((img[c] - atm6s.results[c][1]) / atm6s.results[c][0]) * (1.0 / atm6s.results[c][2])).Process( imgout[c]) # Mask out any pixel for which any band is nodata #imgout.ApplyMask(img.DataMask()) elif val[0] == 'tcap': tmpimg = gippy.GeoImage(reflimg) tmpimg.PruneBands( ['BLUE', 'GREEN', 'RED', 'NIR', 'SWIR1', 'SWIR2']) arr = numpy.array(self.Asset._sensors[ self.sensor_set[0]]['tcap']).astype('float32') imgout = LinearTransform(tmpimg, fname, arr) outbands = [ 'Brightness', 'Greenness', 'Wetness', 'TCT4', 'TCT5', 'TCT6' ] for i in range(0, imgout.NumBands()): imgout.SetBandName(outbands[i], i + 1) elif val[0] == 'temp': imgout = gippy.GeoImage(fname, img, gippy.GDT_Int16, len(lwbands)) for i in range(0, imgout.NumBands()): imgout.SetBandName(lwbands[i], i + 1) imgout.SetNoData(-32768) imgout.SetGain(0.1) [reflimg[col].Process(imgout[col]) for col in lwbands] elif val[0] == 'dn': rawimg = self._readraw() rawimg.SetGain(1.0) rawimg.SetOffset(0.0) imgout = rawimg.Process(fname) rawimg = None elif val[0] == 'volref': bands = deepcopy(visbands) bands.remove("SWIR1") imgout = gippy.GeoImage(fname, reflimg, gippy.GDT_Int16, len(bands)) [ imgout.SetBandName(band, i + 1) for i, band in enumerate(bands) ] imgout.SetNoData(-32768) imgout.SetGain(0.0001) r = 0.54 # Water-air reflection p = 0.03 # Internal Fresnel reflectance pp = 0.54 # Water-air Fresnel reflectance n = 1.34 # Refractive index of water Q = 1.0 # Downwelled irradiance / upwelled radiance A = ((1 - p) * (1 - pp)) / (n * n) srband = reflimg['SWIR1'].Read() nodatainds = srband == reflimg['SWIR1'].NoDataValue() for band in bands: bimg = reflimg[band].Read() diffimg = bimg - srband diffimg = diffimg / (A + r * Q * diffimg) diffimg[bimg == reflimg[band].NoDataValue( )] = imgout[band].NoDataValue() diffimg[nodatainds] = imgout[band].NoDataValue() imgout[band].Write(diffimg) elif val[0] == 'wtemp': imgout = gippy.GeoImage(fname, img, gippy.GDT_Int16, len(lwbands)) [ imgout.SetBandName(lwbands[i], i + 1) for i in range(0, imgout.NumBands()) ] imgout.SetNoData(-32768) imgout.SetGain(0.1) tmpimg = gippy.GeoImage(img) for col in lwbands: band = tmpimg[col] m = meta[col] lat = self.metadata['geometry']['lat'] lon = self.metadata['geometry']['lon'] dt = self.metadata['datetime'] atmos = MODTRAN(m['bandnum'], m['wvlen1'], m['wvlen2'], dt, lat, lon, True) e = 0.95 band = (tmpimg[col] - (atmos.output[1] + (1 - e) * atmos.output[2])) / ( atmos.output[0] * e) band = ( ((band.pow(-1)) * meta[col]['K1'] + 1).log().pow(-1)) * meta[col]['K2'] - 273.15 band.Process(imgout[col]) elif val[0] == 'bqa': if 'LC8' not in self.sensor_set: continue imgout = gippy.GeoImage(fname, img, gippy.GDT_Int16, 7) qaimg = self._readqa() qadata = qaimg.Read() notfilled = ~binmask(qadata, 1) notdropped = ~binmask(qadata, 2) notterrain = ~binmask(qadata, 3) notcirrus = ~binmask(qadata, 14) & binmask(qadata, 13) notcloud = ~binmask(qadata, 16) & binmask(qadata, 15) allgood = notfilled * notdropped * notterrain * notcirrus * notcloud imgout[0].Write(allgood.astype('int16')) imgout[1].Write(notfilled.astype('int16')) imgout[2].Write(notdropped.astype('int16')) imgout[3].Write(notterrain.astype('int16')) imgout[4].Write(notsnow.astype('int16')) imgout[5].Write(notcirrus.astype('int16')) imgout[6].Write(notcloud.astype('int16')) elif val[0] == 'bqashadow': if 'LC8' not in self.sensor_set: continue imgout = gippy.GeoImage(fname, img, gippy.GDT_UInt16, 1) imgout[0].SetNoData(0) qaimg = self._readqa() qadata = qaimg.Read() fill = binmask(qadata, 1) dropped = binmask(qadata, 2) terrain = binmask(qadata, 3) cirrus = binmask(qadata, 14) othercloud = binmask(qadata, 16) cloud = (cirrus + othercloud) + 2 * (fill + dropped + terrain) abfn = fname + '-intermediate' abimg = gippy.GeoImage(abfn, img, gippy.GDT_UInt16, 1) abimg[0].SetNoData(2) abimg[0].Write(cloud.astype(numpy.uint16)) abimg.Process() abimg = None abimg = gippy.GeoImage(abfn + '.tif') s_azim = self.metadata['geometry']['solarazimuth'] s_elev = 90 - self.metadata['geometry']['solarzenith'] try: erosion = int(val[1]) if len(val) > 1 else 5 dilation = int(val[2]) if len(val) > 2 else 10 cloudheight = int(val[3]) if len(val) > 3 else 4000 except: erosion = 5 dilation = 10 cloudheight = 4000 imgout = AddShadowMask(abimg, imgout, 0, s_elev, s_azim, erosion, dilation, cloudheight, {'notes': 'dev-version'}) imgout.Process() abimg = None os.remove(abfn + '.tif') fname = imgout.Filename() imgout.SetMeta(md) imgout = None self.AddFile(sensor, key, fname) VerboseOut( ' -> %s: processed in %s' % (os.path.basename(fname), datetime.now() - start), 1) except Exception, e: VerboseOut( 'Error creating product %s for %s: %s' % (key, basename(self.assets['DN'].filename), e), 2) VerboseOut(traceback.format_exc(), 3)
def process(self, products=None, overwrite=False, **kwargs): """ Make sure all products have been processed """ products = super(landsatData, self).process(products, overwrite, **kwargs) if len(products) == 0: return start = datetime.now() assets = set() for key, val in products.requested.items(): assets.update(self._products[val[0]]['assets']) if len(assets) != 1: raise Exception('This driver does not support creation of products from different Assets at the same time') asset = list(assets)[0] # TODO: De-hack this # Better approach, but needs some thought, is to loop over assets # Ian, you are right. I just don't have enough time to do it. if asset == 'SR': datafiles = self.assets['SR'].datafiles() imgpaths = dict() for datafile in datafiles: key = datafile.partition('_')[2].split('.')[0] path = os.path.join('/vsitar/' + self.assets['SR'].filename, datafile) imgpaths[key] = path # print imgpaths bname = os.path.join(self.path, self.basename) for key, val in products.requested.items(): if val[0] == "ndvi8sr": sensor = 'LC8SR' fname = '%s_%s_%s' % (bname, sensor, key) img = gippy.GeoImage([imgpaths['sr_band4'], imgpaths['sr_band5']]) missing = float(img[0].NoDataValue()) red = img[0].Read().astype('float32') nir = img[1].Read().astype('float32') wvalid = numpy.where((red != missing) & (nir != missing) & (red + nir != 0.0)) red[wvalid] *= 1.E-4 nir[wvalid] *= 1.E-4 # TODO: change this so that these pixels become missing red[(red != missing) & (red < 0.0)] = 0.0 red[red > 1.0] = 1.0 nir[(nir != missing) & (nir < 0.0)] = 0.0 nir[nir > 1.0] = 1.0 ndvi = missing + numpy.zeros_like(red) ndvi[wvalid] = (nir[wvalid] - red[wvalid])/(nir[wvalid] + red[wvalid]) # set_trace() VerboseOut("writing " + fname, 2) imgout = gippy.GeoImage(fname, img, gippy.GDT_Float32, 1) imgout.SetNoData(-9999.) imgout.SetOffset(0.0) imgout.SetGain(1.0) imgout.SetBandName('NDVI', 1) imgout[0].Write(ndvi) if val[0] == "landmask": sensor = 'LC8SR' fname = '%s_%s_%s' % (bname, sensor, key) img = gippy.GeoImage([imgpaths['cfmask'], imgpaths['cfmask_conf']]) cfmask = img[0].Read() # array([ 0, 1, 2, 3, 4, 255], dtype=uint8) # 0 means clear! but I want 1 to mean clear cfmask[cfmask > 0] = 2 cfmask[cfmask == 0] = 1 cfmask[cfmask == 2] = 0 VerboseOut("writing " + fname, 2) imgout = gippy.GeoImage(fname, img, gippy.GDT_Byte, 1) imgout.SetBandName('Land mask', 1) imgout[0].Write(cfmask) elif asset == 'DN': # This block contains everything that existed in the first generation Landsat driver # Add the sensor for this date to the basename self.basename = self.basename + '_' + self.sensors[asset] # Read the assets try: img = self._readraw() except Exception, e: VerboseOut(traceback.format_exc(), 5) raise Exception('Error reading %s: %s' % (basename(self.assets['DN'].filename), e)) meta = self.assets['DN'].meta visbands = self.assets['DN'].visbands lwbands = self.assets['DN'].lwbands md = self.meta_dict() # running atmosphere if any products require it toa = True for val in products.requested.values(): toa = toa and (self._products[val[0]].get('toa', False) or 'toa' in val) if not toa: start = datetime.now() if not settings().REPOS[self.Repository.name.lower()]['6S']: raise Exception('6S is required for atmospheric correction') try: wvlens = [(meta[b]['wvlen1'], meta[b]['wvlen2']) for b in visbands] geo = self.metadata['geometry'] atm6s = SIXS(visbands, wvlens, geo, self.metadata['datetime'], sensor=self.sensor_set[0]) md["AOD Source"] = str(atm6s.aod[0]) md["AOD Value"] = str(atm6s.aod[1]) except Exception, e: VerboseOut(traceback.format_exc(), 4) raise Exception('Problem running 6S atmospheric model: %s' % e)
m = meta[col] lat = self.metadata['geometry']['lat'] lon = self.metadata['geometry']['lon'] dt = self.metadata['datetime'] atmos = MODTRAN(m['bandnum'], m['wvlen1'], m['wvlen2'], dt, lat, lon, True) e = 0.95 band = (tmpimg[col] - (atmos.output[1] + (1 - e) * atmos.output[2])) / (atmos.output[0] * e) band = (((band.pow(-1)) * meta[col]['K1'] + 1).log().pow(-1)) * meta[col]['K2'] - 273.15 band.Process(imgout[col]) fname = imgout.Filename() imgout.SetMeta(md) imgout = None self.AddFile(sensor, key, fname) VerboseOut(' -> %s: processed in %s' % (os.path.basename(fname), datetime.now() - start), 1) except Exception, e: VerboseOut('Error creating product %s for %s: %s' % (key, basename(self.assets[''].filename), e), 2) VerboseOut(traceback.format_exc(), 3) # Process Indices indices0 = dict(groups['Index'], **groups['Tillage']) if len(indices0) > 0: start = datetime.now() indices = {} indices_toa = {} for key, val in indices0.items(): if 'toa' in val: indices_toa[key] = val else: indices[key] = val # Run TOA if len(indices_toa) > 0:
def main(): title = Colors.BOLD + 'GIPS Project Masking (v%s)' % __version__ + Colors.OFF parser0 = GIPSParser(datasources=False, description=title) parser0.add_default_parser() parser0.add_projdir_parser() group = parser0.add_argument_group('masking options') group.add_argument('--filemask', help='Mask all files with this static mask', default=None) group.add_argument('--pmask', help='Mask files with this corresponding product', nargs='*', default=[]) h = 'Write mask to original image instead of creating new image' group.add_argument('--original', help=h, default=False, action='store_true') h = 'Overwrite existing files when creating new' group.add_argument('--overwrite', help=h, default=False, action='store_true') h = 'Suffix to apply to masked file (not compatible with --original)' group.add_argument('--suffix', help=h, default='-masked') #parser0.add_argument('-i', '--invert', help='Invert mask (0->1, 1->0)', default=False, action='store_true') #parser0.add_argument('--value', help='Mask == val', default=1) args = parser0.parse_args() # TODO - check that at least 1 of filemask or pmask is supplied try: VerboseOut(title) for projdir in args.projdir: if args.filemask is not None: mask_file = gippy.GeoImage(args.filemask) inv = ProjectInventory(projdir, args.products) for date in inv.dates: VerboseOut('Masking files from %s' % date) if args.filemask is None and args.pmask == []: available_masks = inv[date].masks() else: available_masks = inv[date].masks(args.pmask) for p in inv.products(date): # don't mask any masks if p in available_masks: continue meta = '' update = True if args.original else False img = inv[date].open(p, update=update) if args.filemask is not None: img.AddMask(mask_file[0]) meta = basename(args.filemask) + ' ' for mask in available_masks: img.AddMask(inv[date].open(mask)[0]) meta = meta + basename(inv[date][mask]) + ' ' if meta != '': if args.original: VerboseOut(' %s' % (img.Basename()), 2) img.Process() img.SetMeta('MASKS', meta) else: fout = os.path.splitext(img.Filename())[0] + args.suffix + '.tif' if not os.path.exists(fout) or args.overwrite: VerboseOut(' %s -> %s' % (img.Basename(), basename(fout)), 2) imgout = img.Process(fout) imgout.SetMeta('MASKS', meta) imgout = None img = None mask_file = None except Exception, e: import traceback VerboseOut(traceback.format_exc(), 4) print 'Masking error: %s' % e
def process(self, products=None, overwrite=False, **kwargs): """ Make sure all products have been processed """ products = super(landsatData, self).process(products, overwrite, **kwargs) if len(products) == 0: return start = datetime.now() assets = set() for key, val in products.requested.items(): assets.update(self._products[val[0]]['assets']) if len(assets) != 1: raise Exception( 'This driver does not support creation of products from different Assets at the same time' ) asset = list(assets)[0] # TODO: De-hack this # Better approach, but needs some thought, is to loop over assets # Ian, you are right. I just don't have enough time to do it. if asset == 'SR': datafiles = self.assets['SR'].datafiles() imgpaths = dict() for datafile in datafiles: key = datafile.partition('_')[2].split('.')[0] path = os.path.join('/vsitar/' + self.assets['SR'].filename, datafile) imgpaths[key] = path # print imgpaths bname = os.path.join(self.path, self.basename) for key, val in products.requested.items(): if val[0] == "ndvi8sr": sensor = 'LC8SR' fname = '%s_%s_%s' % (bname, sensor, key) img = gippy.GeoImage( [imgpaths['sr_band4'], imgpaths['sr_band5']]) missing = float(img[0].NoDataValue()) red = img[0].Read().astype('float32') nir = img[1].Read().astype('float32') wvalid = numpy.where((red != missing) & (nir != missing) & (red + nir != 0.0)) red[wvalid] *= 1.E-4 nir[wvalid] *= 1.E-4 # TODO: change this so that these pixels become missing red[(red != missing) & (red < 0.0)] = 0.0 red[red > 1.0] = 1.0 nir[(nir != missing) & (nir < 0.0)] = 0.0 nir[nir > 1.0] = 1.0 ndvi = missing + numpy.zeros_like(red) ndvi[wvalid] = (nir[wvalid] - red[wvalid]) / (nir[wvalid] + red[wvalid]) # set_trace() VerboseOut("writing " + fname, 2) imgout = gippy.GeoImage(fname, img, gippy.GDT_Float32, 1) imgout.SetNoData(-9999.) imgout.SetOffset(0.0) imgout.SetGain(1.0) imgout.SetBandName('NDVI', 1) imgout[0].Write(ndvi) if val[0] == "landmask": sensor = 'LC8SR' fname = '%s_%s_%s' % (bname, sensor, key) img = gippy.GeoImage( [imgpaths['cfmask'], imgpaths['cfmask_conf']]) cfmask = img[0].Read() # array([ 0, 1, 2, 3, 4, 255], dtype=uint8) # 0 means clear! but I want 1 to mean clear cfmask[cfmask > 0] = 2 cfmask[cfmask == 0] = 1 cfmask[cfmask == 2] = 0 VerboseOut("writing " + fname, 2) imgout = gippy.GeoImage(fname, img, gippy.GDT_Byte, 1) imgout.SetBandName('Land mask', 1) imgout[0].Write(cfmask) elif asset == 'DN': # This block contains everything that existed in the first generation Landsat driver # Add the sensor for this date to the basename self.basename = self.basename + '_' + self.sensors[asset] # Read the assets try: img = self._readraw() except Exception, e: VerboseOut(traceback.format_exc(), 5) raise Exception('Error reading %s: %s' % (basename(self.assets['DN'].filename), e)) meta = self.assets['DN'].meta visbands = self.assets['DN'].visbands lwbands = self.assets['DN'].lwbands md = self.meta_dict() # running atmosphere if any products require it toa = True for val in products.requested.values(): toa = toa and (self._products[val[0]].get('toa', False) or 'toa' in val) if not toa: start = datetime.now() if not settings().REPOS[self.Repository.name.lower()]['6S']: raise Exception( '6S is required for atmospheric correction') try: wvlens = [(meta[b]['wvlen1'], meta[b]['wvlen2']) for b in visbands] geo = self.metadata['geometry'] atm6s = SIXS(visbands, wvlens, geo, self.metadata['datetime'], sensor=self.sensor_set[0]) md["AOD Source"] = str(atm6s.aod[0]) md["AOD Value"] = str(atm6s.aod[1]) except Exception, e: VerboseOut(traceback.format_exc(), 4) raise Exception( 'Problem running 6S atmospheric model: %s' % e)
def main(): title = Colors.BOLD + 'GIPS Project Masking (v%s)' % __version__ + Colors.OFF parser = GIPSParser(datasources=False, description=title) parser.add_projdir_parser() group = parser.add_argument_group('masking options') group.add_argument('--filemask', help='Mask all files with this static mask', default=None) group.add_argument('--pmask', help='Mask files with this corresponding product', nargs='*', default=[]) group.add_argument('--invert', help='Invert the masks from corresponding products', nargs='*', default=[]) h = 'Write mask to original image instead of creating new image' group.add_argument('--original', help=h, default=False, action='store_true') h = 'Overwrite existing files when creating new' group.add_argument('--overwrite', help=h, default=False, action='store_true') h = 'Suffix to apply to masked file (not compatible with --original)' group.add_argument('--suffix', help=h, default='-masked') args = parser.parse_args() # TODO - check that at least 1 of filemask or pmask is supplied utils.gips_script_setup(None, args.stop_on_error) with utils.error_handler('Masking error'): VerboseOut(title) for projdir in args.projdir: if args.filemask is not None: mask_file = gippy.GeoImage(args.filemask) inv = ProjectInventory(projdir, args.products) for date in inv.dates: VerboseOut('Masking files from %s' % date) if args.filemask is None and args.pmask == []: available_masks = inv[date].masks() else: available_masks = inv[date].masks(args.pmask) for p in inv.products(date): # don't mask any masks if p in available_masks: continue meta = '' update = True if args.original else False img = inv[date].open(p, update=update) if args.filemask is not None: img.AddMask(mask_file[0]) meta = basename(args.filemask) + ' ' for mask in available_masks: mask_img = inv[date].open(mask)[0] if mask in args.invert: mask_img.SetNoData(utils.np.nan) mask_img = mask_img.BXOR(1) meta += 'inverted-' img.AddMask(mask_img) meta = meta + basename(inv[date][mask]) + ' ' if meta != '': if args.original: VerboseOut(' %s' % (img.Basename()), 2) img.Process() img.SetMeta('MASKS', meta) else: fout = os.path.splitext( img.Filename())[0] + args.suffix + '.tif' if not os.path.exists(fout) or args.overwrite: VerboseOut( ' %s -> %s' % (img.Basename(), basename(fout)), 2) imgout = img.Process(fout) imgout.SetMeta('MASKS', meta) imgout = None img = None mask_file = None utils.gips_exit()
(1 - e) * atmos.output[2])) / ( atmos.output[0] * e) band = (((band.pow(-1)) * meta[col]['K1'] + 1).log().pow(-1)) * meta[col]['K2'] - 273.15 band.Process(imgout[col]) fname = imgout.Filename() imgout.SetMeta(md) imgout = None self.AddFile(sensor, key, fname) VerboseOut( ' -> %s: processed in %s' % (os.path.basename(fname), datetime.now() - start), 1) except Exception, e: VerboseOut( 'Error creating product %s for %s: %s' % (key, basename(self.assets[''].filename), e), 2) VerboseOut(traceback.format_exc(), 3) # Process Indices indices0 = dict(groups['Index'], **groups['Tillage']) if len(indices0) > 0: start = datetime.now() indices = {} indices_toa = {} for key, val in indices0.items(): if 'toa' in val: indices_toa[key] = val else: indices[key] = val # Run TOA if len(indices_toa) > 0: