def process(self, scene_path, aois, out_path=None, distance=100, ext='*_reflectance.tif'): """ Placeholder """ aoi_paths = [] # get exported geotiff list files = getFileList(scene_path, ext) for f in files: # open image ds = gdal.Open(f) if ds is not None: # get aoi to image transform extent = self.getExtent(ds) coord_tx = self.getCoordinateTransform(ds) # for each aoi for aoi in aois: # get buffered bounding box coordinates bbox = self.getBoundingBox(aoi['bbox'], coord_tx['aoi_image'], distance=distance) if self.overlapsScene(extent, bbox) is True: # create aoi sub-path aoi_path = os.path.join(scene_path, aoi['name'] + '/') if not os.path.exists(aoi_path): os.makedirs(aoi_path, 0o755) # generate aoi sub-image aligned with bbox aoi_pathname = os.path.join(aoi_path, os.path.basename(f)) print('Creating AoI image: {}'.format(aoi_pathname)) # reproject bbox to local utm - setup warp options bbox = self.getBoundingBox(aoi['bbox'], coord_tx['aoi_local'], distance=distance) options = '-t_srs EPSG:{} -tr 15 -15 -te {} {} {} {}'.format( self._epsg, bbox['ulx'], bbox['lry'], bbox['lrx'], bbox['uly']) gdal.Warp(aoi_pathname, ds, options=options) # record aoi image location aoi_paths.append(aoi_path) return list(set(aoi_paths))
def process(self, scenes, bounds, out_path): """ placeholder """ # get client connection to s2 bucket client = storage.Client() bucket = client.get_bucket("gcp-public-data-sentinel-2") for idx, row in scenes.iterrows(): # next interation if no intersection if row['east_lon'] < bounds[0] or row['west_lon'] > bounds[ 2] or row['south_lat'] > bounds[1] or row[ 'north_lat'] < bounds[3]: continue # evaluate number of successfully downloaded scenes files = getFileList(out_path, 'manifest.safe') if len(files) > self._max_scenes: # maximum chips found print('maximum scenes {} downloaded: {}'.format( self._max_scenes, out_path)) break # construct scene path dt = datetime.strptime(row['sensing_time'], '%Y-%m-%dT%H:%M:%S.%fZ') scene_path = os.path.join( os.path.join(out_path, dt.strftime('%Y%m%d_%H%M%S')), row['mgrs_tile']) # get filelist in bucket prefix subfolder prefix = row['base_url'][len('gs://gcp-public-data-sentinel-2/'):] blobs = bucket.list_blobs(prefix=prefix, delimiter=None) for blob in blobs: # apply match to bucket files d = self.blob2dict(blob) if any(f in d['name'] for f in self._prefixes): if not os.path.exists(scene_path): os.makedirs(scene_path) print('downloading {} -> {}'.format(d['name'], scene_path)) # download bucket file to local directory pathname = os.path.join(scene_path, os.path.basename(d['name'])) if not os.path.exists(pathname): with open(pathname, 'w+b') as z: blob.download_to_file(z) return
def getSceneList(args): """ Placeholder """ # assume single scene - else collect list scenes = [args.scene] if args.batch is True: scenes = getFileList(args.scene, 'S2*.zip') return scenes
def getMosaicDataset(self, path, search): """ Placeholder """ # return package dataset = {'srs': None, 'channels': []} # load band data into list files = sorted(getFileList(path, search)) for f in files: # get band index match = re.search('B[0-9]{2}', f) if match: # load geotiff ds = gdal.Open(f) if ds is not None: # get srs attributes if dataset['srs'] is None: dataset['srs'] = { 'geo': ds.GetGeoTransform(), 'prj': ds.GetProjection() } # create dictionary entry dataset['channels'].append({ 'index': int(''.join(filter(str.isdigit, match.group(0)))), 'data': ds.GetRasterBand(1).ReadAsArray(), }) return dataset
def process(self, scene_path, aois, out_path=None, distance=100): """ Placeholder """ aoi_paths = [] # get exported geotiff list files = getFileList(scene_path, '*_B*_*m.tif') for f in files: # open image ds = gdal.Open(f) if ds is not None: # get aoi to image transform extent = self.getExtent(ds) coord_tx = self.getCoordinateTransform(ds) # for each aoi for aoi in aois: # get buffered bounding box coordinates bbox = self.getBoundingBox(aoi['bbox'], coord_tx['aoi_image'], distance=distance) """ ulx, uly, ulz = coord_rx.TransformPoint( bbox[ 'lrx' ], bbox[ 'lry' ] ) lrx, lry, lrz = coord_rx.TransformPoint( bbox[ 'ulx' ], bbox[ 'uly' ] ) print ( aoi[ 'name' ], ulx, uly, lrx, lry ) """ if self.overlapsScene(extent, bbox) is True: # create aoi sub-path aoi_path = os.path.join(scene_path, aoi['name'] + '/') if not os.path.exists(aoi_path): os.makedirs(aoi_path, 0o755) # generate aoi sub-image aligned with bbox aoi_pathname = os.path.join(aoi_path, os.path.basename(f)) print('Creating AoI image: {}'.format(aoi_pathname)) # reproject bbox to local utm and fix pixel resolution bbox = self.getBoundingBox(aoi['bbox'], coord_tx['aoi_local'], distance=distance) res_option = self.getResolution(os.path.basename(f)) # reproject bbox to local utm - setup warp options options = '-t_srs EPSG:{} -tr {} -te {} {} {} {}'.format( self._epsg, res_option, bbox['ulx'], bbox['lry'], bbox['lrx'], bbox['uly']) gdal.Warp(aoi_pathname, ds, options=options) # resample 20m resolution sub-image to 10m if '20m' in aoi_pathname: print('Creating resampled AoI image: {}'.format( aoi_pathname.replace('20m', '10m'))) # rerun gdalwarp options = '-t_srs EPSG:{} -tr 10 -10 -te {} {} {} {}'.format( self._epsg, bbox['ulx'], bbox['lry'], bbox['lrx'], bbox['uly']) gdal.Warp(aoi_pathname.replace('20m', '10m'), ds, options=options) # record aoi image location aoi_paths.append(aoi_path) return list(set(aoi_paths))
def getImageChip(self, path, centroid, out_pathname, size=512, overwrite=False): """ Placeholder """ # file not created or overwrite if not os.path.exists(out_pathname): # get scenes downloaded for datetime images = self.clipScenes(getFileList(path, '_TCI.jp2'), centroid, size) # images = [ '/data/test/ALB0003/20170820_093607/34TDL/T34TDL_20170820T093041_TCI-chip.tif' ] if len(images) > 0: # get scene srs ds = gdal.Open(images[0]) coord_tx = self.getCoordinateTransform(ds) # transform latlon centroid coordinates to image srs x, y, z = coord_tx.TransformPoint(centroid[0], centroid[1]) x = round(x) y = round(y) distance = (size / 2) * self._resolution x0 = x - distance y0 = y - distance x1 = x + distance y1 = y + distance print(x0, y0, x1, y1) # compile translation options options = [ '-of MEM', '-t_srs {}'.format(ds.GetProjection()), '-te {} {} {} {}'.format(x0, y0, x1, y1), '-tr {} {}'.format(self._resolution, self._resolution), '-srcnodata 0' ] # combine images into final output options_str = ' '.join(options) warp_ds = gdal.Warp('', images, options=options_str) # validate goodness - number of black (masked) pixels goodness = ( np.count_nonzero(warp_ds.GetRasterBand(1).ReadAsArray()) / (size * size)) * 100.0 if goodness > 95.0: # spit in-memory image to jpg file if not os.path.exists(os.path.dirname(out_pathname)): os.makedirs(os.path.dirname(out_pathname)) gdal.Translate(out_pathname, warp_ds, options='-of JPEG') print('Generated chip: {} {}'.format( out_pathname, goodness)) else: # failed qc check print('Chip failed QC: {} {}'.format( out_pathname, goodness)) # housekeeping warp_ds = None ds = None return