def create_cog(image_locations, scene, same_path=False): """ Args: image_locations (List[(uri, filename)]): Used to fetch source imagery for the scene for processing scene (Scene): Scene to create COG from same_path (boolean): Output to the same path that it was downloaded from Returns: Scene: The mutated scene. Must call update() on it to be reflected on the API Raises: Exception: Any exceptions here are unrecoverable. """ with get_tempdir() as local_dir: dsts = [os.path.join(local_dir, fname) for _, fname in image_locations] cog.fetch_imagery(image_locations, local_dir) warped_paths = cog.warp_tifs(dsts, local_dir) merged_tif = cog.merge_tifs(warped_paths, local_dir) cog.add_overviews(merged_tif) cog_path = cog.convert_to_cog(merged_tif, local_dir) if same_path: updated_scene = upload_tif( cog_path, scene, os.path.join('user-uploads', scene.owner, '{}_COG.tif'.format(scene.id)), os.path.join('user-uploads', urllib.quote_plus(scene.owner), '{}_COG.tif'.format(scene.id)) ) else: updated_scene = upload_tif(cog_path, scene) os.remove(cog_path) return updated_scene
def generate_scenes(self): """Create a Scene and associated Image for each GeoTiff in self.s3_path Returns: Generator of Scenes """ s3 = boto3.resource('s3') for infile in self.files: # We can't use the temp file as a context manager because it'll be opened/closed multiple # times and by default is deleted when it's closed. So we use try/finally to ensure that # it gets cleaned up. bucket_name, key = s3_bucket_and_key_from_url(infile) filename = os.path.basename(key) logger.info('Downloading %s => %s', infile, filename) bucket = s3.Bucket(bucket_name) with get_tempdir() as tempdir: tmp_fname = os.path.join(tempdir, filename) bucket.download_file(key, tmp_fname) cog.add_overviews(tmp_fname) cog_path = cog.convert_to_cog(tmp_fname, tempdir) scene = self.create_geotiff_scene(tmp_fname, os.path.splitext(filename)[0]) scene.ingestLocation = upload_tifs([cog_path], self.owner, scene.id)[0] images = [self.create_geotiff_image( tmp_fname, urllib.unquote(scene.ingestLocation), scene, cog_path )] scene.thumbnails = [] scene.images = images yield scene
def generate_scenes(self): """Create a Scene and associated Image for each GeoTiff in self.s3_path Returns: Generator of Scenes """ s3 = boto3.resource('s3') for infile in self.files: # We can't use the temp file as a context manager because it'll be opened/closed multiple # times and by default is deleted when it's closed. So we use try/finally to ensure that # it gets cleaned up. bucket_name, key = s3_bucket_and_key_from_url(infile) filename = os.path.basename(key) logger.info('Downloading %s => %s', infile, filename) bucket = s3.Bucket(bucket_name) with get_tempdir() as tempdir: tmp_fname = os.path.join(tempdir, filename) bucket.download_file(key, tmp_fname) cog.add_overviews(tmp_fname) cog_path = cog.convert_to_cog(tmp_fname, tempdir) scene = self.create_geotiff_scene( tmp_fname, os.path.splitext(filename)[0]) scene.ingestLocation = upload_tifs([cog_path], self.owner, scene.id)[0] images = [ self.create_geotiff_image( tmp_fname, urllib.unquote(scene.ingestLocation), scene, cog_path) ] scene.thumbnails = [] scene.images = images yield scene
def generate_scenes(self): scenes = [] for hdf_url in self.hdf_urls: with get_tempdir() as temp_dir: scene = create_scene(hdf_url, temp_dir, self.owner, self.datasource) scenes.append(scene) return scenes
def create_cog(image_locations, scene): with get_tempdir() as local_dir: dsts = [os.path.join(local_dir, fname) for _, fname in image_locations] cog.fetch_imagery(image_locations, local_dir) warped_paths = cog.warp_tifs(dsts, local_dir) merged_tif = cog.merge_tifs(warped_paths, local_dir) cog.add_overviews(merged_tif) cog_path = cog.convert_to_cog(merged_tif, local_dir) updated_scene = upload_tif(cog_path, scene) updated_scene.update()
def generate_scenes(self): scenes = [] for landsat_id in self.upload.files: path_meta = io.base_metadata_for_landsat_id(landsat_id) sensor = path_meta['sensor_id'] config = { 'M': MultiSpectralScannerConfig, 'T': ThematicMapperConfig, 'E': EnhancedThematicMapperConfig }[sensor] with io.get_tempdir() as temp_dir: scene = create_scene(self.upload.owner, temp_dir, landsat_id, config, self.upload.datasource) scenes.append(scene) return scenes
def generate_scenes(self): # If this upload is not associated with a project, set the scene's # ingest status to TOBEINGESTED so that scene creation will kick off # an ingest. Otherwise, set the status to NOTINGESTED, so that the status # will be updated when the scene is added to this upload's project for planet_id in set(self.planet_ids): logger.info('Preparing to copy planet asset to s3: %s', planet_id) with get_tempdir() as prefix: planet_feature, temp_tif_file = self.copy_asset_to_s3(prefix, planet_id) planet_key = self.client.auth.value planet_scene = create_planet_scene( planet_feature, self.datasource, planet_key, self.visibility, self.tags, self.owner ) yield planet_scene
def generate_scenes(self): # If this upload is not associated with a project, set the scene's # ingest status to TOBEINGESTED so that scene creation will kick off # an ingest. Otherwise, set the status to NOTINGESTED, so that the status # will be updated when the scene is added to this upload's project for planet_id in set(self.planet_ids): logger.info('Preparing to copy planet asset to s3: %s', planet_id) with get_tempdir() as prefix: planet_feature, temp_tif_file = self.copy_asset_to_s3( prefix, planet_id) planet_key = self.client.auth.value planet_scene = create_planet_scene(planet_feature, self.datasource, planet_key, self.visibility, self.tags, self.owner) yield planet_scene
def reprocess_landsat_h(scene_id): logger.info('Fetching scene to reprocess with correct band order: %s', scene_id) scene = Scene.from_id(scene_id) upload_dst = scene.ingestLocation.split('/')[-1] landsat_id = scene.name gcs_prefix = io.gcs_path_for_landsat_id(landsat_id) path_meta = io.base_metadata_for_landsat_id(landsat_id) sensor = path_meta['sensor_id'] config = { 'M': MultiSpectralScannerConfig, 'T': ThematicMapperConfig, 'E': EnhancedThematicMapperConfig }[sensor] with io.get_tempdir() as prefix: (local_path, _) = process_to_cog(prefix, gcs_prefix, landsat_id, config) upload_file(scene.owner, local_path, upload_dst)
def create_cog(image_locations, scene, same_path=False): with get_tempdir() as local_dir: dsts = [os.path.join(local_dir, fname) for _, fname in image_locations] cog.fetch_imagery(image_locations, local_dir) warped_paths = cog.warp_tifs(dsts, local_dir) merged_tif = cog.merge_tifs(warped_paths, local_dir) cog.add_overviews(merged_tif) cog_path = cog.convert_to_cog(merged_tif, local_dir) if same_path: updated_scene = upload_tif( cog_path, scene, os.path.join('user-uploads', scene.owner, '{}_COG.tif'.format(scene.id)), os.path.join('user-uploads', urllib.quote_plus(scene.owner), '{}_COG.tif'.format(scene.id)) ) else: updated_scene = upload_tif(cog_path, scene) updated_scene.update() os.remove(cog_path)
def create_cog(image_locations, scene, same_path=False): with get_tempdir() as local_dir: dsts = [os.path.join(local_dir, fname) for _, fname in image_locations] cog.fetch_imagery(image_locations, local_dir) warped_paths = cog.warp_tifs(dsts, local_dir) merged_tif = cog.merge_tifs(warped_paths, local_dir) cog.add_overviews(merged_tif) cog_path = cog.convert_to_cog(merged_tif, local_dir) if same_path: updated_scene = upload_tif( cog_path, scene, os.path.join('user-uploads', scene.owner, '{}_COG.tif'.format(scene.id)), os.path.join('user-uploads', urllib.quote_plus(scene.owner), '{}_COG.tif'.format(scene.id))) else: updated_scene = upload_tif(cog_path, scene) updated_scene.update() os.remove(cog_path)