def create_thumbnails(scene_id, landsat_id): """Creates thumbnails based on scene_id Args: scene_id (str): id of the scene (e.g. LC81351172016273LGN00) Returns: List[Thumbnail] """ path = get_landsat_url(landsat_id) small_url = path + '_'.join([landsat_id, 'thumb', 'small']) + '.jpg' large_url = path + '_'.join([landsat_id, 'thumb', 'large']) + '.jpg' return [ Thumbnail(organization, 228, 233, 'SMALL', small_url, sceneId=scene_id), Thumbnail(organization, 1143, 1168, 'LARGE', large_url, sceneId=scene_id) ]
def get_planet_thumbnail(organization_id, thumbnail_uri, planet_key, scene_id): """Download planet thumbnail, push to S3, create RF thumbnail Args: organization_id (str): organization requiring thumbnail thumbnail_uri (str): source thumbnail planet_key (str): planet API key for authentication scene_id (str): scene to attach thumbnail to Returns: Thumbnail """ _, local_filepath = tempfile.mkstemp() params = {'api_key': planet_key} logger.info('Downloading thumbnail for Planet scene: %s', thumbnail_uri) response = requests.get(thumbnail_uri, params=params, stream=True) with open(local_filepath, 'wb') as filehandle: for chunk in response.iter_content(1024): filehandle.write(chunk) thumbnail_key = '{}.png'.format(scene_id) thumbnail_uri = '/thumbnails/{}.png'.format(scene_id) logger.info('Uploading thumbnails to S3: %s', thumbnail_key) s3_bucket_name = os.getenv('THUMBNAIL_BUCKET') s3_bucket = boto3.resource('s3').Bucket(s3_bucket_name) s3_bucket.upload_file(local_filepath, thumbnail_key, {'ContentType': 'image/png'}) delete_file(local_filepath) return Thumbnail(organization_id, 256, 256, 'SMALL', thumbnail_uri, sceneId=scene_id)
def create_thumbnails(scene_id, tile_path): """Creates thumbnail based on tile_path Args: tile_path (str): path to tile directory (e.g. tiles/54/M/XB/2016/9/25/0) Returns: List[Thumbnail] """ key_path = '{tile_path}/preview.jpg'.format(tile_path=tile_path) thumbnail_url = base_http_path.format(key_path=key_path) if not s3_obj_exists(thumbnail_url): return [] else: return [ Thumbnail(organization, 343, 343, 'SQUARE', thumbnail_url, sceneId=scene_id) ]
def create_thumbnails(tif_path, scene_id, organization_id): """Creates thumbnails based on a geotiff path and scene Args: tif_path (str): path of local tiff file scene_id (str): uuid of the scene Returns: List[Thumbnail] """ r_tif_path = os.path.join( os.path.dirname(tif_path), 'resampled_{}'.format(os.path.basename(tif_path)) ) rp_tif_path = os.path.join( os.path.dirname(tif_path), 'reprojected_{}'.format(os.path.basename(tif_path)) ) dim = get_geotiff_dimensions(tif_path) max_dim = float(max(get_geotiff_dimensions(tif_path))) size_large = 1024 size_small = 256 scale_large = size_large / max_dim scale_small = size_small / max_dim dim_large = (dim[0] * scale_large, dim[1] * scale_large) dim_small = (dim[0] * scale_small, dim[1] * scale_small) # Create paths for each size thumb path_large = '{}-LARGE.png'.format(scene_id) path_small = '{}-SMALL.png'.format(scene_id) # Create urls for each size Thumbnail url_large = '/thumbnails/{}'.format(path_large) url_small = '/thumbnails/{}'.format(path_small) try_to_remove_files([r_tif_path, rp_tif_path, path_large, path_small]) if os.path.isfile(tif_path): try: # Resample tif subprocess.check_call([ 'gdalwarp', tif_path, r_tif_path, '-ts', str(dim_large[0]), str(dim_large[1]), '-q' ]) # Reproject tif subprocess.check_call([ 'gdalwarp', r_tif_path, rp_tif_path, '-t_srs', 'EPSG:3857', '-q' ]) # Create a temporary env object mod_env = os.environ.copy() # Add variable to avoid sidecar files mod_env['GDAL_PAM_ENABLED'] = 'NO' # Convert tif to pngs (large) subprocess.check_call([ 'gdal_translate', rp_tif_path, path_large, '-b', '1', '-b', '2', '-b', '3', '-outsize', str(dim_large[0]), str(dim_large[1]), '-of', 'PNG', '-q' ], env=mod_env) # Convert tif to pngs (small) subprocess.check_call([ 'gdal_translate', rp_tif_path, path_small, '-b', '1', '-b', '2', '-b', '3', '-outsize', str(dim_small[0]), str(dim_small[1]), '-of', 'PNG', '-q', ], env=mod_env) except: # If any subprocess calls fail, we need to clean up before exiting try_to_remove_files([r_tif_path, rp_tif_path, path_large, path_small]) raise if os.path.isfile(path_large) and os.path.isfile(path_small): s3_bucket_name = os.getenv('THUMBNAIL_BUCKET') s3_bucket = boto3.resource('s3').Bucket(s3_bucket_name) s3_bucket.upload_file(path_large, path_large, {'ContentType': 'image/png'}) s3_bucket.upload_file(path_small, path_small, {'ContentType': 'image/png'}) try_to_remove_files([r_tif_path, rp_tif_path, path_large, path_small]) else: return # Return List[Thumbnail] return [ Thumbnail( organization_id, dim_small[0], dim_small[1], 'SMALL', url_small, sceneId=scene_id ), Thumbnail( organization_id, dim_large[0], dim_large[1], 'LARGE', url_large, sceneId=scene_id ) ]
def create_thumbnails(tif_path, scene_id, organization_id): """Creates thumbnails based on a geotiff path and scene Args: tif_path (str): path of local tiff file scene_id (str): uuid of the scene Returns: List[Thumbnail] """ r_tif_path = os.path.join( os.path.dirname(tif_path), 'resampled_{}'.format(os.path.basename(tif_path))) rp_tif_path = os.path.join( os.path.dirname(tif_path), 'reprojected_{}'.format(os.path.basename(tif_path))) dim = get_geotiff_dimensions(tif_path) max_dim = float(max(get_geotiff_dimensions(tif_path))) size_large = 1024 size_small = 256 scale_large = size_large / max_dim scale_small = size_small / max_dim dim_large = (int(dim[0] * scale_large), int(dim[1] * scale_large)) dim_small = (int(dim[0] * scale_small), int(dim[1] * scale_small)) tempdir = os.path.dirname(tif_path) # Create paths for each size thumb path_large = os.path.join(tempdir, '{}-LARGE.png'.format(scene_id)) path_small = os.path.join(tempdir, '{}-SMALL.png'.format(scene_id)) # Color Corrected path_cc_large = os.path.join(tempdir, '{}-CC-LARGE.png'.format(scene_id)) path_cc_small = os.path.join(tempdir, '{}-CC-SMALL.png'.format(scene_id)) # Create urls for each size Thumbnail url_large = '/thumbnails/{}'.format(os.path.basename(path_cc_large)) url_small = '/thumbnails/{}'.format(os.path.basename(path_cc_small)) try_to_remove_files([ r_tif_path, rp_tif_path, path_large, path_small, path_cc_large, path_cc_small ]) if os.path.isfile(tif_path): try: logger.info('Performing thumbnail resampling') subprocess.check_call([ 'gdal_translate', tif_path, r_tif_path, '-outsize', str(dim_large[0]), str(dim_large[1]), ]) logger.info('Performing thumbnail warping') subprocess.check_call([ 'gdalwarp', r_tif_path, rp_tif_path, '-t_srs', 'EPSG:3857', '-q' ]) # Create a temporary env object mod_env = os.environ.copy() # Add variable to avoid sidecar files mod_env['GDAL_PAM_ENABLED'] = 'NO' with rasterio.open(rp_tif_path) as src: num_bands = src.count if num_bands >= 3: bands = ['-b', '1', '-b', '2', '-b', '3'] else: bands = ['-b', '1'] logger.info('Creating thumbnail files') large_thumbnail_gdal_command = [ 'gdal_translate', rp_tif_path, path_large, '-outsize', str(dim_large[0]), str(dim_large[1]), '-of', 'PNG', '-q' ] + bands small_thumbnail_gdal_command = [ 'gdal_translate', rp_tif_path, path_small, '-outsize', str(dim_small[0]), str(dim_small[1]), '-of', 'PNG', '-q' ] + bands # Convert tif to pngs (large) subprocess.check_call(large_thumbnail_gdal_command, env=mod_env) # Convert tif to pngs (small) subprocess.check_call(small_thumbnail_gdal_command, env=mod_env) # Do basic histogram normalization to improve thumbnails subprocess.check_call( ['convert', path_small, '-normalize', path_cc_small]) # Do basic histogram normalization to improve thumbnails subprocess.check_call( ['convert', path_large, '-normalize', path_cc_large]) except: # If any subprocess calls fail, we need to clean up before exiting try_to_remove_files( [r_tif_path, rp_tif_path, path_large, path_small]) raise if os.path.isfile(path_large) and os.path.isfile(path_small): logger.info('Uploading thumbnails to S3') s3_bucket_name = os.getenv('THUMBNAIL_BUCKET') s3_bucket = boto3.resource('s3').Bucket(s3_bucket_name) s3_bucket.upload_file(path_cc_large, os.path.basename(path_cc_large), {'ContentType': 'image/png'}) s3_bucket.upload_file(path_cc_small, os.path.basename(path_cc_small), {'ContentType': 'image/png'}) try_to_remove_files([r_tif_path, rp_tif_path, path_large, path_small]) else: return # Return List[Thumbnail] return [ Thumbnail(organization_id, dim_small[0], dim_small[1], 'SMALL', url_small, sceneId=scene_id), Thumbnail(organization_id, dim_large[0], dim_large[1], 'LARGE', url_large, sceneId=scene_id) ]
def create_thumbnails(tif_path, scene_id): """Creates thumbnails based on a geotiff path and scene Args: tif_path (str): path of local tiff file scene_id (str): uuid of the scene Returns: List[Thumbnail] """ r_tif_path = os.path.join( os.path.dirname(tif_path), "resampled_{}".format(os.path.basename(tif_path)) ) rp_tif_path = os.path.join( os.path.dirname(tif_path), "reprojected_{}".format(os.path.basename(tif_path)) ) dim = get_geotiff_dimensions(tif_path) max_dim = float(max(get_geotiff_dimensions(tif_path))) size_large = 1024 size_small = 256 scale_large = size_large / max_dim scale_small = size_small / max_dim dim_large = (int(dim[0] * scale_large), int(dim[1] * scale_large)) dim_small = (int(dim[0] * scale_small), int(dim[1] * scale_small)) tempdir = os.path.dirname(tif_path) # Create paths for each size thumb path_large = os.path.join(tempdir, "{}-LARGE.png".format(scene_id)) path_small = os.path.join(tempdir, "{}-SMALL.png".format(scene_id)) # Color Corrected path_cc_large = os.path.join(tempdir, "{}-CC-LARGE.png".format(scene_id)) path_cc_small = os.path.join(tempdir, "{}-CC-SMALL.png".format(scene_id)) # Create urls for each size Thumbnail url_large = "/thumbnails/{}".format(os.path.basename(path_cc_large)) url_small = "/thumbnails/{}".format(os.path.basename(path_cc_small)) try_to_remove_files( [r_tif_path, rp_tif_path, path_large, path_small, path_cc_large, path_cc_small] ) if os.path.isfile(tif_path): try: logger.info("Performing thumbnail resampling") subprocess.check_call( [ "gdal_translate", tif_path, r_tif_path, "-outsize", str(dim_large[0]), str(dim_large[1]), ] ) logger.info("Performing thumbnail warping") subprocess.check_call( ["gdalwarp", r_tif_path, rp_tif_path, "-t_srs", "EPSG:3857", "-q"] ) # Create a temporary env object mod_env = os.environ.copy() # Add variable to avoid sidecar files mod_env["GDAL_PAM_ENABLED"] = "NO" with rasterio.open(rp_tif_path) as src: num_bands = src.count if num_bands >= 3: bands = ["-b", "1", "-b", "2", "-b", "3"] else: bands = ["-b", "1"] logger.info("Creating thumbnail files") large_thumbnail_gdal_command = [ "gdal_translate", rp_tif_path, path_large, "-outsize", str(dim_large[0]), str(dim_large[1]), "-of", "PNG", "-q", ] + bands small_thumbnail_gdal_command = [ "gdal_translate", rp_tif_path, path_small, "-outsize", str(dim_small[0]), str(dim_small[1]), "-of", "PNG", "-q", ] + bands # Convert tif to pngs (large) subprocess.check_call(large_thumbnail_gdal_command, env=mod_env) # Convert tif to pngs (small) subprocess.check_call(small_thumbnail_gdal_command, env=mod_env) # Do basic histogram normalization to improve thumbnails subprocess.check_call(["convert", path_small, "-normalize", path_cc_small]) # Do basic histogram normalization to improve thumbnails subprocess.check_call(["convert", path_large, "-normalize", path_cc_large]) except: # If any subprocess calls fail, we need to clean up before exiting try_to_remove_files([r_tif_path, rp_tif_path, path_large, path_small]) raise if os.path.isfile(path_large) and os.path.isfile(path_small): logger.info("Uploading thumbnails to S3") s3_bucket_name = os.getenv("THUMBNAIL_BUCKET") s3_bucket = boto3.resource("s3").Bucket(s3_bucket_name) s3_bucket.upload_file( path_cc_large, os.path.basename(path_cc_large), {"ContentType": "image/png"}, ) s3_bucket.upload_file( path_cc_small, os.path.basename(path_cc_small), {"ContentType": "image/png"}, ) try_to_remove_files([r_tif_path, rp_tif_path, path_large, path_small]) else: return # Return List[Thumbnail] return [ Thumbnail(dim_small[0], dim_small[1], "SMALL", url_small, sceneId=scene_id), Thumbnail(dim_large[0], dim_large[1], "LARGE", url_large, sceneId=scene_id), ]