def stage(self, fileobj, creds=None): """Stages the user's file on S3 If creds are not provided, temporary credientials will be generated Returns the URL to the staged resource. """ if not hasattr(fileobj, 'read'): raise InvalidFileError( "Object `{0}` has no .read method, " "a file-like object is required".format(fileobj)) if not creds: res = self._get_credentials() creds = res.json() session = boto3_session( aws_access_key_id=creds['accessKeyId'], aws_secret_access_key=creds['secretAccessKey'], aws_session_token=creds['sessionToken'], region_name="us-east-1") s3 = session.resource('s3') # We'll use Bucket.upload_file() for actual files. # Progress reporting for the CLI is a TODO. if hasattr(fileobj, 'name') and os.path.exists(fileobj.name): s3.Bucket(creds['bucket']).upload_file(fileobj.name, creds['key']) else: res = s3.Object(creds['bucket'], creds['key']).put(Body=fileobj) return creds['url']
def stage(self, fileobj, creds=None): """Stages the user's file on S3. Returns the URL to the staged resource. """ if not hasattr(fileobj, "read"): raise InvalidFileError( "Object `{0}` has no .read method, " "a file-like object is required".format(fileobj)) if not creds: res = self._get_credentials() creds = res.json() print " -- Uploading package to Amazon S3 staging bucket" session = boto3_session( aws_access_key_id=creds["accessKeyId"], aws_secret_access_key=creds["secretAccessKey"], aws_session_token=creds["sessionToken"], region_name="us-east-1") s3 = session.resource("s3") if hasattr(fileobj, "name") and os.path.exists(fileobj.name): s3.Bucket(creds["bucket"]).upload_file(fileobj.name, creds["key"]) else: res = s3.Object(creds["bucket"], creds["key"]).put(Body=fileobj) return creds["url"]
def stage(self, fileobj, creds=None, callback=None): """Stages the user's file on S3 If creds are not provided, temporary credientials will be generated Returns the URL to the staged resource. """ if not hasattr(fileobj, 'read'): raise InvalidFileError( "Object `{0}` has no .read method, " "a file-like object is required".format(fileobj)) if not creds: res = self._get_credentials() creds = res.json() session = boto3_session( aws_access_key_id=creds['accessKeyId'], aws_secret_access_key=creds['secretAccessKey'], aws_session_token=creds['sessionToken'], region_name="us-east-1") s3 = session.resource('s3') bucket = s3.Bucket(creds['bucket']) bucket.upload_fileobj(fileobj, creds['key'], Callback=callback) return creds['url']
def stage(self, filepath, creds=None): """Stages the user's file on S3 If creds are not provided, temporary credientials will be generated Returns the URL to the staged resource. """ if not creds: res = self._get_credentials() creds = res.json() session = boto3_session( aws_access_key_id=creds['accessKeyId'], aws_secret_access_key=creds['secretAccessKey'], aws_session_token=creds['sessionToken'], region_name="us-east-1") s3 = session.resource('s3') with open(filepath, 'rb') as data: res = s3.Object(creds['bucket'], creds['key']).put(Body=data) return creds['url']
from boto3.session import Session as boto3_session from lambda_proxy.proxy import API from rasterio.session import AWSSession from rio_tiler.profiles import img_profiles from rio_tiler.utils import geotiff_options, render from rio_tiler_mosaic.methods import defaults from rio_tiler_mosaic.mosaic import mosaic_tiler from usgs_topo_tiler import tile as usgs_tiler from cogeo_mosaic import version as mosaic_version from cogeo_mosaic.backends import MosaicBackend from cogeo_mosaic.backends.utils import get_hash from cogeo_mosaic.mosaic import MosaicJSON from usgs_topo_mosaic.utils import _aws_head_object, _get_layer_names session = boto3_session() s3_client = session.client("s3") aws_session = AWSSession(session=session) PIXSEL_METHODS = { "first": defaults.FirstMethod, "highest": defaults.HighestMethod, "lowest": defaults.LowestMethod, "mean": defaults.MeanMethod, "median": defaults.MedianMethod, "stdev": defaults.StdevMethod, } app = API(name="cogeo-mosaic-tiler") params = dict(payload_compression_method="gzip", binary_b64encode=True) if os.environ.get("CORS"):
def _aws_get_data(key: str, bucket: str) -> BinaryIO: session = boto3_session() s3 = session.client("s3") response = s3.get_object(Bucket=bucket, Key=key) return response["Body"].read()
def _aws_put_data(key: str, bucket: str, body: BinaryIO, options: Dict = {}) -> str: session = boto3_session() s3 = session.client("s3") s3.put_object(Bucket=bucket, Key=key, Body=body, **options) return key
import rasterio from boto3.session import Session as boto3_session from lambda_proxy.proxy import API from rasterio.session import AWSSession from rio_tiler.colormap import cmap from rio_tiler.io import COGReader from rio_tiler.profiles import img_profiles from rio_tiler.utils import geotiff_options, render from . import utils from .common import drivers, mimetype from .ogc import wmts_template app = API(name="cogeo-tiler") aws_session = AWSSession(session=boto3_session()) class TilerError(Exception): """Base exception class.""" route_params = dict( cors=True, payload_compression_method="gzip", binary_b64encode=True, ) @app.get("/bounds", tag=["metadata"], **route_params) def _bounds(url: str) -> Tuple[str, str, str]: """Handle /bounds requests.""" with rasterio.Env(aws_session):
def _aws_get_data(key, bucket, client: boto3_session.client = None) -> BinaryIO: if not client: session = boto3_session() client = session.client("s3") response = client.get_object(Bucket=bucket, Key=key) return response["Body"].read()