def setUp(self): self.cognito_client = botocore.session.get_session().create_client( "cognito-identity", region_name="ap-southeast-2", config=BotoConfig(signature_version=UNSIGNED), ) self.cognito_stub = Stubber(self.cognito_client) self.aws_srp_client = botocore.session.get_session().create_client( "cognito-idp", region_name="ap-southeast-2", config=BotoConfig(signature_version=UNSIGNED), ) self.aws_srp_stubber = Stubber(self.aws_srp_client)
def bucket(self): if self.__bucket is None: bucket_name = self.profile['bucket'] self.__bucket = self.session.resource( 's3', config=BotoConfig( signature_version='s3v4')).Bucket(bucket_name) return self.__bucket
def init(conf, bucket=None, path=None): """Init Initialises the module so that it can be used Args: conf: The configuration parameters, see boto3.resource for more info bucket: Optional bucket (needed if not passed in init) path: Optional path to prepend to all keys Returns: None """ # Pull in the module variable global _c, _r, _s3 # Init the conf _s3 = { "conf": BotoConfig(**conf), "bucket": bucket, "path": path } # Get an S3 resource _r = boto3.resource('s3', config=_s3['conf']) # Get a client _c = botocore.session.get_session().create_client('s3')
def run(args): payload = args.payload if payload is None: print("reading payload from stdin") payload = sys.stdin.read() function_name = args.function_name if args.prefix: function_name = "{}_{}".format(args.prefix, function_name) if args.env: function_name = "{}_{}".format(function_name, args.env) client = boto3.client( 'lambda', region_name='us-east-1', config=BotoConfig( connect_timeout=10, read_timeout=300) ) response = client.invoke( FunctionName=function_name, Payload=payload.encode('utf-8') ) if response['StatusCode'] == 200: try: payload = json.loads(response['Payload'].read()) print(json.dumps(payload, indent=4)) except: print(response) else: print(response)
def get_s3_resource(self): return \ self.get_s3_session()\ .resource('s3', endpoint_url=self.host_name, config=BotoConfig( signature_version=self.signature, s3={'addressing_style': self.addressing_style}))
def _get_config(self, config): return { "aws_access_key_id": config.VENDOR.API_KEY, "aws_secret_access_key": config.VENDOR.API_SECRET, "config": BotoConfig( signature_version=self.signature_version, region_name=self.region_name ), }
def get_s3_client(self): return \ self.get_s3_session()\ .client('s3', endpoint_url=self.host_name, config=BotoConfig( signature_version=self.signature, s3={'addressing_style': self.addressing_style}), region_name=self.region)
def get_project_bucket(self): self._check_project() credentials = self.get_credentials() s3 = boto3.resource( "s3", endpoint_url=self.minio_url, aws_access_key_id=credentials["aws_access_key_id"], aws_secret_access_key=credentials["aws_secret_access_key"], config=BotoConfig(signature_version="s3v4"), ) # TODO: Region? bucket = s3.Bucket(self.project_name) # pylint: disable=E1101 return bucket
def main(args=None): parser = argparse.ArgumentParser("execute python lambda functions") parser.add_argument('function_name', type=str, help='the base name of the function') parser.add_argument('--payload', type=str, help='the payload function', default=None) parser.add_argument('--prefix', type=str, help='the prefix for the function', default=config.application) parser.add_argument('--env', type=str, help='the environment this function will run in', default=config.environment) args = parser.parse_args(args) payload = args.payload if payload is None: print("reading payload from stdin") payload = sys.stdin.read() function_name = args.function_name if args.prefix: function_name = "{}_{}".format(args.prefix, function_name) if args.env: function_name = "{}_{}".format(function_name, args.env) client = boto3.client('lambda', region_name='us-east-1', config=BotoConfig(connect_timeout=10, read_timeout=300)) response = client.invoke(FunctionName=function_name, Payload=payload.encode('utf-8')) if response['StatusCode'] == 200: try: payload = json.loads(response['Payload'].read()) print(json.dumps(payload, indent=4)) except: print(response) else: print(response)
def sts_from_cognito_identity_pool(self, token, cognito_client=None, **kwargs): if not cognito_client: cognito_client = boto3.client( "cognito-identity", region_name=self.aws_region, config=BotoConfig(signature_version=UNSIGNED), ) for i in range(self.max_retries): try: id = cognito_client.get_id( IdentityPoolId=self.identity_pool, Logins={ f"cognito-idp.{self.aws_region}.amazonaws.com/{self.user_pool}": token }, ) id_creds = cognito_client.get_credentials_for_identity( IdentityId=id["IdentityId"], Logins={ f"cognito-idp.{self.aws_region}.amazonaws.com/{self.user_pool}": token }, ) break except ClientError as e: # AWS eventual consistency, attempt to retry up to 3 times if "Couldn't verify signed token" in str(e): continue else: raise InvalidCredentialsException( f"Unexpected Client Error. Error details: {e}") else: raise InvalidCredentialsException( "Retries Exceeded: Unexpected Client Error") return id_creds
def id_token_from_cognito(self, username=None, password=None, srp_client=None, **kwargs): token = None if not srp_client: srp_client = boto3.client( "cognito-idp", region_name=self.aws_region, config=BotoConfig(signature_version=UNSIGNED), ) aws = AWSSRP( username=username, password=password, pool_id=self.user_pool, client_id=self.client_id, client=srp_client, ) try: tokens = aws.authenticate_user() except ClientError as e: if e.response["Error"]["Code"] == "NotAuthorizedException": raise InvalidCredentialsException( message=str(e), detail="Please check your Secret Key is correct") elif e.response["Error"]["Code"] == "UserNotFoundException": raise InvalidCredentialsException( message=str(e), detail= "Please check your Access Key, that you have created your Api Token and that you are using the right STAX REGION", ) else: raise InvalidCredentialsException( f"Unexpected Client Error. Error details: {e}") token = tokens["AuthenticationResult"]["IdToken"] return token
def init(profile, conf, bucket=None, path=None): """Init Initialises the module so that it can be used Args: profile (str): The name of the profile to use to connect conf (dict): The configuration parameters, see boto3.resource for more info bucket (str): Optional bucket (needed if not passed in init) path (str): Optional path to prepend to all keys Returns: None """ # Pull in the module variable global _c, _r, _s3 # Init the conf _s3 = { "profile": profile, "conf": BotoConfig(**conf), "bucket": bucket, "path": path } # Create a new session using the profile session = boto3.Session(profile_name=profile) # Get an S3 resource _r = session.resource('s3', config=_s3['conf']) # Get a client _c = session.client('s3', config=boto3.session.Config( s3={'addressing_style': 'path'}, signature_version='s3v4'))
import boto3 from botocore.client import Config as BotoConfig from concurrent.futures import ThreadPoolExecutor import itertools from datetime import datetime client = boto3.client('logs', region_name='us-east-1', config=BotoConfig(connect_timeout=10, read_timeout=300)) def merge_lists(lists): return list(itertools.chain.from_iterable(lists)) def nowms(): return int(time.time() * 1000) def get_streams(log_group, from_ms, to_ms, quiet=False): def ts2date(ts): return datetime.utcfromtimestamp(ts / 1000).strftime("%Y/%m/%d") fds = ts2date(from_ms) tds = ts2date(to_ms) minlen = min(len(fds), len(tds)) common = "".join([fds[i] for i in range(minlen) if fds[i] == tds[i]]) if not quiet: print("getting streams with prefix {}".format(common)) response = client.describe_log_streams(logGroupName=log_group, logStreamNamePrefix=common)
from flask import current_app, jsonify, request, session from flask_login import login_required, current_user import boto3 from botocore.exceptions import ParamValidationError from botocore.client import Config as BotoConfig from app import db from app.upload import bp from app.models import Question, Video, UserAgentSchema, Submission from config import Config s3_client = boto3.client('s3', region_name=Config.S3['S3_REGION'], config=BotoConfig(signature_version='s3v4')) @bp.route('/url') @login_required def get_signed_url(): # http://127.0.0.1:5000/upload/url?prefix=bar/baz&key=test.csv&content_type=text/csv prefix = request.args.get('prefix', default='', type=str) key = request.args.get('key', default=None, type=str) content_type = request.args.get('content_type', default=None, type=str) try: url = s3_client.generate_presigned_url( ClientMethod='put_object', Params={ 'Bucket': current_app.config["S3"]["S3_BUCKET"], 'Key': "{}/{}".format(prefix, key) if prefix else "{}".format(key),
def main(): import argparse import signal parser = argparse.ArgumentParser(description="package storage database") parser.add_argument('-p', '--port', default=8080, type=int, help="http port to listen on") parser.add_argument('-d', '--database', help="mysql+pymysql:// connection string", default=os.environ.get("DATABASE_URL")) parser.add_argument('-s', '--s3', help="http:// or https:// connection string", default=os.environ.get("S3_URL")) parser.add_argument('--debug', action="store_true", help="enable development options") args = parser.parse_args() logging.basicConfig(level=logging.INFO if args.debug else logging.WARNING, format="%(asctime)-15s %(levelname)-8s %(filename)s:%(lineno)d %(message)s") if not args.database: parser.error("--database or DATABASE_URL required") if not args.s3: parser.error("--s3 or S3_URL required") # set up database client dbcon = sqlalchemy.create_engine(args.database, echo=args.debug, encoding="utf8") SAEnginePlugin(cherrypy.engine, dbcon).subscribe() cherrypy.tools.db = SATool() # set up s3 client s3url = urlparse(args.s3) s3args = {"config": BotoConfig(signature_version='s3v4')} endpoint_url = f"{s3url.scheme}://{s3url.hostname}" if s3url.port: endpoint_url += f":{s3url.port}" s3args["endpoint_url"] = endpoint_url if s3url.username and s3url.password: s3args["aws_access_key_id"] = s3url.username s3args["aws_secret_access_key"] = s3url.password s3 = boto3.client('s3', **s3args) bucket = s3url.path[1:] # ensure bucket exists if bucket not in [b['Name'] for b in s3.list_buckets()['Buckets']]: print("Creating bucket") s3.create_bucket(Bucket=bucket) # set up providers providers = {"apt": AptProvider(dbcon, s3, bucket), "pypi": PypiProvider(dbcon, s3, bucket), "tar": TarProvider(dbcon, s3, bucket)} # set up main web screen web = AppWeb(providers) cherrypy.tree.mount(web, '/', {'/': {'tools.trailing_slash.on': False, 'tools.db.on': True}}) cherrypy.config.update({ 'tools.sessions.on': False, 'request.show_tracebacks': True, 'server.socket_port': args.port, 'server.thread_pool': 5, 'server.socket_host': '0.0.0.0', 'server.show_tracebacks': True, 'log.screen': False, 'engine.autoreload.on': args.debug, 'server.max_request_body_size': 0, 'server.socket_timeout': 3600, 'response.timeout': 3600 }) def signal_handler(signum, stack): logging.warning('Got sig {}, exiting...'.format(signum)) cherrypy.engine.exit() signal.signal(signal.SIGINT, signal_handler) signal.signal(signal.SIGTERM, signal_handler) try: cherrypy.engine.start() cherrypy.engine.block() finally: cherrypy.engine.exit()