def get_s3(url, account_acessor=None): """ Gets file from s3 storage. Args: url (str): url of the file account_accessor (callable): callable returning dictionary with s3 credentials (access and secret at least) Example: get_s3('s3://example.com/file1.csv', lambda url: {'access': '<access>': 'secret': '<secret>'}) Returns: S3FS instance (file-like): """ # The monkey patch fixes a bug: https://github.com/boto/boto/issues/2836 import botocore.session session = botocore.session.get_session() _old_match_hostname = ssl.match_hostname # FIXME. This issue is possibly better handled with # https://pypi.python.org/pypi/backports.ssl_match_hostname def _new_match_hostname(cert, hostname): if hostname.endswith('.s3.amazonaws.com'): pos = hostname.find_first('.s3.amazonaws.com') hostname = hostname[:pos].replace('.', '') + hostname[pos:] return _old_match_hostname(cert, hostname) ssl.match_hostname = _new_match_hostname pd = parse_url_to_dict(url) aws_access_key = session.get_credentials().access_key aws_secret_key = session.get_credentials().secret_key missing_credentials = [] if not aws_access_key: missing_credentials.append('access') if not aws_secret_key: missing_credentials.append('secret') if missing_credentials: raise MissingCredentials( 'dict returned by account_accessor callable for {} must contain not empty {} key(s)' .format(pd['netloc'], ', '.join(missing_credentials)), location=pd['netloc'], required_credentials=['access', 'secret'], ) s3 = AltValidationS3FS( bucket=pd['netloc'], # prefix=pd['path'], aws_access_key=aws_access_key, aws_secret_key=aws_secret_key ) # ssl.match_hostname = _old_match_hostname return s3
def main(): profile, command = parse_args() session = botocore.session.Session(profile=profile) configure_cache(session) config = session.get_scoped_config() creds = session.get_credentials() # Unset variables for sanity sake os.unsetenv('AWS_ACCESS_KEY_ID') os.unsetenv('AWS_SECRET_ACCESS_KEY') os.unsetenv('AWS_SESSION_TOKEN') os.unsetenv('AWS_DEFAULT_PROFILE') os.unsetenv('AWS_PROFILE') region = config.get('region', None) if region: os.putenv('AWS_DEFAULT_REGION', region) os.putenv('AWS_REGION', region) os.putenv('AWS_ACCESS_KEY_ID', creds.access_key) os.putenv('AWS_SECRET_ACCESS_KEY', creds.secret_key) if creds.token: if os.getenv('AWS_TOKEN_TYPE') == 'security': os.putenv('AWS_SECURITY_TOKEN', creds.token) else: os.putenv('AWS_SESSION_TOKEN', creds.token) my_env = os.environ.copy() command_status = os.system(command) exit(os.WEXITSTATUS(command_status))
def upload_input_file(): auth.require(fail_redirect='/') redirect_url = str(request.url)+"/job" encrypt = "AES256" time = datetime.timedelta(days = 1) + datetime.datetime.today(); # define S3 policy document policy = {"expiration":time.strftime('%Y-%m-%dT%H:%M:%S.000Z'), "conditions": [{"bucket":"gas-inputs"}, {"acl": "private"}, {"x-amz-server-side-encryption":encrypt}, ["starts-with", "$key", "songty/"], ["starts-with", "$success_action_redirect",redirect_url], ] } s3_key = str(uuid.uuid4()) #https://docs.python.org/2/library/base64.html Policy_Code = base64.b64encode(str(policy)).encode('utf8') session = botocore.session.get_session() credentials = session.get_credentials() access_key = credentials.access_key secret_key = credentials.secret_key #https://docs.python.org/2/library/hmac.html my_hmac = hmac.new(secret_key.encode(),Policy_Code,hashlib.sha1) digest = my_hmac.digest() signature = base64.b64encode(digest) return template(request.app.config['mpcs.env.templates'] + 'upload',auth=auth, acl ="private",encryption=encrypt,policy = Policy_Code, aws_access_key_id = access_key,signature = signature,redirect_url = redirect_url,s3_key_name = "songty/" + s3_key)
def _get_presigned_url(self, cluster_name, role_arn): session = self._session_handler.get_session(self._region_name, role_arn) if self._region_name is None: self._region_name = session.get_config_variable('region') loader = botocore.loaders.create_loader() data = loader.load_data("endpoints") endpoint_resolver = botocore.regions.EndpointResolver(data) endpoint = endpoint_resolver.construct_endpoint( AUTH_SERVICE, self._region_name) signer = RequestSigner(ServiceId(AUTH_SERVICE), self._region_name, AUTH_SERVICE, AUTH_SIGNING_VERSION, session.get_credentials(), session.get_component('event_emitter')) action_params = 'Action=' + AUTH_COMMAND + '&Version=' + AUTH_API_VERSION params = { 'method': 'GET', 'url': 'https://' + endpoint["hostname"] + '/?' + action_params, 'body': {}, 'headers': { CLUSTER_NAME_HEADER: cluster_name }, 'context': {} } url = signer.generate_presigned_url( params, region_name=endpoint["credentialScope"]["region"], operation_name='', expires_in=URL_TIMEOUT) return url
def main(): profile, command, cache = parse_args() session = botocore.session.Session(profile=profile) if cache == 'true': configure_cache(session) config = session.get_scoped_config() creds = session.get_credentials() # Unset variables for sanity sake os.unsetenv('AWS_ACCESS_KEY_ID') os.unsetenv('AWS_SECRET_ACCESS_KEY') os.unsetenv('AWS_SESSION_TOKEN') region = config.get('region', None) if region: os.putenv('AWS_DEFAULT_REGION', region) os.putenv('AWS_REGION', region) os.putenv('AWS_ACCESS_KEY_ID', creds.access_key) os.putenv('AWS_SECRET_ACCESS_KEY', creds.secret_key) if creds.token: if os.getenv('AWS_TOKEN_TYPE') == 'security': os.putenv('AWS_SECURITY_TOKEN', creds.token) else: os.putenv('AWS_SESSION_TOKEN', creds.token) returncode = subprocess.call(command, stdin=sys.stdin, stdout=sys.stdout, stderr=sys.stderr) exit(sys.exit(returncode))
def __init__(self, region, access_key=None, secret_key=None, session_token=None, session_expires=None): self.session_token = None self.session_expires = None self.region = region if access_key is not None and secret_key is not None: self.access_key = access_key self.secret_key = secret_key if session_token: self.session_token = session_token self.session_expires = session_expires elif HAS_BOTO: # hijack botocore's method. probably fragile! session = botocore.session.Session() creds = session.get_credentials() if creds is not None: self.credentials = creds self.access_key = creds.access_key self.secret_key = creds.secret_key self.session_token = creds.token else: raise EnvironmentError("could not find AWS creds anywhere!") else: raise EnvironmentError( "could not find AWS creds (don't have boto3, so didn't look anywhere fancy)" )
def assume_role( session, role_arn, profile=None, duration=3600, session_name=None, serial_number=None, ): fetcher = botocore.credentials.AssumeRoleCredentialFetcher( session.create_client, session.get_credentials(), role_arn, extra_args=filter_none_values({ "DurationSeconds": duration, "RoleSessionName": session_name, "SerialNumber": serial_number, }), cache=botocore.credentials.JSONFileCache(), ) role_session = botocore.session.Session(profile=profile) role_session.register_component( "credential_provider", botocore.credentials.CredentialResolver([AssumeRoleProvider(fetcher)]), ) return role_session
def __init__(self, s3_staging_dir=None, access_key=None, secret_key=None, region_name=None, schema_name='default', profile_name=None, credential_file=None, jvm_path=None, jvm_options=None, converter=None, formatter=None, driver_path=None, **driver_kwargs): if s3_staging_dir: self.s3_staging_dir = s3_staging_dir else: self.s3_staging_dir = os.getenv(self._ENV_S3_STAGING_DIR, None) assert self.s3_staging_dir, 'Required argument `s3_staging_dir` not found.' assert schema_name, 'Required argument `schema_name` not found.' self.schema_name = schema_name if credential_file: self.access_key = None self.secret_key = None self.token = None self.credential_file = credential_file assert self.credential_file, 'Required argument `credential_file` not found.' self.region_name = region_name assert self.region_name, 'Required argument `region_name` not found.' else: import botocore.session session = botocore.session.get_session() if access_key and secret_key: session.set_credentials(access_key, secret_key) if profile_name: session.set_config_variable('profile', profile_name) if region_name: session.set_config_variable('region', region_name) credentials = session.get_credentials() self.access_key = credentials.access_key assert self.access_key, 'Required argument `access_key` not found.' self.secret_key = credentials.secret_key assert self.secret_key, 'Required argument `secret_key` not found.' self.token = credentials.token self.credential_file = None self.region_name = session.get_config_variable('region') assert self.region_name, 'Required argument `region_name` not found.' self._start_jvm(jvm_path, jvm_options, driver_path) props = self._build_driver_args(**driver_kwargs) jpype.JClass(ATHENA_DRIVER_CLASS_NAME) self._jdbc_conn = jpype.java.sql.DriverManager.getConnection( ATHENA_CONNECTION_STRING.format(region=self.region_name, schema=schema_name), props) self._converter = converter if converter else JDBCTypeConverter() self._formatter = formatter if formatter else ParameterFormatter()
def __init__(self, aws_host, aws_service, aws_access_key=None, aws_secret_access_key=None, aws_region=None, headers=None): """ Example usage for talking to an AWS Elasticsearch Service: If an access key, secret access key, or the region is not provided they will be determined using the same method as the aws cli AWSRequestsAuth(aws_host='search-service-foobar.us-east-1.es.amazonaws.com', aws_service='es', aws_access_key='YOURKEY', aws_secret_access_key='YOURSECRET', aws_region='us-east-1') """ self.aws_access_key = aws_access_key self.aws_secret_access_key = aws_secret_access_key self.aws_host = aws_host self.aws_region = aws_region self.service = aws_service self.headers = headers if headers else {} if not (aws_access_key and aws_secret_access_key): # Attempt to get instance role creds metadata_exception = TypeError("AWS credentials not provided, and they cannot be retreived from configuration") try: import botocore.session except ImportError: raise metadata_exception session = botocore.session.Session() security_creds = session.get_credentials() if not security_creds: raise metadata_exception self.aws_access_key = security_creds.access_key self.aws_secret_access_key = security_creds.secret_key if security_creds.token: self.headers['X-Amz-Security-Token'] = security_creds.token if not aws_region: try: import boto.session except ImportError: raise TypeError("Unable to determine region") session = boto.session.Session() self.aws_region = session.get_config_variable('region') else: self.aws_region = aws_region
def get_environment(): # Get AWS credentials # http://stackoverflow.com/questions/36287720/boto3-get-credentials-dynamically session = botocore.session.get_session() access_key = session.get_credentials().access_key secret_key = session.get_credentials().secret_key # Get the Elsevier keys from the Elsevier client environment_vars = [ {'name': ec.api_key_env_name, 'value': ec.elsevier_keys.get('X-ELS-APIKey')}, {'name': ec.inst_key_env_name, 'value': ec.elsevier_keys.get('X-ELS-Insttoken')}, {'name': 'AWS_ACCESS_KEY_ID', 'value': access_key}, {'name': 'AWS_SECRET_ACCESS_KEY', 'value': secret_key}] return environment_vars
def load_aws_config(access_key, secret_key, security_token, credentials_path, profile): # type: (str, str, str, str, str) -> Tuple[str, str, str] """ Load aws credential configuration, by parsing credential file, then try to fall back to botocore, by checking (access_key,secret_key) are not (None,None) """ if access_key is None or secret_key is None: try: exists = os.path.exists(credentials_path) __log('Credentials file \'{0}\' exists \'{1}\''.format(credentials_path, exists)) config = configparser.ConfigParser() config.read(credentials_path) while True: if access_key is None and config.has_option(profile, "aws_access_key_id"): access_key = config.get(profile, "aws_access_key_id") else: break if secret_key is None and config.has_option(profile, "aws_secret_access_key"): secret_key = config.get(profile, "aws_secret_access_key") else: break if security_token is None and config.has_option(profile, "aws_session_token"): security_token = config.get(profile, "aws_session_token") break except configparser.NoSectionError as exception: __log('AWS profile \'{0}\' not found'.format(exception.args)) raise exception except configparser.NoOptionError as exception: __log('AWS profile \'{0}\' is missing \'{1}\''.format(profile, exception.args)) raise exception except ValueError as exception: __log(exception) raise exception # try to load instance credentials using botocore if access_key is None or secret_key is None: try: __log("loading botocore package") import botocore except ImportError: __log("botocore package could not be loaded") botocore = None if botocore: import botocore.session session = botocore.session.get_session() cred = session.get_credentials() access_key, secret_key, security_token = cred.access_key, cred.secret_key, cred.token return access_key, secret_key, security_token
def _get_credentials(): log.write("Refreshing credentials.") session = botocore.session.Session() credentials = session.get_credentials() return { "AWS_ACCESS_KEY_ID": credentials.access_key, "AWS_SECRET_ACCESS_KEY": credentials.secret_key, "AWS_SESSION_TOKEN": credentials.token, "AWS_DEFAULT_REGION": session.create_client("s3").meta.region_name }
def get_aws_auth(self): '''Use pip package to get IAM creds.''' session = botocore.session.get_session() aws_credentials = session.get_credentials() auth = AWSRequestsAuth(aws_access_key=aws_credentials.access_key, aws_secret_access_key=aws_credentials.secret_key, aws_host=environ['TARGET_API_AWS_AUTH'], aws_region=session.get_config_variable('region'), aws_service="execute-api") return auth
def get_creds_with_retry(session, max_tries=10, sleep=0.1): for i in range(max_tries): if i > 0: time.sleep(sleep) sleep = min(sleep*2, 10) creds = session.get_credentials() if creds is not None: return creds return None
def test_default_profile_is_obeyed(self): self.environ['BOTO_DEFAULT_PROFILE'] = 'personal' session = botocore.session.get_session() credentials = session.get_credentials() self.assertEqual(credentials.access_key, 'fie') self.assertEqual(credentials.secret_key, 'baz') self.assertEqual(credentials.token, 'fiebaz') self.assertEqual(credentials.method, 'config') self.assertEqual(len(session.available_profiles), 2) self.assertIn('default', session.available_profiles) self.assertIn('personal', session.available_profiles)
def test_default_profile_is_obeyed(self): self.environ["BOTO_DEFAULT_PROFILE"] = "personal" session = botocore.session.get_session() credentials = session.get_credentials() self.assertEqual(credentials.access_key, "fie") self.assertEqual(credentials.secret_key, "baz") self.assertEqual(credentials.token, "fiebaz") self.assertEqual(credentials.method, "config") self.assertEqual(len(session.available_profiles), 2) self.assertIn("default", session.available_profiles) self.assertIn("personal", session.available_profiles)
def test_config(self): if 'AWS_ACCESS_KEY_ID' in os.environ: del os.environ['AWS_ACCESS_KEY_ID'] if 'AWS_SECRET_ACCESS_KEY' in os.environ: del os.environ['AWS_SECRET_ACCESS_KEY'] os.environ['BOTO_CONFIG'] = '' config_path = os.path.join(os.path.dirname(__file__), 'aws_config') os.environ['AWS_CONFIG_FILE'] = config_path session = botocore.session.get_session() credentials = session.get_credentials() assert credentials.access_key == 'foo' assert credentials.secret_key == 'bar' assert credentials.method == 'config' assert session.available_profiles == ['default', 'personal'] session.profile = 'personal' credentials = session.get_credentials() assert credentials.access_key == 'fie' assert credentials.secret_key == 'baz' assert credentials.method == 'config' assert session.available_profiles == ['default', 'personal']
def get_credentials(self, region_name: Optional[str] = None) -> ReadOnlyCredentials: """ Get the underlying `botocore.Credentials` object. This contains the following authentication attributes: access_key, secret_key and token. """ session, _ = self._get_credentials(region_name=region_name) # Credentials are refreshable, so accessing your access key and # secret key separately can lead to a race condition. # See https://stackoverflow.com/a/36291428/8283373 return session.get_credentials().get_frozen_credentials()
def test_envvar(self): config_path = os.path.join(os.path.dirname(__file__), 'aws_config_nocreds') os.environ['AWS_CONFIG_FILE'] = config_path os.environ['BOTO_CONFIG'] = '' os.environ['AWS_ACCESS_KEY_ID'] = 'foo' os.environ['AWS_SECRET_ACCESS_KEY'] = 'bar' session = botocore.session.get_session() credentials = session.get_credentials() assert credentials.access_key == 'foo' assert credentials.secret_key == 'bar' assert credentials.method == 'env'
def test_credentials_file(self): config_path = os.path.join(os.path.dirname(__file__), 'aws_credentials') for var in ('AWS_ACCESS_KEY_ID', 'AWS_SECRET_ACCESS_KEY', 'BOTO_CONFIG', 'AWS_CONFIG_FILE'): os.environ.pop(var, None) os.environ['AWS_CREDENTIAL_FILE'] = config_path session = botocore.session.get_session() credentials = session.get_credentials() assert credentials.access_key == 'foo' assert credentials.secret_key == 'bar' assert credentials.method == 'credentials-file'
def test_iam_role(self): if 'AWS_ACCESS_KEY_ID' in os.environ: del os.environ['AWS_ACCESS_KEY_ID'] if 'AWS_SECRET_ACCESS_KEY' in os.environ: del os.environ['AWS_SECRET_ACCESS_KEY'] if 'AWS_CONFIG_FILE' in os.environ: del os.environ['AWS_CONFIG_FILE'] os.environ['BOTO_CONFIG'] = '' session = botocore.session.get_session() credentials = session.get_credentials(metadata=metadata) assert credentials.access_key == 'foo' assert credentials.secret_key == 'bar' assert credentials.method == 'iam-role'
def get_environment(): # Get AWS credentials # http://stackoverflow.com/questions/36287720/boto3-get-credentials-dynamically session = botocore.session.get_session() access_key = session.get_credentials().access_key secret_key = session.get_credentials().secret_key # Get the Elsevier keys from the Elsevier client environment_vars = [{ 'name': 'AWS_ACCESS_KEY_ID', 'value': access_key }, { 'name': 'AWS_SECRET_ACCESS_KEY', 'value': secret_key }] environment_vars += get_elsevier_api_keys() # Only include values that are not empty. return [ var_dict for var_dict in environment_vars if var_dict['value'] and var_dict['name'] ]
def upload_file_to_s3(): ''' Help from: https://forums.aws.amazon.com/thread.jspa?messageID=314467 http://stackoverflow.com/questions/36287720/boto3-get-credentials-dynamically http://docs.aws.amazon.com/AmazonS3/latest/dev/UsingHTTPPOST.html and nexts ''' # Define S3 policy document delta = timedelta(days=10) # 10 days of expiration today = datetime.today() exp = (today+delta).isoformat() # I'm getting a weird error: Invalid Policy: Invalid 'expiration' value: '2016-04-28T23:31:36.798044' # Using hardcoded date policy_doc = { "expiration": "2017-12-01T12:00:00.000Z", "conditions": [ {"acl": "public-read" }, {"bucket": "gas-inputs"}, ["starts-with", "$key", "slarrain/"], {"success_action_redirect": "https://s3.amazonaws.com/gas-inputs/slarrain/upload_succesful.html" } ] } # https://s3.amazonaws.com/gas-inputs/slarrain/upload_succesful.html # Encode and sign policy document session = botocore.session.get_session() aws_access_key = session.get_credentials().access_key aws_secret_key = session.get_credentials().secret_key policy_doc = "".join(str(policy_doc).split()) #Remove all whitespace # Encode the policy in base64 policy_encoded = base64.b64encode(policy_doc) signature = base64.b64encode(hmac.new(str(aws_secret_key), str(policy_encoded), sha).digest()) # This was helpful: http://stackoverflow.com/questions/20849805/python-hmac-typeerror-character-mapping-must-return-integer-none-or-unicode # Render the upload form return template("upload.tpl", bucket_name="gas-inputs", aws_key=str(aws_access_key), aws_username="******", policy_encod=str(policy_encoded), signat=str(signature))
def test_boto_config(self): if 'AWS_ACCESS_KEY_ID' in os.environ: del os.environ['AWS_ACCESS_KEY_ID'] if 'AWS_SECRET_ACCESS_KEY' in os.environ: del os.environ['AWS_SECRET_ACCESS_KEY'] if 'AWS_CONFIG_FILE' in os.environ: del os.environ['AWS_CONFIG_FILE'] config_path = os.path.join(os.path.dirname(__file__), 'boto_config') os.environ['BOTO_CONFIG'] = config_path session = botocore.session.get_session() credentials = session.get_credentials() assert credentials.access_key == 'foo' assert credentials.secret_key == 'bar' assert credentials.method == 'boto'
def http_download(location): cred = None if location.startswith('https://s3-external-1.amazonaws.com/') or location.startswith('https://s3.amazonaws.com/'): s3_base = urlparse(location).hostname # if we can find credentials, attach them session = botocore.session.get_session() cred = [getattr(session.get_credentials(), attr) for attr in ['access_key', 'secret_key']] if lfilter(None, cred): # remove any empty values cred = S3Auth(*cred, service_url=s3_base) resp = requests.get(location, auth=cred) if resp.status_code != 200: raise RuntimeError("failed to download xml from %r, got response code: %s\n%s" % (location, resp.status_code, resp.content)) resp.encoding = 'utf-8' return resp.text
def run_docker(m): """Re-run the metasync command in docker. """ import botocore.session from subprocess import Popen, PIPE, STDOUT session = botocore.session.get_session() args = [ 'docker', 'run', '--rm', '-t', '-i', '-eAWS_ACCESS_KEY_ID={}'.format(session.get_credentials().access_key), '-eAWS_SECRET_ACCESS_KEY={}'.format( session.get_credentials().secret_key), 'civicknowledge/metatab', 'metasync' ] for a in ('-D', '--docker'): try: m.raw_args.remove(a) except ValueError: pass args.extend(m.raw_args[1:]) if m.args.verbose: prt("Running Docker Command: ", ' '.join(args)) else: prt("Running In Docker") process = Popen(args, stdout=PIPE, stderr=STDOUT) with process.stdout: for line in iter(process.stdout.readline, b''): prt(line.decode('ascii'), end='') exitcode = process.wait() # 0 means success exit(exitcode)
def get_creds_with_retry(session, max_tries=10, sleep=0.1): """ Attempt to obtain credentials upto `max_tries` times with back off :param session: botocore session, see get_boto_session :param max_tries: number of attempt before failing and returing None """ for i in range(max_tries): if i > 0: time.sleep(sleep) sleep = min(sleep*2, 10) creds = session.get_credentials() if creds is not None: return creds return None
def enumerate(self, values): _, provider, service_name, region, account = values LOG.debug('enumerate, account=%s', account) profile = self._arn.account.map_account_to_profile(account) LOG.debug('enumerate, profile=%s', profile) session = botocore.session.get_session() session.profile = profile LOG.debug('enumerate, access_key=%s', session.get_credentials().access_key) service = session.get_service(service_name) endpoint = Endpoint(service, region, account) resource_type, resource_id = self._split_resource(self.pattern) LOG.debug('resource_type=%s, resource_id=%s', resource_type, resource_id) for resource_type in self.matches: kwargs = {} resource_path = '.'.join([provider, service_name, resource_type]) resource_cls = skew.resources.find_resource_class(resource_path) do_client_side_filtering = False if resource_id and resource_id != '*': # If we are looking for a specific resource and the # API provides a way to filter on a specific resource # id then let's insert the right parameter to do the filtering. # If the API does not support that, we will have to filter # after we get all of the results. filter_name = resource_cls.Meta.filter_name if filter_name: if resource_cls.Meta.filter_type == 'list': kwargs[filter_name] = [resource_id] else: kwargs[filter_name] = resource_id else: do_client_side_filtering = True enum_op, path = resource_cls.Meta.enum_spec data = endpoint.call(enum_op, query=path, **kwargs) LOG.debug(data) for d in data: if do_client_side_filtering: # If the API does not support filtering, the resource # class should provide a filter method that will # return True if the returned data matches the # resource ID we are looking for. if not resource_cls.filter(resource_id, d): continue resource = resource_cls(endpoint, d, self._arn.query) yield resource
def test_config(self): credentials = self.session.get_credentials() assert credentials.access_key == 'foo' assert credentials.secret_key == 'bar' assert credentials.method == 'config' assert len(self.session.available_profiles) == 2 assert 'default' in self.session.available_profiles assert 'personal' in self.session.available_profiles os.environ['BOTO_DEFAULT_PROFILE'] = 'personal' session = botocore.session.get_session() credentials = session.get_credentials() assert credentials.access_key == 'fie' assert credentials.secret_key == 'baz' assert credentials.token == 'fiebaz' assert credentials.method == 'config' assert len(session.available_profiles) == 2 assert 'default' in session.available_profiles assert 'personal' in session.available_profiles
def get_creds_with_retry(session: Session, max_tries: int = 10, sleep: float = 0.1) -> Optional[Credentials]: """ Attempt to obtain credentials upto `max_tries` times with back off :param session: botocore session, see mk_boto_session :param max_tries: number of attempt before failing and returing None :param sleep: number of seconds to sleep after first failure (doubles on every consecutive failure) """ for i in range(max_tries): if i > 0: time.sleep(sleep) sleep = min(sleep * 2, 10) creds = session.get_credentials() if creds is not None: return creds return None
def s3_get_object_request_maker(region_name=None, credentials=None, ssl=True): from botocore.session import get_session from botocore.auth import S3SigV4Auth from botocore.awsrequest import AWSRequest from urllib.request import Request session = get_session() if region_name is None: region_name = auto_find_region() if credentials is None: credentials = session.get_credentials().get_frozen_credentials() protocol = 'https' if ssl else 'http' auth = S3SigV4Auth(credentials, 's3', region_name) def build_request(bucket=None, key=None, url=None, Range=None): if key is None and url is None: if bucket is None: raise ValueError('Have to supply bucket,key or url') # assume bucket is url url = bucket if url is not None: bucket, key = s3_url_parse(url) if isinstance(Range, (tuple, list)): Range = 'bytes={}-{}'.format(Range[0], Range[1] - 1) headers = {} if Range is not None: headers['Range'] = Range req = AWSRequest(method='GET', url='{}://s3.{}.amazonaws.com/{}/{}'.format( protocol, region_name, bucket, key), headers=headers) auth.add_auth(req) return Request(req.url, headers=dict(**req.headers), method='GET') return build_request
def http_download(location): cred = None if location.startswith( 'https://s3-external-1.amazonaws.com/') or location.startswith( 'https://s3.amazonaws.com/'): s3_base = urlparse(location).hostname # if we can find credentials, attach them session = botocore.session.get_session() cred = [ getattr(session.get_credentials(), attr) for attr in ['access_key', 'secret_key'] ] if lfilter(None, cred): # remove any empty values cred = S3Auth(*cred, service_url=s3_base) resp = requests.get(location, auth=cred) if resp.status_code != 200: raise RuntimeError( "failed to download xml from %r, got response code: %s\n%s" % (location, resp.status_code, resp.content)) resp.encoding = 'utf-8' return resp.text
def _get_presigned_url(self, cluster_name, role_arn): session = self._session_handler.get_session( self._region_name, role_arn ) if self._region_name is None: self._region_name = session.get_config_variable('region') loader = botocore.loaders.create_loader() data = loader.load_data("endpoints") endpoint_resolver = botocore.regions.EndpointResolver(data) endpoint = endpoint_resolver.construct_endpoint( AUTH_SERVICE, self._region_name ) signer = RequestSigner( ServiceId(AUTH_SERVICE), self._region_name, AUTH_SERVICE, AUTH_SIGNING_VERSION, session.get_credentials(), session.get_component('event_emitter') ) action_params='Action=' + AUTH_COMMAND + '&Version=' + AUTH_API_VERSION params = { 'method': 'GET', 'url': 'https://' + endpoint["hostname"] + '/?' + action_params, 'body': {}, 'headers': {CLUSTER_NAME_HEADER: cluster_name}, 'context': {} } url=signer.generate_presigned_url( params, region_name=endpoint["credentialScope"]["region"], operation_name='', expires_in=URL_TIMEOUT ) return url
def assume_role( session, role_arn, duration=3600, session_name=None, serial_number=None ): fetcher = botocore.credentials.AssumeRoleCredentialFetcher( session.create_client, session.get_credentials(), role_arn, extra_args=filter_none_values({ 'DurationSeconds': duration, 'RoleSessionName': session_name, 'SerialNumber': serial_number }), cache=botocore.credentials.JSONFileCache() ) role_session = botocore.session.Session() role_session.register_component( 'credential_provider', botocore.credentials.CredentialResolver([AssumeRoleProvider(fetcher)]) ) return role_session
def get_aws_credentials(profile_name, duration=None): """ Get AWS Credentials for a given profile Why do we get credentials like this and not just create a boto3 session or something? Glad you asked! In the real world, people use AWS profiles because they have _lots_ of AWS accounts, so first and foremost, aws profiles _must_ be handled easily. But that's not the primary concern here. In the real real world, people also use MFA. Why? Well for starters AWS TELLS YOU it's a best practice, and you know what, it really is. Why however don't a lot of people use MFA? Well it's because the people building tools never treat it seriously and do no testing with MFA. Every AWS tools developer should work with MFA set to ON for everything they do, trust me, once you do this, the MFA experience will get fixed real fast. So, with all that in mind, what this approach to getting AWS credentials enables is: 1. Get the MFA process out of the way upfront as the _first_ thing the app prompts you for so you don't have to sit around waiting for a prompt 2. Use the built in ability of Boto3 to cache the MFA credentials so you don't have to go find your token every time you run the tool 3. Support shelling out to AWS aware tools (like the SAM CLI) 4. Also support using the cached credentials when creating Botocore/Boto3 sessions (you have to pass in the keys vs. using a profile_name but that's a small price to pay for awesomeness) Args: profile_name (str): A AWS profile name, as defined in the users .aws/aws_credentials file duration (int): Session duration in seconds, the duration configured in the role policy takes precedence Returns: (dict): """ if not duration: duration = 3600 # Construct low level botocore session with cache, which allows MFA session reuse if profile_name: session = botocore.session.Session(profile=profile_name) mfa_serial = session.full_config['profiles'][profile_name].get( 'mfa_serial') else: session = botocore.session.Session() mfa_serial = None session.get_component('credential_provider').get_provider('assume-role').cache = \ botocore.credentials.JSONFileCache() # this mfa_serial code is _only_ here to deal with boto profiles that _do not_ assume a role # which is required because this PR is still open: https://github.com/boto/botocore/pull/1399 # otherwise MFA is nicely handled automatically by boto. Sadly, these credentials are not cached if mfa_serial and not session.full_config['profiles'][profile_name].get( 'role_arn'): sts = session.create_client('sts') mfa_code = input("Enter MFA code for {}: ".format(mfa_serial)) response = sts.get_session_token(DurationSeconds=duration, SerialNumber=mfa_serial, TokenCode=mfa_code) credentials = response['Credentials'] identity = sts.get_caller_identity() env_vars = { 'AWS_ACCESS_KEY_ID': credentials['AccessKeyId'], 'AWS_SECRET_ACCESS_KEY': credentials['SecretAccessKey'], 'AWS_SESSION_TOKEN': credentials['SessionToken'], 'AWS_ACCOUNT_ID': identity.get('Account') } print('.') else: credentials = session.get_credentials() sts = session.create_client('sts') identity = sts.get_caller_identity() env_vars = { 'AWS_ACCESS_KEY_ID': credentials.access_key, 'AWS_SECRET_ACCESS_KEY': credentials.secret_key, 'AWS_SESSION_TOKEN': credentials.token, 'AWS_ACCOUNT_ID': identity.get('Account') } return env_vars
if hasattr(botocore.credentials, 'JSONFileCache'): cli_cache = os.path.join(os.path.expanduser('~'), '.aws/cli/cache') try: session.get_component('credential_provider').get_provider('assume-role').cache = botocore.credentials.JSONFileCache(cli_cache) except botocore.exceptions.ProfileNotFound as e: sys.exit(e) else: # workaround old awscli without https://github.com/boto/botocore/pull/1157 from awscli.customizations.assumerole import inject_assume_role_provider_cache from awscli.customizations.scalarparse import add_scalar_parsers inject_assume_role_provider_cache(session) add_scalar_parsers(session) try: credentials = session.get_credentials() if credentials is None: sys.exit('Unable to locate AWS credentials.') frozen_credentials = credentials.get_frozen_credentials() except KeyboardInterrupt as e: sys.exit(e) except botocore.exceptions.ParamValidationError as e: sys.exit(e) except botocore.exceptions.ProfileNotFound as e: sys.exit(e) except botocore.exceptions.ClientError as e: sys.exit(e) data = { 'Version': 1, 'AccessKeyId': frozen_credentials.access_key,
def from_url(remote_url): """ Parses repository information from a git url, filling in additional attributes we need from our AWS profile. Our remote helper accepts two distinct types of urls... * codecommit://<profile>@<repository> * codecommit::<region>://<profile>@<repository> If provided the former we get the whole url, but if the later git will truncate the proceeding 'codecommit::' prefix for us. The '<profile>@' url is optional, using the aws sessions present profile if not provided. :param str remote_url: git remote url to parse :returns: **Context** with our CodeCommit repository information :raises: * **FormatError** if the url is malformed * **ProfileNotFound** if the url references a profile that doesn't exist * **RegionNotFound** if the url references a region that doesn't exist """ url = urlparse(remote_url) event_handler = botocore.hooks.HierarchicalEmitter() profile = 'default' repository = url.netloc region = url.scheme if not url.scheme or not url.netloc: raise FormatError("'%s' is a malformed url" % remote_url) if '@' in url.netloc: profile, repository = url.netloc.split('@', 1) session = botocore.session.Session(profile = profile, event_hooks = event_handler) if profile not in session.available_profiles: raise ProfileNotFound('Profile %s not found, available profiles are: %s' % (profile, ', '.join(session.available_profiles))) else: session = botocore.session.Session(event_hooks = event_handler) try: # when the aws cli is available support plugin authentication import awscli.plugin awscli.plugin.load_plugins( session.full_config.get('plugins', {}), event_hooks = event_handler, include_builtins = False, ) session.emit_first_non_none_response('session-initialized', session = session) except ImportError: pass if url.scheme == 'codecommit': region = session.get_config_variable('region') if not region: raise RegionNotFound("Profile %s doesn't have a region available. Please set it." % profile) credentials = session.get_credentials() if not credentials: raise CredentialsNotFound("Profile %s doesn't have credentials available." % profile) return Context(session, repository, 'v1', region, credentials)
def __init__(self, aws_host, aws_service, aws_access_key=None, aws_secret_access_key=None, aws_region=None, headers=None): """ Example usage for talking to an AWS Elasticsearch Service: If an access key, secret access key, or the region is not provided they will be determined using the same method as the aws cli AWSRequestsAuth(aws_host='search-service-foobar.us-east-1.es.amazonaws.com', aws_service='es', aws_access_key='YOURKEY', aws_secret_access_key='YOURSECRET', aws_region='us-east-1') """ self.aws_access_key = aws_access_key self.aws_secret_access_key = aws_secret_access_key self.aws_host = aws_host self.aws_region = aws_region self.service = aws_service self.headers = headers if headers else {} if not (aws_access_key and aws_secret_access_key): # Attempt to get instance role creds metadata_exception = TypeError( "AWS credentials not provided, and they cannot be retreived from configuration" ) try: import botocore.session except ImportError: raise metadata_exception session = botocore.session.Session() security_creds = session.get_credentials() if not security_creds: raise metadata_exception self.aws_access_key = security_creds.access_key self.aws_secret_access_key = security_creds.secret_key if security_creds.token: self.headers['X-Amz-Security-Token'] = security_creds.token if not aws_region: try: import boto.session except ImportError: raise TypeError("Unable to determine region") session = boto.session.Session() self.aws_region = session.get_config_variable('region') else: self.aws_region = aws_region
parsed_url = urlparse(url) # Split the host into peices and pull the service and region from it. # If no region is in the url, then us-east-1 is assumed. host_parts = parsed_url.netloc.split('.') if len(host_parts) == 4: service = host_parts[0] region = host_parts[1] else: service = host_parts[0] region = 'us-east-1' # Pull user's AWS credentials from botocore session session = botocore.session.get_session() access_key = session.get_credentials().access_key secret_key = session.get_credentials().secret_key # Create timestamp for headers and date format for credential string t = datetime.datetime.utcnow() amzdate = t.strftime("%Y%m%dT%H%M%SZ") datestamp = t.strftime("%Y%m%d") # CREATE THE CANONICAL QUERY STRING # Get the query parameters query_params = parse_qsl(parsed_url.query) # Get the Action value from the query (if it exists); this can be used in the X-Amz-Target if 'Action' in query_params: param_dict = dict(query_params) action = param_dict['Action']
import os.path import json import subprocess import botocore.session import boto3 import botocore import hmac import hashlib import base64 import pytz import urllib2 import urlparse # Get the access key and secret key for the current session session = botocore.session.get_session() ACCESS_KEY = session.get_credentials().access_key SECRET_KEY = session.get_credentials().secret_key # Connect to SQS and get the message queue sqs_conn = boto3.resource('sqs') my_queue = sqs_conn.get_queue_by_name(QueueName='zhuoyuzhu_glacier') # DynamoDB reference dynamodb = boto3.resource('dynamodb') ann_table = dynamodb.Table('zhuoyuzhu_annotations') def restore(job_id, arch_id): # Start restoring process print 'Restore'
import subprocess import boto3 import botocore import pytz import botocore.session from boto3.dynamodb.conditions import Key from mpcs_utils import log, auth from bottle import route, request, response, redirect, template, static_file, run, post, get from datetime import datetime, timedelta from boto3 import client from boto3.session import Session from boto3.dynamodb.conditions import Key, Attr # Use the boto session object only to get AWS credentials session = botocore.session.get_session() aws_access_key_id = str(session.get_credentials().access_key) aws_secret_access_key = str(session.get_credentials().secret_key) aws_session_token = str(session.get_credentials().token) # Create a reference of dynamoDB region_name = request.app.config['mpcs.aws.app_region'] dynamodb = boto3.resource('dynamodb', region_name = region_name) ann_table = dynamodb.Table(request.app.config['mpcs.aws.dynamodb.annotations_table']) # Define s3 policy property bucket_name = request.app.config['mpcs.aws.s3.inputs_bucket'] encryption = request.app.config['mpcs.aws.s3.encryption'] acl = request.app.config['mpcs.aws.s3.acl'] result_bucket = request.app.config['mpcs.aws.s3.results_bucket'] # Job Request Topic
def main(): # note EC2 ignores Accept header and responds in xml default_headers = [ 'Accept: application/xml', 'Content-Type: application/json' ] parser = configargparse.ArgumentParser( description='Curl AWS request signing', formatter_class=configargparse.ArgumentDefaultsHelpFormatter) parser.add_argument('-v', '--verbose', action='store_true', help='verbose flag', default=False) parser.add_argument('-i', '--include', action='store_true', help='include headers in the output', default=False) parser.add_argument('-X', '--request', help='Specify request command to use', default='GET') parser.add_argument('-d', '--data', help='HTTP POST data', default='') parser.add_argument('-H', '--header', help='HTTP header', action='append') parser.add_argument( '-k', '--insecure', action='store_false', help='This option allows awscurl to proceed and operate even for server ' 'connections otherwise considered insecure') parser.add_argument( '--data-binary', action='store_true', help='Process HTTP POST data exactly as specified with ' 'no extra processing whatsoever.', default=False) parser.add_argument('--region', help='AWS region', default='us-east-1', env_var='AWS_DEFAULT_REGION') parser.add_argument('--profile', help='AWS profile', default='default', env_var='AWS_PROFILE') parser.add_argument('--service', help='AWS service', default='execute-api') parser.add_argument('--access_key', env_var='AWS_ACCESS_KEY_ID') parser.add_argument('--secret_key', env_var='AWS_SECRET_ACCESS_KEY') # AWS_SECURITY_TOKEN is deprecated, but kept for backward compatibility # https://github.com/boto/botocore/blob/c76553d3158b083d818f88c898d8f6d7918478fd/botocore/credentials.py#L260-262 parser.add_argument('--security_token', env_var='AWS_SECURITY_TOKEN') parser.add_argument('--session_token', env_var='AWS_SESSION_TOKEN') parser.add_argument('uri') args = parser.parse_args() global is_verbose is_verbose = args.verbose if args.verbose: __log(vars(parser.parse_args())) data = args.data if data is not None and data.startswith("@"): filename = data[1:] with open(filename, "r") as f: data = f.read() if args.header is None: args.header = default_headers if args.security_token is not None: args.session_token = args.security_token del args.security_token headers = {k: v for (k, v) in map(lambda s: s.split(": "), args.header)} credentials_path = os.path.expanduser("~") + "/.aws/credentials" args.access_key, args.secret_key, args.security_token = load_aws_config( args.access_key, args.secret_key, args.security_token, credentials_path, args.profile) if args.access_key is None: try: import botocore.session session = botocore.session.get_session() cred = session.get_credentials() if cred: if not cred.refresh_needed(): args.access_key, args.secret_key, args.security_token = cred.access_key, cred.secret_key, cred.token else: cred = session.get_credentials() args.access_key, args.secret_key, args.security_token = cred.access_key, cred.secret_key, cred.token except ImportError: __log("couldn't find botocore package") r = make_request(args.request, args.service, args.region, args.uri, headers, data, args.access_key, args.secret_key, args.session_token, args.data_binary, args.insecure) if args.include: print(r.headers, end='\n\n') print(r.text) r.raise_for_status() return 0
def check_aws_credentials(): session = botocore.session.get_session() session.get_credentials().access_key session.get_credentials().secret_key
def from_url(remote_url): """ Parses repository information from a git url, filling in additional attributes we need from our AWS profile. Our remote helper accepts two distinct types of urls... * codecommit://<profile>@<repository> * codecommit::<region>://<profile>@<repository> If provided the former we get the whole url, but if the later git will truncate the proceeding 'codecommit::' prefix for us. The '<profile>@' url is optional, using the aws sessions present profile if not provided. :param str remote_url: git remote url to parse :returns: **Context** with our CodeCommit repository information :raises: * **FormatError** if the url is malformed * **ProfileNotFound** if the url references a profile that doesn't exist * **RegionNotFound** if the url references a region that doesn't exist * **RegionNotAvailable** if the url references a region that is not available """ url = urlparse(remote_url) event_handler = botocore.hooks.HierarchicalEmitter() profile = 'default' repository = url.netloc if not url.scheme or not url.netloc: raise FormatError('The following URL is malformed: {}. A URL must be in one of the two following formats: codecommit://<profile>@<repository> or codecommit::<region>://<profile>@<repository>'.format(remote_url)) if '@' in url.netloc: profile, repository = url.netloc.split('@', 1) session = botocore.session.Session(profile = profile, event_hooks = event_handler) if profile not in session.available_profiles: raise ProfileNotFound('The following profile was not found: {}. Available profiles are: {}. Either use one of the available profiles, or create an AWS CLI profile to use and then try again. For more information, see Configure an AWS CLI Profile in the AWS CLI User Guide.'.format(profile, ', '.join(session.available_profiles))) else: session = botocore.session.Session(event_hooks = event_handler) session.get_component('credential_provider').get_provider('assume-role').cache = JSONFileCache() try: # when the aws cli is available support plugin authentication import awscli.plugin awscli.plugin.load_plugins( session.full_config.get('plugins', {}), event_hooks = event_handler, include_builtins = False, ) session.emit_first_non_none_response('session-initialized', session = session) except ImportError: pass available_regions = [region for partition in session.get_available_partitions() for region in session.get_available_regions('codecommit', partition)] if url.scheme == 'codecommit': region = session.get_config_variable('region') if not region: raise RegionNotFound('The following profile does not have an AWS Region: {}. You must set an AWS Region for this profile. For more information, see Configure An AWS CLI Profile in the AWS CLI User Guide.'.format(profile)) if region not in available_regions: raise RegionNotAvailable('The following AWS Region is not available for use with AWS CodeCommit: {}. For more information about CodeCommit\'s availability in AWS Regions, see the AWS CodeCommit User Guide. If an AWS Region is listed as supported but you receive this error, try updating your version of the AWS CLI or the AWS SDKs.'.format(region)) elif re.match(r"^[a-z]{2}-\w*.*-\d{1}", url.scheme): if url.scheme in available_regions: region = url.scheme else: raise RegionNotAvailable('The following AWS Region is not available for use with AWS CodeCommit: {}. For more information about CodeCommit\'s availability in AWS Regions, see the AWS CodeCommit User Guide. If an AWS Region is listed as supported but you receive this error, try updating your version of the AWS CLI or the AWS SDKs.'.format(url.scheme)) else: raise FormatError('The following URL is malformed: {}. A URL must be in one of the two following formats: codecommit://<profile>@<repository> or codecommit::<region>://<profile>@<repository>'.format(remote_url)) credentials = session.get_credentials() if not credentials: raise CredentialsNotFound('The following profile does not have credentials configured: {}. You must configure the access key and secret key for the profile. For more information, see Configure an AWS CLI Profile in the AWS CLI User Guide.'.format(profile)) return Context(session, repository, 'v1', region, credentials)
import datetime from requests_aws4auth import AWS4Auth command = sys.argv[1] esHost = os.getenv('ELASTICSEARCH_HOST', 'localhost:9200') bucketName = os.environ['S3_BUCKET'] roleArn = os.getenv('ROLE_ARN', '') indices = os.getenv('INDICES', '*') region = os.getenv('REGION', 'us-west-1') host = 'https://' + esHost + '/' service = 'es' try: session = botocore.session.get_session() credentials = session.get_credentials() except: print("Unable to get AWS credentials") print(sys.exc_info()[0]) exit(1) try: awsAuth = AWS4Auth(credentials.access_key, credentials.secret_key, region, service, session_token=credentials.token) except AttributeError as err: print("Unable to auth with AWS credentials") print(str(err), file=sys.stderr) exit(1)
def upload_input_file(): log.info(request.url) # Check that user is authenticated auth.require(fail_redirect='/login?redirect_url=' + request.url) # Use the boto session object only to get AWS credentials session = botocore.session.get_session() aws_access_key_id = str(session.get_credentials().access_key) aws_secret_access_key = str(session.get_credentials().secret_key) aws_session_token = str(session.get_credentials().token) # Define policy conditions bucket_name = request.app.config['mpcs.aws.s3.inputs_bucket'] encryption = request.app.config['mpcs.aws.s3.encryption'] acl = request.app.config['mpcs.aws.s3.acl'] # Generate unique ID to be used as S3 key (name) key_name = request.app.config['mpcs.aws.s3.key_prefix'] + str(uuid.uuid4()) # Redirect to a route that will call the annotator redirect_url = str(request.url) + "/job" # Define the S3 policy doc to allow upload via form POST # The only required elements are "expiration", and "conditions" # must include "bucket", "key" and "acl"; other elements optional # NOTE: We also must inlcude "x-amz-security-token" since we're # using temporary credentials via instance roles policy_document = str({ "expiration": (datetime.datetime.utcnow() + datetime.timedelta(hours=24)).strftime("%Y-%m-%dT%H:%M:%SZ"), "conditions": [{ "bucket": bucket_name }, ["starts-with", "$key", key_name], ["starts-with", "$success_action_redirect", redirect_url], { "x-amz-server-side-encryption": encryption }, { "x-amz-security-token": aws_session_token }, { "acl": acl }] }) # Encode the policy document - ensure no whitespace before encoding policy = base64.b64encode( policy_document.translate(None, string.whitespace)) # Sign the policy document using the AWS secret key signature = base64.b64encode( hmac.new(aws_secret_access_key, policy, hashlib.sha1).digest()) # Render the upload form # Must pass template variables for _all_ the policy elements # (in addition to the AWS access key and signed policy from above) return template(request.app.config['mpcs.env.templates'] + 'upload', auth=auth, bucket_name=bucket_name, s3_key_name=key_name, aws_access_key_id=aws_access_key_id, aws_session_token=aws_session_token, redirect_url=redirect_url, encryption=encryption, acl=acl, policy=policy, signature=signature)