def get_frozen_credentials(self): cfg = getpath(self.vault.cfg, self.cfgpath) return ReadOnlyCredentials( cfg['awsAccessKeyId'], cfg['awsSecretAccessKey'], cfg.get('awsSessionToken', None), )
def test_s3_basics(without_aws_env): from numpy import s_ from botocore.credentials import ReadOnlyCredentials assert s3_url_parse('s3://bucket/key') == ('bucket', 'key') assert s3_url_parse('s3://bucket/key/') == ('bucket', 'key/') assert s3_url_parse('s3://bucket/k/k/key') == ('bucket', 'k/k/key') with pytest.raises(ValueError): s3_url_parse("file://some/path") assert s3_fmt_range((0, 3)) == "bytes=0-2" assert s3_fmt_range(s_[4:10]) == "bytes=4-9" assert s3_fmt_range(s_[:10]) == "bytes=0-9" assert s3_fmt_range(None) is None for bad in (s_[10:], s_[-2:3], s_[:-3], (-1, 3), (3, -1), s_[1:100:3]): with pytest.raises(ValueError): s3_fmt_range(bad) creds = ReadOnlyCredentials('fake-key', 'fake-secret', None) assert str(s3_client( region_name='kk')._endpoint) == 's3(https://s3.kk.amazonaws.com)' assert str( s3_client(region_name='kk', use_ssl=False)._endpoint) == 's3(http://s3.kk.amazonaws.com)' s3 = s3_client(region_name='us-west-2', creds=creds) assert s3 is not None
async def _protected_refresh(self, is_mandatory): try: metadata = await self._refresh_using() except Exception: period_name = 'mandatory' if is_mandatory else 'advisory' logger.warning( "Refreshing temporary credentials failed " "during %s refresh period.", period_name, exc_info=True) if is_mandatory: # If this is a mandatory refresh, then # all errors that occur when we attempt to refresh # credentials are propagated back to the user. raise # Otherwise we'll just return. # The end result will be that we'll use the current # set of temporary credentials we have. return self._set_from_data(metadata) self._frozen_credentials = ReadOnlyCredentials(self._access_key, self._secret_key, self._token) if self._is_expired(): msg = ("Credentials were refreshed, but the " "refreshed credentials are still expired.") logger.warning(msg) raise RuntimeError(msg)
def setUp(self): self.botocore_credential_provider = mock.Mock(CredentialResolver) self.access_key = "access_key" self.secret_key = "secret_key" self.token = "token" self.botocore_credential_provider.load_credentials.return_value.\ get_frozen_credentials.return_value = ReadOnlyCredentials( self.access_key, self.secret_key, self.token)
def test_sign_with_region_name(self): auth = mock.Mock() auth_types = {'v4': auth} with mock.patch.dict(botocore.auth.AUTH_TYPE_MAPS, auth_types): self.signer.sign('operation_name', self.request, region_name='foo') auth.assert_called_with(credentials=ReadOnlyCredentials( 'key', 'secret', None), service_name='signing_name', region_name='foo')
def _load_and_set_credentials(self) -> None: credentials = self._credentials_provider.load() self._last_loaded = self._credentials_modified_time() self.access_key = credentials.access_key self.secret_key = credentials.secret_key self.token = credentials.token self._frozen_credentials = ReadOnlyCredentials(credentials.access_key, credentials.secret_key, credentials.token)
def test_sign_override_signing_name_from_context(self): auth = mock.Mock() auth_types = {'v4': auth} self.request.context = {'signing': {'signing_name': 'override_name'}} with mock.patch.dict(botocore.auth.AUTH_TYPE_MAPS, auth_types): self.signer.sign('operation_name', self.request) auth.assert_called_with(credentials=ReadOnlyCredentials( 'key', 'secret', None), service_name='override_name', region_name='region_name')
def test_s3_client_cache(monkeypatch, without_aws_env): monkeypatch.setenv("AWS_ACCESS_KEY_ID", "fake-key-id") monkeypatch.setenv("AWS_SECRET_ACCESS_KEY", "fake-secret") s3 = s3_client(cache=True) assert s3 is s3_client(cache=True) assert s3 is s3_client(cache='purge') assert s3_client(cache='purge') is None assert s3 is not s3_client(cache=True) opts = (dict(), dict(region_name="foo"), dict(region_name="bar"), dict(profile="foo"), dict(profile="foo", region_name="xxx"), dict(profile="bar"), dict(creds=ReadOnlyCredentials('fake1', '...', None)), dict(creds=ReadOnlyCredentials('fake1', '...', None), region_name='custom'), dict(creds=ReadOnlyCredentials('fake2', '...', None))) keys = set(_s3_cache_key(**o) for o in opts) assert len(keys) == len(opts)
def from_url(remote_url): """ Parses repository information from a git url, filling in additional attributes we need from our AWS profile. Our remote helper accepts one type of urls... * cvm://<role>@<account_name>/<repository> :param str remote_url: git remote url to parse :returns: **Context** with our CodeCommit repository information :raises: * **FormatError** if the url is malformed * **ProfileNotFound** if the url references a profile that doesn't exist * **RegionNotFound** if the url references a region that doesn't exist * **RegionNotAvailable** if the url references a region that is not available """ url = urlparse(remote_url) event_handler = botocore.hooks.HierarchicalEmitter() profile = 'default' if not url.scheme or not url.netloc or not url.path or '@' not in url.netloc: raise FormatError('The following URL is malformed: {}. A URL must be in the following format: cvm://<role>@<account_name>/<repository>'.format(remote_url)) repository = url.path[1:] role_name, account_name = url.netloc.split('@', 1) session = botocore.session.Session(event_hooks = event_handler) available_regions = [ region for partition in session.get_available_partitions() for region in session.get_available_regions('codecommit', partition) ] region = session.get_config_variable('region') if not region or region not in available_regions: raise RegionNotFound('Please set the AWS_DEFAULT_REGION environment variable to a valid AWS region.') r = subprocess.run( f'cvm creds --account-name {account_name} --role-name {role_name}'.split(' '), stdout=subprocess.PIPE, stderr=subprocess.PIPE ) data = json.loads(r.stdout.decode('utf-8')) credentials = ReadOnlyCredentials( data['Credentials']['AccessKeyId'], data['Credentials']['SecretAccessKey'], data['Credentials']['SessionToken'] ) return Context(session, repository, 'v1', region, credentials)
def test_sign_with_signing_type_presign_post(self): auth = mock.Mock() post_auth = mock.Mock() query_auth = mock.Mock() auth_types = { 'v4-presign-post': post_auth, 'v4-query': query_auth, 'v4': auth } with mock.patch.dict(botocore.auth.AUTH_TYPE_MAPS, auth_types): self.signer.sign('operation_name', self.request, signing_type='presign-post') self.assertFalse(auth.called) self.assertFalse(query_auth.called) post_auth.assert_called_with( credentials=ReadOnlyCredentials('key', 'secret', None), service_name='signing_name', region_name='region_name' )
def test_signer_with_refreshable_credentials_gets_credential_set(self): class FakeCredentials(Credentials): def get_frozen_credentials(self): return ReadOnlyCredentials('foo', 'bar', 'baz') self.credentials = FakeCredentials('a', 'b', 'c') self.signer = RequestSigner( ServiceId('service_name'), 'region_name', 'signing_name', 'v4', self.credentials, self.emitter) auth_cls = mock.Mock() with mock.patch.dict(botocore.auth.AUTH_TYPE_MAPS, {'v4': auth_cls}): auth = self.signer.get_auth('service_name', 'region_name') self.assertEqual(auth, auth_cls.return_value) # Note we're called with 'foo', 'bar', 'baz', and *not* # 'a', 'b', 'c'. auth_cls.assert_called_with( credentials=ReadOnlyCredentials('foo', 'bar', 'baz'), service_name='service_name', region_name='region_name')
def get_token(self) -> Tuple[str, str]: """ Return a GCP Service Account Access Token """ aws_access_key, aws_secret_access_key, aws_session_token = self.utils._assume_role( ) #pylint: disable=protected-access # create a ReadOnlyCredentials object with the assume_role credentials credentials = ReadOnlyCredentials(aws_access_key, aws_secret_access_key, aws_session_token) # Generate time used for signature # It seems if too much time passes between generation and use, the signature check will fail (<10% of the time) #pylint: disable=line-too-long current_time = datetime.datetime.utcnow() x_amz_date = current_time.strftime('%Y%m%dT%H%M%SZ') # create the Authorization header authorization_header = self.utils._generate_auth_header( x_amz_date, credentials) #pylint: disable=protected-access # generate the identity token caller_identity_token = self.utils._generate_caller_identity_token( #pylint: disable=protected-access authorization_header, x_amz_date, self.x_goog_cloud_target_resource, credentials) # get the federated token from GCP federated_access_token = self.utils._get_federated_access_token( #pylint: disable=protected-access caller_identity_token, self.x_goog_cloud_target_resource) # get the SA token self.gcp_sa_token, sa_expire_time = self.utils._get_sa_token( federated_access_token, self.gcp_token_lifetime) #pylint: disable=protected-access return self.gcp_sa_token, sa_expire_time
async def get_frozen_credentials(self): return ReadOnlyCredentials(self.access_key, self.secret_key, self.token)
def get_frozen_credentials(self): return ReadOnlyCredentials('foo', 'bar', 'baz')
def __init__(self, access_key, secret_key): self._hmac_auth = HmacV1Auth( ReadOnlyCredentials(access_key, secret_key, None))
def get_frozen_credentials(self) -> ReadOnlyCredentials: self._refresh() with self._lock: return ReadOnlyCredentials(self._access_key, self._secret_key, self._token)