def _create_google_default_config(): config = type('', (), {})() # Use botocore session API to get defaults session = botocore.session.Session() # region: The default AWS region that this script will connect # to for all API calls config.region = session.get_config_variable('region') or 'eu-central-1' # aws cli profile to store config and access keys into config.profile = session.profile or None # output format: The AWS CLI output format that will be configured in the # adf profile (affects subsequent CLI calls) config.output_format = session.get_config_variable('format') or 'json' # aws credential location: The file where this script will store the temp # credentials under the configured profile config.aws_credentials_location = os.path.expanduser( session.get_config_variable('credentials_file')) config.aws_config_location = os.path.expanduser( session.get_config_variable('config_file')) config.role_arn = None config.provider = None config.google_sp_id = None config.google_idp_id = None config.google_username = None config.duration = 3600 config.ask_role = False return config
def create_adfs_default_config(profile): config = type('', (), {})() # Use botocore session API to get defaults session = _create_aws_session(profile) # region: The default AWS region that this script will connect # to for all API calls config.region = session.get_config_variable('region') or 'eu-central-1' # aws cli profile to store config and access keys into config.profile = session.profile or 'default' # output format: The AWS CLI output format that will be configured in the # adf profile (affects subsequent CLI calls) config.output_format = session.get_config_variable('format') or 'json' # aws credential location: The file where this script will store the temp # credentials under the configured profile config.aws_credentials_location = os.path.expanduser( session.get_config_variable('credentials_file')) config.aws_config_location = os.path.expanduser( session.get_config_variable('config_file')) # cookie location: The file where this script will store the ADFS session cookies config.adfs_cookie_location = os.path.join( os.path.dirname(config.aws_credentials_location), 'adfs_cookies') # SSL certificate verification: Whether or not strict certificate # verification is done, False should only be used for dev/test config.ssl_verification = True # Override CA bundle for SSL certificate verification for ADFS server only. config.adfs_ca_bundle = None # AWS role arn config.role_arn = None config.adfs_host = None config.adfs_user = None # aws provider id. (Optional - 9/10 times it will always be urn:amazon:websevices) config.provider_id = 'urn:amazon:webservices' # Note: if your bucket require CORS, it is advised that you use path style addressing # (which is set by default in signature version 4). config.s3_signature_version = None # AWS STS session duration, default is 3600 seconds config.session_duration = int(3600) # Whether SSPI is enabled config.sspi = system() == "Windows" # Whether to also trigger the default authentication method when U2F is available config.u2f_trigger_default = True return config
def test_profile(self): self.assertEqual(self.session.get_config_variable('profile'), 'foo') self.assertEqual(self.session.get_config_variable('region'), 'moon-west-1') self.session.get_config_variable('profile') == 'default' saved_region = self.environ['FOO_REGION'] del self.environ['FOO_REGION'] saved_profile = self.environ['FOO_PROFILE'] del self.environ['FOO_PROFILE'] session = create_session(session_vars=self.env_vars) self.assertEqual(session.get_config_variable('profile'), None) self.assertEqual(session.get_config_variable('region'), 'us-west-1') self.environ['FOO_REGION'] = saved_region self.environ['FOO_PROFILE'] = saved_profile
def _get_presigned_url(self, cluster_name, role_arn): session = self._session_handler.get_session(self._region_name, role_arn) if self._region_name is None: self._region_name = session.get_config_variable('region') loader = botocore.loaders.create_loader() data = loader.load_data("endpoints") endpoint_resolver = botocore.regions.EndpointResolver(data) endpoint = endpoint_resolver.construct_endpoint( AUTH_SERVICE, self._region_name) signer = RequestSigner(ServiceId(AUTH_SERVICE), self._region_name, AUTH_SERVICE, AUTH_SIGNING_VERSION, session.get_credentials(), session.get_component('event_emitter')) action_params = 'Action=' + AUTH_COMMAND + '&Version=' + AUTH_API_VERSION params = { 'method': 'GET', 'url': 'https://' + endpoint["hostname"] + '/?' + action_params, 'body': {}, 'headers': { CLUSTER_NAME_HEADER: cluster_name }, 'context': {} } url = signer.generate_presigned_url( params, region_name=endpoint["credentialScope"]["region"], operation_name='', expires_in=URL_TIMEOUT) return url
def mk_boto_session( profile: Optional[str] = None, creds: Optional[ReadOnlyCredentials] = None, region_name: Optional[str] = None, ) -> Session: """Get botocore session with correct `region` configured :param profile: profile name to lookup :param creds: Override credentials with supplied data :param region_name: default region_name to use if not configured for a given profile """ session = botocore.session.Session(profile=profile) if creds is not None: session.set_credentials(creds.access_key, creds.secret_key, creds.token) _region = session.get_config_variable("region") if _region is None: if region_name is None or region_name == "auto": _region = auto_find_region(session, default="us-west-2") else: _region = region_name session.set_config_variable("region", _region) return session
def get_aws_settings( profile: Optional[str] = None, region_name: str = "auto", aws_unsigned: bool = False, requester_pays: bool = False) -> Tuple[Dict[str, Any], Credentials]: """ Compute ``aws=`` parameter for ``set_default_rio_config``. see also ``datacube.utils.rio.set_default_rio_config`` Returns a tuple of: ``(aws: Dictionary, creds: session credentials from botocore)``. Note that credentials are baked in to ``aws`` setting dictionary, however since those might be STS credentials they might require refresh hence they are returned from this function separately as well. """ session = mk_boto_session(profile=profile, region_name=region_name) region_name = session.get_config_variable("region") if aws_unsigned: return (dict(region_name=region_name, aws_unsigned=True), None) creds = get_creds_with_retry(session) if creds is None: raise ValueError("Couldn't get credentials") cc = creds.get_frozen_credentials() return (dict(region_name=region_name, aws_access_key_id=cc.access_key, aws_secret_access_key=cc.secret_key, aws_session_token=cc.token, requester_pays=requester_pays), creds)
def botocore_default_region( session: Optional[Session] = None) -> Optional[str]: """ Returns default region name as configured on the system. """ if session is None: session = botocore.session.get_session() return session.get_config_variable('region')
def create_adfs_default_config(profile): config = type('', (), {})() # Use botocore session API to get defaults session = _create_aws_session(profile) # region: The default AWS region that this script will connect # to for all API calls config.region = session.get_config_variable('region') or 'eu-central-1' # aws cli profile to store config and access keys into config.profile = session.profile or 'default' # output format: The AWS CLI output format that will be configured in the # adf profile (affects subsequent CLI calls) config.output_format = session.get_config_variable('format') or 'json' # aws credential location: The file where this script will store the temp # credentials under the configured profile config.aws_credentials_location = os.path.expanduser(session.get_config_variable('credentials_file')) config.aws_config_location = os.path.expanduser(session.get_config_variable('config_file')) # cookie location: The file where this script will store the ADFS session cookies config.adfs_cookie_location = os.path.join(os.path.dirname(config.aws_credentials_location), 'adfs_cookies') # SSL certificate verification: Whether or not strict certificate # verification is done, False should only be used for dev/test config.ssl_verification = True # AWS role arn config.role_arn = None config.adfs_host = None config.adfs_user = None # aws provider id. (Optional - 9/10 times it will always be urn:amazon:websevices) config.provider_id = 'urn:amazon:webservices' # Note: if your bucket require CORS, it is advised that you use path style addressing # (which is set by default in signature version 4). config.s3_signature_version = None # AWS STS session duration, default is 3600 seconds config.session_duration = int(3600) return config
def __init__(self, aws_host, aws_service, aws_access_key=None, aws_secret_access_key=None, aws_region=None, headers=None): """ Example usage for talking to an AWS Elasticsearch Service: If an access key, secret access key, or the region is not provided they will be determined using the same method as the aws cli AWSRequestsAuth(aws_host='search-service-foobar.us-east-1.es.amazonaws.com', aws_service='es', aws_access_key='YOURKEY', aws_secret_access_key='YOURSECRET', aws_region='us-east-1') """ self.aws_access_key = aws_access_key self.aws_secret_access_key = aws_secret_access_key self.aws_host = aws_host self.aws_region = aws_region self.service = aws_service self.headers = headers if headers else {} if not (aws_access_key and aws_secret_access_key): # Attempt to get instance role creds metadata_exception = TypeError("AWS credentials not provided, and they cannot be retreived from configuration") try: import botocore.session except ImportError: raise metadata_exception session = botocore.session.Session() security_creds = session.get_credentials() if not security_creds: raise metadata_exception self.aws_access_key = security_creds.access_key self.aws_secret_access_key = security_creds.secret_key if security_creds.token: self.headers['X-Amz-Security-Token'] = security_creds.token if not aws_region: try: import boto.session except ImportError: raise TypeError("Unable to determine region") session = boto.session.Session() self.aws_region = session.get_config_variable('region') else: self.aws_region = aws_region
def __init__(self, s3_staging_dir=None, access_key=None, secret_key=None, region_name=None, schema_name='default', profile_name=None, credential_file=None, jvm_path=None, jvm_options=None, converter=None, formatter=None, driver_path=None, **driver_kwargs): if s3_staging_dir: self.s3_staging_dir = s3_staging_dir else: self.s3_staging_dir = os.getenv(self._ENV_S3_STAGING_DIR, None) assert self.s3_staging_dir, 'Required argument `s3_staging_dir` not found.' assert schema_name, 'Required argument `schema_name` not found.' self.schema_name = schema_name if credential_file: self.access_key = None self.secret_key = None self.token = None self.credential_file = credential_file assert self.credential_file, 'Required argument `credential_file` not found.' self.region_name = region_name assert self.region_name, 'Required argument `region_name` not found.' else: import botocore.session session = botocore.session.get_session() if access_key and secret_key: session.set_credentials(access_key, secret_key) if profile_name: session.set_config_variable('profile', profile_name) if region_name: session.set_config_variable('region', region_name) credentials = session.get_credentials() self.access_key = credentials.access_key assert self.access_key, 'Required argument `access_key` not found.' self.secret_key = credentials.secret_key assert self.secret_key, 'Required argument `secret_key` not found.' self.token = credentials.token self.credential_file = None self.region_name = session.get_config_variable('region') assert self.region_name, 'Required argument `region_name` not found.' self._start_jvm(jvm_path, jvm_options, driver_path) props = self._build_driver_args(**driver_kwargs) jpype.JClass(ATHENA_DRIVER_CLASS_NAME) self._jdbc_conn = jpype.java.sql.DriverManager.getConnection( ATHENA_CONNECTION_STRING.format(region=self.region_name, schema=schema_name), props) self._converter = converter if converter else JDBCTypeConverter() self._formatter = formatter if formatter else ParameterFormatter()
def test_multiple_env_vars_uses_second_var(self): env_vars = { 'profile': (None, ['BAR_DEFAULT_PROFILE', 'BAR_PROFILE'], None), } session = create_session(session_vars=env_vars) self.environ.pop('BAR_DEFAULT_PROFILE', None) self.environ['BAR_PROFILE'] = 'second' self.assertEqual(session.get_config_variable('profile'), 'second')
def test_supports_multiple_env_vars_for_single_logical_name(self): env_vars = { 'profile': (None, ['BAR_DEFAULT_PROFILE', 'BAR_PROFILE'], None), } session = create_session(session_vars=env_vars) self.environ['BAR_DEFAULT_PROFILE'] = 'first' self.environ['BAR_PROFILE'] = 'second' self.assertEqual(session.get_config_variable('profile'), 'first')
def _create_adfs_default_config(): config = type('', (), {})() # Use botocore session API to get defaults session = botocore.session.Session() # region: The default AWS region that this script will connect # to for all API calls config.region = session.get_config_variable('region') or 'eu-central-1' # aws cli profile to store config and access keys into config.profile = session.profile or 'default' # output format: The AWS CLI output format that will be configured in the # adf profile (affects subsequent CLI calls) config.output_format = session.get_config_variable('format') or 'json' # aws credential location: The file where this script will store the temp # credentials under the configured profile config.aws_credentials_location = os.path.expanduser( session.get_config_variable('credentials_file')) config.aws_config_location = os.path.expanduser( session.get_config_variable('config_file')) # cookie location: The file where this script will store the ADFS session cookies config.adfs_cookie_location = os.path.join( os.path.dirname(config.aws_credentials_location), 'adfs_cookies') # SSL certificate verification: Whether or not strict certificate # verification is done, False should only be used for dev/test config.ssl_verification = True # AWS role arn config.role_arn = None config.adfs_host = None config.adfs_user = None # aws provider id. (Optional - 9/10 times it will always be urn:amazon:websevices) config.provider_id = session.profile or 'urn:amazon:webservices' return config
def botocore_default_region(session=None): """Return region configured for AWS. With no arguments or with session=None return region configured in the default session. Otherwise return region configured in the supplied session. """ if session is None: session = botocore.session.get_session() return session.get_config_variable('region')
def create_session(**kwargs): # Create a Session object. By default, # the _LOADER object is used as the loader # so that we reused the same models across tests. session = botocore.session.Session(**kwargs) data_path = session.get_config_variable('data_path') _LOADER.data_path = data_path or '' session.register_component('data_loader', _LOADER) session.set_config_variable('credentials_file', 'noexist/foo/botocore') return session
def get_aws_auth(self): '''Use pip package to get IAM creds.''' session = botocore.session.get_session() aws_credentials = session.get_credentials() auth = AWSRequestsAuth(aws_access_key=aws_credentials.access_key, aws_secret_access_key=aws_credentials.secret_key, aws_host=environ['TARGET_API_AWS_AUTH'], aws_region=session.get_config_variable('region'), aws_service="execute-api") return auth
def test_type_conversions_occur_when_specified(self): # Specify that we can retrieve the var from the # FOO_TIMEOUT env var, with a conversion function # of int(). self.env_vars['metadata_service_timeout'] = (None, 'FOO_TIMEOUT', None, int) # Environment variables are always strings. self.environ['FOO_TIMEOUT'] = '10' session = create_session(session_vars=self.env_vars) # But we should type convert this to a string. self.assertEqual( session.get_config_variable('metadata_service_timeout'), 10)
def test_type_conversions_occur_when_specified(self): # Specify that we can retrieve the var from the # FOO_TIMEOUT env var, with a conversion function # of int(). self.env_vars['metadata_service_timeout'] = ( None, 'FOO_TIMEOUT', None, int) # Environment variables are always strings. self.environ['FOO_TIMEOUT'] = '10' session = create_session(session_vars=self.env_vars) # But we should type convert this to a string. self.assertEqual( session.get_config_variable('metadata_service_timeout'), 10)
def _get_botocore_session(): global _region_name if _get_botocore_session.botocore_session is None: LOG.debug('Creating new Botocore Session') LOG.debug('Botocore version: {0}'.format(botocore.__version__)) session = botocore.session.get_session({ 'profile': (None, _profile_env_var, _profile, None), }) if _region_name or not session.get_config_variable('region'): session.set_config_variable('region', _region_name) _region_name = session.get_config_variable('region') session.register_component('data_loader', _get_data_loader()) _set_user_agent_for_session(session) _get_botocore_session.botocore_session = session if _debug: session.set_debug_logger() return _get_botocore_session.botocore_session
def from_url(remote_url): """ Parses repository information from a git url, filling in additional attributes we need from our AWS profile. Our remote helper accepts one type of urls... * cvm://<role>@<account_name>/<repository> :param str remote_url: git remote url to parse :returns: **Context** with our CodeCommit repository information :raises: * **FormatError** if the url is malformed * **ProfileNotFound** if the url references a profile that doesn't exist * **RegionNotFound** if the url references a region that doesn't exist * **RegionNotAvailable** if the url references a region that is not available """ url = urlparse(remote_url) event_handler = botocore.hooks.HierarchicalEmitter() profile = 'default' if not url.scheme or not url.netloc or not url.path or '@' not in url.netloc: raise FormatError('The following URL is malformed: {}. A URL must be in the following format: cvm://<role>@<account_name>/<repository>'.format(remote_url)) repository = url.path[1:] role_name, account_name = url.netloc.split('@', 1) session = botocore.session.Session(event_hooks = event_handler) available_regions = [ region for partition in session.get_available_partitions() for region in session.get_available_regions('codecommit', partition) ] region = session.get_config_variable('region') if not region or region not in available_regions: raise RegionNotFound('Please set the AWS_DEFAULT_REGION environment variable to a valid AWS region.') r = subprocess.run( f'cvm creds --account-name {account_name} --role-name {role_name}'.split(' '), stdout=subprocess.PIPE, stderr=subprocess.PIPE ) data = json.loads(r.stdout.decode('utf-8')) credentials = ReadOnlyCredentials( data['Credentials']['AccessKeyId'], data['Credentials']['SecretAccessKey'], data['Credentials']['SessionToken'] ) return Context(session, repository, 'v1', region, credentials)
def _create_adfs_default_config(): config = type('', (), {})() # Use botocore session API to get defaults session = botocore.session.Session() # region: The default AWS region that this script will connect # to for all API calls config.region = session.get_config_variable('region') or 'eu-central-1' # aws cli profile to store config and access keys into config.profile = 'adfs' # output format: The AWS CLI output format that will be configured in the # adf profile (affects subsequent CLI calls) config.output_format = session.get_config_variable('format') or 'json' # aws credential location: The file where this script will store the temp # credentials under the configured profile config.aws_credentials_location = os.path.expanduser(session.get_config_variable('credentials_file')) config.aws_config_location = os.path.expanduser(session.get_config_variable('config_file')) # cookie location: The file where this script will store the ADFS session cookies config.adfs_cookie_location = os.path.join(os.path.dirname(config.aws_credentials_location), 'adfs_cookies') # SSL certificate verification: Whether or not strict certificate # verification is done, False should only be used for dev/test config.ssl_verification = 'True' # AWS role arn config.role_arn = None config.adfs_host = None config.adfs_user = None return config
def _deploy_app(temp_dirname): factory = CLIFactory(temp_dirname) config = factory.create_config_obj(chalice_stage_name='dev', autogen_policy=True) session = factory.create_botocore_session() d = factory.create_default_deployer(session, config, UI()) region = session.get_config_variable('region') deployed = _deploy_with_retries(d, config) application = SmokeTestApplication( region=region, deployed_values=deployed, stage_name='dev', app_name=RANDOM_APP_NAME, app_dir=temp_dirname, ) return application
def find_instance(instance_id, config_profiles): """ Search through all AWS profiles and regions for an instance. """ profiles_session = botocore.session.get_session() for profile in profiles_session.available_profiles: # Re-using the same session doesn't work session = botocore.session.get_session() session.profile = profile # Prefer regions listed in the profile regions = None if profile in config_profiles: regions = config_profiles[profile].regions if not regions: region = session.get_config_variable('region') if not region: continue else: regions = [region] ec2 = session.get_service('ec2') operation = ec2.get_operation('DescribeInstances') for region in regions: logger.debug("connecting to region '%s' with AWS profile '%s'...", region, profile) endpoint = ec2.get_endpoint(region) try: resp, data = operation.call( endpoint, instance_ids=[instance_id], ) except botocore.exceptions.NoCredentialsError: break if resp.status_code == 200: for reservation in data['Reservations']: for instance_data in reservation['Instances']: return Instance.from_instance_data(instance_data, profile, region) else: continue return None
def _deploy_app(temp_dirname): factory = CLIFactory(temp_dirname) config = factory.create_config_obj( chalice_stage_name='dev', autogen_policy=True ) session = factory.create_botocore_session() d = factory.create_default_deployer(session, config, UI()) region = session.get_config_variable('region') deployed = _deploy_with_retries(d, config) application = SmokeTestApplication( region=region, deployed_values=deployed, stage_name='dev', app_name=RANDOM_APP_NAME, app_dir=temp_dirname, ) return application
def _get_presigned_url(self, cluster_name, role_arn): session = self._session_handler.get_session( self._region_name, role_arn ) if self._region_name is None: self._region_name = session.get_config_variable('region') loader = botocore.loaders.create_loader() data = loader.load_data("endpoints") endpoint_resolver = botocore.regions.EndpointResolver(data) endpoint = endpoint_resolver.construct_endpoint( AUTH_SERVICE, self._region_name ) signer = RequestSigner( ServiceId(AUTH_SERVICE), self._region_name, AUTH_SERVICE, AUTH_SIGNING_VERSION, session.get_credentials(), session.get_component('event_emitter') ) action_params='Action=' + AUTH_COMMAND + '&Version=' + AUTH_API_VERSION params = { 'method': 'GET', 'url': 'https://' + endpoint["hostname"] + '/?' + action_params, 'body': {}, 'headers': {CLUSTER_NAME_HEADER: cluster_name}, 'context': {} } url=signer.generate_presigned_url( params, region_name=endpoint["credentialScope"]["region"], operation_name='', expires_in=URL_TIMEOUT ) return url
def make_s3_client(region_name=None, max_pool_connections=32, session=None, profile=None, creds=None, use_ssl=True): """ Create s3 client with correct region and configured max_pool_connections. """ if session is None: session = get_boto_session(region_name=region_name, profile=profile, creds=creds) region_name = session.get_config_variable("region") protocol = 'https' if use_ssl else 'http' s3 = session.create_client('s3', region_name=region_name, endpoint_url='{}://s3.{}.amazonaws.com'.format(protocol, region_name), config=botocore.client.Config(max_pool_connections=max_pool_connections)) return s3
def get_boto_session(region_name=None, profile=None, creds=None, cache=None): """ Get botocore.session with correct region_name configured """ if cache is not None: sessions = getattr(cache, 'sessions', None) if sessions is None: sessions = {} setattr(cache, 'sessions', sessions) session = sessions.get(region_name) else: sessions, session = {}, None if session is not None: return session session = botocore.session.Session(profile=profile) _region = session.get_config_variable("region") if creds is not None: session.set_credentials(creds.access_key, creds.secret_key, creds.token) if _region is None: if region_name is None or region_name == "auto": _region = auto_find_region(session) else: _region = region_name session.set_config_variable("region", _region) sessions[_region] = session return session
def from_url(remote_url): """ Parses repository information from a git url, filling in additional attributes we need from our AWS profile. Our remote helper accepts two distinct types of urls... * codecommit://<profile>@<repository> * codecommit::<region>://<profile>@<repository> If provided the former we get the whole url, but if the later git will truncate the proceeding 'codecommit::' prefix for us. The '<profile>@' url is optional, using the aws sessions present profile if not provided. :param str remote_url: git remote url to parse :returns: **Context** with our CodeCommit repository information :raises: * **FormatError** if the url is malformed * **ProfileNotFound** if the url references a profile that doesn't exist * **RegionNotFound** if the url references a region that doesn't exist * **RegionNotAvailable** if the url references a region that is not available """ url = urlparse(remote_url) event_handler = botocore.hooks.HierarchicalEmitter() profile = 'default' repository = url.netloc if not url.scheme or not url.netloc: raise FormatError('The following URL is malformed: {}. A URL must be in one of the two following formats: codecommit://<profile>@<repository> or codecommit::<region>://<profile>@<repository>'.format(remote_url)) if '@' in url.netloc: profile, repository = url.netloc.split('@', 1) session = botocore.session.Session(profile = profile, event_hooks = event_handler) if profile not in session.available_profiles: raise ProfileNotFound('The following profile was not found: {}. Available profiles are: {}. Either use one of the available profiles, or create an AWS CLI profile to use and then try again. For more information, see Configure an AWS CLI Profile in the AWS CLI User Guide.'.format(profile, ', '.join(session.available_profiles))) else: session = botocore.session.Session(event_hooks = event_handler) session.get_component('credential_provider').get_provider('assume-role').cache = JSONFileCache() try: # when the aws cli is available support plugin authentication import awscli.plugin awscli.plugin.load_plugins( session.full_config.get('plugins', {}), event_hooks = event_handler, include_builtins = False, ) session.emit_first_non_none_response('session-initialized', session = session) except ImportError: pass available_regions = [region for partition in session.get_available_partitions() for region in session.get_available_regions('codecommit', partition)] if url.scheme == 'codecommit': region = session.get_config_variable('region') if not region: raise RegionNotFound('The following profile does not have an AWS Region: {}. You must set an AWS Region for this profile. For more information, see Configure An AWS CLI Profile in the AWS CLI User Guide.'.format(profile)) if region not in available_regions: raise RegionNotAvailable('The following AWS Region is not available for use with AWS CodeCommit: {}. For more information about CodeCommit\'s availability in AWS Regions, see the AWS CodeCommit User Guide. If an AWS Region is listed as supported but you receive this error, try updating your version of the AWS CLI or the AWS SDKs.'.format(region)) elif re.match(r"^[a-z]{2}-\w*.*-\d{1}", url.scheme): if url.scheme in available_regions: region = url.scheme else: raise RegionNotAvailable('The following AWS Region is not available for use with AWS CodeCommit: {}. For more information about CodeCommit\'s availability in AWS Regions, see the AWS CodeCommit User Guide. If an AWS Region is listed as supported but you receive this error, try updating your version of the AWS CLI or the AWS SDKs.'.format(url.scheme)) else: raise FormatError('The following URL is malformed: {}. A URL must be in one of the two following formats: codecommit://<profile>@<repository> or codecommit::<region>://<profile>@<repository>'.format(remote_url)) credentials = session.get_credentials() if not credentials: raise CredentialsNotFound('The following profile does not have credentials configured: {}. You must configure the access key and secret key for the profile. For more information, see Configure an AWS CLI Profile in the AWS CLI User Guide.'.format(profile)) return Context(session, repository, 'v1', region, credentials)
def profile(self): session = botocore.session.Session() profile_name = session.get_config_variable('profile') return {} if profile_name is None else session.full_config['profiles'][ profile_name]
CONTEXT_SETTINGS = dict(help_option_names=['-h', '--help']) CF_STATUS_FILTERS = ['CREATE_COMPLETE', 'CREATE_IN_PROGRESS', 'ROLLBACK_IN_PROGRESS', 'ROLLBACK_FAILED', 'ROLLBACK_COMPLETE', 'DELETE_IN_PROGRESS', 'DELETE_FAILED', 'UPDATE_IN_PROGRESS', 'UPDATE_COMPLETE_CLEANUP_IN_PROGRESS', 'UPDATE_COMPLETE', 'UPDATE_ROLLBACK_IN_PROGRESS', 'UPDATE_ROLLBACK_FAILED', 'UPDATE_ROLLBACK_COMPLETE_CLEANUP_IN_PROGRESS', 'UPDATE_ROLLBACK_COMPLETE', 'REVIEW_IN_PROGRESS'] complete_status = ['CREATE_COMPLETE', 'UPDATE_COMPLETE'] failed_status = ['UPDATE_FAILED', 'CREATE_FAILED', 'ROLLBACK_IN_PROGRESS', 'UPDATE_ROLLBACK_IN_PROGRESS', 'UPDATE_ROLLBACK_COMPLETE'] failed_event = ['UPDATE_FAILED', 'CREATE_FAILED'] module_list = [] pkgpath = imp.find_module('stack_modules')[1] session = botocore.session.get_session() if session.get_config_variable('region') is None: client = boto3.client('cloudformation', region_name='us-east-1') else: client = boto3.client('cloudformation') for _, name, _ in pkgutil.iter_modules([pkgpath]): if 'generator' in name: i = re.sub('_generator$', '', name) module_list.append(i) #helper function to load the config file and create the object. def confInit(config_file): conf = StackConfig() conf.loadlocalconfig(config_file) mystack = Stack(conf) return mystack
def test_variable_does_not_exist(self): session = create_session(session_vars=self.env_vars) self.assertIsNone(session.get_config_variable('foo/bar'))
def __init__(self, aws_host, aws_service, aws_access_key=None, aws_secret_access_key=None, aws_region=None, headers=None): """ Example usage for talking to an AWS Elasticsearch Service: If an access key, secret access key, or the region is not provided they will be determined using the same method as the aws cli AWSRequestsAuth(aws_host='search-service-foobar.us-east-1.es.amazonaws.com', aws_service='es', aws_access_key='YOURKEY', aws_secret_access_key='YOURSECRET', aws_region='us-east-1') """ self.aws_access_key = aws_access_key self.aws_secret_access_key = aws_secret_access_key self.aws_host = aws_host self.aws_region = aws_region self.service = aws_service self.headers = headers if headers else {} if not (aws_access_key and aws_secret_access_key): # Attempt to get instance role creds metadata_exception = TypeError( "AWS credentials not provided, and they cannot be retreived from configuration" ) try: import botocore.session except ImportError: raise metadata_exception session = botocore.session.Session() security_creds = session.get_credentials() if not security_creds: raise metadata_exception self.aws_access_key = security_creds.access_key self.aws_secret_access_key = security_creds.secret_key if security_creds.token: self.headers['X-Amz-Security-Token'] = security_creds.token if not aws_region: try: import boto.session except ImportError: raise TypeError("Unable to determine region") session = boto.session.Session() self.aws_region = session.get_config_variable('region') else: self.aws_region = aws_region
def botocore_default_region(session=None): if session is None: session = botocore.session.get_session() return session.get_config_variable('region')
def from_url(remote_url): """ Parses repository information from a git url, filling in additional attributes we need from our AWS profile. Our remote helper accepts two distinct types of urls... * codecommit://<profile>@<repository> * codecommit::<region>://<profile>@<repository> If provided the former we get the whole url, but if the later git will truncate the proceeding 'codecommit::' prefix for us. The '<profile>@' url is optional, using the aws sessions present profile if not provided. :param str remote_url: git remote url to parse :returns: **Context** with our CodeCommit repository information :raises: * **FormatError** if the url is malformed * **ProfileNotFound** if the url references a profile that doesn't exist * **RegionNotFound** if the url references a region that doesn't exist """ url = urlparse(remote_url) event_handler = botocore.hooks.HierarchicalEmitter() profile = 'default' repository = url.netloc region = url.scheme if not url.scheme or not url.netloc: raise FormatError("'%s' is a malformed url" % remote_url) if '@' in url.netloc: profile, repository = url.netloc.split('@', 1) session = botocore.session.Session(profile = profile, event_hooks = event_handler) if profile not in session.available_profiles: raise ProfileNotFound('Profile %s not found, available profiles are: %s' % (profile, ', '.join(session.available_profiles))) else: session = botocore.session.Session(event_hooks = event_handler) try: # when the aws cli is available support plugin authentication import awscli.plugin awscli.plugin.load_plugins( session.full_config.get('plugins', {}), event_hooks = event_handler, include_builtins = False, ) session.emit_first_non_none_response('session-initialized', session = session) except ImportError: pass if url.scheme == 'codecommit': region = session.get_config_variable('region') if not region: raise RegionNotFound("Profile %s doesn't have a region available. Please set it." % profile) credentials = session.get_credentials() if not credentials: raise CredentialsNotFound("Profile %s doesn't have credentials available." % profile) return Context(session, repository, 'v1', region, credentials)
def _create_and_verify(profile_to_use=None): session = botocore.session.Session(profile=profile_to_use) session.get_config_variable('region') return session
def whoami(session=None, disable_account_alias=False): """Return a WhoamiInfo namedtuple. Args: session: An optional boto3 or botocore Session disable_account_alias (bool): Disable checking the account alias Returns: WhoamiInfo: Data on the current IAM principal, account, and region. """ if session is None: session = botocore.session.get_session() elif hasattr(session, '_session'): # allow boto3 Session as well session = session._session data = {} data['Region'] = session.get_config_variable('region') response = session.create_client('sts').get_caller_identity() for field in ['Account', 'Arn', 'UserId']: data[field] = response[field] data['Type'], name = data['Arn'].rsplit(':', 1)[1].split('/', 1) if data['Type'] == 'assumed-role': data['Name'], data['RoleSessionName'] = name.rsplit('/', 1) else: data['Name'] = name data['RoleSessionName'] = None if data['Type'] == 'assumed-role' and data['Name'].startswith( 'AWSReservedSSO'): try: # format is AWSReservedSSO_{permission-set}_{random-tag} data['SSOPermissionSet'] = data['Name'].split('_', 1)[1].rsplit('_', 1)[0] except Exception as e: data['SSOPermissionSet'] = None else: data['SSOPermissionSet'] = None data['AccountAliases'] = [] if not isinstance(disable_account_alias, bool): for value in disable_account_alias: if data['Account'].startswith(value) or data['Account'].endswith( value): disable_account_alias = True break fields = ['Name', 'Arn', 'RoleSessionName'] if any(value == data[field] for field in fields): disable_account_alias = True break if not disable_account_alias: try: #pedantry paginator = session.create_client('iam').get_paginator( 'list_account_aliases') for response in paginator.paginate(): data['AccountAliases'].extend(response['AccountAliases']) except ClientError as e: if e.response.get('Error', {}).get('Code') != 'AccessDenied': raise return WhoamiInfo(**data)