def test_get_credentials_from_extra_with_s3_config_and_profile( self, mock_get_connection, mock_parse_s3_config): mock_connection = Connection(extra='{"s3_config_format": "aws", ' '"profile": "test", ' '"s3_config_file": "aws-credentials", ' '"region_name": "us-east-1"}') mock_get_connection.return_value = mock_connection hook = AwsBaseHook() hook._get_credentials(region_name=None) mock_parse_s3_config.assert_called_once_with('aws-credentials', 'aws', 'test')
def get_iam_token(self, conn: Connection) -> Tuple[str, str, int]: """ Uses AWSHook to retrieve a temporary password to connect to Postgres or Redshift. Port is required. If none is provided, default is used for each service """ from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook redshift = conn.extra_dejson.get('redshift', False) aws_conn_id = conn.extra_dejson.get('aws_conn_id', 'aws_default') aws_hook = AwsBaseHook(aws_conn_id, client_type='rds') login = conn.login if conn.port is None: port = 5439 if redshift else 5432 else: port = conn.port if redshift: # Pull the custer-identifier from the beginning of the Redshift URL # ex. my-cluster.ccdre4hpd39h.us-east-1.redshift.amazonaws.com returns my-cluster cluster_identifier = conn.extra_dejson.get('cluster-identifier', conn.host.split('.')[0]) session, endpoint_url = aws_hook._get_credentials() client = session.client( "redshift", endpoint_url=endpoint_url, config=aws_hook.config, verify=aws_hook.verify, ) cluster_creds = client.get_cluster_credentials( DbUser=conn.login, DbName=self.schema or conn.schema, ClusterIdentifier=cluster_identifier, AutoCreate=False, ) token = cluster_creds['DbPassword'] login = cluster_creds['DbUser'] else: token = aws_hook.conn.generate_db_auth_token( conn.host, port, conn.login) return login, token, port