def get_iam_token(self, conn): """ Uses AWSHook to retrieve a temporary password to connect to Postgres or Redshift. Port is required. If none is provided, default is used for each service """ from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook redshift = conn.extra_dejson.get('redshift', False) aws_conn_id = conn.extra_dejson.get('aws_conn_id', 'aws_default') aws_hook = AwsBaseHook(aws_conn_id) login = conn.login if conn.port is None: port = 5439 if redshift else 5432 else: port = conn.port if redshift: # Pull the custer-identifier from the beginning of the Redshift URL # ex. my-cluster.ccdre4hpd39h.us-east-1.redshift.amazonaws.com returns my-cluster cluster_identifier = conn.extra_dejson.get('cluster-identifier', conn.host.split('.')[0]) client = aws_hook.get_client_type('redshift') cluster_creds = client.get_cluster_credentials( DbUser=conn.login, DbName=self.schema or conn.schema, ClusterIdentifier=cluster_identifier, AutoCreate=False) token = cluster_creds['DbPassword'] login = cluster_creds['DbUser'] else: client = aws_hook.get_client_type('rds') token = client.generate_db_auth_token(conn.host, port, conn.login) return login, token, port
def test_get_client_type_returns_a_boto3_client_of_the_requested_type( self): self._create_clusters() hook = AwsBaseHook(aws_conn_id='aws_default') client_from_hook = hook.get_client_type('redshift') clusters = client_from_hook.describe_clusters()['Clusters'] self.assertEqual(len(clusters), 2)
def test_get_client_type_returns_a_boto3_client_of_the_requested_type(self): client = boto3.client('emr', region_name='us-east-1') if client.list_clusters()['Clusters']: raise ValueError('AWS not properly mocked') hook = AwsBaseHook(aws_conn_id='aws_default', client_type='emr') client_from_hook = hook.get_client_type('emr') self.assertEqual(client_from_hook.list_clusters()['Clusters'], [])
def get_iam_token(self, conn): """ Uses AWSHook to retrieve a temporary password to connect to MySQL Port is required. If none is provided, default 3306 is used """ from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook aws_conn_id = conn.extra_dejson.get('aws_conn_id', 'aws_default') aws_hook = AwsBaseHook(aws_conn_id) if conn.port is None: port = 3306 else: port = conn.port client = aws_hook.get_client_type('rds') token = client.generate_db_auth_token(conn.host, port, conn.login) return token, port
def execute(self, context): hook = AwsBaseHook(self._aws_conn_id) glue_client = hook.get_client_type("glue", region_name=self._region_name) self.log.info("Triggering crawler") response = glue_client.start_crawler(Name="ratings-crawler") if response["ResponseMetadata"]["HTTPStatusCode"] != 200: raise RuntimeError( "An error occurred while triggering the crawler: %r" % response) self.log.info("Waiting for crawler to finish") while True: time.sleep(1) crawler = glue_client.get_crawler(Name=self._crawler_name) crawler_state = crawler["Crawler"]["State"] if crawler_state == "READY": self.log.info("Crawler finished running") break
def test_use_default_boto3_behaviour_without_conn_id(self): for conn_id in (None, ''): hook = AwsBaseHook(aws_conn_id=conn_id, client_type='s3') # should cause no exception hook.get_client_type('s3')