def test_read_configuration_from_connection_empty_config(self): conn = Connection( conn_type="gcpssh", extra=json.dumps({}), ) conn_uri = conn.get_uri() with mock.patch.dict("os.environ", AIRFLOW_CONN_GCPSSH=conn_uri): hook = ComputeEngineSSHHook(gcp_conn_id="gcpssh") hook._load_connection_config() self.assertEqual(None, hook.instance_name) self.assertEqual(None, hook.hostname) self.assertEqual("root", hook.user) self.assertEqual(False, hook.use_internal_ip) self.assertIsInstance(hook.use_internal_ip, bool) self.assertEqual(False, hook.use_iap_tunnel) self.assertIsInstance(hook.use_iap_tunnel, bool) self.assertEqual(False, hook.use_oslogin) self.assertIsInstance(hook.use_oslogin, bool) self.assertEqual(300, hook.expire_time) self.assertIsInstance(hook.expire_time, int)
def test_read_configuration_from_connection_empty_config(self): conn = Connection( conn_type="gcpssh", extra=json.dumps({}), ) conn_uri = conn.get_uri() with mock.patch.dict("os.environ", AIRFLOW_CONN_GCPSSH=conn_uri): hook = ComputeEngineSSHHook(gcp_conn_id="gcpssh") hook._load_connection_config() assert None is hook.instance_name assert None is hook.hostname assert "root" == hook.user assert False is hook.use_internal_ip assert isinstance(hook.use_internal_ip, bool) assert False is hook.use_iap_tunnel assert isinstance(hook.use_iap_tunnel, bool) assert False is hook.use_oslogin assert isinstance(hook.use_oslogin, bool) assert 300 == hook.expire_time assert isinstance(hook.expire_time, int)
def provide_wasb_default_connection(key_file_path: str): """ Context manager to provide a temporary value for wasb_default connection :param key_file_path: Path to file with wasb_default credentials .json file. :type key_file_path: str """ if not key_file_path.endswith(".json"): raise AirflowException("Use a JSON key file.") with open(key_file_path) as credentials: creds = json.load(credentials) conn = Connection( conn_id=WASB_CONNECTION_ID, conn_type="wasb", host=creds.get("host", None), login=creds.get("login", None), password=creds.get("password", None), extra=json.dumps(creds.get('extra', None)), ) with patch_environ({f"AIRFLOW_CONN_{conn.conn_id.upper()}": conn.get_uri()}): yield
def test_connection() -> APIResponse: """ To test a connection, this method first creates an in-memory dummy conn_id & exports that to an env var, as some hook classes tries to find out the conn from their __init__ method & errors out if not found. It also deletes the conn id env variable after the test. """ body = request.json dummy_conn_id = get_random_string() conn_env_var = f'{CONN_ENV_PREFIX}{dummy_conn_id.upper()}' try: data = connection_schema.load(body) data['conn_id'] = dummy_conn_id conn = Connection(**data) os.environ[conn_env_var] = conn.get_uri() status, message = conn.test_connection() return connection_test_schema.dump({"status": status, "message": message}) except ValidationError as err: raise BadRequest(detail=str(err.messages)) finally: if conn_env_var in os.environ: del os.environ[conn_env_var]
def test_run_example_gcp_vision_autogenerated_id_dag(self): mock_connection = Connection( conn_type="aws", extra=json.dumps({ "role_arn": ROLE_ANR, "assume_role_method": "assume_role_with_web_identity", "assume_role_with_web_identity_federation": 'google', "assume_role_with_web_identity_federation_audience": AUDIENCE, }), ) with mock.patch.dict( 'os.environ', AIRFLOW_CONN_AWS_DEFAULT=mock_connection.get_uri()): hook = AwsBaseHook(client_type='s3') client = hook.get_conn() response = client.list_buckets() assert 'Buckets' in response
def provide_facebook_connection(key_file_path: str): """ Context manager that provides a temporary value of AIRFLOW_CONN_GOOGLE_CLOUD_DEFAULT connection. It build a new connection that includes path to provided service json, required scopes and project id. :param key_file_path: Path to file with FACEBOOK credentials .json file. :type key_file_path: str """ if not key_file_path.endswith(".json"): raise AirflowException("Use a JSON key file.") with open(key_file_path) as credentials: creds = json.load(credentials) missing_keys = CONFIG_REQUIRED_FIELDS - creds.keys() if missing_keys: message = f"{missing_keys} fields are missing" raise AirflowException(message) conn = Connection(conn_id=FACEBOOK_CONNECTION_ID, conn_type=CONNECTION_TYPE, extra=json.dumps(creds)) with patch_environ( {f"AIRFLOW_CONN_{conn.conn_id.upper()}": conn.get_uri()}): yield
def provide_leveldb_connection(): """Context manager that provides a temporary value of AIRFLOW_CONN_LEVELDB_DEFAULT connection""" conn = Connection(conn_id=LEVELDB_CONNECTION_ID, conn_type=CONNECTION_TYPE) with patch_environ({f"AIRFLOW_CONN_{conn.conn_id.upper()}": conn.get_uri()}): yield
def test_get_credentials_from_gcp_credentials(self): mock_connection = Connection(extra=json.dumps({ "role_arn": "arn:aws:iam::123456:role/role_arn", "assume_role_method": "assume_role_with_web_identity", "assume_role_with_web_identity_federation": 'google', "assume_role_with_web_identity_federation_audience": 'aws-federation.airflow.apache.org', })) # Store original __import__ orig_import = __import__ mock_id_token_credentials = mock.Mock() def import_mock(name, *args): if name == 'airflow.providers.google.common.utils.id_token_credentials': return mock_id_token_credentials return orig_import(name, *args) with mock.patch( 'builtins.__import__', side_effect=import_mock ), mock.patch.dict( 'os.environ', AIRFLOW_CONN_AWS_DEFAULT=mock_connection.get_uri() ), mock.patch( 'airflow.providers.amazon.aws.hooks.base_aws.boto3' ) as mock_boto3, mock.patch( 'airflow.providers.amazon.aws.hooks.base_aws.botocore' ) as mock_botocore, mock.patch( 'airflow.providers.amazon.aws.hooks.base_aws.botocore.session' ) as mock_session: hook = AwsBaseHook(aws_conn_id='aws_default', client_type='airflow_test') credentials_from_hook = hook.get_credentials() mock_get_credentials = mock_boto3.session.Session.return_value.get_credentials assert (mock_get_credentials.return_value.get_frozen_credentials. return_value == credentials_from_hook) mock_boto3.assert_has_calls([ mock.call.session.Session( aws_access_key_id=None, aws_secret_access_key=None, aws_session_token=None, region_name=None, ), mock.call.session.Session()._session.__bool__(), mock.call.session.Session( botocore_session=mock_session.Session.return_value, region_name=mock_boto3.session.Session.return_value. region_name, ), mock.call.session.Session().get_credentials(), mock.call.session.Session().get_credentials(). get_frozen_credentials(), ]) mock_fetcher = mock_botocore.credentials.AssumeRoleWithWebIdentityCredentialFetcher mock_botocore.assert_has_calls([ mock.call.credentials.AssumeRoleWithWebIdentityCredentialFetcher( client_creator=mock_boto3.session.Session.return_value. _session.create_client, extra_args={}, role_arn='arn:aws:iam::123456:role/role_arn', web_identity_token_loader=mock.ANY, ), mock.call.credentials.DeferredRefreshableCredentials( method='assume-role-with-web-identity', refresh_using=mock_fetcher.return_value.fetch_credentials, time_fetcher=mock.ANY, ), ]) mock_session.assert_has_calls([mock.call.Session()]) mock_id_token_credentials.assert_has_calls([ mock.call.get_default_id_token_credentials( target_audience='aws-federation.airflow.apache.org') ])