def test_build_job_request_settings(self): jenkins_mock = mock.Mock(spec=jenkins.Jenkins, auth='secret', timeout=2) jenkins_mock.build_job_url.return_value = 'http://apache.org' with mock.patch('airflow.contrib.operators.jenkins_job_trigger_operator' '.jenkins_request_with_headers') as mock_make_request: operator = JenkinsJobTriggerOperator( dag=None, task_id="build_job_test", job_name="a_job_on_jenkins", jenkins_connection_id="fake_jenkins_connection") operator.build_job(jenkins_mock) mock_request = mock_make_request.call_args_list[0][0][1] self.assertEqual(mock_request.method, 'POST') self.assertEqual(mock_request.url, 'http://apache.org')
def test_results_parser_callable(self, mock_execute, mock_get_query_results): mock_execute.return_value = None pass_value = 'pass_value' mock_get_query_results.return_value = pass_value results_parser_callable = mock.Mock() results_parser_callable.return_value = [pass_value] operator = self.__construct_operator('select value from tab1 limit 1;', pass_value, None, results_parser_callable) operator.execute() results_parser_callable.assert_called_once_with([pass_value])
def test_check_success_task_not_raises(self): client_mock = mock.Mock() self.batch.jobId = '8ba9d676-4108-4474-9dca-8bbac1da9b19' self.batch.client = client_mock client_mock.describe_jobs.return_value = { 'jobs': [{ 'status': 'SUCCEEDED' }] } self.batch._check_success_task() # Ordering of str(dict) is not guaranteed. client_mock.describe_jobs.assert_called_once_with( jobs=['8ba9d676-4108-4474-9dca-8bbac1da9b19'])
def test_check_success_task_not_raises(self): client_mock = mock.Mock() self.batch.jobId = JOB_ID self.batch.client = client_mock client_mock.describe_jobs.return_value = { "jobs": [{ "jobId": JOB_ID, "status": "SUCCEEDED" }] } self.batch._check_success_task() # Ordering of str(dict) is not guaranteed. client_mock.describe_jobs.assert_called_once_with(jobs=[JOB_ID])
def test_run_load_with_kms(self, run_with_config): encryption_configuration = { "kms_key_name": "projects/p/locations/l/keyRings/k/cryptoKeys/c" } cursor = hook.BigQueryBaseCursor(mock.Mock(), "project_id") cursor.run_load( destination_project_dataset_table='p.d.dt', source_uris=['abc.csv'], autodetect=True, encryption_configuration=encryption_configuration ) args, kwargs = run_with_config.call_args self.assertIs( args[0]['load']['destinationEncryptionConfiguration'], encryption_configuration )
def test_when_the_config_key_does_not_exists(self): from airflow import logging_config conf_get = conf.get def side_effect(*args): if args[1] == 'logging_config_class': raise AirflowConfigException else: return conf_get(*args) logging_config.conf.get = mock.Mock(side_effect=side_effect) with patch.object(logging_config.log, 'debug') as mock_debug: logging_config.configure_logging() mock_debug.assert_any_call( 'Could not find key logging_config_class in config')
def test_inner_whitespace_is_collapsed(self): auth = HmacAuthV4Handler('glacier.us-east-1.amazonaws.com', mock.Mock(), self.provider) self.request.headers['x-amz-archive-description'] = 'two spaces' self.request.headers['x-amz-quoted-string'] = ' "a b c" ' headers = auth.headers_to_sign(self.request) self.assertEqual(headers, {'Host': 'glacier.us-east-1.amazonaws.com', 'x-amz-archive-description': 'two spaces', 'x-amz-glacier-version': '2012-06-01', 'x-amz-quoted-string': ' "a b c" '}) # Note the single space between the "two spaces". self.assertEqual(auth.canonical_headers(headers), 'host:glacier.us-east-1.amazonaws.com\n' 'x-amz-archive-description:two spaces\n' 'x-amz-glacier-version:2012-06-01\n' 'x-amz-quoted-string:"a b c"')
def test_instance_create_idempotent(self, mock_hook, _check_if_instance_exists): _check_if_instance_exists.return_value = True mock_hook.return_value.create_instance.return_value = True op = CloudSqlInstanceCreateOperator(project_id=PROJECT_ID, instance=INSTANCE_NAME, body=CREATE_BODY, task_id="id") result = op.execute( context={ # pylint: disable=assignment-from-no-return 'task_instance': mock.Mock() }) mock_hook.assert_called_once_with(api_version="v1beta4", gcp_conn_id="google_cloud_default") mock_hook.return_value.create_instance.assert_not_called() self.assertIsNone(result)
def test_create_execute(self, mock_hook): op = BigtableTableCreateOperator(project_id=PROJECT_ID, instance_id=INSTANCE_ID, table_id=TABLE_ID, initial_split_keys=INITIAL_SPLIT_KEYS, column_families=EMPTY_COLUMN_FAMILIES, task_id="id") instance = mock_hook.return_value.get_instance.return_value = mock.Mock( Instance) op.execute(None) mock_hook.assert_called_once_with() mock_hook.return_value.create_table.assert_called_once_with( instance=instance, table_id=TABLE_ID, initial_split_keys=INITIAL_SPLIT_KEYS, column_families=EMPTY_COLUMN_FAMILIES)
def test_execute_not_ignore_zero(self, mock_get_db_hook): mock_hook = mock.Mock() mock_hook.get_first.return_value = [0] mock_get_db_hook.return_value = mock_hook operator = self._construct_operator( table='test_table', metric_thresholds={ 'f1': 1, }, ratio_formula='max_over_min', ignore_zero=False, ) with self.assertRaises(AirflowException): operator.execute()
def test_check_success_task_not_raises(self): client_mock = mock.Mock() self.ecs.client = client_mock self.ecs.arn = 'arn' client_mock.describe_tasks.return_value = { 'tasks': [{ 'containers': [{ 'name': 'container-name', 'lastStatus': 'STOPPED', 'exitCode': 0 }] }] } self.ecs._check_success_task() client_mock.describe_tasks.assert_called_once_with(cluster='c', tasks=['arn'])
def test_check_success_tasks_raises_pending(self): client_mock = mock.Mock() self.batch.jobId = '8ba9d676-4108-4474-9dca-8bbac1da9b19' self.batch.client = client_mock client_mock.describe_jobs.return_value = { 'jobs': [{ 'status': 'RUNNABLE' }] } with self.assertRaises(Exception) as e: self.batch._check_success_task() # Ordering of str(dict) is not guaranteed. self.assertIn('This task is still pending ', str(e.exception))
def test_create_instance_missing_project_id(self, get_client, instance_create, mock_project_id): operation = mock.Mock() operation.result_return_value = Instance(instance_id=CBT_INSTANCE, client=get_client) instance_create.return_value = operation with self.assertRaises(AirflowException) as cm: self.bigtable_hook_no_default_project_id.create_instance( instance_id=CBT_INSTANCE, main_cluster_id=CBT_CLUSTER, main_cluster_zone=CBT_ZONE) get_client.assert_not_called() instance_create.assert_not_called() err = cm.exception self.assertIn("The project id must be passed", str(err))
def test_call_with_success(self, slack_client_class_mock): slack_client_mock = mock.Mock() slack_client_class_mock.return_value = slack_client_mock slack_client_mock.api_call.return_value = {'ok': True} test_token = 'test_token' test_slack_conn_id = 'test_slack_conn_id' slack_hook = SlackHook(token=test_token, slack_conn_id=test_slack_conn_id) test_method = 'test_method' test_api_params = {'key1': 'value1', 'key2': 'value2'} slack_hook.call(test_method, test_api_params) slack_client_class_mock.assert_called_with(test_token) slack_client_mock.api_call.assert_called_with(test_method, **test_api_params)
def test_poll_job_status_success(self, mock_randint): client_mock = mock.Mock() self.batch.jobId = JOB_ID self.batch.client = client_mock mock_randint.return_value = 0 # don't pause in unit tests client_mock.get_waiter.return_value.wait.side_effect = ValueError() client_mock.describe_jobs.return_value = { "jobs": [{ "jobId": JOB_ID, "status": "SUCCEEDED" }] } self.batch._wait_for_task_ended() client_mock.describe_jobs.assert_called_once_with(jobs=[JOB_ID])
def test_non_404_gcf_error_bubbled_up(self, mock_hook): op = GcfFunctionDeleteOperator( name=self._FUNCTION_NAME, task_id="id" ) resp = type('', (object,), {"status": 500})() mock_hook.return_value.delete_function.side_effect = mock.Mock( side_effect=HttpError(resp=resp, content=b'error')) with self.assertRaises(HttpError): op.execute(None) mock_hook.assert_called_once_with(api_version='v1', gcp_conn_id='google_cloud_default') mock_hook.return_value.delete_function.assert_called_once_with( 'projects/project_name/locations/project_location/functions/function_name' )
def test_run_query_with_arg(self, mocked_rwc): project_id = 12345 def run_with_config(config): self.assertEqual( config['labels'], {'label1': 'test1', 'label2': 'test2'} ) mocked_rwc.side_effect = run_with_config bq_hook = hook.BigQueryBaseCursor(mock.Mock(), project_id) bq_hook.run_query( sql='select 1', destination_dataset_table='my_dataset.my_table', labels={'label1': 'test1', 'label2': 'test2'} ) assert mocked_rwc.call_count == 1
def test_create_view_fails_on_exception(self): project_id = 'bq-project' dataset_id = 'bq_dataset' table_id = 'bq_table_view' view = { 'incorrect_key': 'SELECT * FROM `test-project-id.test_dataset_id.test_table_prefix*`', "useLegacySql": False } mock_service = mock.Mock() method = mock_service.tables.return_value.insert method.return_value.execute.side_effect = HttpError( resp={'status': '400'}, content=b'Query is required for views') cursor = hook.BigQueryBaseCursor(mock_service, project_id) with self.assertRaises(Exception): cursor.create_empty_table(project_id, dataset_id, table_id, view=view)
def test_training_ends_with_wait(self, mock_client, mock_check_training): mock_check_training.return_value = True mock_session = mock.Mock() attrs = {'create_training_job.return_value': test_arn_return, 'describe_training_job.side_effect': [DESCRIBE_TRAINING_INPROGRESS_RETURN, DESCRIBE_TRAINING_STOPPING_RETURN, DESCRIBE_TRAINING_COMPELETED_RETURN, DESCRIBE_TRAINING_COMPELETED_RETURN] } mock_session.configure_mock(**attrs) mock_client.return_value = mock_session hook = SageMakerHook(aws_conn_id='sagemaker_test_conn_id_1') hook.create_training_job(create_training_params, wait_for_completion=True, print_log=False, check_interval=1) self.assertEqual(mock_session.describe_training_job.call_count, 4)
def test_refund_1(self, mock_urlopen): """ 调用之后向支付宝服务器发送了申请,程序能够处理中文 """ # 配置urlopen返回值 response = mock.Mock() response.read.return_value = valid_response mock_urlopen.return_value = response alipay = self.get_client("RSA") data = { "out_trade_no": "test_ouit_trade_no", "refund_amount": 0.01, "refund_reason": "中文测试" } alipay.refund_app_order(**data) self.assertTrue(mock_urlopen.called)
def test_num_retries_is_obeyed(self): # total attempts is 1 + num_retries so if I have num_retries of 2, # I'll attempt the upload once, and if that fails I'll retry up to # 2 more times for a total of 3 attempts. api = mock.Mock() job_queue = Queue() result_queue = Queue() upload_thread = UploadWorkerThread( api, 'vault_name', self.filename, 'upload_id', job_queue, result_queue, num_retries=2, time_between_retries=0) api.upload_part.side_effect = Exception() job_queue.put((0, 1024)) job_queue.put(_END_SENTINEL) upload_thread.run() self.assertEqual(api.upload_part.call_count, 3)
def test_call_with_failure(self, slack_client_class_mock): slack_client_mock = mock.Mock() slack_client_class_mock.return_value = slack_client_mock slack_client_mock.api_call.return_value = { 'ok': False, 'error': 'test_error' } test_token = 'test_token' test_slack_conn_id = 'test_slack_conn_id' slack_hook = SlackHook(token=test_token, slack_conn_id=test_slack_conn_id) test_method = 'test_method' test_api_params = {'key1': 'value1', 'key2': 'value2'} self.assertRaises(AirflowException, slack_hook.call, test_method, test_api_params)
def test_wait_for_status_success_default_expected_status(self, mock_tool): op = GCPTransferServiceWaitForJobStatusSensor( task_id='task-id', job_name='job-name', project_id='project-id', expected_statuses=GcpTransferOperationStatus.SUCCESS, ) context = {'ti': (mock.Mock(**{'xcom_push.return_value': None}))} result = op.poke(context) mock_tool.operations_contain_expected_statuses.assert_called_once_with( operations=mock.ANY, expected_statuses={GcpTransferOperationStatus.SUCCESS}) self.assertTrue(result)
def test_valid_source_code_union_field(self, source_code, project_id, mock_hook): mock_hook.return_value.upload_function_zip.return_value = 'https://uploadUrl' mock_hook.return_value.get_function.side_effect = mock.Mock( side_effect=HttpError(resp=MOCK_RESP_404, content=b'not found')) mock_hook.return_value.create_new_function.return_value = True body = deepcopy(VALID_BODY) body.pop('sourceUploadUrl', None) body.pop('sourceArchiveUrl', None) body.pop('sourceRepository', None) body.pop('sourceRepositoryUrl', None) zip_path = source_code.pop('zip_path', None) body.update(source_code) if project_id: op = GcfFunctionDeployOperator( project_id="test_project_id", location="test_region", body=body, task_id="id", zip_path=zip_path ) else: op = GcfFunctionDeployOperator( location="test_region", body=body, task_id="id", zip_path=zip_path ) op.execute(None) mock_hook.assert_called_once_with(api_version='v1', gcp_conn_id='google_cloud_default') if zip_path: mock_hook.return_value.upload_function_zip.assert_called_once_with( project_id=project_id, location='test_region', zip_path='/path/to/file' ) mock_hook.return_value.get_function.assert_called_once_with( 'projects/test_project_id/locations/test_region/functions/helloWorld' ) mock_hook.return_value.create_new_function.assert_called_once_with( project_id=project_id, location='test_region', body=body ) mock_hook.reset_mock()
def test_instance_create_missing_project_id(self, mock_hook, _check_if_instance_exists): _check_if_instance_exists.return_value = False mock_hook.return_value.create_instance.return_value = True op = CloudSqlInstanceCreateOperator( instance=INSTANCE_NAME, body=CREATE_BODY, task_id="id" ) result = op.execute(context={ 'task_instance': mock.Mock() }) mock_hook.assert_called_once_with(api_version="v1beta4", gcp_conn_id="google_cloud_default") mock_hook.return_value.create_instance.assert_called_once_with( project_id=None, body=CREATE_BODY ) self.assertIsNone(result)
def setUp(self): self.attachment = Attachment() self.attachment.id = 'eni-attach-1' self.attachment.instance_id = 10 self.attachment.status = "some status" self.attachment.device_index = 100 self.eni_one = NetworkInterface() self.eni_one.id = 'eni-1' self.eni_one.status = "one_status" self.eni_one.attachment = self.attachment self.eni_two = NetworkInterface() self.eni_two.connection = mock.Mock() self.eni_two.id = 'eni-2' self.eni_two.status = "two_status" self.eni_two.attachment = None
def test_check_success_tasks_raises_pending(self): client_mock = mock.Mock() self.batch.jobId = JOB_ID self.batch.client = client_mock client_mock.describe_jobs.return_value = { "jobs": [{ "jobId": JOB_ID, "status": "RUNNABLE" }] } with self.assertRaises(Exception) as e: self.batch._check_success_task() # Ordering of str(dict) is not guaranteed. self.assertIn("Job ({}) is still pending".format(JOB_ID), str(e.exception))
def test_valid_object(self, mock_parent_init): file_name = 'gs://test-bucket/path/to/obj.jar' mock_parent_init.return_value = None gcs_bucket_helper = GoogleCloudBucketHelper() gcs_bucket_helper._gcs_hook = mock.Mock() # pylint:disable=redefined-builtin,unused-argument def _mock_download(bucket, object, filename=None): text_file_contents = 'text file contents' with open(filename, 'w') as text_file: text_file.write(text_file_contents) return text_file_contents gcs_bucket_helper._gcs_hook.download.side_effect = _mock_download local_file = gcs_bucket_helper.google_cloud_to_local(file_name) self.assertIn('obj.jar', local_file)
def test_alipay_trade_refund(self, mock_urlopen): """ 调用之后向支付宝服务器发送了申请,程序能够处理中文 """ alipay = self.get_client("RSA") # 配置urlopen返回值 response = mock.Mock() response.read.return_value = self._prepare_refund_face_to_face_response( alipay) mock_urlopen.return_value = response data = { "out_trade_no": "test_ouit_trade_no", "refund_amount": 0.01, "refund_reason": "中文测试" } alipay.api_alipay_trade_refund(**data) self.assertTrue(mock_urlopen.called)
def test_upload_errors_have_exception_messages(self): api = mock.Mock() job_queue = Queue() result_queue = Queue() upload_thread = UploadWorkerThread(api, 'vault_name', self.filename, 'upload_id', job_queue, result_queue, num_retries=1, time_between_retries=0) api.upload_part.side_effect = Exception("exception message") job_queue.put((0, 1024)) job_queue.put(_END_SENTINEL) upload_thread.run() result = result_queue.get(timeout=1) self.assertIn("exception message", str(result))