def testSuccessWithURI(self): with patch('airflow.contrib.operators.mlengine_operator.MLEngineHook') \ as mock_hook: input_with_uri = self.INPUT_MISSING_ORIGIN.copy() input_with_uri['uri'] = 'gs://my_bucket/my_models/savedModel' success_message = self.SUCCESS_MESSAGE_MISSING_INPUT.copy() success_message['predictionInput'] = input_with_uri hook_instance = mock_hook.return_value hook_instance.get_job.side_effect = HttpError( resp=httplib2.Response({ 'status': 404 }), content=b'some bytes') hook_instance.create_job.return_value = success_message prediction_task = MLEngineBatchPredictionOperator( job_id='test_prediction', project_id='test-project', region=input_with_uri['region'], data_format=input_with_uri['dataFormat'], input_paths=input_with_uri['inputPaths'], output_path=input_with_uri['outputPath'], uri=input_with_uri['uri'], dag=self.dag, task_id='test-prediction') prediction_output = prediction_task.execute(None) mock_hook.assert_called_with('google_cloud_default', None) hook_instance.create_job.assert_called_with( 'test-project', { 'jobId': 'test_prediction', 'predictionInput': input_with_uri }, ANY) self.assertEqual(success_message['predictionOutput'], prediction_output)
def test_cancel_mlengine_job_completed_job(self, mock_get_conn): project_id = "test-project" job_id = 'test-job-id' job_path = 'projects/{}/jobs/{}'.format(project_id, job_id) job_cancelled = {} error_job_already_completed = HttpError( resp=mock.MagicMock(status=400), content=b'Job already completed') # fmt: off (mock_get_conn.return_value.projects.return_value.jobs.return_value. cancel.return_value.execute.side_effect) = error_job_already_completed (mock_get_conn.return_value.projects.return_value.jobs.return_value. cancel.return_value.execute.return_value) = job_cancelled # fmt: on cancel_job_response = self.hook.cancel_job(job_id=job_id, project_id=project_id) self.assertEqual(cancel_job_response, job_cancelled) mock_get_conn.assert_has_calls( [ mock.call().projects().jobs().cancel(name=job_path), ], any_order=True, )
def test_create_mlengine_job_check_existing_job_failed( self, mock_get_conn): project_id = 'test-project' job_id = 'test-job-id' my_job = { 'jobId': job_id, 'foo': 4815162342, 'state': 'SUCCEEDED', 'someInput': { 'input': 'someInput' } } different_job = { 'jobId': job_id, 'foo': 4815162342, 'state': 'SUCCEEDED', 'someInput': { 'input': 'someDifferentInput' } } error_job_exists = HttpError(resp=mock.MagicMock(status=409), content=b'Job already exists') (mock_get_conn.return_value.projects.return_value.jobs.return_value. create.return_value.execute.side_effect) = error_job_exists (mock_get_conn.return_value.projects.return_value.jobs.return_value. get.return_value.execute.return_value) = different_job def check_input(existing_job): return existing_job.get('someInput', None) == \ my_job['someInput'] with self.assertRaises(HttpError): self.hook.create_job(project_id=project_id, job=my_job, use_existing_job_fn=check_input)
def test_google_upload_apk_does_not_error_out_when_apk_is_already_published( edit_resource_mock, reason, expectation): content = { 'error': { 'errors': [{ 'reason': reason }], }, } # XXX content must be bytes # https://github.com/googleapis/google-api-python-client/blob/ffea1a7fe9d381d23ab59048263c631cc2b45323/googleapiclient/errors.py#L41 content_bytes = json.dumps(content).encode('ascii') edit_resource_mock.apks().upload().execute.side_effect = HttpError( # XXX status is presented as a string by googleapiclient resp={'status': '403'}, content=content_bytes, ) google_play = GooglePlayEdit(edit_resource_mock, 1, 'dummy_package_name') with expectation: apk_mock = Mock() apk_mock.name = '/path/to/dummy.apk' google_play.upload_apk(apk_mock)
def test_clean_dataset_exceptions(self, mock_bq_utils, mock_wait_on_jobs, mock_job_status_errored, mock_format_failure_message): # Test the case where BigQuery throws an error mock_bq_utils.side_effect = HttpError( mock.Mock(return_value={'status': 404}), self.exception_statement_one) clean_cdr_engine.clean_dataset(self.project, self.statements) self.assertEqual(mock_wait_on_jobs.call_count, 0) self.assertEqual(mock_job_status_errored.call_count, 0) self.assertEqual(mock_format_failure_message.call_count, 2) # Test the case where there is an incomplete job mock_bq_utils.reset_mock() mock_format_failure_message.reset_mock() mock_bq_utils.side_effect = [self.job_results_success] mock_wait_on_jobs.return_value = [self.job_id_success] with self.assertRaises(bq_utils.BigQueryJobWaitError): clean_cdr_engine.clean_dataset(self.project, self.statements)
def test_success_with_version(self, mock_hook): input_with_version = self.INPUT_MISSING_ORIGIN.copy() input_with_version['versionName'] = \ 'projects/test-project/models/test_model/versions/test_version' success_message = self.SUCCESS_MESSAGE_MISSING_INPUT.copy() success_message['predictionInput'] = input_with_version hook_instance = mock_hook.return_value hook_instance.get_job.side_effect = HttpError(resp=httplib2.Response( {'status': 404}), content=b'some bytes') hook_instance.create_job.return_value = success_message prediction_task = MLEngineStartBatchPredictionJobOperator( job_id='test_prediction', project_id='test-project', region=input_with_version['region'], data_format=input_with_version['dataFormat'], input_paths=input_with_version['inputPaths'], output_path=input_with_version['outputPath'], model_name=input_with_version['versionName'].split('/')[-3], version_name=input_with_version['versionName'].split('/')[-1], dag=self.dag, task_id='test-prediction') prediction_output = prediction_task.execute(None) mock_hook.assert_called_once_with('google_cloud_default', None) hook_instance.create_job.assert_called_once_with( project_id='test-project', job={ 'jobId': 'test_prediction', 'predictionInput': input_with_version }, use_existing_job_fn=ANY) self.assertEqual(success_message['predictionOutput'], prediction_output)
def test_cancel_mlengine_job_nonexistent_job(self, mock_get_conn): project_id = "test-project" job_id = 'test-job-id' job_cancelled = {} error_job_does_not_exist = HttpError(resp=mock.MagicMock(status=404), content=b'Job does not exist') # fmt: off ( mock_get_conn.return_value. projects.return_value. jobs.return_value. cancel.return_value. execute.side_effect ) = error_job_does_not_exist ( mock_get_conn.return_value. projects.return_value. jobs.return_value. cancel.return_value. execute.return_value ) = job_cancelled # fmt: on with self.assertRaises(HttpError): self.hook.cancel_job(job_id=job_id, project_id=project_id)
def test_http_error(self, mock_hook): http_error_code = 403 hook_instance = mock_hook.return_value hook_instance.cancel_job.side_effect = HttpError( resp=httplib2.Response({ 'status': http_error_code }), content=b'Forbidden') with self.assertRaises(HttpError) as context: cancel_training_op = MLEngineTrainingCancelJobOperator( **self.TRAINING_DEFAULT_ARGS) cancel_training_op.execute(None) mock_hook.assert_called_once_with( gcp_conn_id='google_cloud_default', delegate_to=None, impersonation_chain=None, ) # Make sure only 'create_job' is invoked on hook instance self.assertEqual(len(hook_instance.mock_calls), 1) hook_instance.cancel_job.assert_called_once_with( project_id=self.TRAINING_DEFAULT_ARGS['project_id'], job_id=self.TRAINING_DEFAULT_ARGS['job_id']) self.assertEqual(http_error_code, context.exception.resp.status)
def test_success_with_uri(self, mock_hook): input_with_uri = self.INPUT_MISSING_ORIGIN.copy() input_with_uri['uri'] = 'gs://my_bucket/my_models/savedModel' success_message = self.SUCCESS_MESSAGE_MISSING_INPUT.copy() success_message['predictionInput'] = input_with_uri hook_instance = mock_hook.return_value hook_instance.get_job.side_effect = HttpError( resp=httplib2.Response({ 'status': 404 }), content=b'some bytes') hook_instance.create_job.return_value = success_message prediction_task = MLEngineStartBatchPredictionJobOperator( job_id='test_prediction', project_id='test-project', region=input_with_uri['region'], data_format=input_with_uri['dataFormat'], input_paths=input_with_uri['inputPaths'], output_path=input_with_uri['outputPath'], uri=input_with_uri['uri'], dag=self.dag, task_id='test-prediction') prediction_output = prediction_task.execute(None) mock_hook.assert_called_once_with('google_cloud_default', None, impersonation_chain=None,) hook_instance.create_job.assert_called_once_with( project_id='test-project', job={ 'jobId': 'test_prediction', 'predictionInput': input_with_uri }, use_existing_job_fn=ANY ) self.assertEqual(success_message['predictionOutput'], prediction_output)
def test_successful_copy_template(self, mock_hook): mock_hook.return_value.get_instance_template.side_effect = [ HttpError(resp=httplib2.Response({'status': 404}), content=EMPTY_CONTENT), GCE_INSTANCE_TEMPLATE_BODY_GET, GCE_INSTANCE_TEMPLATE_BODY_GET_NEW, ] op = ComputeEngineCopyInstanceTemplateOperator( project_id=GCP_PROJECT_ID, resource_id=GCE_INSTANCE_TEMPLATE_NAME, task_id='id', body_patch={"name": GCE_INSTANCE_TEMPLATE_NEW_NAME}, ) result = op.execute(None) mock_hook.assert_called_once_with( api_version='v1', gcp_conn_id='google_cloud_default', impersonation_chain=None, ) mock_hook.return_value.insert_instance_template.assert_called_once_with( project_id=GCP_PROJECT_ID, body=GCE_INSTANCE_TEMPLATE_BODY_INSERT, request_id=None) self.assertEqual(GCE_INSTANCE_TEMPLATE_BODY_GET_NEW, result)
def test_valid_trigger_union_field(self, trigger, mock_hook): mock_hook.return_value.upload_function_zip.return_value = 'https://uploadUrl' mock_hook.return_value.get_function.side_effect = mock.Mock( side_effect=HttpError(resp=MOCK_RESP_404, content=b'not found')) mock_hook.return_value.create_new_function.return_value = True body = deepcopy(VALID_BODY) body.pop('httpsTrigger', None) body.pop('eventTrigger', None) body.update(trigger) op = GcfFunctionDeployOperator( project_id="test_project_id", location="test_region", body=body, task_id="id", ) op.execute(None) mock_hook.assert_called_once_with(api_version='v1', gcp_conn_id='google_cloud_default') mock_hook.return_value.get_function.assert_called_once_with( 'projects/test_project_id/locations/test_region/functions/helloWorld' ) mock_hook.return_value.create_new_function.assert_called_once_with( project_id='test_project_id', location='test_region', body=body) mock_hook.reset_mock()
class TestCatchHttpException(unittest.TestCase): # pylint:disable=no-method-argument,unused-argument @parameterized.expand([ ("no_exception", None, LoggingMixin, None, None), ("raise_airflowexception", MovedPermanently("MESSAGE"), LoggingMixin, None, AirflowException), ("raise_airflowexception", RetryError("MESSAGE", cause=Exception("MESSAGE")), LoggingMixin, None, AirflowException), ("raise_airflowexception", ValueError("MESSAGE"), LoggingMixin, None, AirflowException), ("raise_alreadyexists", AlreadyExists("MESSAGE"), LoggingMixin, None, AlreadyExists), ("raise_http_error", HttpError(mock.Mock(**{"reason.return_value": None}), b"CONTENT"), BaseHook, { "source": None }, AirflowException), ]) def test_catch_exception(self, name, exception, base_class, base_class_args, assert_raised): self.called = False # pylint:disable=attribute-defined-outside-init class FixtureClass(base_class): @hook.GoogleCloudBaseHook.catch_http_exception def test_fixture(*args, **kwargs): # pylint:disable=unused-argument,no-method-argument self.called = True # pylint:disable=attribute-defined-outside-init if exception is not None: raise exception if assert_raised is None: FixtureClass(base_class_args).test_fixture() else: with self.assertRaises(assert_raised): FixtureClass(base_class_args).test_fixture() self.assertTrue(self.called)
def _build(self, key): """ Get all arguments needed to construct a Resource object. _build shortcuts the googleapiclient.discovery functions build() and build_from_document(), which construct the Resource class. See googleapiclient.discovery.Resource for more information. """ # auth baseUrl = 'https://sheets.googleapis.com/' discUrl = (baseUrl + '$discovery/rest?version=v4') scope = ['https://spreadsheets.google.com/feeds'] creds = ServiceAccountCredentials.from_json_keyfile_name(key, scope) http = creds.authorize(Http()) # service JSON response, body = http.request(discUrl) if response.status >= 400: raise HttpError(response, body, uri=discUrl) service = json.loads(body) # model from service features = service.get('features', []) model = JsonModel('dataWrapper' in features) # schema from service schema = Schemas(service) return dict(http=http, baseUrl=baseUrl, model=model, developerKey=None, requestBuilder=HttpRequest, resourceDesc=service, rootDesc=service, schema=schema)
def _obtain_accounts(service) -> List[str]: """Get a list of all Google Analytics accounts for this user Args: service: A service that is connected to the specified API. Returns: a list of all account_ids obtained """ try: query = service.management().accounts().list() response = query.execute() except TypeError as error: # Handle errors in constructing a query. logging.error( f"There was an error in constructing your query : {error}") raise TypeError(error) except HttpError as error: # Handle API errors. logging.error("Arg, there was an API error : {} : {}".format( error.resp.status, error._get_reason())) raise HttpError(error) except RefreshError: # Handle Auth errors. message = ( "The credentials have been revoked or expired, please re-run " "the application to re-authorize") logging.error(message) raise RefreshError(message) except Exception as e: # Handle all other errors. raise Exception(e) else: if response.get("items"): # Get all Google Analytics account. return [i["id"] for i in response["items"]] logging.warning("no Google Analytics accounts detected\n" * 5) return []
def testGetMonorailIssueForIssueIdHttpError(self, mocked_issue_tracker_api): mocked_issue_tracker_api.return_value.getIssue.side_effect = HttpError( mock.Mock(), 'error') self.assertIsNone( monorail_util.GetMonorailIssueForIssueId(12345, 'chromium'))
class TestPubSubHook(unittest.TestCase): def setUp(self): with mock.patch(BASE_STRING.format('GoogleBaseHook.__init__'), new=mock_init): self.pubsub_hook = PubSubHook(gcp_conn_id='test') def _generate_messages(self, count) -> List[ReceivedMessage]: return [ ParseDict( { "ack_id": str(i), "message": { "data": f'Message {i}'.encode('utf8'), "attributes": { "type": "generated message" }, }, }, ReceivedMessage(), ) for i in range(1, count + 1) ] @mock.patch( "airflow.providers.google.cloud.hooks.pubsub.PubSubHook.client_info", new_callable=mock.PropertyMock) @mock.patch( "airflow.providers.google.cloud.hooks.pubsub.PubSubHook._get_credentials" ) @mock.patch("airflow.providers.google.cloud.hooks.pubsub.PublisherClient") def test_publisher_client_creation(self, mock_client, mock_get_creds, mock_client_info): self.assertIsNone(self.pubsub_hook._client) result = self.pubsub_hook.get_conn() mock_client.assert_called_once_with( credentials=mock_get_creds.return_value, client_info=mock_client_info.return_value) self.assertEqual(mock_client.return_value, result) self.assertEqual(self.pubsub_hook._client, result) @mock.patch( "airflow.providers.google.cloud.hooks.pubsub.PubSubHook.client_info", new_callable=mock.PropertyMock) @mock.patch( "airflow.providers.google.cloud.hooks.pubsub.PubSubHook._get_credentials" ) @mock.patch("airflow.providers.google.cloud.hooks.pubsub.SubscriberClient") def test_subscriber_client_creation(self, mock_client, mock_get_creds, mock_client_info): self.assertIsNone(self.pubsub_hook._client) result = self.pubsub_hook.subscriber_client mock_client.assert_called_once_with( credentials=mock_get_creds.return_value, client_info=mock_client_info.return_value) self.assertEqual(mock_client.return_value, result) @mock.patch(PUBSUB_STRING.format('PubSubHook.get_conn')) def test_create_nonexistent_topic(self, mock_service): create_method = mock_service.return_value.create_topic self.pubsub_hook.create_topic(project_id=TEST_PROJECT, topic=TEST_TOPIC) create_method.assert_called_once_with(name=EXPANDED_TOPIC, labels=LABELS, message_storage_policy=None, kms_key_name=None, retry=None, timeout=None, metadata=None) @mock.patch(PUBSUB_STRING.format('PubSubHook.get_conn')) def test_delete_topic(self, mock_service): delete_method = mock_service.return_value.delete_topic self.pubsub_hook.delete_topic(project_id=TEST_PROJECT, topic=TEST_TOPIC) delete_method.assert_called_once_with(topic=EXPANDED_TOPIC, retry=None, timeout=None, metadata=None) @mock.patch(PUBSUB_STRING.format('PubSubHook.get_conn')) def test_delete_nonexisting_topic_failifnotexists(self, mock_service): mock_service.return_value.delete_topic.side_effect = NotFound( 'Topic does not exists: %s' % EXPANDED_TOPIC) with self.assertRaises(PubSubException) as e: self.pubsub_hook.delete_topic(project_id=TEST_PROJECT, topic=TEST_TOPIC, fail_if_not_exists=True) self.assertEqual(str(e.exception), 'Topic does not exist: %s' % EXPANDED_TOPIC) @mock.patch(PUBSUB_STRING.format('PubSubHook.get_conn')) def test_delete_topic_api_call_error(self, mock_service): mock_service.return_value.delete_topic.side_effect = GoogleAPICallError( 'Error deleting topic: %s' % EXPANDED_TOPIC) with self.assertRaises(PubSubException): self.pubsub_hook.delete_topic(project_id=TEST_PROJECT, topic=TEST_TOPIC, fail_if_not_exists=True) @mock.patch(PUBSUB_STRING.format('PubSubHook.get_conn')) def test_create_preexisting_topic_failifexists(self, mock_service): mock_service.return_value.create_topic.side_effect = AlreadyExists( 'Topic already exists: %s' % TEST_TOPIC) with self.assertRaises(PubSubException) as e: self.pubsub_hook.create_topic(project_id=TEST_PROJECT, topic=TEST_TOPIC, fail_if_exists=True) self.assertEqual(str(e.exception), 'Topic already exists: %s' % TEST_TOPIC) @mock.patch(PUBSUB_STRING.format('PubSubHook.get_conn')) def test_create_preexisting_topic_nofailifexists(self, mock_service): mock_service.return_value.create_topic.side_effect = AlreadyExists( 'Topic already exists: %s' % EXPANDED_TOPIC) self.pubsub_hook.create_topic(project_id=TEST_PROJECT, topic=TEST_TOPIC) @mock.patch(PUBSUB_STRING.format('PubSubHook.get_conn')) def test_create_topic_api_call_error(self, mock_service): mock_service.return_value.create_topic.side_effect = GoogleAPICallError( 'Error creating topic: %s' % TEST_TOPIC) with self.assertRaises(PubSubException): self.pubsub_hook.create_topic(project_id=TEST_PROJECT, topic=TEST_TOPIC, fail_if_exists=True) @mock.patch(PUBSUB_STRING.format('PubSubHook.subscriber_client')) def test_create_nonexistent_subscription(self, mock_service): create_method = mock_service.create_subscription response = self.pubsub_hook.create_subscription( project_id=TEST_PROJECT, topic=TEST_TOPIC, subscription=TEST_SUBSCRIPTION) create_method.assert_called_once_with( name=EXPANDED_SUBSCRIPTION, topic=EXPANDED_TOPIC, push_config=None, ack_deadline_seconds=10, retain_acked_messages=None, message_retention_duration=None, labels=LABELS, enable_message_ordering=False, expiration_policy=None, filter_=None, dead_letter_policy=None, retry_policy=None, retry=None, timeout=None, metadata=None, ) self.assertEqual(TEST_SUBSCRIPTION, response) @mock.patch(PUBSUB_STRING.format('PubSubHook.subscriber_client')) def test_create_subscription_different_project_topic(self, mock_service): create_method = mock_service.create_subscription response = self.pubsub_hook.create_subscription( project_id=TEST_PROJECT, topic=TEST_TOPIC, subscription=TEST_SUBSCRIPTION, subscription_project_id='a-different-project') expected_subscription = 'projects/{}/subscriptions/{}'.format( 'a-different-project', TEST_SUBSCRIPTION) create_method.assert_called_once_with( name=expected_subscription, topic=EXPANDED_TOPIC, push_config=None, ack_deadline_seconds=10, retain_acked_messages=None, message_retention_duration=None, labels=LABELS, enable_message_ordering=False, expiration_policy=None, filter_=None, dead_letter_policy=None, retry_policy=None, retry=None, timeout=None, metadata=None, ) self.assertEqual(TEST_SUBSCRIPTION, response) @mock.patch(PUBSUB_STRING.format('PubSubHook.subscriber_client')) def test_delete_subscription(self, mock_service): self.pubsub_hook.delete_subscription(project_id=TEST_PROJECT, subscription=TEST_SUBSCRIPTION) delete_method = mock_service.delete_subscription delete_method.assert_called_once_with( subscription=EXPANDED_SUBSCRIPTION, retry=None, timeout=None, metadata=None) @mock.patch(PUBSUB_STRING.format('PubSubHook.subscriber_client')) def test_delete_nonexisting_subscription_failifnotexists( self, mock_service): mock_service.delete_subscription.side_effect = NotFound( 'Subscription does not exists: %s' % EXPANDED_SUBSCRIPTION) with self.assertRaises(PubSubException) as e: self.pubsub_hook.delete_subscription( project_id=TEST_PROJECT, subscription=TEST_SUBSCRIPTION, fail_if_not_exists=True) self.assertEqual( str(e.exception), 'Subscription does not exist: %s' % EXPANDED_SUBSCRIPTION) @mock.patch(PUBSUB_STRING.format('PubSubHook.subscriber_client')) def test_delete_subscription_api_call_error(self, mock_service): mock_service.delete_subscription.side_effect = GoogleAPICallError( 'Error deleting subscription %s' % EXPANDED_SUBSCRIPTION) with self.assertRaises(PubSubException): self.pubsub_hook.delete_subscription( project_id=TEST_PROJECT, subscription=TEST_SUBSCRIPTION, fail_if_not_exists=True) @mock.patch(PUBSUB_STRING.format('PubSubHook.subscriber_client')) @mock.patch(PUBSUB_STRING.format('uuid4'), new_callable=mock.Mock(return_value=lambda: TEST_UUID)) def test_create_subscription_without_subscription_name( self, mock_uuid, mock_service): # noqa # pylint: disable=unused-argument,line-too-long create_method = mock_service.create_subscription expected_name = EXPANDED_SUBSCRIPTION.replace(TEST_SUBSCRIPTION, 'sub-%s' % TEST_UUID) response = self.pubsub_hook.create_subscription( project_id=TEST_PROJECT, topic=TEST_TOPIC) create_method.assert_called_once_with( name=expected_name, topic=EXPANDED_TOPIC, push_config=None, ack_deadline_seconds=10, retain_acked_messages=None, message_retention_duration=None, labels=LABELS, enable_message_ordering=False, expiration_policy=None, filter_=None, dead_letter_policy=None, retry_policy=None, retry=None, timeout=None, metadata=None, ) self.assertEqual('sub-%s' % TEST_UUID, response) @mock.patch(PUBSUB_STRING.format('PubSubHook.subscriber_client')) def test_create_subscription_with_ack_deadline(self, mock_service): create_method = mock_service.create_subscription response = self.pubsub_hook.create_subscription( project_id=TEST_PROJECT, topic=TEST_TOPIC, subscription=TEST_SUBSCRIPTION, ack_deadline_secs=30) create_method.assert_called_once_with( name=EXPANDED_SUBSCRIPTION, topic=EXPANDED_TOPIC, push_config=None, ack_deadline_seconds=30, retain_acked_messages=None, message_retention_duration=None, labels=LABELS, enable_message_ordering=False, expiration_policy=None, filter_=None, dead_letter_policy=None, retry_policy=None, retry=None, timeout=None, metadata=None, ) self.assertEqual(TEST_SUBSCRIPTION, response) @mock.patch(PUBSUB_STRING.format('PubSubHook.subscriber_client')) def test_create_subscription_with_filter(self, mock_service): create_method = mock_service.create_subscription response = self.pubsub_hook.create_subscription( project_id=TEST_PROJECT, topic=TEST_TOPIC, subscription=TEST_SUBSCRIPTION, filter_='attributes.domain="com"') create_method.assert_called_once_with( name=EXPANDED_SUBSCRIPTION, topic=EXPANDED_TOPIC, push_config=None, ack_deadline_seconds=10, retain_acked_messages=None, message_retention_duration=None, labels=LABELS, enable_message_ordering=False, expiration_policy=None, filter_='attributes.domain="com"', dead_letter_policy=None, retry_policy=None, retry=None, timeout=None, metadata=None, ) self.assertEqual(TEST_SUBSCRIPTION, response) @mock.patch(PUBSUB_STRING.format('PubSubHook.subscriber_client')) def test_create_subscription_failifexists(self, mock_service): mock_service.create_subscription.side_effect = AlreadyExists( 'Subscription already exists: %s' % EXPANDED_SUBSCRIPTION) with self.assertRaises(PubSubException) as e: self.pubsub_hook.create_subscription( project_id=TEST_PROJECT, topic=TEST_TOPIC, subscription=TEST_SUBSCRIPTION, fail_if_exists=True) self.assertEqual( str(e.exception), 'Subscription already exists: %s' % EXPANDED_SUBSCRIPTION) @mock.patch(PUBSUB_STRING.format('PubSubHook.subscriber_client')) def test_create_subscription_api_call_error(self, mock_service): mock_service.create_subscription.side_effect = GoogleAPICallError( 'Error creating subscription %s' % EXPANDED_SUBSCRIPTION) with self.assertRaises(PubSubException): self.pubsub_hook.create_subscription( project_id=TEST_PROJECT, topic=TEST_TOPIC, subscription=TEST_SUBSCRIPTION, fail_if_exists=True) @mock.patch(PUBSUB_STRING.format('PubSubHook.subscriber_client')) def test_create_subscription_nofailifexists(self, mock_service): mock_service.create_subscription.side_effect = AlreadyExists( 'Subscription already exists: %s' % EXPANDED_SUBSCRIPTION) response = self.pubsub_hook.create_subscription( project_id=TEST_PROJECT, topic=TEST_TOPIC, subscription=TEST_SUBSCRIPTION) self.assertEqual(TEST_SUBSCRIPTION, response) @mock.patch(PUBSUB_STRING.format('PubSubHook.get_conn')) def test_publish(self, mock_service): publish_method = mock_service.return_value.publish self.pubsub_hook.publish(project_id=TEST_PROJECT, topic=TEST_TOPIC, messages=TEST_MESSAGES) calls = [ mock.call(topic=EXPANDED_TOPIC, data=message.get("data", b''), **message.get('attributes', {})) for message in TEST_MESSAGES ] publish_method.has_calls(calls) @mock.patch(PUBSUB_STRING.format('PubSubHook.get_conn')) def test_publish_api_call_error(self, mock_service): publish_method = mock_service.return_value.publish publish_method.side_effect = GoogleAPICallError( 'Error publishing to topic {}'.format(EXPANDED_SUBSCRIPTION)) with self.assertRaises(PubSubException): self.pubsub_hook.publish(project_id=TEST_PROJECT, topic=TEST_TOPIC, messages=TEST_MESSAGES) @mock.patch(PUBSUB_STRING.format('PubSubHook.subscriber_client')) def test_pull(self, mock_service): pull_method = mock_service.pull pulled_messages = [] for i, msg in enumerate(TEST_MESSAGES): pulled_messages.append({'ackId': i, 'message': msg}) pull_method.return_value.received_messages = pulled_messages response = self.pubsub_hook.pull(project_id=TEST_PROJECT, subscription=TEST_SUBSCRIPTION, max_messages=10) pull_method.assert_called_once_with( subscription=EXPANDED_SUBSCRIPTION, max_messages=10, return_immediately=False, retry=None, timeout=None, metadata=None, ) self.assertEqual(pulled_messages, response) @mock.patch(PUBSUB_STRING.format('PubSubHook.subscriber_client')) def test_pull_no_messages(self, mock_service): pull_method = mock_service.pull pull_method.return_value.received_messages = [] response = self.pubsub_hook.pull(project_id=TEST_PROJECT, subscription=TEST_SUBSCRIPTION, max_messages=10) pull_method.assert_called_once_with( subscription=EXPANDED_SUBSCRIPTION, max_messages=10, return_immediately=False, retry=None, timeout=None, metadata=None, ) self.assertListEqual([], response) @parameterized.expand([(exception, ) for exception in [ HttpError(resp={'status': '404'}, content=EMPTY_CONTENT), GoogleAPICallError("API Call Error") ]]) @mock.patch(PUBSUB_STRING.format('PubSubHook.subscriber_client')) def test_pull_fails_on_exception(self, exception, mock_service): pull_method = mock_service.pull pull_method.side_effect = exception with self.assertRaises(PubSubException): self.pubsub_hook.pull(project_id=TEST_PROJECT, subscription=TEST_SUBSCRIPTION, max_messages=10) pull_method.assert_called_once_with( subscription=EXPANDED_SUBSCRIPTION, max_messages=10, return_immediately=False, retry=None, timeout=None, metadata=None, ) @mock.patch(PUBSUB_STRING.format('PubSubHook.subscriber_client')) def test_acknowledge_by_ack_ids(self, mock_service): ack_method = mock_service.acknowledge self.pubsub_hook.acknowledge(project_id=TEST_PROJECT, subscription=TEST_SUBSCRIPTION, ack_ids=['1', '2', '3']) ack_method.assert_called_once_with(subscription=EXPANDED_SUBSCRIPTION, ack_ids=['1', '2', '3'], retry=None, timeout=None, metadata=None) @mock.patch(PUBSUB_STRING.format('PubSubHook.subscriber_client')) def test_acknowledge_by_message_objects(self, mock_service): ack_method = mock_service.acknowledge self.pubsub_hook.acknowledge( project_id=TEST_PROJECT, subscription=TEST_SUBSCRIPTION, messages=self._generate_messages(3), ) ack_method.assert_called_once_with( subscription=EXPANDED_SUBSCRIPTION, ack_ids=['1', '2', '3'], retry=None, timeout=None, metadata=None, ) @parameterized.expand([(exception, ) for exception in [ HttpError(resp={'status': '404'}, content=EMPTY_CONTENT), GoogleAPICallError("API Call Error") ]]) @mock.patch(PUBSUB_STRING.format('PubSubHook.subscriber_client')) def test_acknowledge_fails_on_exception(self, exception, mock_service): ack_method = mock_service.acknowledge ack_method.side_effect = exception with self.assertRaises(PubSubException): self.pubsub_hook.acknowledge(project_id=TEST_PROJECT, subscription=TEST_SUBSCRIPTION, ack_ids=['1', '2', '3']) ack_method.assert_called_once_with( subscription=EXPANDED_SUBSCRIPTION, ack_ids=['1', '2', '3'], retry=None, timeout=None, metadata=None) @parameterized.expand([(messages, ) for messages in [ [{ "data": b'test' }], [{ "data": b'' }], [{ "data": b'test', "attributes": { "weight": "100kg" } }], [{ "data": b'', "attributes": { "weight": "100kg" } }], [{ "attributes": { "weight": "100kg" } }], ]]) def test_messages_validation_positive(self, messages): PubSubHook._validate_messages(messages) @parameterized.expand([ ([("wrong type", )], "Wrong message type. Must be a dictionary."), ([{ "wrong_key": b'test' }], "Wrong message. Dictionary must contain 'data' or 'attributes'."), ([{ "data": 'wrong string' }], "Wrong message. 'data' must be send as a bytestring"), ([{ "data": None }], "Wrong message. 'data' must be send as a bytestring"), ([{ "attributes": None }], "Wrong message. If 'data' is not provided 'attributes' must be a non empty dictionary." ), ([{ "attributes": "wrong string" }], "Wrong message. If 'data' is not provided 'attributes' must be a non empty dictionary." ) ]) def test_messages_validation_negative(self, messages, error_message): with self.assertRaises(PubSubException) as e: PubSubHook._validate_messages(messages) self.assertEqual(str(e.exception), error_message)
def build(serviceName, version, http=None, discoveryServiceUrl=DISCOVERY_URI, developerKey=None, model=None, requestBuilder=HttpRequest, credentials=None): """Construct a Resource for interacting with an API. Construct a Resource object for interacting with an API. The serviceName and version are the names from the Discovery service. Args: serviceName: string, name of the service. version: string, the version of the service. http: httplib2.Http, An instance of httplib2.Http or something that acts like it that HTTP requests will be made through. discoveryServiceUrl: string, a URI Template that points to the location of the discovery service. It should have two parameters {api} and {apiVersion} that when filled in produce an absolute URI to the discovery document for that service. developerKey: string, key obtained from https://code.google.com/apis/console. model: googleapiclient.Model, converts to and from the wire format. requestBuilder: googleapiclient.http.HttpRequest, encapsulator for an HTTP request. credentials: oauth2client.Credentials, credentials to be used for authentication. Returns: A Resource object with methods for interacting with the service. """ params = {'api': serviceName, 'apiVersion': version} if http is None: http = httplib2.Http() requested_url = uritemplate.expand(discoveryServiceUrl, params) # REMOTE_ADDR is defined by the CGI spec [RFC3875] as the environment # variable that contains the network address of the client sending the # request. If it exists then add that to the request for the discovery # document to avoid exceeding the quota on discovery requests. if 'REMOTE_ADDR' in os.environ: requested_url = _add_query_parameter(requested_url, 'userIp', os.environ['REMOTE_ADDR']) logger.info('URL being requested: GET %s' % requested_url) resp, content = http.request(requested_url) if resp.status == 404: raise UnknownApiNameOrVersion("name: %s version: %s" % (serviceName, version)) if resp.status >= 400: raise HttpError(resp, content, uri=requested_url) try: content = content.decode('utf-8') except AttributeError: pass try: service = json.loads(content) except ValueError as e: logger.error('Failed to parse as JSON: ' + content) raise InvalidJsonError() return build_from_document(content, base=discoveryServiceUrl, http=http, developerKey=developerKey, model=model, requestBuilder=requestBuilder, credentials=credentials)
def setUp(self): super(RestoreTestCase, self).setUp() fake_resp = type('FakeResp', (object, ), {'status': 404}) fake_content = MagicMock(spec=bytes) self.fake_http_error = HttpError(fake_resp, fake_content)
def download_file(self, file_id, write_path, page_num=None, print_details=True, output_type=None): file_metadata = self._files.get( fileId=file_id, fields='name, id, mimeType, modifiedTime, size').execute( num_retries=self._max_retries) file_title = file_metadata['name'] modified_date = datetime.strptime( str(file_metadata['modifiedTime']), '%Y-%m-%dT%H:%M:%S.%fZ').replace(tzinfo=utc).astimezone( timezone('Asia/Singapore')).replace(tzinfo=None) return_data = None if file_metadata[ 'mimeType'] == 'application/vnd.google-apps.spreadsheet': assert page_num is not None download_url = 'https://docs.google.com/spreadsheets/d/%s/export?format=csv&gid=%i' % ( file_id, page_num) resp, content = self._service._http.request(download_url) if resp.status == 200: if output_type is not None: assert output_type in ('dataframe', 'list') from io import BytesIO with BytesIO(content) as file_buffer: if output_type == 'list': import unicodecsv as csv return_data = list(csv.reader(file_buffer)) elif output_type == 'dataframe': import pandas as pd return_data = pd.read_csv(file_buffer) else: with open(write_path, 'wb') as write_file: write_file.write(content) logging_string = '[Drive] Downloaded %s [%s]. Last Modified: %s' % ( file_title, file_id, modified_date) else: raise HttpError(resp, content) else: request = self._files.get_media(fileId=file_id) with open(write_path, 'wb') as write_file: downloader = MediaIoBaseDownload(write_file, request) done = False while done is False: status, done = downloader.next_chunk() file_size = humanize.naturalsize(int(file_metadata['size'])) logging_string = '[Drive] Downloaded %s [%s] (%s). Last Modified: %s' % ( file_title, file_id, file_size, modified_date) if print_details: print '\t' + logging_string if self._logger is not None: self._logger.info(logging_string) return return_data
def create_exception(): response = type('MyObject', (object,), {}) response.reason = 'Request Admission Denied.' response.status = 400 return HttpError(response, bytes(), 'requesting https://vision.googleapis.com/v1/images:annotate?alt=json')
def test_successful_copy_template_with_bigger_array_fields( self, mock_hook): mock_hook.return_value.get_instance_template.side_effect = [ HttpError(resp=httplib2.Response({'status': 404}), content=EMPTY_CONTENT), GCE_INSTANCE_TEMPLATE_BODY_GET, GCE_INSTANCE_TEMPLATE_BODY_GET_NEW, ] op = ComputeEngineCopyInstanceTemplateOperator( project_id=GCP_PROJECT_ID, resource_id=GCE_INSTANCE_TEMPLATE_NAME, task_id='id', body_patch={ "name": GCE_INSTANCE_TEMPLATE_NEW_NAME, "properties": { "disks": [ { "kind": "compute#attachedDisk", "type": "SCRATCH", "licenses": [ "Updated String", ], }, { "kind": "compute#attachedDisk", "type": "PERSISTENT", "licenses": [ "Another String", ], }, ], }, }, ) result = op.execute(None) mock_hook.assert_called_once_with( api_version='v1', gcp_conn_id='google_cloud_default', impersonation_chain=None, ) body_insert = deepcopy(GCE_INSTANCE_TEMPLATE_BODY_INSERT) body_insert["properties"]["disks"] = [ { "kind": "compute#attachedDisk", "type": "SCRATCH", "licenses": [ "Updated String", ], }, { "kind": "compute#attachedDisk", "type": "PERSISTENT", "licenses": [ "Another String", ], }, ] mock_hook.return_value.insert_instance_template.assert_called_once_with( project_id=GCP_PROJECT_ID, body=body_insert, request_id=None, ) self.assertEqual(GCE_INSTANCE_TEMPLATE_BODY_GET_NEW, result)
def test_create_model_idempotency(self, mock_get_conn): project_id = 'test-project' model_name = 'test-model' model = { 'name': model_name, } model_with_airflow_version = { 'name': model_name, 'labels': {'airflow-version': hook._AIRFLOW_VERSION} } project_path = 'projects/{}'.format(project_id) ( mock_get_conn.return_value. projects.return_value. models.return_value. create.return_value. execute.side_effect ) = [ HttpError( resp=httplib2.Response({"status": 409}), content=json.dumps( { "error": { "code": 409, "message": "Field: model.name Error: A model with the same name already exists.", "status": "ALREADY_EXISTS", "details": [ { "@type": "type.googleapis.com/google.rpc.BadRequest", "fieldViolations": [ { "field": "model.name", "description": "A model with the same name already exists." } ], } ], } } ).encode(), ) ] ( mock_get_conn.return_value. projects.return_value. models.return_value. get.return_value. execute.return_value ) = deepcopy(model) create_model_response = self.hook.create_model( project_id=project_id, model=deepcopy(model) ) self.assertEqual(create_model_response, model) mock_get_conn.assert_has_calls([ mock.call().projects().models().create(body=model_with_airflow_version, parent=project_path), mock.call().projects().models().create().execute(), ]) mock_get_conn.assert_has_calls([ mock.call().projects().models().get(name='projects/test-project/models/test-model'), mock.call().projects().models().get().execute() ])
def test_fixtue(*args, **kwargs): self.called = True raise HttpError(mock.Mock(**{"reason.return_value": None}), b"CONTENT")
def test_sync_mailinglists(self, logger_mock): original_create = self.sync_service.create_group original_update = self.sync_service.update_group original_delete = self.sync_service.delete_group self.sync_service.create_group = MagicMock() self.sync_service.update_group = MagicMock() self.sync_service.delete_group = MagicMock() with self.subTest("Error getting existing list"): self.directory_api.groups().list().execute.side_effect = HttpError( Response({"status": 500}), bytes()) self.sync_service.sync_mailinglists() logger_mock.error.assert_called_once_with( "Could not get the existing groups: %s", bytes()) self.directory_api.reset_mock() with self.subTest("Successful"): existing_groups = [ { "name": "deleteme", "directMembersCount": "3" }, { "name": "already_synced", "directMembersCount": "2" }, { "name": "ignore", "directMembersCount": "0" }, ] self.directory_api.groups().list().execute.side_effect = [ { "groups": existing_groups[:1], "nextPageToken": "some_token" }, { "groups": existing_groups[1:] }, ] self.sync_service.sync_mailinglists([ GSuiteSyncService.GroupData(name="syncme", addresses=["someone"]), GSuiteSyncService.GroupData(name="already_synced", addresses=["someone"]), GSuiteSyncService.GroupData(name="ignore2", addresses=[]), ]) self.sync_service.create_group.assert_called_with( GSuiteSyncService.GroupData(name="syncme", addresses=["someone"])) self.sync_service.update_group.assert_called_with( "already_synced", GSuiteSyncService.GroupData(name="already_synced", addresses=["someone"]), ) self.sync_service.delete_group.assert_called_with("deleteme") self.sync_service.create_group.assert_not_called_with( GSuiteSyncService.GroupData(name="ignore2", addresses=[])) self.sync_service.update_group.assert_not_called_with( "ignore2", GSuiteSyncService.GroupData(name="ignore2", addresses=[])) self.sync_service.delete_group.assert_not_called_with("ignore2") self.sync_service.create_group = original_create self.sync_service.update_group = original_update self.sync_service.delete_group = original_delete
def test_update_group_members(self, logger_mock): with self.subTest("Error getting existing list"): self.directory_api.members( ).list().execute.side_effect = HttpError(Response({"status": 500}), bytes()) self.sync_service._update_group_members( GSuiteSyncService.GroupData(name="update_group")) logger_mock.error.assert_called_once_with( "Could not obtain list member data: %s", bytes()) self.directory_api.reset_mock() with self.subTest("Successful with some errors"): group_data = GSuiteSyncService.GroupData( name="update_group", addresses=[ "*****@*****.**", "*****@*****.**", "*****@*****.**", ], ) existing_aliases = [ { "email": "*****@*****.**", "role": "MEMBER" }, { "email": "*****@*****.**", "role": "MEMBER" }, { "email": "*****@*****.**", "role": "MEMBER" }, { "email": "*****@*****.**", "role": "MANAGER" }, ] self.directory_api.members().list().execute.side_effect = [ { "members": existing_aliases[:1], "nextPageToken": "some_token" }, { "members": existing_aliases[1:] }, ] self.directory_api.members().insert().execute.side_effect = [ "success", HttpError(Response({"status": 500}), bytes()), ] self.directory_api.members().delete().execute.side_effect = [ "success", HttpError(Response({"status": 500}), bytes()), ] self.sync_service._update_group_members(group_data) self.directory_api.members().insert.assert_any_call( groupKey=f"update_group@{settings.GSUITE_DOMAIN}", body={ "email": "*****@*****.**", "role": "MEMBER" }, ) self.directory_api.members().delete.assert_any_call( groupKey=f"update_group@{settings.GSUITE_DOMAIN}", memberKey="*****@*****.**", ) self.directory_api.members().delete.assert_not_called_with( groupKey=f"update_group@{settings.GSUITE_DOMAIN}", memberKey="*****@*****.**", ) logger_mock.error.assert_any_call( "Could not insert list member %s in %s: %s", "*****@*****.**", "update_group", bytes(), ) logger_mock.error.assert_any_call( "Could not remove list member %s from %s: %s", "*****@*****.**", "update_group", bytes(), )
def test_update_group_aliases(self, logger_mock): with self.subTest("Error getting existing list"): self.directory_api.groups().aliases( ).list().execute.side_effect = HttpError(Response({"status": 500}), bytes()) self.sync_service._update_group_aliases( GSuiteSyncService.GroupData(name="update_group")) logger_mock.error.assert_called_once_with( "Could not obtain existing aliases for list %s: %s", "update_group", bytes(), ) self.directory_api.reset_mock() with self.subTest("Successful with some errors"): group_data = GSuiteSyncService.GroupData( name="update_group", aliases=["not_synced", "not_synced_error", "already_synced"], ) existing_aliases = [ { "alias": f"deleteme@{settings.GSUITE_DOMAIN}" }, { "alias": f"deleteme_error@{settings.GSUITE_DOMAIN}" }, { "alias": f"already_synced@{settings.GSUITE_DOMAIN}" }, ] self.directory_api.groups().aliases().list( ).execute.side_effect = [{ "aliases": existing_aliases }] self.directory_api.groups().aliases().insert( ).execute.side_effect = [ "success", HttpError(Response({"status": 500}), bytes()), ] self.directory_api.groups().aliases().delete( ).execute.side_effect = [ "success", HttpError(Response({"status": 500}), bytes()), ] self.sync_service._update_group_aliases(group_data) self.directory_api.groups().aliases().insert.assert_any_call( groupKey=f"update_group@{settings.GSUITE_DOMAIN}", body={"alias": f"not_synced@{settings.GSUITE_DOMAIN}"}, ) self.directory_api.groups().aliases().delete.assert_any_call( groupKey=f"update_group@{settings.GSUITE_DOMAIN}", alias=f"deleteme@{settings.GSUITE_DOMAIN}", ) logger_mock.error.assert_any_call( "Could not insert alias %s for list %s: %s", f"not_synced_error@{settings.GSUITE_DOMAIN}", "update_group", bytes(), ) logger_mock.error.assert_any_call( "Could not remove alias %s for list %s: %s", f"deleteme_error@{settings.GSUITE_DOMAIN}", "update_group", bytes(), )
class TestCalendarApiService(unittest.TestCase): """Test calendar api service methods.""" def setUp(self): """Set up mocks for testing.""" calendar_api_service.CalendarApiService.calendar_auth = mock.MagicMock( ) self.service = calendar_api_service.CalendarApiService() self.events_mock = mock.MagicMock() self.service.calendar_service.events = mock.MagicMock( return_value=self.events_mock) def test_create_event(self): """Test creation of an event.""" self.events_mock.insert = mock.MagicMock() response = self.service.create_event( event_id=1, calendar_id="primary", summary="test calendar event", description="test calendar event description", start="2018-01-01", end="2018-01-01", timezone="UTC", attendees=["*****@*****.**"], send_notifications=False) expected_body = { "summary": "test calendar event", "description": "test calendar event description", "start": { "date": "2018-01-01", "timeZone": "UTC", }, "end": { "date": "2018-01-01", "timeZone": "UTC", }, "attendees": [{ "email": "*****@*****.**" }], "locked": True, "sendNotifications": False, "guestsCanModify": False, "guestsCanInviteOthers": False, "transparency": "transparent", } self.assertEquals(response['status_code'], 200) self.events_mock.insert.assert_called_with(calendarId="primary", body=expected_body) @ddt.data( (HttpError( resp=ErrorResp(status=403, reason="reason"), content="Test", ), 403), (requests.exceptions.RequestException, 500), ) @ddt.unpack def test_create_event_error(self, error, code): """Test creation of an event with raised HttpError.""" self.events_mock.insert = mock.MagicMock() self.events_mock.insert.side_effect = error response = self.service.create_event( event_id=1, calendar_id="primary", summary="test calendar event with http error", description="test calendar event description", start="2018-02-01", end="2018-02-01", timezone="UTC", attendees=["*****@*****.**"], send_notifications=False) self.assertEquals(response, {"content": None, "status_code": code}) def test_update_event(self): """Test update of an event.""" self.events_mock.update = mock.MagicMock() response = self.service.update_event( event_id=1, calendar_id="primary", summary="test calendar event", description="test calendar event description", external_event_id="SOMEID12345", start="2018-01-01", end="2018-01-01", timezone="UTC", attendees=["*****@*****.**"], ) self.assertEquals(response['status_code'], 200) expected_body = { "summary": "test calendar event", "description": "test calendar event description", "start": { "date": "2018-01-01", "timeZone": "UTC", }, "end": { "date": "2018-01-01", "timeZone": "UTC", }, "attendees": [{ "email": "*****@*****.**" }], "locked": True, "sendNotifications": False, "guestsCanModify": False, "guestsCanInviteOthers": False, "transparency": "transparent", } self.events_mock.update.assert_called_with(calendarId="primary", eventId="SOMEID12345", body=expected_body) @ddt.data( (HttpError( resp=ErrorResp(status=403, reason="reason"), content="Test", ), 403), (requests.exceptions.RequestException, 500), ) @ddt.unpack def test_update_event_error(self, error, code): """Test update of an event with raised HttpError.""" self.events_mock.update = mock.MagicMock() self.events_mock.update.side_effect = error response = self.service.update_event( event_id=1, calendar_id="primary", summary="test calendar event with http error", external_event_id="SOMEID12345", description="test calendar event description", start="2018-02-01", end="2018-02-01", timezone="UTC", attendees=["*****@*****.**"], ) self.assertEquals(response, {"content": None, "status_code": code}) def test_delete_event(self): """Test delete of an event.""" self.events_mock.delete = mock.MagicMock() response = self.service.delete_event( calendar_id="primary", external_event_id="SOMEID12345", event_id=1, ) self.assertEquals(response['status_code'], 200) self.events_mock.delete.assert_called_with( calendarId="primary", eventId="SOMEID12345", ) def test_get_event(self): """Test get of an event.""" self.events_mock.get = mock.MagicMock() response = self.service.get_event( calendar_id="primary", external_event_id="SOMEID12345", event_id=1, ) self.assertEquals(response['status_code'], 200) self.events_mock.get.assert_called_with( calendarId="primary", eventId="SOMEID12345", )
"deobfuscation_file": "123" }], True, ) gp_service_mock.commit_edit.assert_not_called() upload_apk_exception_params = [ (Exception("error")), (HttpError( {}, json.dumps({ "error": { "code": 404, "message": ("APK specifies a version code that has already been used."), } }).encode("utf-8"), )), (HttpError( {}, json.dumps({ "error": { "code": 403, "message": ("Some other error message but with same status code."), } }).encode("utf-8"), )),
def test_create_preexisting_topic_nofailifexists(self, mock_service): (mock_service.return_value.projects.return_value.topics.return_value. get.return_value.execute.side_effect) = HttpError( resp={'status': '409'}, content=EMPTY_CONTENT) self.pubsub_hook.create_topic(TEST_PROJECT, TEST_TOPIC)
def new_instance(self, instance_name, machine, n_wait=0, update=False, wn_type=None): option = { "name": instance_name, "machineType": "custom-%d-%d" % (machine["core"], machine["mem"]), "disks": [{ "type": "PERSISTENT", "boot": True, "autoDelete": True, "initializeParams": { "diskSizeGb": machine["disk"], "sourceImage": "global/images/" + machine["image"], } }], "serviceAccounts": [{ "email": "default", "scopes": [ "https://www.googleapis.com/auth/devstorage.read_only", "https://www.googleapis.com/auth/logging.write", "https://www.googleapis.com/auth/monitoring.write", "https://www.googleapis.com/auth/trace.append", ] }], } if wn_type is not None: option["tags"] = {"items": self.data["network_tag"]} option["metadata"] = { "items": [ { "key": "startup-script", "value": self.scripts[wn_type]["startup"][machine["core"]] }, { "key": "shutdown-script", "value": self.scripts[wn_type]["shutdown"][machine["core"]] }, ] } option["scheduling"] = { "onHostMaintenance": "terminate" if ("gpu" in machine or "guestAccelerators" in machine) else "migrate", "automaticRestart": not bool(self.data["preemptible"]), "preemptible": bool(self.data["preemptible"]) } if "ssd" in machine: ssd = machine["ssd"] if type(ssd) is not list: ssd = [ssd] for s in ssd: option["disks"].append({ "type": "SCRATCH", "boot": True, "autoDelete": True, "interface": s, "initializeParams": { "diskType": "zones/%s/diskTypes/local-ssd" % self.data["zone"] } }) for opt in machine: if opt not in [ "name", "core", "mem", "swap", "disk", "image", "max", "idle", "ssd" ]: option[opt] = machine[opt] m = Machine(name=instance_name, core=machine["core"], mem=machine["mem"], disk=machine["disk"], start_time=time.time(), test=(wn_type == "wn_test")) if wn_type is not None: self.wn_starting.append(m) try: return self.get_gce().create_instance(instance=instance_name, option=option, n_wait=n_wait, update=update) except HttpError as e: if m in self.wn_starting: self.wn_starting.remove(m) if e.resp.status == 409: self.logger.warning(e) return False raise HttpError(e.resp, e.content, e.uri)