def raise_HttpError404(self): self.counter_for_404() raise HttpError(mock.Mock(status=404), 'Not Found')
def raise_HttpError500(self): self.counter_for_500() raise HttpError(mock.Mock(status=500), 'Internal error')
def spreadsheet_404(mocker, services): from apiclient.errors import HttpError http404 = HttpError(resp=mocker.NonCallableMock(status=404), content=b'') services.sheets.spreadsheets.return_value.get.return_value.execute.side_effect = http404 yield http404
def raise_HttpError400(self): self.counter_for_400() raise HttpError(mock.Mock(status=400), 'Bad request')
def test_that_copy_table_should_create_correct_post_copy_action_if_access_denied_http_error_thrown_on_copy_job_creation( self, create_post_copy_action, insert_job): # given http_error_content = "{\"error\": " \ " {\"errors\": [" \ " {\"reason\": \"Access Denied\"," \ " \"message\": \"Access Denied\"," \ " \"location\": \"US\"" \ " }]," \ " \"code\": 403," \ " \"message\": \"Access Denied\"}}" insert_job.side_effect = HttpError(Mock(status=403), http_error_content) post_copy_action_request = PostCopyActionRequest( url='/my/url', data={'key1': 'value1'}) request = CopyJobRequest( task_name_suffix='task_name_suffix', copy_job_type_id='test-process', source_big_query_table=self.example_source_bq_table, target_big_query_table=self.example_target_bq_table, create_disposition="CREATE_IF_NEEDED", write_disposition="WRITE_EMPTY", retry_count=0, post_copy_action_request=post_copy_action_request) # when CopyJobService().run_copy_job_request(request) # then create_post_copy_action.assert_called_once_with( copy_job_type_id='test-process', post_copy_action_request=post_copy_action_request, job_json={ 'status': { 'state': 'DONE', 'errors': [{ 'reason': 'Invalid', 'message': ("Access Denied while creating Copy Job from {} to {}". format(self.example_source_bq_table, self.example_target_bq_table)) }] }, 'configuration': { 'copy': { 'sourceTable': { 'projectId': self.example_source_bq_table.get_project_id(), 'tableId': self.example_source_bq_table.get_table_id(), 'datasetId': self.example_source_bq_table.get_dataset_id() }, 'destinationTable': { 'projectId': self.example_target_bq_table.get_project_id(), 'tableId': self.example_target_bq_table.get_table_id(), 'datasetId': self.example_target_bq_table.get_dataset_id() } } } })
def test_that_copy_table_will_try_to_wait_if_deadline_exceeded( self, create_copy_job_result_check, insert_job, get_job): # given http_error_content = "{\"error\": " \ " {\"errors\": [" \ " {\"reason\": \"Deadline exceeded\"," \ " \"message\": \"Deadline exceeded\"," \ " \"location\": \"US\"" \ " }]," \ " \"code\": 500," \ " \"message\": \"Deadline exceeded\"}}" successful_job_json = { 'status': { 'state': 'DONE' }, 'jobReference': { 'projectId': self.example_target_bq_table.get_project_id(), 'location': 'EU', 'jobId': 'job123', }, 'configuration': { 'copy': { 'sourceTable': { 'projectId': self.example_source_bq_table.get_project_id(), 'tableId': self.example_source_bq_table.get_table_id(), 'datasetId': self.example_source_bq_table.get_dataset_id() }, 'destinationTable': { 'projectId': self.example_target_bq_table.get_project_id(), 'tableId': self.example_target_bq_table.get_table_id(), 'datasetId': self.example_target_bq_table.get_dataset_id() } } } } insert_job.side_effect = HttpError(Mock(status=500), http_error_content) get_job.return_value = successful_job_json request = CopyJobRequest( task_name_suffix='task_name_suffix', copy_job_type_id='test-process', source_big_query_table=self.example_source_bq_table, target_big_query_table=self.example_target_bq_table, create_disposition="CREATE_IF_NEEDED", write_disposition="WRITE_EMPTY", retry_count=0, post_copy_action_request=None) # when CopyJobService().run_copy_job_request(request) # then create_copy_job_result_check.assert_called_once_with( ResultCheckRequest( task_name_suffix='task_name_suffix', copy_job_type_id='test-process', job_reference=BigQueryJobReference( project_id=self.example_target_bq_table.get_project_id(), job_id='job123', location='EU'), retry_count=0, post_copy_action_request=None))
class TestGracePeriodAfterDeletionFilter(unittest.TestCase): def setUp(self): self.testbed = testbed.Testbed() self.testbed.activate() self.testbed.init_datastore_v3_stub() self.testbed.init_memcache_stub() ndb.get_context().clear_cache() patch('googleapiclient.discovery.build').start() patch('oauth2client.client.GoogleCredentials.get_application_default') \ .start() self.under_test = GracePeriodAfterDeletionFilter() def tearDown(self): self.testbed.deactivate() patch.stopall() @patch.object(BigQueryTableMetadata, 'table_exists', return_value=False) @patch.object(BigQueryTableMetadata, 'get_table_by_reference', return_value=BigQueryTableMetadata(None)) @patch.object(Backup, 'get_table', return_value=Table(last_checked=datetime(2017, 7, 01))) @freeze_time("2017-08-20") def test_should_keep_old_backup_when_source_table_was_deleted_only_recently( self, _, _1, _2): # nopep8 pylint: disable=C0301 # given reference = TableReference('example-project-id', 'example-dataset-id', 'example-table-id') b1 = create_backup(datetime(2015, 06, 01)) # when backups_to_retain = self.under_test.filter(backups=[b1], table_reference=reference) # then self.assertListEqual([b1], backups_to_retain) @patch.object(BigQueryTableMetadata, 'table_exists', return_value=False) @patch.object(BigQueryTableMetadata, 'get_table_by_reference', return_value=BigQueryTableMetadata(None)) @patch.object(Backup, 'get_table', return_value=Table(last_checked=datetime(2017, 7, 01))) @freeze_time("2017-08-20") def test_should_keep_young_backups_even_if_source_table_is_deleted( self, _, _1, _2): # given reference = TableReference('example-project-id', 'example-dataset-id', 'example-table-id') b1 = create_backup(datetime(2017, 06, 01)) # when backups_to_retain = self.under_test.filter(backups=[b1], table_reference=reference) # then self.assertListEqual([b1], backups_to_retain) @patch.object(BigQueryTableMetadata, 'table_exists', return_value=False) @patch.object(BigQueryTableMetadata, 'get_table_by_reference', return_value=BigQueryTableMetadata(None)) @patch.object(Backup, 'get_table', return_value=Table(last_checked=datetime(2015, 7, 01))) @freeze_time("2017-08-20") def test_should_delete_old_backups_if_source_table_is_gone_for_long( self, _, _1, _2): # nopep8 pylint: disable=C0301 # given reference = TableReference('example-project-id', 'example-dataset-id', 'example-table-id') b1 = create_backup(datetime(2015, 06, 01)) # when backups_to_retain = self.under_test.filter(backups=[b1], table_reference=reference) # then self.assertFalse(backups_to_retain) is_not_parititoned_http_error_response = '''{ "error": { "code": 400, "message": "Cannot read partition information from a table that is not partitioned: sit-cymes-euw1-mlservices:MarketingRecommendations_Derived_Restricted.DeliveredOrder$2420180805", "errors": [ { "message": "Cannot read partition information from a table that is not partitioned: sit-cymes-euw1-mlservices:MarketingRecommendations_Derived_Restricted.DeliveredOrder$2420180805", "domain": "global", "reason": "invalid" } ], "status": "INVALID_ARGUMENT" } } ''' @patch.object(BigQueryTableMetadata, 'get_table_by_reference', side_effect=HttpError( Mock(status=400), is_not_parititoned_http_error_response)) @patch.object(Backup, 'get_table', return_value=Table(last_checked=datetime(2015, 7, 01))) @freeze_time("2017-08-20") def test_should_delete_old_backups_if_source_partitioned_table_is_gone_for_long_and_new_table_with_the_same_name_is_not_partitioned( self, _, _1): # nopep8 pylint: disable=C0301 # given partitioned_reference = TableReference('example-project-id', 'example-dataset-id', 'example-table-id', 'example-partition-id') b1 = create_backup(datetime(2015, 06, 01)) # when backups_to_retain = self.under_test.filter( backups=[b1], table_reference=partitioned_reference) # then self.assertFalse(backups_to_retain) @patch.object(BigQueryTableMetadata, 'get_table_by_reference', side_effect=HttpError( Mock(status=400), is_not_parititoned_http_error_response)) @patch.object(Backup, 'get_table', return_value=Table(last_checked=datetime(2015, 7, 01))) @freeze_time("2017-08-20") def test_should_raise_exception_if_source_non_partitioned_table_is_gone_for_long_and_new_table_with_the_same_name_is_not_partitioned( self, _, _1): # nopep8 pylint: disable=C0301 # given non_partitioned_reference = TableReference('example-project-id', 'example-dataset-id', 'example-table-id') b1 = create_backup(datetime(2015, 06, 01)) # when &then self.assertRaises(HttpError, self.under_test.filter, [b1], non_partitioned_reference) @patch.object(BigQueryTableMetadata, 'table_exists', return_value=True) @patch.object(BigQueryTableMetadata, 'get_table_by_reference', return_value=BigQueryTableMetadata(None)) @patch.object(Backup, 'get_table', return_value=Table(last_checked=datetime(2015, 7, 01))) @freeze_time("2017-08-20") def test_should_retain_backups_if_source_table_still_exists( self, _, _1, _2): # nopep8 pylint: disable=C0301 # given reference = TableReference('example-project-id', 'example-dataset-id', 'example-table-id') b1 = create_backup(datetime(2015, 06, 01)) # when backups_to_retain = self.under_test.filter(backups=[b1], table_reference=reference) # then self.assertListEqual([b1], backups_to_retain) def test_should_gracefully_deal_with_empty_backup_list(self): # given reference = TableReference('example-project-id', 'example-dataset-id', 'example-table-id') # when backups_to_retain = self.under_test.filter(backups=[], table_reference=reference) # then self.assertFalse(backups_to_retain)
def test_fails_requests_with_too_many_retries(self): request = mock.Mock() request.execute.side_effect = [HttpError(mock.Mock(status=500), 'err')] * 3 with self.assertRaises(HttpError): endpoints.retry_request(request, num_tries=3)
def spreadsheet_404(apiclient): from apiclient.errors import HttpError http404 = HttpError(resp=mock.Mock(status=404), content=b'') apiclient.sheets.spreadsheets().get().execute.side_effect = http404 yield http404
def test_create_preexisting_topic_nofailifexists(self, mock_service): (mock_service.return_value.projects.return_value.topics.return_value. get.return_value.execute.side_effect) = HttpError( resp={'status': '409'}, content='') self.pubsub_hook.create_topic(TEST_PROJECT, TEST_TOPIC)
class TestAsyncRestoreService(TestCase): def setUp(self): self.testbed = testbed.Testbed() self.testbed.activate() self.testbed.init_memcache_stub() self.testbed.init_datastore_v3_stub() ndb.get_context().clear_cache() self.copy_service = \ patch( 'src.restore.async_batch_restore_service.CopyJobServiceAsync').start() patch('src.restore.async_batch_restore_service.BigQuery').start() patch.object(uuid, 'uuid4', return_value=HARDCODED_UUID).start() self.enforcer = patch.object(RestoreWorkspaceCreator, 'create_workspace').start() def tearDown(self): patch.stopall() self.testbed.deactivate() def test_for_single_item_should_create_post_copy_action(self): # given restoration_job_key = RestorationJob.create( HARDCODED_UUID, create_disposition="CREATE_IF_NEEDED", write_disposition="WRITE_EMPTY") restore_items_tuple = self.__create_restore_items(count=1) restore_item = restore_items_tuple[0][0] source_bq = restore_items_tuple[0][1].create_big_query_table() target_bq = restore_items_tuple[0][2].create_big_query_table() # when AsyncBatchRestoreService().restore(restoration_job_key, [[restore_item]]) # then self.copy_service.assert_has_calls([ call(copy_job_type_id='restore', task_name_suffix='123'), call().with_post_action( PostCopyActionRequest( url='/callback/restore-finished/', data={'restoreItemKey': (restore_item.key.urlsafe())})), call().with_post_action().with_create_disposition( 'CREATE_IF_NEEDED'), call().with_post_action().with_create_disposition( ).with_write_disposition('WRITE_EMPTY'), call().with_post_action().with_create_disposition(). with_write_disposition().copy_table(source_bq, target_bq) ]) @patch.object(RestoreWorkspaceCreator, 'create_workspace', side_effect=HttpError(mock.Mock(status=403), 'Forbidden')) def test_failing_creating_dataset_should_update_restore_item_status( self, _): # given restoration_job_key = RestorationJob.create( HARDCODED_UUID, create_disposition="CREATE_IF_NEEDED", write_disposition="WRITE_EMPTY") restore_items_tuple = self.__create_restore_items(count=1) restore_item = restore_items_tuple[0][0] # when AsyncBatchRestoreService().restore(restoration_job_key, [[restore_item]]) # then restore_items = list(RestoreItem.query().filter( RestoreItem.restoration_job_key == restoration_job_key)) self.assertEqual(restore_items[0].status, RestoreItem.STATUS_FAILED) def test_multiple_items_restore(self): # given restoration_job_key = RestorationJob.create( HARDCODED_UUID, create_disposition="CREATE_IF_NEEDED", write_disposition="WRITE_EMPTY") restore_items_tuple = self.__create_restore_items(count=2) restore_item1 = restore_items_tuple[0][0] restore_item2 = restore_items_tuple[1][0] source_bq1 = restore_items_tuple[0][1].create_big_query_table() target_bq1 = restore_items_tuple[0][2].create_big_query_table() source_bq2 = restore_items_tuple[1][1].create_big_query_table() target_bq2 = restore_items_tuple[1][2].create_big_query_table() # when AsyncBatchRestoreService().restore( restoration_job_key, [[restore_items_tuple[0][0], restore_items_tuple[1][0]]]) # then calls = [ call(copy_job_type_id='restore', task_name_suffix='123'), call().with_post_action( PostCopyActionRequest( url='/callback/restore-finished/', data={'restoreItemKey': (restore_item1.key.urlsafe())})), call().with_post_action().with_create_disposition( 'CREATE_IF_NEEDED'), call().with_post_action().with_create_disposition( ).with_write_disposition('WRITE_EMPTY'), call().with_post_action().with_create_disposition( ).with_write_disposition().copy_table(source_bq1, target_bq1), call(copy_job_type_id='restore', task_name_suffix='123'), call().with_post_action( PostCopyActionRequest( url='/callback/restore-finished/', data={'restoreItemKey': (restore_item2.key.urlsafe())})), call().with_post_action().with_create_disposition( 'CREATE_IF_NEEDED'), call().with_post_action().with_create_disposition(). with_write_disposition('WRITE_EMPTY'), call().with_post_action().with_create_disposition( ).with_write_disposition().copy_table(source_bq2, target_bq2) ] self.copy_service.assert_has_calls(calls) def test_that_enforcer_is_called_for_each_restore_item(self): # given restoration_job_key = RestorationJob.create( HARDCODED_UUID, create_disposition="CREATE_IF_NEEDED", write_disposition="WRITE_EMPTY") restore_items_tuple = self.__create_restore_items(count=2) # when AsyncBatchRestoreService().restore( restoration_job_key, [[restore_items_tuple[0][0], restore_items_tuple[1][0]]]) # then expected_calls = [ call(restore_items_tuple[0][1], restore_items_tuple[0][2]), call(restore_items_tuple[1][1], restore_items_tuple[1][2]) ] self.enforcer.assert_has_calls(expected_calls) def test_that_proper_entities_were_stored_in_datastore(self): # given restoration_job_key = RestorationJob.create( HARDCODED_UUID, create_disposition="CREATE_IF_NEEDED", write_disposition="WRITE_EMPTY") restore_item_tuples = self.__create_restore_items(count=3) # when AsyncBatchRestoreService().restore( restoration_job_key, [[restore_item_tuples[0][0], restore_item_tuples[1][0]], [restore_item_tuples[2][0]]]) # then restoration_job = RestorationJob.get_by_id(HARDCODED_UUID) self.assertEqual(restoration_job.items_count, 3) restore_items = list(RestoreItem.query().filter( RestoreItem.restoration_job_key == restoration_job.key)) self.assertEqual(restore_items[0].status, RestoreItem.STATUS_IN_PROGRESS) self.assertEqual(restore_items[0].completed, None) self.assertEqual(restore_items[0].source_table_reference, restore_item_tuples[0][1]) self.assertEqual(restore_items[0].target_table_reference, restore_item_tuples[0][2]) self.assertEqual(restore_items[1].status, RestoreItem.STATUS_IN_PROGRESS) self.assertEqual(restore_items[1].completed, None) self.assertEqual(restore_items[1].source_table_reference, restore_item_tuples[1][1]) self.assertEqual(restore_items[1].target_table_reference, restore_item_tuples[1][2]) @staticmethod def __create_restore_items(count=1): result = [] for i in range(0, count): source_table_reference = TableReference( "source_project_id_" + str(i), "source_dataset_id_" + str(i), "source_table_id_" + str(i), "source_partition_id_" + str(i)) target_table_reference = TableReference( "target_project_id_" + str(i), "target_dataset_id_" + str(i), "target_table_id_" + str(i), "target_partition_id_" + str(i)) restore_item = RestoreItem.create(source_table_reference, target_table_reference) result.append( (restore_item, source_table_reference, target_table_reference)) return result
def test_non_json(self): """Test handling of non-JSON bodies""" resp, content = fake_response('}NOT OK', {'status': '400'}) error = HttpError(resp, content) self.assertEqual(str(error), '<HttpError 400 "Ok">')
def test_missing_reason(self): """Test an empty dict with a missing resp.reason.""" resp, content = fake_response('}NOT OK', {'status': '400'}, reason=None) error = HttpError(resp, content) self.assertEqual(str(error), '<HttpError 400 "">')
def _raise_value_error_exception(*args, **kwargs): raise HttpError(mock.Mock(status=400), '')
class TestCopyJobService(unittest.TestCase): def setUp(self): self.testbed = testbed.Testbed() self.testbed.activate() ndb.get_context().clear_cache() patch('googleapiclient.discovery.build').start() patch( 'oauth2client.client.GoogleCredentials.get_application_default') \ .start() self._create_http = patch.object(BigQuery, '_create_http').start() self.example_source_bq_table = BigQueryTable('source_project_id_1', 'source_dataset_id_1', 'source_table_id_1') self.example_target_bq_table = BigQueryTable('target_project_id_1', 'target_dataset_id_1', 'target_table_id_1') def tearDown(self): patch.stopall() self.testbed.deactivate() @patch.object(BigQuery, 'insert_job', return_value=BigQueryJobReference(project_id='test_project', job_id='job123', location='EU')) @patch.object(TaskCreator, 'create_copy_job_result_check') def test_that_post_copy_action_request_is_passed( self, create_copy_job_result_check, _): # given post_copy_action_request = \ PostCopyActionRequest(url='/my/url', data={'key1': 'value1'}) # when CopyJobService().run_copy_job_request( CopyJobRequest(task_name_suffix='task_name_suffix', copy_job_type_id='test-process', source_big_query_table=self.example_source_bq_table, target_big_query_table=self.example_target_bq_table, create_disposition="CREATE_IF_NEEDED", write_disposition="WRITE_EMPTY", retry_count=0, post_copy_action_request=post_copy_action_request)) # then create_copy_job_result_check.assert_called_once_with( ResultCheckRequest( task_name_suffix='task_name_suffix', copy_job_type_id='test-process', job_reference=BigQueryJobReference(project_id='test_project', job_id='job123', location='EU'), retry_count=0, post_copy_action_request=post_copy_action_request)) @patch.object(BigQuery, 'insert_job', return_value=BigQueryJobReference(project_id='test_project', job_id='job123', location='EU')) @patch.object(TaskCreator, 'create_copy_job_result_check') def test_that_create_and_write_disposition_are_passed_to_result_check( self, create_copy_job_result_check, _): # given create_disposition = "SOME_CREATE_DISPOSITION" write_disposition = "SOME_WRITE_DISPOSITION" # when CopyJobService().run_copy_job_request( CopyJobRequest(task_name_suffix='task_name_suffix', copy_job_type_id='test-process', source_big_query_table=self.example_source_bq_table, target_big_query_table=self.example_target_bq_table, create_disposition=create_disposition, write_disposition=write_disposition, retry_count=0, post_copy_action_request=None)) # then create_copy_job_result_check.assert_called_once_with( ResultCheckRequest(task_name_suffix='task_name_suffix', copy_job_type_id='test-process', job_reference=BigQueryJobReference( project_id='test_project', job_id='job123', location='EU'), retry_count=0, post_copy_action_request=None)) @patch.object(BigQuery, 'insert_job') @patch('time.sleep', side_effect=lambda _: None) def test_that_copy_table_should_throw_error_after_exception_not_being_http_error_thrown_on_copy_job_creation( self, _, insert_job): # given error_message = 'test exception' insert_job.side_effect = Exception(error_message) request = CopyJobRequest( task_name_suffix=None, copy_job_type_id=None, source_big_query_table=self.example_source_bq_table, target_big_query_table=self.example_target_bq_table, create_disposition="CREATE_IF_NEEDED", write_disposition="WRITE_EMPTY") # when with self.assertRaises(Exception) as context: CopyJobService().run_copy_job_request(request) # then self.assertTrue(error_message in context.exception) @patch.object(BigQuery, 'insert_job') @patch('time.sleep', side_effect=lambda _: None) def test_that_copy_table_should_throw_unhandled_errors( self, _, insert_job): # given exception = HttpError(Mock(status=500), 'internal error') exception._get_reason = Mock(return_value='internal error') insert_job.side_effect = exception request = CopyJobRequest( task_name_suffix=None, copy_job_type_id=None, source_big_query_table=self.example_source_bq_table, target_big_query_table=self.example_target_bq_table, create_disposition="CREATE_IF_NEEDED", write_disposition="WRITE_EMPTY") # when with self.assertRaises(HttpError) as context: CopyJobService().run_copy_job_request(request) # then self.assertEqual(context.exception, exception) @patch.object(BigQuery, 'insert_job') @patch.object(TaskCreator, 'create_post_copy_action') def test_that_copy_table_should_create_correct_post_copy_action_if_404_http_error_thrown_on_copy_job_creation( self, create_post_copy_action, insert_job): # given error = HttpError(Mock(status=404), 'not found') error._get_reason = Mock(return_value='not found') insert_job.side_effect = error post_copy_action_request = PostCopyActionRequest( url='/my/url', data={'key1': 'value1'}) request = CopyJobRequest( task_name_suffix='task_name_suffix', copy_job_type_id='test-process', source_big_query_table=self.example_source_bq_table, target_big_query_table=self.example_target_bq_table, create_disposition="CREATE_IF_NEEDED", write_disposition="WRITE_EMPTY", retry_count=0, post_copy_action_request=post_copy_action_request) # when CopyJobService().run_copy_job_request(request) # then create_post_copy_action.assert_called_once_with( copy_job_type_id='test-process', post_copy_action_request=post_copy_action_request, job_json={ 'status': { 'state': 'DONE', 'errors': [{ 'reason': 'Invalid', 'message': ("404 while creating Copy Job from {} to {}".format( self.example_source_bq_table, self.example_target_bq_table)) }] }, 'configuration': { 'copy': { 'sourceTable': { 'projectId': self.example_source_bq_table.get_project_id(), 'tableId': self.example_source_bq_table.get_table_id(), 'datasetId': self.example_source_bq_table.get_dataset_id() }, 'destinationTable': { 'projectId': self.example_target_bq_table.get_project_id(), 'tableId': self.example_target_bq_table.get_table_id(), 'datasetId': self.example_target_bq_table.get_dataset_id() } } } }) @patch.object(BigQuery, 'insert_job') @patch.object(TaskCreator, 'create_post_copy_action') def test_that_copy_table_should_create_correct_post_copy_action_if_access_denied_http_error_thrown_on_copy_job_creation( self, create_post_copy_action, insert_job): # given http_error_content = "{\"error\": " \ " {\"errors\": [" \ " {\"reason\": \"Access Denied\"," \ " \"message\": \"Access Denied\"," \ " \"location\": \"US\"" \ " }]," \ " \"code\": 403," \ " \"message\": \"Access Denied\"}}" insert_job.side_effect = HttpError(Mock(status=403), http_error_content) post_copy_action_request = PostCopyActionRequest( url='/my/url', data={'key1': 'value1'}) request = CopyJobRequest( task_name_suffix='task_name_suffix', copy_job_type_id='test-process', source_big_query_table=self.example_source_bq_table, target_big_query_table=self.example_target_bq_table, create_disposition="CREATE_IF_NEEDED", write_disposition="WRITE_EMPTY", retry_count=0, post_copy_action_request=post_copy_action_request) # when CopyJobService().run_copy_job_request(request) # then create_post_copy_action.assert_called_once_with( copy_job_type_id='test-process', post_copy_action_request=post_copy_action_request, job_json={ 'status': { 'state': 'DONE', 'errors': [{ 'reason': 'Invalid', 'message': ("Access Denied while creating Copy Job from {} to {}". format(self.example_source_bq_table, self.example_target_bq_table)) }] }, 'configuration': { 'copy': { 'sourceTable': { 'projectId': self.example_source_bq_table.get_project_id(), 'tableId': self.example_source_bq_table.get_table_id(), 'datasetId': self.example_source_bq_table.get_dataset_id() }, 'destinationTable': { 'projectId': self.example_target_bq_table.get_project_id(), 'tableId': self.example_target_bq_table.get_table_id(), 'datasetId': self.example_target_bq_table.get_dataset_id() } } } }) @patch.object(BigQuery, 'get_job') @patch.object(BigQuery, 'insert_job') @patch.object(TaskCreator, 'create_copy_job_result_check') def test_that_copy_table_will_try_to_wait_if_deadline_exceeded( self, create_copy_job_result_check, insert_job, get_job): # given http_error_content = "{\"error\": " \ " {\"errors\": [" \ " {\"reason\": \"Deadline exceeded\"," \ " \"message\": \"Deadline exceeded\"," \ " \"location\": \"US\"" \ " }]," \ " \"code\": 500," \ " \"message\": \"Deadline exceeded\"}}" successful_job_json = { 'status': { 'state': 'DONE' }, 'jobReference': { 'projectId': self.example_target_bq_table.get_project_id(), 'location': 'EU', 'jobId': 'job123', }, 'configuration': { 'copy': { 'sourceTable': { 'projectId': self.example_source_bq_table.get_project_id(), 'tableId': self.example_source_bq_table.get_table_id(), 'datasetId': self.example_source_bq_table.get_dataset_id() }, 'destinationTable': { 'projectId': self.example_target_bq_table.get_project_id(), 'tableId': self.example_target_bq_table.get_table_id(), 'datasetId': self.example_target_bq_table.get_dataset_id() } } } } insert_job.side_effect = HttpError(Mock(status=500), http_error_content) get_job.return_value = successful_job_json request = CopyJobRequest( task_name_suffix='task_name_suffix', copy_job_type_id='test-process', source_big_query_table=self.example_source_bq_table, target_big_query_table=self.example_target_bq_table, create_disposition="CREATE_IF_NEEDED", write_disposition="WRITE_EMPTY", retry_count=0, post_copy_action_request=None) # when CopyJobService().run_copy_job_request(request) # then create_copy_job_result_check.assert_called_once_with( ResultCheckRequest( task_name_suffix='task_name_suffix', copy_job_type_id='test-process', job_reference=BigQueryJobReference( project_id=self.example_target_bq_table.get_project_id(), job_id='job123', location='EU'), retry_count=0, post_copy_action_request=None)) @patch( 'src.commons.big_query.big_query_table_metadata.BigQueryTableMetadata') @patch.object(TaskCreator, 'create_copy_job_result_check') @patch.object(CopyJobService, '_create_random_job_id', return_value='random_job_123') @patch.object(BigQuery, 'insert_job', side_effect=[ HttpError(Mock(status=503), 'internal error'), HttpError(Mock(status=409), 'job exists') ]) @patch('time.sleep', side_effect=lambda _: None) def test_bug_regression_job_already_exists_after_internal_error( self, _, insert_job, _create_random_job_id, create_copy_job_result_check, table_metadata): # given post_copy_action_request = \ PostCopyActionRequest(url='/my/url', data={'key1': 'value1'}) table_metadata._BigQueryTableMetadata__get_table_or_partition.return_value.get_location.return_value = 'EU' # when CopyJobService().run_copy_job_request( CopyJobRequest(task_name_suffix='task_name_suffix', copy_job_type_id='test-process', source_big_query_table=self.example_source_bq_table, target_big_query_table=self.example_target_bq_table, create_disposition="CREATE_IF_NEEDED", write_disposition="WRITE_EMPTY", retry_count=0, post_copy_action_request=post_copy_action_request)) # then self.assertEqual(insert_job.call_count, 2) create_copy_job_result_check.assert_called_once_with( ResultCheckRequest( task_name_suffix='task_name_suffix', copy_job_type_id='test-process', job_reference=BigQueryJobReference( project_id='target_project_id_1', job_id='random_job_123', location='EU'), retry_count=0, post_copy_action_request=post_copy_action_request))
def build(serviceName, version, http=None, discoveryServiceUrl=DISCOVERY_URI, developerKey=None, model=None, requestBuilder=HttpRequest): """Construct a Resource for interacting with an API. Construct a Resource object for interacting with an API. The serviceName and version are the names from the Discovery service. Args: serviceName: string, name of the service. version: string, the version of the service. http: httplib2.Http, An instance of httplib2.Http or something that acts like it that HTTP requests will be made through. discoveryServiceUrl: string, a URI Template that points to the location of the discovery service. It should have two parameters {api} and {apiVersion} that when filled in produce an absolute URI to the discovery document for that service. developerKey: string, key obtained from https://code.google.com/apis/console. model: apiclient.Model, converts to and from the wire format. requestBuilder: apiclient.http.HttpRequest, encapsulator for an HTTP request. Returns: A Resource object with methods for interacting with the service. """ params = { 'api': serviceName, 'apiVersion': version } if http is None: http = httplib2.Http() requested_url = uritemplate.expand(discoveryServiceUrl, params) # REMOTE_ADDR is defined by the CGI spec [RFC3875] as the environment # variable that contains the network address of the client sending the # request. If it exists then add that to the request for the discovery # document to avoid exceeding the quota on discovery requests. if 'REMOTE_ADDR' in os.environ: requested_url = _add_query_parameter(requested_url, 'userIp', os.environ['REMOTE_ADDR']) logger.info('URL being requested: %s' % requested_url) resp, content = http.request(requested_url) if resp.status == 404: raise UnknownApiNameOrVersion("name: %s version: %s" % (serviceName, version)) if resp.status >= 400: raise HttpError(resp, content, requested_url) try: service = simplejson.loads(content) except ValueError, e: logger.error('Failed to parse as JSON: ' + content) raise InvalidJsonError()
def test_fails_to_build_client_after_5_errors(self): self.build.side_effect = [HttpError(mock.Mock(), 'error')] * 3 with self.assertRaises(HttpError): endpoints.build_client('foo', 'bar', 'baz', num_tries=3)