def test_non_401_error_response(self): http = HttpMockSequence([ ({'status': '400'}, ''), ]) http = self.credentials.authorize(http) resp, content = http.request('http://example.com') self.assertEqual(400, resp.status)
def test_auth_header_sent(self): http = HttpMockSequence([ ({'status': '200'}, 'echo_request_headers'), ]) http = self.credentials.authorize(http) resp, content = http.request('http://example.com') self.assertEqual('Bearer foo', content['Authorization'])
def test_assertion_refresh(self): http = HttpMockSequence([ ({'status': '200'}, '{"access_token":"1/3w"}'), ({'status': '200'}, 'echo_request_headers'), ]) http = self.credentials.authorize(http) resp, content = http.request('http://example.com') self.assertEqual('Bearer 1/3w', content['Authorization'])
def test_set_user_agent(self): http = HttpMockSequence([ ({'status': '200'}, 'echo_request_headers'), ]) http = set_user_agent(http, "my_app/5.5") resp, content = http.request("http://example.com") self.assertEqual('my_app/5.5', content['user-agent'])
def _credentials_refresh(self, credentials): http = HttpMockSequence([ ({'status': '200'}, '{"access_token":"1/3w","expires_in":3600}'), ({'status': '401'}, ''), ({'status': '200'}, '{"access_token":"3/3w","expires_in":3600}'), ({'status': '200'}, 'echo_request_headers'), ]) http = credentials.authorize(http) resp, content = http.request('http://example.org') return content
def test_token_refresh_success(self): for status_code in REFRESH_STATUS_CODES: http = HttpMockSequence([ ({'status': status_code}, ''), ]) http = self.credentials.authorize(http) try: resp, content = http.request('http://example.com') self.fail('should throw exception if token expires') except AccessTokenCredentialsError: pass except Exception: self.fail('should only throw AccessTokenCredentialsError')
def test_token_refresh_success(self): for status_code in REFRESH_STATUS_CODES: token_response = {'access_token': '1/3w', 'expires_in': 3600} http = HttpMockSequence([ ({'status': status_code}, ''), ({'status': '200'}, simplejson.dumps(token_response)), ({'status': '200'}, 'echo_request_headers'), ]) http = self.credentials.authorize(http) resp, content = http.request('http://example.com') self.assertEqual('Bearer 1/3w', content['Authorization']) self.assertFalse(self.credentials.access_token_expired) self.assertEqual(token_response, self.credentials.token_response)
def test_token_refresh_failure(self): for status_code in REFRESH_STATUS_CODES: http = HttpMockSequence([ ({'status': status_code}, ''), ({'status': '400'}, '{"error":"access_denied"}'), ]) http = self.credentials.authorize(http) try: http.request('http://example.com') self.fail('should raise AccessTokenRefreshError exception') except AccessTokenRefreshError: pass self.assertTrue(self.credentials.access_token_expired) self.assertEqual(None, self.credentials.token_response)
def test_credentials_good(self): private_key = datafile('privatekey.%s' % self.format) credentials = SignedJwtAssertionCredentials( '*****@*****.**', private_key, scope='read+write', sub='*****@*****.**') http = HttpMockSequence([ ({'status': '200'}, '{"access_token":"1/3w","expires_in":3600}'), ({'status': '200'}, 'echo_request_headers'), ]) http = credentials.authorize(http) resp, content = http.request('http://example.org') self.assertEqual('Bearer 1/3w', content['Authorization'])
def __enter__(self): http = HttpMockSequence(self._responses) native_request_method = http.request # Collecting requests to validate at __exit__. def _request_wrapper(*args, **kwargs): self._actual_requests.append(args + (kwargs.get('body', ''),)) return native_request_method(*args, **kwargs) http.request = _request_wrapper discovery = requests.get( 'https://www.googleapis.com/discovery/v1/apis/ml/v1/rest') service_mock = build_from_document(discovery.json(), http=http) with mock.patch.object( hook.MLEngineHook, 'get_conn', return_value=service_mock): return hook.MLEngineHook()
def test_delete_file_success(self, mock_from_service_account_file): # pylint: disable=unused-argument """ Test normal case for deleting files. """ fake_file_ids = ['fake-file-id1', 'fake-file-id2'] batch_response = b'''--batch_foobarbaz Content-Type: application/http Content-Transfer-Encoding: binary Content-ID: <response+0> HTTP/1.1 204 OK ETag: "etag/pony"\r\n\r\n --batch_foobarbaz Content-Type: application/http Content-Transfer-Encoding: binary Content-ID: <response+1> HTTP/1.1 204 OK ETag: "etag/sheep"\r\n\r\n --batch_foobarbaz--''' http_mock_sequence = HttpMockSequence([ # First, a request is made to the discovery API to construct a client object for Drive. ({'status': '200'}, self.mock_discovery_response_content), # Then, a request is made to delete files. ({'status': '200', 'content-type': 'multipart/mixed; boundary="batch_foobarbaz"'}, batch_response), ]) test_client = DriveApi('non-existent-secrets.json', http=http_mock_sequence) if sys.version_info < (3, 4): # This is a simple smoke-test without checking the output because # python <3.4 doesn't support assertLogs. test_client.delete_files(fake_file_ids) else: # This is the full test case, which only runs under python 3.4+. with self.assertLogs(level='INFO') as captured_logs: # pylint: disable=no-member test_client.delete_files(fake_file_ids) assert sum( 'Successfully processed request' in msg for msg in captured_logs.output ) == 2
def test_media_io_base_download_handle_range_request_ignored(self): self.request.http = HttpMockSequence([ ({'status': '200'}, b'12345'), ]) download = MediaIoBaseDownload( fd=self.fd, request=self.request, chunksize=6) self.assertEqual(self.fd, download._fd) self.assertEqual(6, download._chunksize) self.assertEqual(0, download._progress) self.assertEqual(None, download._total_size) self.assertEqual(False, download._done) self.assertEqual(self.request.uri, download._uri) status, done = download.next_chunk() self.assertEqual(self.fd.getvalue(), b'12345') self.assertEqual(True, done) self.assertEqual(5, download._progress) self.assertEqual(None, download._total_size) self.assertEqual(5, status.resumable_progress)
def test_no_retry_401_fails_fast(self): http = HttpMockSequence([({ 'status': '401' }, ''), ({ 'status': '200' }, '{}')]) model = JsonModel() uri = u'https://www.googleapis.com/someapi/v1/collection/?foo=bar' method = u'POST' request = HttpRequest(http, model.response, uri, method=method, body=u'{}', headers={'content-type': 'application/json'}) request._rand = lambda: 1.0 request._sleep = mock.MagicMock() with self.assertRaises(HttpError): request.execute() request._sleep.assert_not_called()
def testGetImage(self): """Tests that GetImage returns the correct image.""" good_http = HttpMockSequence([ ( { 'status': '200', }, '{"name": "foo-image"}', ), ]) # Assert that GetImage does not complain if an image is found. self.PatchObject( gce.GceContext, '_BuildRetriableRequest', autospec=True, side_effect=self._MockOutBuildRetriableRequest(good_http)) gce_context = gce.GceContext.ForServiceAccount(self._PROJECT, self._ZONE, self.json_key_file) self.assertDictEqual(gce_context.GetImage('foo-image'), dict(name='foo-image'))
def test_delete_files_older_than(self, mock_from_service_account_file): # pylint: disable=unused-argument """ Tests the logic to delete files older than a certain age. """ five_days_ago = datetime.now(UTC) - timedelta(days=5) fake_newish_files = [ { 'id': 'fake-text-file-id-{}'.format(idx), 'createdTime': five_days_ago + timedelta(days=1), 'mimeType': 'text/plain' } for idx in range(1, 10, 2) ] fake_old_files = [ { 'id': 'fake-text-file-id-{}'.format(idx), 'createdTime': five_days_ago - timedelta(days=14), 'mimeType': 'text/plain' } for idx in range(2, 10, 2) ] fake_files = fake_newish_files + fake_old_files http_mock_sequence = HttpMockSequence([ # First, a request is made to the discovery API to construct a client object for Drive. ( {'status': '200'}, self.mock_discovery_response_content, ), # Then, a request is made to list files. ( {'status': '200', 'content-type': 'application/json'}, json.dumps({'files': fake_files}, default=lambda x: x.isoformat()).encode('utf-8'), ), ]) with patch.object(DriveApi, 'delete_files', return_value=None) as mock_delete_files: test_client = DriveApi('non-existent-secrets.json', http=http_mock_sequence) test_client.delete_files_older_than('fake-folder-id', five_days_ago) # Verify that the correct files were requested to be deleted. mock_delete_files.assert_called_once_with(['fake-text-file-id-{}'.format(idx) for idx in range(2, 10, 2)])
def test_list_permissions_success(self, mock_from_service_account_file): # pylint: disable=unused-argument """ Test normal case for listing permissions on files. """ fake_file_ids = ['fake-file-id0', 'fake-file-id1'] batch_response = b'''--batch_foobarbaz Content-Type: application/http Content-Transfer-Encoding: binary Content-ID: <response+0> HTTP/1.1 200 OK Content-Type: application/json ETag: "etag/pony"\r\n\r\n{"permissions": [{"emailAddress": "*****@*****.**", "role": "reader"}]} --batch_foobarbaz Content-Type: application/http Content-Transfer-Encoding: binary Content-ID: <response+1> HTTP/1.1 200 OK Content-Type: application/json ETag: "etag/sheep"\r\n\r\n{"permissions": [{"emailAddress": "*****@*****.**", "role": "writer"}]} --batch_foobarbaz--''' http_mock_sequence = HttpMockSequence([ # First, a request is made to the discovery API to construct a client object for Drive. ({'status': '200'}, self.mock_discovery_response_content), # Then, a request is made to add comments to the files. ({'status': '200', 'content-type': 'multipart/mixed; boundary="batch_foobarbaz"'}, batch_response), ]) test_client = DriveApi('non-existent-secrets.json', http=http_mock_sequence) resp = test_client.list_permissions_for_files(fake_file_ids) six.assertCountEqual( self, resp, { 'fake-file-id0': [{'emailAddress': '*****@*****.**', 'role': 'reader'}], 'fake-file-id1': [{'emailAddress': '*****@*****.**', 'role': 'writer'}], }, )
def test_get_job(self): """Test getting a job""" http = HttpMockSequence([ ({ 'status': '200' }, open('tests/fixtures/ml_engine/first_result.json', 'rb').read()), ({ 'status': '200' }, open('tests/fixtures/ml_engine/get_job_succeeded.json', 'rb').read()), ]) job_id = "globoplay_autoencoder_keras_2018_08_06_10_00_37" ml_engine_test = ml_engine.MlEngine("PROJECT", "BUCKET_NAME", "REGION", http=http) job = ml_engine_test.get_job(job_id) self.assertEqual(job['jobId'], job_id)
def test_get_deployment(self): deployments = [ Deployment("kf-vfoo-n00", "2019-04-01T23:59:59+00:00"), Deployment("kf-vfoo-n01", "2019-04-02T23:59:59+00:00"), Deployment("kf-vfoo-n02", "2019-04-03T23:59:59+00:00"), ] list_resp = { "deployments": create_mock_list_resp(deployments), } http = HttpMockSequence([ ({'status': '200'}, self.dm_api), ({'status': '200'}, json.dumps(list_resp)), ({"status": "200"}, json.dumps(create_mock_resource_resp(deployments[0]))), ({"status": "200"}, json.dumps(create_mock_resource_resp(deployments[1]))), ({"status": "200"}, json.dumps(create_mock_resource_resp(deployments[2]))), ({'status': '200'}, self.dm_api), ({'status': '200'}, json.dumps(list_resp)), ({"status": "200"}, json.dumps(create_mock_resource_resp(deployments[0]))), ({"status": "200"}, json.dumps(create_mock_resource_resp(deployments[1]))), ({"status": "200"}, json.dumps(create_mock_resource_resp(deployments[2]))), ]) # get latest deployment. self.assertEqual(get_kf_testing_cluster.get_deployment( TEST_PROJECT, "kf-vfoo", TEST_LABEL, http=http), get_kf_testing_cluster.get_deployment_endpoint(TEST_PROJECT, "kf-vfoo-n02")) # get oldest deployment. self.assertEqual(get_kf_testing_cluster.get_deployment( TEST_PROJECT, "kf-vfoo", TEST_LABEL, http=http, desc_ordered=False), get_kf_testing_cluster.get_deployment_endpoint(TEST_PROJECT, "kf-vfoo-n00"))
def test_walk_files_two_page(self, mock_from_service_account_file): # pylint: disable=unused-argument """ Subfolders are requested, but the response is paginated. """ fake_folders = [ { 'id': 'fake-folder-id-{}'.format(idx), 'name': 'fake-folder-name-{}'.format(idx), 'mimeType': 'application/vnd.google-apps.folder' } for idx in range(10) ] fake_files_part_1 = fake_folders[:7] fake_files_part_2 = fake_folders[7:] http_mock_sequence = HttpMockSequence([ # First, a request is made to the discovery API to construct a client object for Drive. ( {'status': '200'}, self.mock_discovery_response_content, ), # Then, a request is made to list files. The response contains a nextPageToken suggesting there are more # pages. ( {'status': '200', 'content-type': 'application/json'}, json.dumps({'files': fake_files_part_1, 'nextPageToken': 'fake-next-page-token'}).encode('utf-8'), ), # Finally, a second list request is made. This time, no nextPageToken is present in the response, # suggesting there are no more pages. ( {'status': '200', 'content-type': 'application/json'}, json.dumps({'files': fake_files_part_2}).encode('utf-8'), ), ]) test_client = DriveApi('non-existent-secrets.json', http=http_mock_sequence) response = test_client.walk_files('fake-folder-id', mimetype=FOLDER_MIMETYPE, recurse=False) # Remove all the mimeTypes for comparison purposes. for fake_folder in fake_folders: del fake_folder['mimeType'] six.assertCountEqual(self, response, fake_folders)
def test_execute_request_body(self): batch = BatchHttpRequest() batch.add(self.request1) batch.add(self.request2) http = HttpMockSequence([ ({'status': '200', 'content-type': 'multipart/mixed; boundary="batch_foobarbaz"'}, 'echo_request_body'), ]) try: batch.execute(http=http) self.fail('Should raise exception') except BatchError, e: boundary, _ = e.content.split(None, 1) self.assertEqual('--', boundary[:2]) parts = e.content.split(boundary) self.assertEqual(4, len(parts)) self.assertEqual('', parts[0]) self.assertEqual('--', parts[3].rstrip()) header = parts[1].splitlines()[1] self.assertEqual('Content-Type: application/http', header)
def test_no_retry_succeeds(self): num_retries = 5 resp_seq = [({'status': '200'}, '{}')] * (num_retries) http = HttpMockSequence(resp_seq) model = JsonModel() uri = u'https://www.googleapis.com/someapi/v1/collection/?foo=bar' method = u'POST' request = HttpRequest(http, model.response, uri, method=method, body=u'{}', headers={'content-type': 'application/json'}) sleeptimes = [] request._sleep = lambda x: sleeptimes.append(x) request._rand = lambda: 10 request.execute(num_retries=num_retries) self.assertEqual(0, len(sleeptimes))
def test_resumable_media_handle_uploads_of_unknown_size(self): http = HttpMockSequence([ ({ 'status': '200', 'location': 'http://upload.example.com' }, ''), ({ 'status': '200' }, 'echo_request_headers_as_json'), ]) self.http = HttpMock(datafile('zoo.json'), {'status': '200'}) zoo = build('zoo', 'v1', http=self.http) # Create an upload that doesn't know the full size of the media. class IoBaseUnknownLength(MediaUpload): def chunksize(self): return 10 def mimetype(self): return 'image/png' def size(self): return None def resumable(self): return True def getbytes(self, begin, length): return '0123456789' upload = IoBaseUnknownLength() request = zoo.animals().insert(media_body=upload, body=None) status, body = request.next_chunk(http=http) self.assertEqual(body, { 'Content-Range': 'bytes 0-9/*', 'Content-Length': '10', })
def testGetImageRaisesIfImageNotFound(self): """Tests that GetImage raies exception when image is not found.""" bad_http = HttpMockSequence([ ( { 'status': '404', }, 'Image not found.', ), ]) # Assert that GetImage raises if image is not found. self.PatchObject( gce.GceContext, '_BuildRetriableRequest', autospec=True, side_effect=self._MockOutBuildRetriableRequest(bad_http)) gce_context = gce.GceContext.ForServiceAccount(self._PROJECT, self._ZONE, self.json_key_file) with self.assertRaises(gce.ResourceNotFoundError): gce_context.GetImage('not-exising-image')
def test_media_io_base_download(self): self.request.http = HttpMockSequence([ ({ 'status': '200', 'content-range': '0-2/5' }, b'123'), ({ 'status': '200', 'content-range': '3-4/5' }, b'45'), ]) self.assertEqual(True, self.request.http.follow_redirects) download = MediaIoBaseDownload(fd=self.fd, request=self.request, chunksize=3) self.assertEqual(self.fd, download._fd) self.assertEqual(3, download._chunksize) self.assertEqual(0, download._progress) self.assertEqual(None, download._total_size) self.assertEqual(False, download._done) self.assertEqual(self.request.uri, download._uri) status, done = download.next_chunk() self.assertEqual(self.fd.getvalue(), b'123') self.assertEqual(False, done) self.assertEqual(3, download._progress) self.assertEqual(5, download._total_size) self.assertEqual(3, status.resumable_progress) status, done = download.next_chunk() self.assertEqual(self.fd.getvalue(), b'12345') self.assertEqual(True, done) self.assertEqual(5, download._progress) self.assertEqual(5, download._total_size)
def test_execute_task_exceeding_max_attmpts(self, cleanup, write_to_cloud_logging, delete_task, prepare_dir): http = HttpMockSequence([ ({ 'status': '200' }, self._FAKE_PULL_TASKS_RESPONSE_WITH_ATTEMPTS), ({ 'status': '200' }, self._FAKE_DELETE_TASK_RESPONSE), ]) client = self._create_cloudtasks_client_testing(http=http) prepare_dir.return_value = ('task_id', '/tmp', '/tmp/artman-config.yaml', '/tmp/artman.log') write_to_cloud_logging.return_value = None cleanup.return_value = None cloudtasks_conductor._pull_and_execute_tasks( task_client=client, queue_name=self._FAKE_QUEUE_NAME) delete_task.assert_called_once() cleanup.assert_called_once() write_to_cloud_logging.assert_called_with('task_id', '/tmp/artman.log')
def test_list_deployments_name_filter(self): deployments = [ Deployment("kf-vfoo-n00", "2019-04-01T23:59:59+00:00"), Deployment("kf-vfoo-n01", "2019-04-02T23:59:59+00:00"), Deployment("kf-vfoo-n02-storage", "2019-04-03T23:59:59+00:00"), ] list_resp = { "deployments": create_mock_list_resp(deployments), } http = HttpMockSequence([ ({'status': '200'}, self.dm_api), ({'status': '200'}, json.dumps(list_resp)), ({"status": "200"}, json.dumps(create_mock_resource_resp(deployments[0]))), ({"status": "200"}, json.dumps(create_mock_resource_resp(deployments[1]))), ]) actual = get_kf_testing_cluster.list_deployments(TEST_PROJECT, "kf-vfoo", TEST_LABEL, http=http) expected = sorted(create_expected_list_resp(deployments[0:2]), key=lambda entry: entry["insertTime"], reverse=True) self.assertListEqual(actual, expected)
def test_set_model_version_as_default(self): """"Test method that set version as default""" http = HttpMockSequence([ ({ 'status': '200' }, open('tests/fixtures/ml_engine/first_result.json', 'rb').read()), ({ 'status': '200' }, open('tests/fixtures/ml_engine/versions_models_empty.json', 'rb').read()) ]) ml_engine_test = ml_engine.MlEngine("PROJECT", "BUCKET_NAME", "REGION", http=http) result = ml_engine_test.set_version_as_default("model_teste", "v1_0") self.assertEqual(result.body, '{}') self.assertEqual( result.uri, "https://ml.googleapis.com/v1/projects/PROJECT/models/model_teste/versions/v1_0:setDefault?alt=json" )
def test_unmatching_query(self, mock_credclass, mock_cache): # Disable the discovery cache, so that we can fully control the http requests # with HttpMockSequence below mock_cache.return_value = None # This is the type of data we get back if the rev doesn't match anything. empty_response = """{"reports": [ { "data": { "samplingSpaceSizes": ["2085651"], "totals": [{"values": ["0"]}], "samplesReadCounts": ["999997"] }, "columnHeader": { "dimensions": ["ga:dimension12"], "metricHeader": { "metricHeaderEntries": [ {"type": "INTEGER", "name": "ga:uniquePageviews"} ] } } } ]}""" mock_creds = mock_credclass.from_json_keyfile_dict.return_value mock_creds.authorize.return_value = HttpMockSequence([ ({ "status": "200" }, self.valid_discovery), ({ "status": "200" }, empty_response), ]) results = analytics_upageviews([42], self.start_date) self.assertEqual(results, {42: 0})
def testRetryOnServerErrorHttpRequest(self): """Tests that 500 erros are retried.""" # Fake http sequence that does not return 200 until the third trial. mock_http = HttpMockSequence([ ({ 'status': '502' }, 'Server error'), ({ 'status': '502' }, 'Server error'), ({ 'status': '200' }, '{"name":"foo-image"}'), ]) self.PatchObject( gce.GceContext, '_BuildRetriableRequest', autospec=True, side_effect=self._MockOutBuildRetriableRequest(mock_http)) # Disable retry and expect the request to fail. gce.GceContext.RETRIES = 0 gce_context = gce.GceContext.ForServiceAccount(self._PROJECT, self._ZONE, self.json_key_file) with self.assertRaises(HttpError): gce_context.GetImage('foo-image') # Enable retry and expect the request to succeed. gce.GceContext.RETRIES = 2 gce_context = gce.GceContext.ForServiceAccount(self._PROJECT, self._ZONE, self.json_key_file) self.assertDictEqual(gce_context.GetImage('foo-image'), dict(name='foo-image'))
def test_check_create_folder(self): api_key = "your_api_key" image_folder = "imagehosting" image_folder_id = "imagehosting-id" article_title = "test_article" article_folder_id = "test-article-id" http = HttpMockSequence([ ({ "status": "200" }, open("tests/data/drive.json", "rb").read()), ({ "status": "200" }, json.dumps({})), ({ "status": "200" }, json.dumps({"id": image_folder_id})), ({ "status": "200" }, json.dumps({})), ({ "status": "200" }, json.dumps({"id": article_folder_id})), ]) mock_gdrive_config = MagicMock() mock_gdrive_config.article_title = article_title mock_gdrive_config.blog_folder = image_folder mock_gdrive_config.credit = api_key service = build("drive", "v3", http=http, developerKey=api_key) gdrive_service = GdriveService(config=mock_gdrive_config, api_service=service) result = gdrive_service._check_create_folder() assert result == article_folder_id
def test_comment_files_success(self, mock_from_service_account_file): # pylint: disable=unused-argument """ Test normal case for commenting on files. """ fake_file_ids = ['fake-file-id0', 'fake-file-id1'] batch_response = b'''--batch_foobarbaz Content-Type: application/http Content-Transfer-Encoding: binary Content-ID: <response+0> HTTP/1.1 204 OK ETag: "etag/pony"\r\n\r\n{"id": "fake-comment-id0"} --batch_foobarbaz Content-Type: application/http Content-Transfer-Encoding: binary Content-ID: <response+1> HTTP/1.1 204 OK ETag: "etag/sheep"\r\n\r\n{"id": "fake-comment-id1"} --batch_foobarbaz--''' http_mock_sequence = HttpMockSequence([ # First, a request is made to the discovery API to construct a client object for Drive. ({'status': '200'}, self.mock_discovery_response_content), # Then, a request is made to add comments to the files. ({'status': '200', 'content-type': 'multipart/mixed; boundary="batch_foobarbaz"'}, batch_response), ]) test_client = DriveApi('non-existent-secrets.json', http=http_mock_sequence) resp = test_client.create_comments_for_files(list(zip(fake_file_ids, cycle(['some comment message'])))) six.assertCountEqual( self, resp, { 'fake-file-id0': {'id': 'fake-comment-id0'}, 'fake-file-id1': {'id': 'fake-comment-id1'}, }, )
def test_job_list(self): """TEST JOB LIST AND FILTER BY FINAL STATE""" jobs_mock = open('tests/fixtures/ml_engine/jobs.json', 'rb').read() http = HttpMockSequence([({ 'status': '200' }, open('tests/fixtures/ml_engine/first_result.json', 'rb').read()), ({ 'status': '200' }, jobs_mock), ({ 'status': '200' }, jobs_mock), ({ 'status': '200' }, jobs_mock)]) ml_engine_test = ml_engine.MlEngine("PROJECT", "BUCKET_NAME", "REGION", http=http) self.assertEqual(len(ml_engine_test.list_jobs()), 19) self.assertEqual( len(ml_engine_test.list_jobs(filter_final_state=None)), 20) self.assertEqual( len(ml_engine_test.list_jobs(filter_final_state="STATE_WRONG")), 0)
def test_create_file_retry_success(self, mock_from_service_account_file): """ Test rate limit and retry during file upload. """ fake_file_id = 'fake-file-id' http_mock_sequence = HttpMockSequence([ # First, a request is made to the discovery API to construct a client object for Drive. ({'status': '200'}, self.mock_discovery_response_content), # Then, a request is made to upload the file while rate limiting was activated. This should cause a retry. self._http_mock_sequence_retry(), # Finally, success. ({'status': '200'}, '{{"id": "{}"}}'.format(fake_file_id)), ]) test_client = DriveApi('non-existent-secrets.json', http=http_mock_sequence) response = test_client.create_file_in_folder( 'fake-folder-id', 'Fake Filename', BytesIO('fake file contents'.encode('ascii')), 'text/plain', ) # There is no need to explicitly check if the call was retried because # the response value cannot possibly contain fake_file_id otherwise, # since it was only passed in the last response. assert response == fake_file_id
def test_execute_initial_refresh_oauth2(self): batch = BatchHttpRequest() callbacks = Callbacks() cred = MockCredentials('Foo', expired=True) http = HttpMockSequence([ ({'status': '200', 'content-type': 'multipart/mixed; boundary="batch_foobarbaz"'}, BATCH_SINGLE_RESPONSE), ]) cred.authorize(http) batch.add(self.request1, callback=callbacks.f) batch.execute(http=http) self.assertEqual({'foo': 42}, callbacks.responses['1']) self.assertIsNone(callbacks.exceptions['1']) self.assertEqual(1, cred._refreshed) self.assertEqual(1, cred._authorized) self.assertEqual(1, cred._applied)
def test_list_deployments_default_insertime(self): deployments = [ Deployment("kf-vfoo-n00", "2019-04-01T23:59:59+00:00"), Deployment("kf-vfoo-n01", "2019-04-02T23:59:59+00:00"), Deployment("kf-vfoo-n02", "2019-04-03T23:59:59+00:00"), ] list_resp = { "deployments": create_mock_list_resp(deployments), } # Remove insertTime for the method to attach default timestamp. list_resp["deployments"][-1].pop("insertTime", None) http = HttpMockSequence([ ({ 'status': '200' }, self.dm_api), ({ 'status': '200' }, json.dumps(list_resp)), ({ "status": "200" }, json.dumps(create_mock_resource_resp(deployments[0]))), ({ "status": "200" }, json.dumps(create_mock_resource_resp(deployments[1]))), ({ "status": "200" }, json.dumps(create_mock_resource_resp(deployments[2]))), ]) actual = get_kf_testing_cluster.list_deployments(TEST_PROJECT, "kf-vfoo", TEST_LABEL, http=http) expected = create_expected_list_resp(deployments) expected[-1]["insertTime"] = "1969-12-31T23:59:59+00:00" expected.sort(key=lambda entry: entry["insertTime"], reverse=True) self.assertListEqual(actual, expected)
def test_channel_fetch_videos(self, mocked_youtube, mocked_video): self.init_channel() with open(join(dirname(__file__), "../data", "youtube_api.json")) as file: youtube_dicovery = file.read() with open( join(dirname(__file__), "../data", "youtube_fetch_videos_1.json")) as file: playlist_items_1 = file.read() with open( join(dirname(__file__), "../data", "youtube_fetch_videos_2.json")) as file: playlist_items_2 = file.read() responses = [ ({ "status": "200" }, youtube_dicovery), ({ "status": "200" }, playlist_items_1), ({ "status": "200" }, playlist_items_2), ] mocked_youtube.return_value = build( self.app.config["YOUTUBE_API_SERVICE_NAME"], self.app.config["YOUTUBE_API_VERSION"], http=HttpMockSequence(responses), cache_discovery=False, developerKey=self.app.config["YOUTUBE_API_DEVELOPER_KEY"], ) mocked_video.query.get.side_effect = lambda x: bool(hash(x) % 2) self.test_channel.fetch_videos()
def test_turn_get_into_post(self): def _postproc(resp, content): return content http = HttpMockSequence([ ({'status': '200'}, 'echo_request_body'), ({'status': '200'}, 'echo_request_headers'), ]) # Send a long query parameter. query = { 'q': 'a' * MAX_URI_LENGTH + '?&' } req = HttpRequest( http, _postproc, 'http://example.com?' + urllib.urlencode(query), method='GET', body=None, headers={}, methodId='foo', resumable=None) # Query parameters should be sent in the body. response = req.execute() self.assertEqual('q=' + 'a' * MAX_URI_LENGTH + '%3F%26', response) # Extra headers should be set. response = req.execute() self.assertEqual('GET', response['x-http-method-override']) self.assertEqual(str(MAX_URI_LENGTH + 8), response['content-length']) self.assertEqual( 'application/x-www-form-urlencoded', response['content-type'])
def test_media_io_base_next_chunk_retries(self): f = open(datafile('small.png'), 'rb') fd = BytesIO(f.read()) upload = MediaIoBaseUpload( fd=fd, mimetype='image/png', chunksize=500, resumable=True) # Simulate errors for both the request that creates the resumable upload # and the upload itself. http = HttpMockSequence([ ({'status': '500'}, ''), ({'status': '500'}, ''), ({'status': '503'}, ''), ({'status': '200', 'location': 'location'}, ''), ({'status': '403'}, USER_RATE_LIMIT_EXCEEDED_RESPONSE), ({'status': '403'}, RATE_LIMIT_EXCEEDED_RESPONSE), ({'status': '429'}, ''), ({'status': '200'}, '{}'), ]) model = JsonModel() uri = u'https://www.googleapis.com/someapi/v1/upload/?foo=bar' method = u'POST' request = HttpRequest( http, model.response, uri, method=method, headers={}, resumable=upload) sleeptimes = [] request._sleep = lambda x: sleeptimes.append(x) request._rand = lambda: 10 request.execute(num_retries=3) self.assertEqual([20, 40, 80, 20, 40, 80], sleeptimes)
def test_no_retry_fails_fast(self): http = HttpMockSequence([ ({'status': '500'}, ''), ({'status': '200'}, '{}') ]) model = JsonModel() uri = u'https://www.googleapis.com/someapi/v1/collection/?foo=bar' method = u'POST' request = HttpRequest( http, model.response, uri, method=method, body=u'{}', headers={'content-type': 'application/json'}) request._rand = lambda: 1.0 request._sleep = lambda _: self.fail('sleep should not have been called.') try: request.execute() self.fail('Should have raised an exception.') except HttpError: pass
def test_media_io_base_download_empty_file(self): self.request.http = HttpMockSequence([ ({ 'status': '200', 'content-range': '0-0/0' }, b''), ]) download = MediaIoBaseDownload(fd=self.fd, request=self.request, chunksize=3) self.assertEqual(self.fd, download._fd) self.assertEqual(0, download._progress) self.assertEqual(None, download._total_size) self.assertEqual(False, download._done) self.assertEqual(self.request.uri, download._uri) status, done = download.next_chunk() self.assertEqual(True, done) self.assertEqual(0, download._progress) self.assertEqual(0, download._total_size) self.assertEqual(0, status.progress())