def test_no_unicode_in_request_params(self): access_token = 'foo' client_id = 'some_client_id' client_secret = 'cOuDdkfjxxnv+' refresh_token = '1/0/a.df219fjls0' token_expiry = str(datetime.datetime.utcnow()) token_uri = str(GOOGLE_TOKEN_URI) revoke_uri = str(GOOGLE_REVOKE_URI) user_agent = 'refresh_checker/1.0' credentials = OAuth2Credentials(access_token, client_id, client_secret, refresh_token, token_expiry, token_uri, user_agent, revoke_uri=revoke_uri) http = HttpMock(headers={'status': '200'}) http = credentials.authorize(http) http.request('http://example.com', method='GET', headers={'foo': 'bar'}) for k, v in http.headers.items(): self.assertEqual(str, type(k)) self.assertEqual(str, type(v)) # Test again with unicode strings that can't simple be converted to ASCII. try: http.request( 'http://example.com', method='GET', headers={'foo': '\N{COMET}'}) self.fail('Expected exception to be raised.') except NonAsciiHeaderError: pass self.credentials.token_response = 'foobar' instance = OAuth2Credentials.from_json(self.credentials.to_json()) self.assertEqual('foobar', instance.token_response)
def test_get_youtube_playlistid(self): discovery = os.path.join(settings.BASE_DIR, "../test/data/youtube-discovery.json") http = HttpMock(discovery, {'status': '200'}) youtube = build(settings.YOUTUBE_API_SERVICE_NAME, settings.YOUTUBE_API_VERSION, developerKey='', http=http) path = os.path.join(settings.BASE_DIR, "../test/data/youtube-playlistid.json") http = HttpMock(path, {'status': '200'}) self.assertEqual(get_youtube_playlistid(youtube, 'IETF98', http=http), 'PLC86T-test')
def test_get_youtube_videos(self): discovery = os.path.join(settings.BASE_DIR, "../test/data/youtube-discovery.json") http = HttpMock(discovery, {'status': '200'}) youtube = build(settings.YOUTUBE_API_SERVICE_NAME, settings.YOUTUBE_API_VERSION, developerKey='', http=http) path = os.path.join(settings.BASE_DIR, "../test/data/youtube-playlistitems.json") http = HttpMock(path, {'status': '200'}) videos = get_youtube_videos(youtube, 'PLC86T', http=http) self.assertEqual(len(videos), 2)
def test_resumable_media_handle_resume_of_upload_of_unknown_size(self): http = HttpMockSequence([ ({'status': '200', 'location': 'http://upload.example.com'}, ''), ({'status': '400'}, ''), ]) self.http = HttpMock(datafile('zoo.json'), {'status': '200'}) zoo = build('zoo', 'v1', http=self.http) # Create an upload that doesn't know the full size of the media. fd = io.StringIO('data goes here') upload = MediaIoBaseUpload( fd=fd, mimetype='image/png', chunksize=500, resumable=True) request = zoo.animals().insert(media_body=upload, body=None) # Put it in an error state. self.assertRaises(HttpError, request.next_chunk, http=http) http = HttpMockSequence([ ({'status': '400', 'range': '0-5'}, 'echo_request_headers_as_json'), ]) try: # Should resume the upload by first querying the status of the upload. request.next_chunk(http=http) except HttpError as e: expected = { 'Content-Range': 'bytes */14', 'content-length': '0' } self.assertEqual(expected, simplejson.loads(e.content), 'Should send an empty body when requesting the current upload status.')
def test_resumable_media_good_upload_from_execute(self): """Not a multipart upload.""" self.http = HttpMock(datafile('zoo.json'), {'status': '200'}) zoo = build('zoo', 'v1', http=self.http) media_upload = MediaFileUpload(datafile('small.png'), resumable=True) request = zoo.animals().insert(media_body=media_upload, body=None) assertUrisEqual( self, 'https://www.googleapis.com/upload/zoo/v1/animals?uploadType=resumable&alt=json', request.uri) http = HttpMockSequence([ ({ 'status': '200', 'location': 'http://upload.example.com' }, ''), ({ 'status': '308', 'location': 'http://upload.example.com/2', 'range': '0-12' }, ''), ({ 'status': '308', 'location': 'http://upload.example.com/3', 'range': '0-%d' % media_upload.size() }, ''), ({ 'status': '200' }, '{"foo": "bar"}'), ]) body = request.execute(http=http) self.assertEquals(body, {"foo": "bar"})
def test_media_io_base_stream_chunksize_resume(self): self.http = HttpMock(datafile('zoo.json'), {'status': '200'}) zoo = build('zoo', 'v1', http=self.http) try: import io # Set up a seekable stream and try to upload in chunks. fd = io.BytesIO(b'0123456789') media_upload = MediaIoBaseUpload(fd=fd, mimetype='text/plain', chunksize=5, resumable=True) request = zoo.animals().insert(media_body=media_upload, body=None) # The single chunk fails, pull the content sent out of the exception. http = HttpMockSequence([ ({ 'status': '200', 'location': 'http://upload.example.com' }, ''), ({ 'status': '400' }, 'echo_request_body'), ]) try: body = request.execute(http=http) except HttpError as e: self.assertEqual('01234', e.content) except ImportError: pass
def setUp(self): credentials = mock.Mock(spec="google.oauth2.credentials.Credentials") email = "*****@*****.**" self.client = ApiClient(email, credentials=credentials) http = HttpMock("test_data/lumapps_discovery.json", {"status": "200"}) service = build("lumapps", "v1", http=http, developerKey="no") self.client._service = service
def test_media_io_base_stream_unlimited_chunksize_resume(self): self.http = HttpMock(datafile('zoo.json'), {'status': '200'}) zoo = build('zoo', 'v1', http=self.http) try: import io # Set up a seekable stream and try to upload in single chunk. fd = io.BytesIO(b'01234"56789"') media_upload = MediaIoBaseUpload( fd=fd, mimetype='text/plain', chunksize=-1, resumable=True) request = zoo.animals().insert(media_body=media_upload, body=None) # The single chunk fails, restart at the right point. http = HttpMockSequence([ ({'status': '200', 'location': 'http://upload.example.com'}, ''), ({'status': '308', 'location': 'http://upload.example.com/2', 'range': '0-4'}, ''), ({'status': '200'}, 'echo_request_body'), ]) body = request.execute(http=http) self.assertEqual('56789', body) except ImportError: pass
def __build_mock(self, mock_file, status=200): import os path = os.path.dirname(__file__) fixture_path = os.path.join(path, 'fixtures', mock_file) return HttpMock(fixture_path, {'status': status})
def test_failed_to_parse_discovery_json(self): self.http = HttpMock(datafile('malformed.json'), {'status': '200'}) try: plus = build('plus', 'v1', http=self.http) self.fail("should have raised an exception over malformed JSON.") except InvalidJsonError: pass
def test_resumable_media_handle_uploads_of_unknown_size_eof(self): http = HttpMockSequence([ ({ 'status': '200', 'location': 'http://upload.example.com' }, ''), ({ 'status': '200' }, 'echo_request_headers_as_json'), ]) self.http = HttpMock(datafile('zoo.json'), {'status': '200'}) zoo = build('zoo', 'v1', http=self.http) fd = io.StringIO('data goes here') # Create an upload that doesn't know the full size of the media. upload = MediaIoBaseUpload(fd=fd, mimetype='image/png', chunksize=15, resumable=True) request = zoo.animals().insert(media_body=upload, body=None) status, body = request.next_chunk(http=http) self.assertEqual(body, { 'Content-Range': 'bytes 0-13/14', 'Content-Length': '14', })
def _token_revoke_test_helper(testcase, status, revoke_raise, valid_bool_value, token_attr): current_store = getattr(testcase.credentials, 'store', None) dummy_store = DummyDeleteStorage() testcase.credentials.set_store(dummy_store) actual_do_revoke = testcase.credentials._do_revoke testcase.token_from_revoke = None def do_revoke_stub(http_request, token): testcase.token_from_revoke = token return actual_do_revoke(http_request, token) testcase.credentials._do_revoke = do_revoke_stub http = HttpMock(headers={'status': status}) if revoke_raise: testcase.assertRaises(TokenRevokeError, testcase.credentials.revoke, http) else: testcase.credentials.revoke(http) testcase.assertEqual(getattr(testcase.credentials, token_attr), testcase.token_from_revoke) testcase.assertEqual(valid_bool_value, testcase.credentials.invalid) testcase.assertEqual(valid_bool_value, dummy_store.delete_called) testcase.credentials.set_store(current_store)
def test_patch(self): http = HttpMock(datafile('zoo.json'), {'status': '200'}) zoo = build('zoo', 'v1', http=http) request = zoo.animals().patch(name='lion', body='{"description": "foo"}') self.assertEqual(request.method, 'PATCH')
def test_method_error_checking(self): self.http = HttpMock(datafile('plus.json'), {'status': '200'}) plus = build('plus', 'v1', http=self.http) # Missing required parameters try: plus.activities().list() self.fail() except TypeError as e: self.assertTrue('Missing' in str(e)) # Missing required parameters even if supplied as None. try: plus.activities().list(collection=None, userId=None) self.fail() except TypeError as e: self.assertTrue('Missing' in str(e)) # Parameter doesn't match regex try: plus.activities().list(collection='not_a_collection_name', userId='me') self.fail() except TypeError as e: self.assertTrue('not an allowed value' in str(e)) # Unexpected parameter try: plus.activities().list(flubber=12) self.fail() except TypeError as e: self.assertTrue('unexpected' in str(e))
def test__build_google_client(self): http_auth = HttpMock(self._get_fixture('compute.json'), {'status': '200'}) client = auth._build_google_client('compute', 'v1', http_auth=http_auth) self.assertTrue(hasattr(client, '__class__')) self.assertTrue(isinstance(client, Resource))
def test_get_playlist_video_ids_with_published_before(service): http = HttpMock(FIXTURES / "playlist.json") dt = datetime(2018, 11, 4, 4, 27, 10, tzinfo=tz.UTC) video_ids = channel_to_playlist.get_playlist_video_ids(service, PLAYLIST_ID, published_before=dt, http=http) assert video_ids == VIDEO_IDS[:-1]
def test(): http = HttpMock( os.path.join(self._BASE_DIR, 'fixtures/empty_response.txt'), {'status': '404'}, ) Client(self.endpoint, http=http)
def test_simple_media_upload_no_max_size_provided(self): self.http = HttpMock(datafile('zoo.json'), {'status': '200'}) zoo = build('zoo', 'v1', http=self.http) # TODO: Google API does not recognize the PNG content type return request = zoo.animals().crossbreed(media_body=datafile('small.png')) self.assertEquals('image/png', request.headers['content-type']) self.assertEquals('PNG', request.body[1:4])
def get_instance_without_boot(compute, project, zone, instance): http = HttpMock(DATA_DIR + '/instance/get_instance_without_boot.json', {'status': '200'}) request = compute.instances().get(project=project, zone=zone, instance=instance) response = request.execute(http=http) return response
def test_nested_resources(self): self.http = HttpMock(datafile('zoo.json'), {'status': '200'}) zoo = build('zoo', 'v1', http=self.http) self.assertTrue(getattr(zoo, 'animals')) request = zoo.my().favorites().list(max_results="5") parsed = urllib.parse.urlparse(request.uri) q = parse_qs(parsed[4]) self.assertEqual(q['max-results'], ['5'])
def test_top_level_functions(self): self.http = HttpMock(datafile('zoo.json'), {'status': '200'}) zoo = build('zoo', 'v1', http=self.http) self.assertTrue(getattr(zoo, 'query')) request = zoo.query(q="foo") parsed = urllib.parse.urlparse(request.uri) q = parse_qs(parsed[4]) self.assertEqual(q['q'], ['foo'])
def test_string_params_value_of_none_get_dropped(self): http = HttpMock(datafile('zoo.json'), {'status': '200'}) zoo = build('zoo', 'v1', http=http) request = zoo.query(trace=None, fields='description') parsed = urllib.parse.urlparse(request.uri) q = parse_qs(parsed[4]) self.assertFalse('trace' in q)
def test_resumable_multipart_media_good_upload(self): self.http = HttpMock(datafile('zoo.json'), {'status': '200'}) zoo = build('zoo', 'v1', http=self.http) media_upload = MediaFileUpload(datafile('small.png'), resumable=True) request = zoo.animals().insert(media_body=media_upload, body={}) self.assertTrue( request.headers['content-type'].startswith('application/json')) self.assertEquals('{"data": {}}', request.body) self.assertEquals(media_upload, request.resumable) # TODO: Google API does not recognize the PNG content type #self.assertEquals('image/png', request.resumable.mimetype()) #self.assertNotEquals(request.body, None) #self.assertEquals(request.resumable_uri, None) http = HttpMockSequence([ ({ 'status': '200', 'location': 'http://upload.example.com' }, ''), ({ 'status': '308', 'location': 'http://upload.example.com/2', 'range': '0-12' }, ''), ({ 'status': '308', 'location': 'http://upload.example.com/3', 'range': '0-%d' % (media_upload.size() - 2) }, ''), ({ 'status': '200' }, '{"foo": "bar"}'), ]) status, body = request.next_chunk(http=http) self.assertEquals(None, body) self.assertTrue(isinstance(status, MediaUploadProgress)) self.assertEquals(13, status.resumable_progress) # Two requests should have been made and the resumable_uri should have been # updated for each one. self.assertEquals(request.resumable_uri, 'http://upload.example.com/2') self.assertEquals(media_upload, request.resumable) self.assertEquals(13, request.resumable_progress) status, body = request.next_chunk(http=http) self.assertEquals(request.resumable_uri, 'http://upload.example.com/3') self.assertEquals(media_upload.size() - 1, request.resumable_progress) self.assertEquals('{"data": {}}', request.body) # Final call to next_chunk should complete the upload. status, body = request.next_chunk(http=http) self.assertEquals(body, {"foo": "bar"}) self.assertEquals(status, None)
def test_fallback_to_raw_model(self): http = HttpMock(datafile('zoo.json'), {'status': '200'}) zoo = build('zoo', 'v1', http=http) request = zoo.animals().getmedia(name='Lion') parsed = urllib.parse.urlparse(request.uri) q = parse_qs(parsed[4]) self.assertTrue('alt' not in q) self.assertEqual(request.headers['accept'], '*/*')
def test_model_added_query_parameters(self): http = HttpMock(datafile('zoo.json'), {'status': '200'}) zoo = build('zoo', 'v1', http=http) request = zoo.animals().get(name='Lion') parsed = urllib.parse.urlparse(request.uri) q = parse_qs(parsed[4]) self.assertEqual(q['alt'], ['json']) self.assertEqual(request.headers['accept'], 'application/json')
def test_optional_stack_query_parameters(self): http = HttpMock(datafile('zoo.json'), {'status': '200'}) zoo = build('zoo', 'v1', http=http) request = zoo.query(trace='html', fields='description') parsed = urllib.parse.urlparse(request.uri) q = parse_qs(parsed[4]) self.assertEqual(q['trace'], ['html']) self.assertEqual(q['fields'], ['description'])
def test_next_successful_with_next_page_token(self): self.http = HttpMock(datafile('tasks.json'), {'status': '200'}) tasks = build('tasks', 'v1', http=self.http) request = tasks.tasklists().list() next_request = tasks.tasklists().list_next(request, {'nextPageToken': '123abc'}) parsed = list(urllib.parse.urlparse(next_request.uri)) q = parse_qs(parsed[4]) self.assertEqual(q['pageToken'][0], '123abc')
def test_resumable_media_fail_unknown_response_code_subsequent_request( self): """Not a multipart upload.""" self.http = HttpMock(datafile('zoo.json'), {'status': '200'}) zoo = build('zoo', 'v1', http=self.http) media_upload = MediaFileUpload(datafile('small.png'), resumable=True) request = zoo.animals().insert(media_body=media_upload, body=None) http = HttpMockSequence([ ({ 'status': '200', 'location': 'http://upload.example.com' }, ''), ({ 'status': '400' }, ''), ]) self.assertRaises(HttpError, request.execute, http=http) self.assertTrue(request._in_error_state) http = HttpMockSequence([ ({ 'status': '308', 'range': '0-5' }, ''), ({ 'status': '308', 'range': '0-6' }, ''), ]) status, body = request.next_chunk(http=http) self.assertEquals( status.resumable_progress, 7, 'Should have first checked length and then tried to PUT more.') self.assertFalse(request._in_error_state) # Put it back in an error state. http = HttpMockSequence([ ({ 'status': '400' }, ''), ]) self.assertRaises(HttpError, request.execute, http=http) self.assertTrue(request._in_error_state) # Pretend the last request that 400'd actually succeeded. http = HttpMockSequence([ ({ 'status': '200' }, '{"foo": "bar"}'), ]) status, body = request.next_chunk(http=http) self.assertEqual(body, {'foo': 'bar'})
def setUp(self): self._BASE_DIR = os.path.dirname(os.path.abspath(__file__)) self.discovery = HttpMock( os.path.join(self._BASE_DIR, 'fixtures/fleet_v1.json'), {'status': '200'}) self.endpoint = 'http://198.51.100.23:9160' self.client = Client(self.endpoint, http=self.discovery)
def test_search_youtube(self): http = HttpMock('google-api-responses/youtube-films-search.json', {'status': '200'}) result = self.req.search_youtube('blade runner', http) expected = [{ 'etag': '"g7k5f8kvn67Bsl8L-Bum53neIr4/WAVFA12qvhEv3kGcmmq9qJwyRNk"', 'id': { 'kind': 'youtube#video', 'videoId': 'rJ-T1ddFVRw' }, 'snippet': { 'liveBroadcastContent': 'none', 'channelId': 'UCsDKdkvGBqaD-KINQP8WAEA', 'channelTitle': 'warnervoduk', 'description': '21st-century detective Rick Deckard brings his masculine-yet-vulnerable ' 'presence to this stylish noir thriller. In a future of high-tech possibility ' 'soured by urban and social decay, Deckard...', 'publishedAt': '2014-01-20T09:56:51.000Z', 'title': 'Blade Runner' }, 'kind': 'youtube#searchResult' }, { 'etag': '"g7k5f8kvn67Bsl8L-Bum53neIr4/NWxfpBvtqNPc7OAaV63b5qs4IRM"', 'id': { 'kind': 'youtube#video', 'videoId': '59cQqLrdmK8' }, 'snippet': { 'liveBroadcastContent': 'none', 'channelId': 'UCsDKdkvGBqaD-KINQP8WAEA', 'channelTitle': 'warnervoduk', 'description': "The one that started it all. Ridley Scott's Blade Runner is one of the most important" " science-fiction movies of the 20th Century. Its futuristic depiction of a " "post-apocalyptic, dystopian world...", 'publishedAt': '2013-07-19T04:02:19.000Z', 'title': 'Blade Runner: The Final Cut Special Edition' }, 'kind': 'youtube#searchResult' }] self.assertEqual(result, expected)
def test_get_stats(self): http = HttpMock('google-api-responses/youtube-films-stats.json', {'status': '200'}) result = self.req.get_stats('59cQqLrdmK8', http) expected = { 'favoriteCount': '0', 'commentCount': '0', 'likeCount': '321', 'dislikeCount': '97' } self.assertEqual(result, expected)