def test_get_is_case_insensitive(self): h = HttpHeaders() h.set('Cookie', 'a=1') h.add_pairs([('cookie', 'b=2'), ('COOKIE', 'c=3')]) h.add(u'CoOkIe', 'd=4') # note: unicode self.assertEqual('a=1', h.get(u'COOKIE')) self.assertEqual(['a=1', 'b=2', 'c=3', 'd=4'], list(h.get_values('Cookie')))
def _validate_successful_get_response(self, put_object): self.assertEqual(self.response_status_code, 200, "status code is not 200") headers = HttpHeaders(self.response_headers) self.assertIsNone(headers.get("Content-Range")) body_length = headers.get("Content-Length") if not put_object: self.assertIsNotNone(body_length, "Content-Length is missing from headers") if body_length: self.assertEqual( int(body_length), self.received_body_len, "Received body length does not match the Content-Length header" )
def _get_object_request(self, object_path): headers = HttpHeaders([ ("host", self._build_endpoint_string(self.region, self.bucket_name)) ]) request = HttpRequest("GET", object_path, headers) return request
def _put_object_cancel_helper(self, cancel_after_read): read_futrue = Future() put_body_stream = FakeReadStream(read_futrue) data_len = 10 * 1024 * 1024 * 1024 # some fake length headers = HttpHeaders([ ("host", self._build_endpoint_string(self.region, self.bucket_name)), ("Content-Type", "text/plain"), ("Content-Length", str(data_len)) ]) http_request = HttpRequest("PUT", "/cancelled_request", headers, put_body_stream) s3_client = s3_client_new(False, self.region, 5 * 1024 * 1024) s3_request = s3_client.make_request( request=http_request, type=S3RequestType.PUT_OBJECT, on_headers=self._on_request_headers) if cancel_after_read: read_futrue.result(self.timeout) s3_request.cancel() finished_future = s3_request.finished_future try: finished_future.result(self.timeout) except Exception as e: self.assertEqual(e.name, "AWS_ERROR_S3_CANCELED") shutdown_event = s3_request.shutdown_event s3_request = None self.assertTrue(shutdown_event.wait(self.timeout))
def test_multipart_get_object_cancel(self): # a 5 GB file request = self._get_object_request( "/crt-canary-obj-single-part-9223372036854775807") s3_client = s3_client_new(False, self.region, 5 * 1024 * 1024) with NamedTemporaryFile(mode="w", delete=False) as file: file.close() self.s3_request = s3_client.make_request( request=request, recv_filepath=file.name, type=S3RequestType.GET_OBJECT, on_headers=self._on_request_headers, on_progress=self._on_progress_cancel_after_first_chunk) finished_future = self.s3_request.finished_future try: finished_future.result(self.timeout) except Exception as e: self.assertEqual(e.name, "AWS_ERROR_S3_CANCELED") # Result check self.data_len = int( HttpHeaders(self.response_headers).get("Content-Length")) self.assertLess( self.transferred_len, self.data_len, "the cancel failed to block all the following body") # The on_finish callback may invoke the progress self.assertLessEqual(self.progress_invoked, 2) shutdown_event = self.s3_request.shutdown_event self.s3_request = None self.assertTrue(shutdown_event.wait(self.timeout)) os.remove(file.name)
def test_get_object_file_object(self): request = self._get_object_request(self.get_test_object_path) request_type = S3RequestType.GET_OBJECT s3_client = s3_client_new(False, self.region, 5 * 1024 * 1024) with NamedTemporaryFile(mode="w", delete=False) as file: file.close() s3_request = s3_client.make_request( request=request, type=request_type, recv_filepath=file.name, on_headers=self._on_request_headers, on_progress=self._on_progress) finished_future = s3_request.finished_future finished_future.result(self.timeout) # Result check self.data_len = int( HttpHeaders(self.response_headers).get("Content-Length")) file_stats = os.stat(file.name) file_len = file_stats.st_size self.assertEqual( file_len, self.transferred_len, "the length of written file does not match the transferred length reported" ) self.assertEqual( self.data_len, self.transferred_len, "the transferred length reported does not match the content-length header" ) self.assertEqual(self.response_status_code, 200, "status code is not 200") # TODO verify the content of written file os.remove(file.name)
def _convert_request(request: PreparedRequest) -> HttpRequest: return HttpRequest( method=request.method, path=request.path, headers=HttpHeaders(request.headers.as_list()), body_stream=request.body, )
def _test_request_lives_until_stream_complete(self, secure): # Ensure HttpRequest and body InputStream stay alive until HttpClientStream completes (regression test) self._start_server(secure) try: connection = self._new_client_connection(secure) request = HttpRequest(method='PUT', path='/test/test_request_refcounts.txt', headers=HttpHeaders([('Host', self.hostname), ('Content-Length', '5') ]), body_stream=BytesIO(b'hello')) response = Response() http_stream = connection.request(request, response.on_response, response.on_body) # HttpClientStream should keep the dependencies (HttpRequest, HttpHeaders, InputStream) # alive as long as it needs them del request http_stream.activate() http_stream.completion_future.result(self.timeout) self.assertEqual(None, connection.close().result(self.timeout)) finally: self._stop_server()
def _test_put(self, secure): # PUT request sends this very file to the server. self._start_server(secure) connection = self._new_client_connection(secure) test_asset_path = 'test/test_http_client.py' with open(test_asset_path, 'rb') as outgoing_body_stream: outgoing_body_bytes = outgoing_body_stream.read() headers = HttpHeaders([ ('Content-Length', str(len(outgoing_body_bytes))), ]) # seek back to start of stream before trying to send it outgoing_body_stream.seek(0) request = HttpRequest('PUT', '/' + test_asset_path, headers, outgoing_body_stream) response = Response() http_stream = connection.request(request, response.on_response, response.on_body) http_stream.activate() # wait for stream to complete stream_completion_result = http_stream.completion_future.result(self.timeout) self.assertEqual(200, response.status_code) self.assertEqual(200, stream_completion_result) # compare what we sent against what the server received server_received = self.server.put_requests.get('/' + test_asset_path) self.assertIsNotNone(server_received) self.assertEqual(server_received, outgoing_body_bytes) self.assertEqual(None, connection.close().result(self.timeout)) self._stop_server()
def test_iter(self): # test that we iterate over everything we put in src = [('Host', 'example.org'), ('Cookie', 'a=1')] h = HttpHeaders(src) for pair in h: src.remove(pair) self.assertEqual(0, len(src))
def test_iter_order(self): # test that headers with multiple values are iterated in insertion order src = [('Cookie', 'a=1'), ('cookie', 'b=2')] h = HttpHeaders(src) gather = [pair for pair in h] # note this also compares that we preserved case of the names self.assertEqual(src, gather)
def on_connection_completed(conn_future): try: connection = conn_future.result() headers = HttpHeaders() headers.add('host', self._gg_server_name) request = HttpRequest( method='GET', path='/greengrass/discover/thing/{}'.format(thing_name), headers=headers) http_stream = connection.request(request=request, on_body=on_incoming_body) http_stream.completion_future.add_done_callback( on_request_complete) except Exception as e: discovery['future'].set_exception(e)
def test_remove(self): h = HttpHeaders() self.assertRaises(KeyError, h.remove, 'Non-Existent') h.add('Host', 'example.org') h.remove('Host') self.assertIsNone(h.get('Host'))
def test_request_create_nondefault(self): src_headers = [('Cookie', 'a=1'), ('Cookie', 'b=2')] body_stream = open('test/test_http_headers.py', 'rb') request = HttpRequest(method="PUT", path="/upload", headers=HttpHeaders(src_headers), body_stream=body_stream) self.assertEqual("PUT", request.method) self.assertEqual("/upload", request.path) self.assertEqual(src_headers, list(request.headers)) self.assertIsNotNone(request.body_stream) body_stream.close()
def _put_object_request(self, file_name, path=None): self.put_body_stream = open(file_name, "r+b") file_stats = os.stat(file_name) self.data_len = file_stats.st_size headers = HttpHeaders([ ("host", self._build_endpoint_string(self.region, self.bucket_name)), ("Content-Type", "text/plain"), ("Content-Length", str(self.data_len)) ]) if path is None: path = self.put_test_object_path request = HttpRequest("PUT", path, headers, self.put_body_stream) return request
def test_add_pairs(self): h = HttpHeaders() h.add_pairs([ ('Host', 'example.org'), ('Cookie', 'a=1'), ('Cookie', 'b=2'), ]) self.assertEqual('example.org', h.get('Host')) self.assertEqual(['a=1', 'b=2'], list(h.get_values('Cookie')))
request_type = "download" # Initialization event_loop_group = EventLoopGroup(18) host_resolver = DefaultHostResolver(event_loop_group) bootstrap = ClientBootstrap(event_loop_group, host_resolver) credential_provider = AwsCredentialsProvider.new_default_chain(bootstrap) s3_client = S3Client( bootstrap=bootstrap, region="us-west-2", credential_provider=credential_provider, throughput_target_gbps=100) t_statistic = Statistics() headers = HttpHeaders([("host", bucket_name + ".s3." + region + ".amazonaws.com")]) request = HttpRequest("GET", object_name, headers) # file_stats = os.stat(file_name) # data_len = file_stats.st_size # data_stream = CrtLazyReadStream(file_name, "r+b", t_statistic, data_len) # upload_headers = HttpHeaders([("host", bucket_name + ".s3." + region + ".amazonaws.com"), # ("Content-Type", "text/plain"), ("Content-Length", str(data_len))]) # upload_request = HttpRequest("PUT", "/put_object_test_py_10MB.txt", upload_headers, data_stream) def on_body(offset, chunk, **kwargs): t_statistic.record_read(len(chunk)) # if writing_disk: # if not os.path.exists(file_name):
def test_get_none(self): h = HttpHeaders() self.assertIsNone(h.get('Non-Existent')) self.assertEqual('Banana', h.get('Non-Existent', 'Banana')) self.assertEqual([], list(h.get_values('Non-Existent')))
def test_unicode(self): # test adding unicode values in all the different ways h = HttpHeaders([('a', 'ሴ')]) self.assertEqual('ሴ', h.get('a')) h.set('b', '𦉘') self.assertEqual('𦉘', h.get('b')) h.add('c', '👁👄👁') self.assertEqual('👁👄👁', h.get('c')) h.add_pairs([('d', 'ⓤţḟ⁻❽')]) self.assertEqual('ⓤţḟ⁻❽', h.get('d'))
def test_set(self): h = HttpHeaders() # create h.set('Host', 'example.org') self.assertEqual(['example.org'], list(h.get_values('Host'))) # replace h.set('Host', 'example2.org') self.assertEqual(['example2.org'], list(h.get_values('Host'))) # replace many h.add('Host', 'example3.org') h.add('Host', 'example4.org') h.set('Host', 'example5.org') self.assertEqual(['example5.org'], list(h.get_values('Host')))
def test_clear(self): h = HttpHeaders([('Host', 'example.org'), ('Cookie', 'a=1'), ('cookie', 'b=2')]) h.clear() self.assertEqual([], [pair for pair in h])
def test_remove_value(self): h = HttpHeaders() self.assertRaises(ValueError, h.remove_value, 'Non-Existent', 'Nope') # header with 1 value h.add('Host', 'example.org') self.assertRaises(ValueError, h.remove_value, 'Host', 'wrong-value') h.remove_value('Host', 'example.org') self.assertIsNone(h.get('Host')) # pluck out a duplicate value [1,2,2] -> [1,2] h.add_pairs([('Dupes', '1'), ('DUPES', '2'), ('dupes', '2')]) h.remove_value('Dupes', '2') self.assertEqual(['1', '2'], list(h.get_values('Dupes')))
def test_add(self): h = HttpHeaders() h.add('Host', 'example.org') self.assertEqual('example.org', h.get('Host')) self.assertEqual(['example.org'], list(h.get_values('Host')))
def on_response(self, http_stream, status_code, headers, **kwargs): self.status_code = status_code self.headers = HttpHeaders(headers)
def test_add_multi_values(self): h = HttpHeaders() h.add('Cookie', 'a=1') h.add('Cookie', 'b=2') self.assertEqual('a=1', h.get('Cookie')) self.assertEqual(['a=1', 'b=2'], list(h.get_values('Cookie')))