def _convert_request(request: PreparedRequest) -> HttpRequest: return HttpRequest( method=request.method, path=request.path, headers=HttpHeaders(request.headers.as_list()), body_stream=request.body, )
def _test_request_lives_until_stream_complete(self, secure): # Ensure HttpRequest and body InputStream stay alive until HttpClientStream completes (regression test) self._start_server(secure) try: connection = self._new_client_connection(secure) request = HttpRequest(method='PUT', path='/test/test_request_refcounts.txt', headers=HttpHeaders([('Host', self.hostname), ('Content-Length', '5') ]), body_stream=BytesIO(b'hello')) response = Response() http_stream = connection.request(request, response.on_response, response.on_body) # HttpClientStream should keep the dependencies (HttpRequest, HttpHeaders, InputStream) # alive as long as it needs them del request http_stream.activate() http_stream.completion_future.result(self.timeout) self.assertEqual(None, connection.close().result(self.timeout)) finally: self._stop_server()
def _get_object_request(self, object_path): headers = HttpHeaders([ ("host", self._build_endpoint_string(self.region, self.bucket_name)) ]) request = HttpRequest("GET", object_path, headers) return request
def _test_put(self, secure): # PUT request sends this very file to the server. self._start_server(secure) connection = self._new_client_connection(secure) test_asset_path = 'test/test_http_client.py' with open(test_asset_path, 'rb') as outgoing_body_stream: outgoing_body_bytes = outgoing_body_stream.read() headers = HttpHeaders([ ('Content-Length', str(len(outgoing_body_bytes))), ]) # seek back to start of stream before trying to send it outgoing_body_stream.seek(0) request = HttpRequest('PUT', '/' + test_asset_path, headers, outgoing_body_stream) response = Response() http_stream = connection.request(request, response.on_response, response.on_body) http_stream.activate() # wait for stream to complete stream_completion_result = http_stream.completion_future.result(self.timeout) self.assertEqual(200, response.status_code) self.assertEqual(200, stream_completion_result) # compare what we sent against what the server received server_received = self.server.put_requests.get('/' + test_asset_path) self.assertIsNotNone(server_received) self.assertEqual(server_received, outgoing_body_bytes) self.assertEqual(None, connection.close().result(self.timeout)) self._stop_server()
def _put_object_cancel_helper(self, cancel_after_read): read_futrue = Future() put_body_stream = FakeReadStream(read_futrue) data_len = 10 * 1024 * 1024 * 1024 # some fake length headers = HttpHeaders([ ("host", self._build_endpoint_string(self.region, self.bucket_name)), ("Content-Type", "text/plain"), ("Content-Length", str(data_len)) ]) http_request = HttpRequest("PUT", "/cancelled_request", headers, put_body_stream) s3_client = s3_client_new(False, self.region, 5 * 1024 * 1024) s3_request = s3_client.make_request( request=http_request, type=S3RequestType.PUT_OBJECT, on_headers=self._on_request_headers) if cancel_after_read: read_futrue.result(self.timeout) s3_request.cancel() finished_future = s3_request.finished_future try: finished_future.result(self.timeout) except Exception as e: self.assertEqual(e.name, "AWS_ERROR_S3_CANCELED") shutdown_event = s3_request.shutdown_event s3_request = None self.assertTrue(shutdown_event.wait(self.timeout))
def _test_get(self, secure, proxy_options=None): # GET request receives this very file from the server. Super meta. # Use HTTP/1.0 in proxy tests or server will keep connection with proxy alive # and refuse to shut down for 1 minute at the end of each proxy test http_1_0 = proxy_options is not None self._start_server(secure, http_1_0) connection = self._new_client_connection(secure, proxy_options) test_asset_path = 'test/test_http_client.py' request = HttpRequest('GET', '/' + test_asset_path) response = Response() stream = connection.request(request, response.on_response, response.on_body) stream.activate() # wait for stream to complete stream_completion_result = stream.completion_future.result(self.timeout) self.assertEqual(200, response.status_code) self.assertEqual(200, stream_completion_result) with open(test_asset_path, 'rb') as test_asset: test_asset_bytes = test_asset.read() self.assertEqual(test_asset_bytes, response.body) self.assertEqual(None, connection.close().exception(self.timeout)) self._stop_server()
def test_request_create_nondefault(self): src_headers = [('Cookie', 'a=1'), ('Cookie', 'b=2')] body_stream = open('test/test_http_headers.py', 'rb') request = HttpRequest(method="PUT", path="/upload", headers=HttpHeaders(src_headers), body_stream=body_stream) self.assertEqual("PUT", request.method) self.assertEqual("/upload", request.path) self.assertEqual(src_headers, list(request.headers)) self.assertIsNotNone(request.body_stream) body_stream.close()
def _test_stream_cleans_up_if_never_activated(self, secure): # If a stream is never activated, it should just clean itself up self._start_server(secure) connection = self._new_client_connection(secure) stream = connection.request(HttpRequest('GET', '/test/test_http_client.py')) # note we do NOT activate the stream # delete local references, stream should clean itself up, connection should shut itself down del stream del connection self._stop_server()
def _put_object_request(self, file_name, path=None): self.put_body_stream = open(file_name, "r+b") file_stats = os.stat(file_name) self.data_len = file_stats.st_size headers = HttpHeaders([ ("host", self._build_endpoint_string(self.region, self.bucket_name)), ("Content-Type", "text/plain"), ("Content-Length", str(self.data_len)) ]) if path is None: path = self.put_test_object_path request = HttpRequest("PUT", path, headers, self.put_body_stream) return request
def _test_stream_lives_until_complete(self, secure): self._start_server(secure) connection = self._new_client_connection(secure) request = HttpRequest('GET', '/test/test_http_client.py') stream = connection.request(request) completion_future = stream.completion_future # delete all local references del stream del connection # stream should still complete successfully completion_future.result(self.timeout) self._stop_server()
def _test_shutdown_error(self, secure): # Use HTTP/1.0 connection to force a SOCKET_CLOSED error after request completes self._start_server(secure, http_1_0=True) connection = self._new_client_connection(secure) # Send request, don't care what happens request = HttpRequest('GET', '/') response = Response() stream = connection.request(request, response.on_response, response.on_body) stream.activate() stream.completion_future.result(self.timeout) # Wait for server to hang up, which should be immediate since it's using HTTP/1.0 shutdown_error = connection.shutdown_future.exception(self.timeout) self.assertIsInstance(shutdown_error, awscrt.exceptions.AwsCrtError) self._stop_server()
def on_connection_completed(conn_future): try: connection = conn_future.result() headers = HttpHeaders() headers.add('host', self._gg_server_name) request = HttpRequest( method='GET', path='/greengrass/discover/thing/{}'.format(thing_name), headers=headers) http_stream = connection.request(request=request, on_body=on_incoming_body) http_stream.completion_future.add_done_callback( on_request_complete) except Exception as e: discovery['future'].set_exception(e)
def _do_proxy_http_test(self, test_type, auth_type): uri = ProxyTestConfiguration.get_uri_from_test_type(test_type) proxy_options = ProxyTestConfiguration.create_http_proxy_options_from_environment( test_type, auth_type) connection = self._establish_http_connection(test_type, uri, proxy_options) request = HttpRequest('GET', '/') request.headers.add('host', uri) response = Response() stream = connection.request(request, response.on_response, response.on_body) stream.activate() # wait for stream to complete stream_completion_result = stream.completion_future.result(TIMEOUT) self.assertEqual(200, response.status_code) self.assertEqual(200, stream_completion_result)
def _test_stream_lives_until_complete(self, secure): # Ensure that stream and connection classes stay alive until work is complete self._start_server(secure) try: connection = self._new_client_connection(secure) request = HttpRequest('GET', '/test/test_http_client.py') stream = connection.request(request) stream.activate() completion_future = stream.completion_future # delete all local references del stream del connection # stream should still complete successfully completion_future.result(self.timeout) finally: self._stop_server()
def test_h2_client(self): url = urlparse("https://d1cz66xoahf9cl.cloudfront.net/http_test_doc.txt") connection = self._new_h2_client_connection(url) # check we set an h2 connection self.assertEqual(connection.version, HttpVersion.Http2) request = HttpRequest('GET', url.path) request.headers.add('host', url.hostname) response = Response() stream = connection.request(request, response.on_response, response.on_body) stream.activate() # wait for stream to complete stream_completion_result = stream.completion_future.result(self.timeout) # check result self.assertEqual(200, response.status_code) self.assertEqual(200, stream_completion_result) self.assertEqual(14428801, len(response.body)) self.assertEqual(None, connection.close().exception(self.timeout))
def test_unicode(self): request = HttpRequest(path='/ሴ') self.assertEqual('/ሴ', request.path)
def test_headers_live_after_message_del(self): request = HttpRequest() headers = request.headers del request headers.add('Cookie', 'a=1') self.assertEqual([('Cookie', 'a=1')], list(headers))
def test_request_create_default(self): request = HttpRequest() self.assertEqual("GET", request.method) self.assertEqual("/", request.path) self.assertEqual([], list(request.headers)) self.assertIsNone(request.body_stream)
# Initialization event_loop_group = EventLoopGroup(18) host_resolver = DefaultHostResolver(event_loop_group) bootstrap = ClientBootstrap(event_loop_group, host_resolver) credential_provider = AwsCredentialsProvider.new_default_chain(bootstrap) s3_client = S3Client( bootstrap=bootstrap, region="us-west-2", credential_provider=credential_provider, throughput_target_gbps=100) t_statistic = Statistics() headers = HttpHeaders([("host", bucket_name + ".s3." + region + ".amazonaws.com")]) request = HttpRequest("GET", object_name, headers) # file_stats = os.stat(file_name) # data_len = file_stats.st_size # data_stream = CrtLazyReadStream(file_name, "r+b", t_statistic, data_len) # upload_headers = HttpHeaders([("host", bucket_name + ".s3." + region + ".amazonaws.com"), # ("Content-Type", "text/plain"), ("Content-Length", str(data_len))]) # upload_request = HttpRequest("PUT", "/put_object_test_py_10MB.txt", upload_headers, data_stream) def on_body(offset, chunk, **kwargs): t_statistic.record_read(len(chunk)) # if writing_disk: # if not os.path.exists(file_name): # open(file_name, 'a').close()