def test_send_interceptorpb_not_connected(self): _ic = self.stats_http.get('interceptor_count') _iec = self.stats_http.get('interceptor_error_count') # Send a SMS MT through http interface url = 'http://127.0.0.1:1401/send' params = { 'to': '06155423', 'content': 'test', 'username': self.u1.username, 'password': self.u1_password } agent = Agent(reactor) client = HTTPClient(agent) response = yield client.post(url, data=params) lastErrorStatus = response.code lastResponse = yield text_content(response) # Asserts self.assertEqual(lastErrorStatus, 503) self.assertEqual(lastResponse, 'Error "InterceptorPB not connected !"') self.assertEqual(_ic, self.stats_http.get('interceptor_count')) self.assertEqual(_iec + 1, self.stats_http.get('interceptor_error_count'))
def test_send_syntax_error(self): _ic = self.stats_http.get('interceptor_count') _iec = self.stats_http.get('interceptor_error_count') # Connect to InterceptorPB yield self.ipb_connect() # Send a SMS MT through http interface url = 'http://127.0.0.1:1401/send' params = { 'to': '06155423', 'content': 'test', 'username': self.u1.username, 'password': self.u1_password } agent = Agent(reactor) client = HTTPClient(agent) response = yield client.post(url, data=params) lastErrorStatus = response.code lastResponse = yield text_content(response) # Asserts self.assertEqual(lastErrorStatus, 400) self.assertEqual( lastResponse, 'Error "Failed running interception script, check log for details"' ) self.assertEqual(_ic, self.stats_http.get('interceptor_count')) self.assertEqual(_iec + 1, self.stats_http.get('interceptor_error_count'))
def handle_outbound_message(self, message): # The transport does not make any attempt to # interpret AfricasTalking responses self.emit("consuming %s" % message) message_id = message['message_id'] missing_fields = self.ensure_message_values(message, ['to_addr', 'content']) if missing_fields: returnValue(self.reject_message(message, missing_fields)) outbound_msg = { 'username': self.username, 'to': ','.join(message.payload['to_addr']), 'message': message.payload['content'].encode('utf-8'), 'bulkSMSMode': 1, } self.emit("outbound message {}".format(outbound_msg)) http_client = HTTPClient(self.agent_factory) args = dict(url=self.outbound_url, data=outbound_msg, headers=self.headers, allow_redirects=False) response = yield http_client.post(**args) validate = yield self.validate_outbound(response) validate['message_id'] = message_id yield self.outbound_status(**validate)
def run_test(self, content, datacoding=None, port=1401): yield self.connect('127.0.0.1', self.pbPort) yield self.prepareRoutingsAndStartConnector() # Set content self.params['content'] = content # Set datacoding if datacoding is None and 'coding' in self.params: del self.params['coding'] if datacoding is not None: self.params['coding'] = datacoding # Prepare baseurl baseurl = 'http://127.0.0.1:%s/send' % port # Send a MT # We should receive a msg id agent = Agent(reactor) client = HTTPClient(agent) response = yield client.post(baseurl, data=self.params) text = yield text_content(response) msgStatus = text[:7] # Wait 2 seconds before stopping SmppClientConnectors exitDeferred = defer.Deferred() reactor.callLater(2, exitDeferred.callback, None) yield exitDeferred yield self.stopSmppClientConnectors() # Run tests self.assertEqual(msgStatus, 'Success')
def run_send_test(self, user=None, content='anycontent', hex_content=None, dlr_level=None, dlr_method=None, source_address=None, priority=None, schedule_delivery_time=None, validity_period=None, destination_address=None, default_route=None, side_effect=None): yield self.connect('127.0.0.1', self.pbPort) yield self.prepareRoutingsAndStartConnector(user, default_route, side_effect) # Set params if content is None: del (self.params['content']) else: self.params['content'] = content if hex_content is not None: self.params['hex-content'] = hex_content if dlr_level is not None: self.params['dlr-level'] = dlr_level if dlr_method is not None: self.params['dlr-method'] = dlr_method if source_address is not None: self.params['from'] = source_address if priority is not None: self.params['priority'] = priority if schedule_delivery_time is not None: self.params['sdt'] = schedule_delivery_time if validity_period is not None: self.params['validity-period'] = validity_period if destination_address is not None: self.params['to'] = destination_address baseurl = 'http://127.0.0.1:1401/send' # Send a MT # We should receive a msg id agent = Agent(reactor) client = HTTPClient(agent) response = yield client.post(baseurl, data=self.params) response_text = yield text_content(response) response_code = response.code # Wait 5 seconds before stopping SmppClientConnectors yield waitFor(5) yield self.stopSmppClientConnectors() defer.returnValue((response_text, response_code))
def test_throughput_limit_rejection(self): user = copy.copy(self.user1) user.mt_credential.setQuota('http_throughput', 2) route = DefaultRoute(self.c1, rate=0.0) yield self.connect('127.0.0.1', self.pbPort) yield self.prepareRoutingsAndStartConnector(user, route) # Set content self.params['content'] = 'Any Content' baseurl = 'http://127.0.0.1:1401/send' # Send a bunch of MT messages # We should receive a msg id for success and error when throughput is exceeded start_time = datetime.now() throughput_exceeded_errors = 0 request_counter = 0 for x in range(5000): agent = Agent(reactor) client = HTTPClient(agent) response = yield client.post(baseurl, data=self.params) response_text = yield text_content(response) response_code = response.code request_counter += 1 if response_code == 403 and response_text == 'Error "User throughput exceeded"': throughput_exceeded_errors += 1 end_time = datetime.now() # Wait 2 seconds before stopping SmppClientConnectors yield waitFor(2) yield self.stopSmppClientConnectors() # Asserts (tolerance of -/+ 3 messages) throughput = 1 / float(user.mt_credential.getQuota('http_throughput')) dt = end_time - start_time max_unsuccessfull_requests = request_counter - (dt.seconds / throughput) unsuccessfull_requests = throughput_exceeded_errors self.assertGreaterEqual(unsuccessfull_requests, max_unsuccessfull_requests - 3) self.assertLessEqual(unsuccessfull_requests, max_unsuccessfull_requests + 3)
def test_tagging(self): """Refs #495 Will tag message inside interceptor script and assert routing based tagfilter were correctly done """ # Re-provision interceptor with correct script mt_interceptor = MTInterceptorScript("routable.addTag(10)") yield self.mtinterceptor_add(DefaultInterceptor(mt_interceptor), 0) # Change routing rules by shadowing (high order value) default route with a # static route having a tagfilter yield self.mtroute_flush() yield self.mtroute_add(StaticMTRoute([TagFilter(10)], self.c1, 0.0), 1000) # Connect to InterceptorPB yield self.ipb_connect() # Send a SMS MT through http interface url = 'http://127.0.0.1:1401/send' params = { 'to': '06155423', 'content': 'test', 'username': self.u1.username, 'password': self.u1_password } agent = Agent(reactor) client = HTTPClient(agent) response = yield client.post(url, data=params) lastErrorStatus = response.code lastResponse = yield text_content(response) # Wait some time for message delivery through smppc yield waitFor(2) # Asserts self.assertEqual(lastErrorStatus, 200) self.assertEqual(1, len(self.SMSCPort.factory.lastClient.submitRecords))
class TestFakeDockerServer(TestCase): def setUp(self): self.dockerAPI = FakeDockerServer() self.dockerServer = reactor.listenTCP(0, self.dockerAPI) self.dockerPort = self.dockerServer.getHost().port self.agent = Agent(reactor) # no connectionpool self.client = HTTPClient(self.agent) def tearDown(self): return self.dockerServer.stopListening() def test_douglas_adams_would_be_proud(self): d = self.client.post('http://127.0.0.1:%d/towel' % (self.dockerPort,), json.dumps({"hiding": "things"}), headers={'Content-Type': ['application/json']}) d.addCallback(treq.json_content) def verify(response): self.assertEqual(response, {"hiding": "things", "SeenByFakeDocker": 42}) d.addCallback(verify) return d
def test_send_success(self): _ic = self.stats_http.get('interceptor_count') _iec = self.stats_http.get('interceptor_error_count') # Re-provision interceptor with correct script mt_interceptor = MTInterceptorScript(self.update_message_sript) yield self.mtinterceptor_add(DefaultInterceptor(mt_interceptor), 0) # Connect to InterceptorPB yield self.ipb_connect() # Send a SMS MT through http interface url = 'http://127.0.0.1:1401/send' params = { 'to': '06155423', 'content': 'test', 'username': self.u1.username, 'password': self.u1_password } # We should receive an error since interceptorpb is not connected agent = Agent(reactor) client = HTTPClient(agent) response = yield client.post(url, data=params) lastErrorStatus = response.code lastResponse = yield text_content(response) # Wait some time for message delivery through smppc yield waitFor(2) # Asserts self.assertEqual(lastErrorStatus, 200) self.assertEqual(1, len(self.SMSCPort.factory.lastClient.submitRecords)) self.assertEqual( b'Intercepted message', self.SMSCPort.factory.lastClient. submitRecords[0].params['short_message']) self.assertEqual(_ic + 1, self.stats_http.get('interceptor_count')) self.assertEqual(_iec, self.stats_http.get('interceptor_error_count'))
def test_send_ESME_RINVESMCLASS_from_script(self): "Will ensure if script defines only smpp error it will implicitly cause a http 520 error" _ic = self.stats_http.get('interceptor_count') _iec = self.stats_http.get('interceptor_error_count') # Re-provision interceptor with correct script mt_interceptor = MTInterceptorScript(self.return_ESME_RINVESMCLASS) yield self.mtinterceptor_add(DefaultInterceptor(mt_interceptor), 0) # Connect to InterceptorPB yield self.ipb_connect() # Send a SMS MT through http interface url = 'http://127.0.0.1:1401/send' params = { 'to': '06155423', 'content': 'test', 'username': self.u1.username, 'password': self.u1_password } agent = Agent(reactor) client = HTTPClient(agent) response = yield client.post(url, data=params) lastErrorStatus = response.code lastResponse = yield text_content(response) # Wait some time for message delivery through smppc yield waitFor(2) # Asserts self.assertEqual(lastErrorStatus, 520) self.assertEqual(lastResponse, 'Error "Interception specific error code 520"') self.assertEqual(_ic, self.stats_http.get('interceptor_count')) self.assertEqual(_iec + 1, self.stats_http.get('interceptor_error_count'))
def test_send_with_tags(self): """Related to #455 Will send message through http api using tags and then assert for getting the tags into the short_message """ # Re-provision interceptor with correct script mt_interceptor = MTInterceptorScript( self.update_message_from_tags_sript) yield self.mtinterceptor_add(DefaultInterceptor(mt_interceptor), 0) # Connect to InterceptorPB yield self.ipb_connect() # Send a SMS MT through http interface url = 'http://127.0.0.1:1401/send' params = { 'to': '06155423', 'content': 'temporary', 'username': self.u1.username, 'password': self.u1_password, 'tags': '123,456' } agent = Agent(reactor) client = HTTPClient(agent) response = yield client.post(url, data=params) lastErrorStatus = response.code lastResponse = yield text_content(response) # Wait some time for message delivery through smppc yield waitFor(2) # Asserts self.assertEqual(lastErrorStatus, 200) self.assertEqual(1, len(self.SMSCPort.factory.lastClient.submitRecords)) self.assertEqual( b"['123', '456']", self.SMSCPort.factory.lastClient. submitRecords[0].params['short_message'])
def test_send_any_exception_from_script(self): _ic = self.stats_http.get('interceptor_count') _iec = self.stats_http.get('interceptor_error_count') # Re-provision interceptor with correct script mt_interceptor = MTInterceptorScript(self.raise_any_exception) yield self.mtinterceptor_add(DefaultInterceptor(mt_interceptor), 0) # Connect to InterceptorPB yield self.ipb_connect() # Send a SMS MT through http interface url = 'http://127.0.0.1:1401/send' params = { 'to': '06155423', 'content': 'test', 'username': self.u1.username, 'password': self.u1_password } agent = Agent(reactor) client = HTTPClient(agent) response = yield client.post(url, data=params) lastErrorStatus = response.code lastResponse = yield text_content(response) # Wait some time for message delivery through smppc yield waitFor(2) # Asserts self.assertEqual(lastErrorStatus, 400) self.assertEqual( lastResponse, 'Error "Failed running interception script, check log for details"' ) self.assertEqual(_ic, self.stats_http.get('interceptor_count')) self.assertEqual(_iec + 1, self.stats_http.get('interceptor_error_count'))
def test_send_and_lock_param(self): """Related to #458 Will set and lock sm_default_msg_id inside interceptor and assert it were kept as set. """ # Re-provision interceptor with correct script mt_interceptor = MTInterceptorScript(self.lock_param_script) yield self.mtinterceptor_add(DefaultInterceptor(mt_interceptor), 0) # Connect to InterceptorPB yield self.ipb_connect() # Send a SMS MT through http interface url = 'http://127.0.0.1:1401/send' params = { 'to': '06155423', 'content': 'temporary', 'username': self.u1.username, 'password': self.u1_password } agent = Agent(reactor) client = HTTPClient(agent) response = yield client.post(url, data=params) lastErrorStatus = response.code lastResponse = yield text_content(response) # Wait some time for message delivery through smppc yield waitFor(2) # Asserts self.assertEqual(lastErrorStatus, 200) self.assertEqual(1, len(self.SMSCPort.factory.lastClient.submitRecords)) self.assertEqual( 10, self.SMSCPort.factory.lastClient.submitRecords[0]. params['sm_default_msg_id'])
class MountResource(resource.Resource): """ A powerstrip pre-hook for container create. """ isLeaf = True def __init__(self, *args, **kw): self._agent = Agent(reactor) # no connectionpool self.client = HTTPClient(self._agent) return resource.Resource.__init__(self, *args, **kw) def render_POST(self, request): """ Handle a pre-hook: either create a filesystem, or move it in place. """ json_parsed = json.loads(request.content.read()) print ">>> called with", json_parsed pprint.pprint(os.environ) # BASE_URL like http://control-service/v1/ ^ self.base_url = os.environ.get("FLOCKER_CONTROL_SERVICE_BASE_URL") self.ip = os.environ.get("MY_NETWORK_IDENTITY") def wait_until_volume_in_place(result, fs): """ Called after a dataset has been created or moved in the cluster's desired configuration. Wait until the volume shows up in the cluster actual state on the right host (either having been created or moved). :return: Deferred which fires with the tuple (fs, dataset_id) -- that is, the filesystem and the corresponding flocker dataset uuid that the docker client asked for -- firing only once the filesystem has been created/moved and mounted (iow, exists on the right host in the cluster state). """ print "wait_until_volume_in_place while processing", fs, "got result", result dataset_id = result["dataset_id"] def dataset_exists(): d = self.client.get(self.base_url + "/state/datasets") d.addCallback(treq.json_content) def check_dataset_exists(datasets): """ The /v1/state/datasets API seems to show the volume as being on two hosts at once during a move. We assume therefore that when it settles down to only show it on one host that this means the move is complete. """ print "Got", self.ip, self.host_uuid, "datasets:", datasets matching_datasets = [] for dataset in datasets: if dataset["dataset_id"] == dataset_id: matching_datasets.append(dataset) if len(matching_datasets) == 1: if matching_datasets[0]["primary"] == self.host_uuid: return matching_datasets[0] return False d.addCallback(check_dataset_exists) return d d = loop_until(dataset_exists) d.addCallback(lambda dataset: (fs, dataset)) return d d = self.client.get(self.base_url + "/state/nodes") d.addCallback(treq.json_content) def find_my_uuid(nodes): for node in nodes: if node["host"] == self.ip: self.host_uuid = node["uuid"] break return self.client.get(self.base_url + "/configuration/datasets") d.addCallback(find_my_uuid) d.addCallback(treq.json_content) def got_dataset_configuration(configured_datasets): # form a mapping from names onto dataset objects configured_dataset_mapping = {} for dataset in configured_datasets: if dataset["metadata"].get("name"): configured_dataset_mapping[dataset["metadata"].get("name")] = dataset # iterate over the datasets we were asked to create by the docker client fs_create_deferreds = [] old_binds = [] print "got json_parsed...", json_parsed if json_parsed['Name'] is not None and json_parsed['Name'] != "": binds = [json_parsed['Name']] for bind in binds: fs, remainder = bind, "" # TODO validation # if "/" in fs: # raise Exception("Not allowed flocker filesystems more than one level deep") old_binds.append((fs, remainder)) # if a dataset exists, and is in the right place, we're cool. if fs in configured_dataset_mapping: dataset = configured_dataset_mapping[fs] if dataset["primary"] == self.host_uuid: # check / wait for the state to match the desired # configuration fs_create_deferreds.append(wait_until_volume_in_place(dataset, fs=fs)) else: # if a dataset exists, but is on the wrong server [TODO # and is not being used], then move it in place. d = self.client.post( self.base_url + "/configuration/datasets/%s" % ( dataset["dataset_id"].encode('ascii'),), json.dumps({"primary": self.host_uuid}), headers={'Content-Type': ['application/json']}) d.addCallback(treq.json_content) d.addCallback(wait_until_volume_in_place, fs=fs) fs_create_deferreds.append(d) else: # if a dataset doesn't exist at all, create it on this server. d = self.client.post(self.base_url + "/configuration/datasets", json.dumps({"primary": self.host_uuid, "metadata": {"name": fs}}), headers={'Content-Type': ['application/json']}) d.addCallback(treq.json_content) d.addCallback(wait_until_volume_in_place, fs=fs) fs_create_deferreds.append(d) d = defer.gatherResults(fs_create_deferreds) def got_created_and_moved_datasets(list_new_datasets): dataset_mapping = dict(list_new_datasets) print "constructed dataset_mapping", dataset_mapping new_binds = [] for fs, remainder in old_binds: # forget about remainder... new_binds.append(dataset_mapping[fs]["path"]) new_json = {} if new_binds: new_json["Mountpoint"] = new_binds[0] new_json["Err"] = None else: # This is how you indicate not handling this request new_json["Mountpoint"] = "" new_json["Err"] = "unable to handle" print "<<< responding with", new_json request.write(json.dumps(new_json)) request.finish() d.addCallback(got_created_and_moved_datasets) return d d.addCallback(got_dataset_configuration) d.addErrback(log.err, 'while processing configured datasets') return server.NOT_DONE_YET
class ProxyTests(TestCase, GenerallyUsefulPowerstripTestMixin): def setUp(self): """ Construct a fake "Docker daemon" (one which does much less than the actual Docker daemon) and a Proxy instance. Pre- and post-hook API servers are provided by the individual tests. """ self.agent = Agent(reactor) # no connectionpool self.client = HTTPClient(self.agent) def tearDown(self): shutdowns = [ self.dockerServer.stopListening(), self.proxyServer.stopListening()] if hasattr(self, 'adderServer'): shutdowns.append(self.adderServer.stopListening()) if hasattr(self, 'adderTwoServer'): shutdowns.append(self.adderTwoServer.stopListening()) return defer.gatherResults(shutdowns) def test_empty_endpoints(self): """ The proxy passes through requests when no endpoints are specified. In particular, when POST to the /towel endpoint on the *proxy*, we get to see that we were seen by the (admittedly fake) Docker daemon. """ self._configure("endpoints: {}\nadapters: {}") d = self.client.post('http://127.0.0.1:%d/towel' % (self.proxyPort,), json.dumps({"hiding": "things"}), headers={'Content-Type': ['application/json']}) d.addCallback(treq.json_content) def verify(response): self.assertEqual(response, {"hiding": "things", "SeenByFakeDocker": 42}) d.addCallback(verify) return d def test_empty_endpoints_socket(self): """ The proxy is able to connect to Docker on a UNIX socket. """ self._configure("endpoints: {}\nadapters: {}", dockerOnSocket=True) d = self.client.post('http://127.0.0.1:%d/towel' % (self.proxyPort,), json.dumps({"hiding": "things"}), headers={'Content-Type': ['application/json']}) d.addCallback(treq.json_content) def verify(response): self.assertEqual(response, {"hiding": "things", "SeenByFakeDocker": 42}) d.addCallback(verify) return d def test_endpoint_and_empty_hooks(self): """ An endpoint is specified, but no pre-or post hooks are added to it. Requests to the endpoint are proxied. """ endpoint = "/towel" self._configure("""endpoints: "POST %s": pre: [] post: [] adapters: {}""" % (endpoint,)) d = self.client.post('http://127.0.0.1:%d%s' % (self.proxyPort, endpoint), json.dumps({"hiding": "things"}), headers={'Content-Type': ['application/json']}) d.addCallback(treq.json_content) def verify(response): self.assertEqual(response, {"hiding": "things", "SeenByFakeDocker": 42}) d.addCallback(verify) return d def _getAdder(self, *args, **kw): self.adderAPI = TrafficLoggingFactory(AdderPlugin(*args, **kw), "adder-") self.adderServer = reactor.listenTCP(0, self.adderAPI) self.adderPort = self.adderServer.getHost().port def _getAdderTwo(self, *args, **kw): kw["incrementBy"] = 2 self.adderTwoAPI = TrafficLoggingFactory(AdderPlugin(*args, **kw), "adder2-") self.adderTwoServer = reactor.listenTCP(0, self.adderTwoAPI) self.adderTwoPort = self.adderTwoServer.getHost().port def _hookTest(self, config_yml, adderArgs=dict(pre=True), adderTwoArgs=dict(pre=True)): """ Generalised version of a pre-hook test. """ self._getAdder(**adderArgs) self._getAdderTwo(**adderTwoArgs) self.dockerEndpoint = "/towel" self.adapterEndpoint = "/adapter" self.args = dict(dockerEndpoint=self.dockerEndpoint, adapterEndpoint=self.adapterEndpoint, adderPort=self.adderPort, adderTwoPort=self.adderTwoPort) self._configure(config_yml % self.args) self.args["proxyPort"] = self.proxyPort d = self.client.post('http://127.0.0.1:%(proxyPort)d%(dockerEndpoint)s' % self.args, json.dumps({"Number": 1}), headers={'Content-Type': ['application/json']}) d.addCallback(treq.json_content) def debug(result, *args, **kw): return result d.addCallback(debug) return d def test_adding_pre_hook_adapter(self): """ A adapter has a pre-hook which increments an integral field in the JSON POST body called "Number" which starts with value 1. Calling that pre-hook once increments the number to 2. """ d = self._hookTest("""endpoints: "POST %(dockerEndpoint)s": pre: [adder] post: [] adapters: adder: http://127.0.0.1:%(adderPort)d%(adapterEndpoint)s""") def verify(response): self.assertEqual(response, {"Number": 2, "SeenByFakeDocker": 42}) d.addCallback(verify) return d def test_adding_pre_hook_twice_adapter(self): """ Chaining pre-hooks: adding twice means you get +2. Note that the naming here is confusing. the adapter "adder2" here is defined as being the **same adapter** as "adder", which increments by 1. In later tests, we use a different adder on "adderTwoPort" which increments by 2. """ d = self._hookTest("""endpoints: "POST %(dockerEndpoint)s": pre: [adder, adder2] post: [] adapters: adder: http://127.0.0.1:%(adderPort)d%(adapterEndpoint)s adder2: http://127.0.0.1:%(adderPort)d%(adapterEndpoint)s""") def verify(response): self.assertEqual(response, {"Number": 3, "SeenByFakeDocker": 42}) d.addCallback(verify) return d def test_adding_one_then_two_pre_hook_adapter(self): """ Chaining pre-hooks: adding +1 and then +2 gives you +3. Note that the naming here is confusing. the adapter "adder2" here is defined as being a **different adapter** to "adder", which increments by 2. """ d = self._hookTest("""endpoints: "POST %(dockerEndpoint)s": pre: [adder, adder2] post: [] adapters: adder: http://127.0.0.1:%(adderPort)d%(adapterEndpoint)s adder2: http://127.0.0.1:%(adderTwoPort)d%(adapterEndpoint)s""") def verify(response): self.assertEqual(response, {"Number": 4, "SeenByFakeDocker": 42}) d.addCallback(verify) return d def test_adding_post_hook_adapter(self): """ A adapter has a post-hook which increments an integral field in the JSON (Docker) response body called "Number". """ d = self._hookTest("""endpoints: "POST %(dockerEndpoint)s": pre: [] post: [adder] adapters: adder: http://127.0.0.1:%(adderPort)d%(adapterEndpoint)s""", adderArgs=dict(post=True)) def verify(response): self.assertEqual(response, {"Number": 2, "SeenByFakeDocker": 42}) d.addCallback(verify) return d def test_content_length_post_hook(self): """ When the content length is changed by a post-hook, test that powerstrip returns the correct content as per the content-length """ d = self._hookTest("""endpoints: "POST %(dockerEndpoint)s": pre: [] post: [adder,adder,adder,adder,adder,adder,adder,adder,adder] adapters: adder: http://127.0.0.1:%(adderPort)d%(adapterEndpoint)s""", adderArgs=dict(post=True)) def verify(response): self.assertEqual(response, {"Number": 10, "SeenByFakeDocker": 42}) d.addCallback(verify) return d def test_adding_post_hook_twice_adapter(self): """ Chaining post-hooks: adding twice means you get +2. """ d = self._hookTest("""endpoints: "POST %(dockerEndpoint)s": pre: [] post: [adder, adder2] adapters: adder: http://127.0.0.1:%(adderPort)d%(adapterEndpoint)s adder2: http://127.0.0.1:%(adderTwoPort)d%(adapterEndpoint)s""", adderArgs=dict(post=True), adderTwoArgs=dict(post=True)) def verify(response): self.assertEqual(response, {"Number": 4, "SeenByFakeDocker": 42}) d.addCallback(verify) return d def test_stream_endpoint(self): """ A streaming (aka hijacking) endpoint like /attach is permitted with no post-hooks (the Docker response's content-type is detected and the entire connection switched down into simple TCP-proxying mode (with support for half-close). """ self._configure("endpoints: {}\nadapters: {}", dockerArgs=dict(rawStream=True)) d = self.client.post('http://127.0.0.1:%d/towel' % (self.proxyPort,), json.dumps({"raw": "stream"}), headers={'Content-Type': ['application/json']}) def verify(response): self.assertEqual(response.headers.getRawHeaders("content-type"), ["application/vnd.docker.raw-stream"]) # TODO Verify that half-close, and bi-directional TCP proxying # works. d.addCallback(verify) return d def test_chunked_endpoint(self): """ A chunking endpoint like /pull is permitted with no post-hooks (the Docker response's Content-Encoding is chunked). """ self._configure("endpoints: {}\nadapters: {}", dockerArgs=dict(chunkedResponse=True)) d = self.client.post('http://127.0.0.1:%d/towel' % (self.proxyPort,), json.dumps({"chunked": "response"}), headers={'Content-Type': ['application/json']}) def verify(response): self.assertEqual(response.headers.getRawHeaders("transfer-encoding"), ["chunked"]) d.addCallback(verify) return d test_chunked_endpoint.skip = ("Doesn't work yet. " "Need a fake docker which can emit chunked encodings.") def test_endpoint_GET_args(self): """ An endpoint is matched when it has ?-style GET arguments (and no JSON body), and the GET request is passed through. """ self._configure("endpoints: {}\nadapters: {}", dockerArgs=dict(chunkedResponse=True)) d = self.client.get('http://127.0.0.1:%d/info?return=fish' % (self.proxyPort,)) d.addCallback(treq.content) def verify(response): self.assertEqual(response, "INFORMATION FOR YOU: fish") d.addCallback(verify) return d def test_stream_endpoint_reject_post_hook(self): """ A streaming (aka hijacking) endpoint like /attach is rejected if a post-hook is attached: a runtime error is raised when the Content-Type is detected. """ test_stream_endpoint_reject_post_hook.skip = "not implemented yet" def test_chunked_endpoint_reject_post_hook(self): """ A chunking endpoint like /pull is rejected if a post-hook is attached: a runtime error is raised when the Content-Encoding is detected. """ test_chunked_endpoint_reject_post_hook.skip = "not implemented yet" def test_prehook_error_does_not_call_docker(self): """ An error in the pre-hook does not call through to Docker and returns the error to the user. """ test_prehook_error_does_not_call_docker.skip = "not implemented yet" def test_prehook_error_stops_chain(self): """ An error in the pre-hook stops the chain when there are multiple pre-hooks. """ test_prehook_error_stops_chain.skip = "not implemented yet" def test_posthook_error_stops_chain(self): """ An error in the post-hook stops the chain and returns the error to the user. """ test_posthook_error_stops_chain.skip = "not implemented yet" def test_docker_error_does_not_stop_posthooks(self): """ If Docker returns an HTTP error code, the post-hooks are given a chance to take a look at it/modify it. """ test_docker_error_does_not_stop_posthooks.skip = "not implemented yet" def test_second_pre_hook_gets_new_request_and_method(self): """ Chaining pre-hooks: the next pre-hook gets the request and method from the previous. """ test_second_pre_hook_gets_new_request_and_method.skip = "not implemented yet" def test_second_post_hook_gets_new_request_and_code(self): """ Chaining post-hooks: the next post-hook gets the request and code from the previous. Also content-type. """ test_second_post_hook_gets_new_request_and_code.skip = "not implemented yet" def test_endpoint_globbing(self): """ An endpoint is matched when there are '*' characters in the string """ test_endpoint_globbing.skip = "not implemented yet"
class ProxyTests(TestCase): def setUp(self): """ Construct a fake "Docker daemon" (one which does much less than the actual Docker daemon) and a Proxy instance. Pre- and post-hook API servers are provided by the individual tests. """ self.agent = Agent(reactor) # no connectionpool self.client = HTTPClient(self.agent) def tearDown(self): shutdowns = [ self.dockerServer.stopListening(), self.proxyServer.stopListening()] if hasattr(self, 'adderServer'): shutdowns.append(self.adderServer.stopListening()) if hasattr(self, 'adderTwoServer'): shutdowns.append(self.adderTwoServer.stopListening()) return defer.gatherResults(shutdowns) def _configure(self, config_yml, dockerArgs={}, dockerOnSocket=False): self.dockerAPI = TrafficLoggingFactory(testtools.FakeDockerServer(**dockerArgs), "docker-") if dockerOnSocket: self.socketPath = self.mktemp() self.dockerServer = reactor.listenUNIX(self.socketPath, self.dockerAPI) else: self.dockerServer = reactor.listenTCP(0, self.dockerAPI) self.dockerPort = self.dockerServer.getHost().port self.config = PluginConfiguration() tmp = self.mktemp() self.config._default_file = tmp fp = FilePath(tmp) fp.setContent(config_yml) self.parser = EndpointParser(self.config) if dockerOnSocket: self.proxyAPI = TrafficLoggingFactory(powerstrip.ServerProtocolFactory( dockerSocket=self.socketPath, config=self.config), "proxy-") else: self.proxyAPI = TrafficLoggingFactory( powerstrip.ServerProtocolFactory( dockerAddr="127.0.0.1", dockerPort=self.dockerPort, config=self.config), "proxy-") self.proxyServer = reactor.listenTCP(0, self.proxyAPI) self.proxyPort = self.proxyServer.getHost().port def test_empty_endpoints(self): """ The proxy passes through requests when no endpoints are specified. In particular, when POST to the /towel endpoint on the *proxy*, we get to see that we were seen by the (admittedly fake) Docker daemon. """ self._configure("endpoints: {}\nadapters: {}") d = self.client.post('http://127.0.0.1:%d/towel' % (self.proxyPort,), json.dumps({"hiding": "things"}), headers={'Content-Type': ['application/json']}) d.addCallback(treq.json_content) def verify(response): self.assertEqual(response, {"hiding": "things", "SeenByFakeDocker": 42}) d.addCallback(verify) return d def test_empty_endpoints_socket(self): """ The proxy is able to connect to Docker on a UNIX socket. """ self._configure("endpoints: {}\nadapters: {}", dockerOnSocket=True) d = self.client.post('http://127.0.0.1:%d/towel' % (self.proxyPort,), json.dumps({"hiding": "things"}), headers={'Content-Type': ['application/json']}) d.addCallback(treq.json_content) def verify(response): self.assertEqual(response, {"hiding": "things", "SeenByFakeDocker": 42}) d.addCallback(verify) return d def test_endpoint_and_empty_hooks(self): """ An endpoint is specified, but no pre-or post hooks are added to it. Requests to the endpoint are proxied. """ endpoint = "/towel" self._configure("""endpoints: "POST %s": pre: [] post: [] adapters: {}""" % (endpoint,)) d = self.client.post('http://127.0.0.1:%d%s' % (self.proxyPort, endpoint), json.dumps({"hiding": "things"}), headers={'Content-Type': ['application/json']}) d.addCallback(treq.json_content) def verify(response): self.assertEqual(response, {"hiding": "things", "SeenByFakeDocker": 42}) d.addCallback(verify) return d def _getAdder(self, *args, **kw): self.adderAPI = TrafficLoggingFactory(AdderPlugin(*args, **kw), "adder-") self.adderServer = reactor.listenTCP(0, self.adderAPI) self.adderPort = self.adderServer.getHost().port def _getAdderTwo(self, *args, **kw): kw["incrementBy"] = 2 self.adderTwoAPI = TrafficLoggingFactory(AdderPlugin(*args, **kw), "adder2-") self.adderTwoServer = reactor.listenTCP(0, self.adderTwoAPI) self.adderTwoPort = self.adderTwoServer.getHost().port def _hookTest(self, config_yml, adderArgs=dict(pre=True), adderTwoArgs=dict(pre=True)): """ Generalised version of a pre-hook test. """ self._getAdder(**adderArgs) self._getAdderTwo(**adderTwoArgs) self.dockerEndpoint = "/towel" self.adapterEndpoint = "/adapter" self.args = dict(dockerEndpoint=self.dockerEndpoint, adapterEndpoint=self.adapterEndpoint, adderPort=self.adderPort, adderTwoPort=self.adderTwoPort) self._configure(config_yml % self.args) self.args["proxyPort"] = self.proxyPort d = self.client.post('http://127.0.0.1:%(proxyPort)d%(dockerEndpoint)s' % self.args, json.dumps({"Number": 1}), headers={'Content-Type': ['application/json']}) d.addCallback(treq.json_content) def debug(result, *args, **kw): return result d.addCallback(debug) return d def test_adding_pre_hook_adapter(self): """ A adapter has a pre-hook which increments an integral field in the JSON POST body called "Number" which starts with value 1. Calling that pre-hook once increments the number to 2. """ d = self._hookTest("""endpoints: "POST %(dockerEndpoint)s": pre: [adder] post: [] adapters: adder: http://127.0.0.1:%(adderPort)d%(adapterEndpoint)s""") def verify(response): self.assertEqual(response, {"Number": 2, "SeenByFakeDocker": 42}) d.addCallback(verify) return d def test_adding_pre_hook_twice_adapter(self): """ Chaining pre-hooks: adding twice means you get +2. Note that the naming here is confusing. the adapter "adder2" here is defined as being the **same adapter** as "adder", which increments by 1. In later tests, we use a different adder on "adderTwoPort" which increments by 2. """ d = self._hookTest("""endpoints: "POST %(dockerEndpoint)s": pre: [adder, adder2] post: [] adapters: adder: http://127.0.0.1:%(adderPort)d%(adapterEndpoint)s adder2: http://127.0.0.1:%(adderPort)d%(adapterEndpoint)s""") def verify(response): self.assertEqual(response, {"Number": 3, "SeenByFakeDocker": 42}) d.addCallback(verify) return d def test_adding_one_then_two_pre_hook_adapter(self): """ Chaining pre-hooks: adding +1 and then +2 gives you +3. Note that the naming here is confusing. the adapter "adder2" here is defined as being a **different adapter** to "adder", which increments by 2. """ d = self._hookTest("""endpoints: "POST %(dockerEndpoint)s": pre: [adder, adder2] post: [] adapters: adder: http://127.0.0.1:%(adderPort)d%(adapterEndpoint)s adder2: http://127.0.0.1:%(adderTwoPort)d%(adapterEndpoint)s""") def verify(response): self.assertEqual(response, {"Number": 4, "SeenByFakeDocker": 42}) d.addCallback(verify) return d def test_adding_post_hook_adapter(self): """ A adapter has a post-hook which increments an integral field in the JSON (Docker) response body called "Number". """ d = self._hookTest("""endpoints: "POST %(dockerEndpoint)s": pre: [] post: [adder] adapters: adder: http://127.0.0.1:%(adderPort)d%(adapterEndpoint)s""", adderArgs=dict(post=True)) def verify(response): self.assertEqual(response, {"Number": 2, "SeenByFakeDocker": 42}) d.addCallback(verify) return d def test_adding_post_hook_twice_adapter(self): """ Chaining post-hooks: adding twice means you get +2. """ d = self._hookTest("""endpoints: "POST %(dockerEndpoint)s": pre: [] post: [adder, adder2] adapters: adder: http://127.0.0.1:%(adderPort)d%(adapterEndpoint)s adder2: http://127.0.0.1:%(adderTwoPort)d%(adapterEndpoint)s""", adderArgs=dict(post=True), adderTwoArgs=dict(post=True)) def verify(response): self.assertEqual(response, {"Number": 4, "SeenByFakeDocker": 42}) d.addCallback(verify) return d def test_stream_endpoint(self): """ A streaming (aka hijacking) endpoint like /attach is permitted with no post-hooks (the Docker response's content-type is detected and the entire connection switched down into simple TCP-proxying mode (with support for half-close). """ self._configure("endpoints: {}\nadapters: {}", dockerArgs=dict(rawStream=True)) d = self.client.post('http://127.0.0.1:%d/towel' % (self.proxyPort,), json.dumps({"raw": "stream"}), headers={'Content-Type': ['application/json']}) def verify(response): self.assertEqual(response.headers.getRawHeaders("content-type"), ["application/vnd.docker.raw-stream"]) # TODO Verify that half-close, and bi-directional TCP proxying # works. d.addCallback(verify) return d def test_chunked_endpoint(self): """ A chunking endpoint like /pull is permitted with no post-hooks (the Docker response's Content-Encoding is chunked). """ self._configure("endpoints: {}\nadapters: {}", dockerArgs=dict(chunkedResponse=True)) d = self.client.post('http://127.0.0.1:%d/towel' % (self.proxyPort,), json.dumps({"chunked": "response"}), headers={'Content-Type': ['application/json']}) def verify(response): self.assertEqual(response.headers.getRawHeaders("transfer-encoding"), ["chunked"]) d.addCallback(verify) return d test_chunked_endpoint.skip = ("Doesn't work yet. " "Need a fake docker which can emit chunked encodings.") def test_endpoint_GET_args(self): """ An endpoint is matched when it has ?-style GET arguments (and no JSON body), and the GET request is passed through. """ self._configure("endpoints: {}\nadapters: {}", dockerArgs=dict(chunkedResponse=True)) d = self.client.get('http://127.0.0.1:%d/info?return=fish' % (self.proxyPort,)) d.addCallback(treq.content) def verify(response): self.assertEqual(response, "INFORMATION FOR YOU: fish") d.addCallback(verify) return d def test_stream_endpoint_reject_post_hook(self): """ A streaming (aka hijacking) endpoint like /attach is rejected if a post-hook is attached: a runtime error is raised when the Content-Type is detected. """ test_stream_endpoint_reject_post_hook.skip = "not implemented yet" def test_chunked_endpoint_reject_post_hook(self): """ A chunking endpoint like /pull is rejected if a post-hook is attached: a runtime error is raised when the Content-Encoding is detected. """ test_chunked_endpoint_reject_post_hook.skip = "not implemented yet" def test_prehook_error_does_not_call_docker(self): """ An error in the pre-hook does not call through to Docker and returns the error to the user. """ test_prehook_error_does_not_call_docker.skip = "not implemented yet" def test_prehook_error_stops_chain(self): """ An error in the pre-hook stops the chain when there are multiple pre-hooks. """ test_prehook_error_stops_chain.skip = "not implemented yet" def test_posthook_error_stops_chain(self): """ An error in the post-hook stops the chain and returns the error to the user. """ test_posthook_error_stops_chain.skip = "not implemented yet" def test_docker_error_does_not_stop_posthooks(self): """ If Docker returns an HTTP error code, the post-hooks are given a chance to take a look at it/modify it. """ test_docker_error_does_not_stop_posthooks.skip = "not implemented yet" def test_second_pre_hook_gets_new_request_and_method(self): """ Chaining pre-hooks: the next pre-hook gets the request and method from the previous. """ test_second_pre_hook_gets_new_request_and_method.skip = "not implemented yet" def test_second_post_hook_gets_new_request_and_code(self): """ Chaining post-hooks: the next post-hook gets the request and code from the previous. Also content-type. """ test_second_post_hook_gets_new_request_and_code.skip = "not implemented yet" def test_endpoint_globbing(self): """ An endpoint is matched when there are '*' characters in the string """ test_endpoint_globbing.skip = "not implemented yet"
class AdapterResource(resource.Resource): """ A powerstrip pre-hook for container create. """ isLeaf = True def __init__(self, *args, **kw): self._agent = Agent(reactor) # no connectionpool self.client = HTTPClient(self._agent) return resource.Resource.__init__(self, *args, **kw) def render_POST(self, request): """ Handle a pre-hook: either create a filesystem, or move it in place. """ requestJson = json.loads(request.content.read()) if requestJson["Type"] != "pre-hook": raise Exception("unsupported hook type %s" % (requestJson["Type"], )) pprint.pprint(os.environ) # BASE_URL like http://control-service/v1/ ^ json_payload = requestJson["ClientRequest"]["Body"] json_parsed = json.loads(json_payload) self.base_url = os.environ.get("FLOCKER_CONTROL_SERVICE_BASE_URL") self.ip = os.environ.get("MY_NETWORK_IDENTITY") self.host_uuid = os.environ.get("MY_HOST_UUID") def wait_until_volume_in_place(result, fs): """ Called after a dataset has been created or moved in the cluster's desired configuration. Wait until the volume shows up in the cluster actual state on the right host (either having been created or moved). :return: Deferred which fires with the tuple (fs, dataset_id) -- that is, the filesystem and the corresponding flocker dataset uuid that the docker client asked for -- firing only once the filesystem has been created/moved and mounted (iow, exists on the right host in the cluster state). """ dataset_id = result["dataset_id"] def dataset_exists(): d = self.client.get(self.base_url + "/state/datasets") d.addCallback(treq.json_content) def check_dataset_exists(datasets): """ The /v1/state/datasets API seems to show the volume as being on two hosts at once during a move. We assume therefore that when it settles down to only show it on one host that this means the move is complete. """ print "Got", self.ip, "datasets:", datasets matching_datasets = [] for dataset in datasets: if dataset["dataset_id"] == dataset_id: matching_datasets.append(dataset) if len(matching_datasets) == 1: if matching_datasets[0]["primary"] == self.ip: return True return False d.addCallback(check_dataset_exists) return d d = loop_until(dataset_exists) d.addCallback(lambda ignored: (fs, dataset_id)) return d d = self.client.get(self.base_url + "/configuration/datasets") d.addCallback(treq.json_content) def got_dataset_configuration(configured_datasets): # form a mapping from names onto dataset objects configured_dataset_mapping = {} for dataset in configured_datasets: if dataset["metadata"].get("name"): configured_dataset_mapping[dataset["metadata"].get( "name")] = dataset # iterate over the datasets we were asked to create by the docker client fs_create_deferreds = [] old_binds = [] if json_parsed['HostConfig']['Binds'] is not None: for bind in json_parsed['HostConfig']['Binds']: host_path, remainder = bind.split(":", 1) # TODO validation # if "/" in fs: # raise Exception("Not allowed flocker filesystems more than one level deep") if host_path.startswith("/flocker/"): fs = host_path[len("/flocker/"):] old_binds.append((fs, remainder)) # if a dataset exists, and is in the right place, we're cool. if fs in configured_dataset_mapping: dataset = configured_dataset_mapping[fs] if dataset["primary"] == self.ip: # simulate "immediate success" fs_create_deferreds.append( defer.succeed((fs, dataset["dataset_id"]))) else: # if a dataset exists, but is on the wrong server [TODO # and is not being used], then move it in place. d = self.client.post( self.base_url + "/configuration/datasets/%s" % (dataset["dataset_id"].encode('ascii'), ), json.dumps({"primary": self.ip}), headers={ 'Content-Type': ['application/json'] }) d.addCallback(treq.json_content) d.addCallback(wait_until_volume_in_place, fs=fs) fs_create_deferreds.append(d) else: # if a dataset doesn't exist at all, create it on this server. d = self.client.post( self.base_url + "/configuration/datasets", json.dumps({ "primary": self.ip, "metadata": { "name": fs } }), headers={'Content-Type': ['application/json']}) d.addCallback(treq.json_content) d.addCallback(wait_until_volume_in_place, fs=fs) fs_create_deferreds.append(d) d = defer.gatherResults(fs_create_deferreds) def got_created_and_moved_datasets(list_new_datasets): dataset_mapping = dict(list_new_datasets) new_binds = [] for fs, reminder in old_binds: new_binds.append( "/flocker/%s.default.%s:%s" % (self.host_uuid, dataset_mapping[fs], remainder)) new_json_parsed = json_parsed.copy() new_json_parsed['HostConfig']['Binds'] = new_binds request.write( json.dumps({ "PowerstripProtocolVersion": 1, "ModifiedClientRequest": { "Method": "POST", "Request": request.uri, "Body": json.dumps(new_json_parsed) } })) request.finish() d.addCallback(got_created_and_moved_datasets) return d d.addCallback(got_dataset_configuration) d.addErrback(log.err, 'while processing configured datasets') return server.NOT_DONE_YET
class HTTPClientTests(TestCase): def setUp(self): self.agent = mock.Mock(Agent) self.client = HTTPClient(self.agent) self.fbp_patcher = mock.patch('treq.client.FileBodyProducer') self.FileBodyProducer = self.fbp_patcher.start() self.addCleanup(self.fbp_patcher.stop) self.mbp_patcher = mock.patch('treq.multipart.MultiPartProducer') self.MultiPartProducer = self.mbp_patcher.start() self.addCleanup(self.mbp_patcher.stop) def assertBody(self, expected): body = self.FileBodyProducer.mock_calls[0][1][0] self.assertEqual(body.read(), expected) def test_post(self): self.client.post('http://example.com/') self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({b'accept-encoding': [b'gzip']}), None) def test_request_uri_idn(self): self.client.request('GET', u'http://č.net') self.agent.request.assert_called_once_with( b'GET', b'http://xn--bea.net', Headers({b'accept-encoding': [b'gzip']}), None) def test_request_uri_decodedurl(self): """ A URL may be passed as a `hyperlink.DecodedURL` object. It is converted to bytes when passed to the underlying agent. """ url = DecodedURL.from_text(u"https://example.org/foo") self.client.request("GET", url) self.agent.request.assert_called_once_with( b"GET", b"https://example.org/foo", Headers({b"accept-encoding": [b"gzip"]}), None, ) def test_request_uri_encodedurl(self): """ A URL may be passed as a `hyperlink.EncodedURL` object. It is converted to bytes when passed to the underlying agent. """ url = EncodedURL.from_text(u"https://example.org/foo") self.client.request("GET", url) self.agent.request.assert_called_once_with( b"GET", b"https://example.org/foo", Headers({b"accept-encoding": [b"gzip"]}), None, ) def test_request_uri_idn_params(self): """ A URL that contains non-ASCII characters can be augmented with querystring parameters. This reproduces treq #264. """ self.client.request('GET', u'http://č.net', params={'foo': 'bar'}) self.agent.request.assert_called_once_with( b'GET', b'http://xn--bea.net/?foo=bar', Headers({b'accept-encoding': [b'gzip']}), None) def test_request_uri_hyperlink_params(self): """ The *params* argument augments an instance of `hyperlink.DecodedURL` passed as the *url* parameter, just as if it were a string. """ self.client.request( method="GET", url=DecodedURL.from_text(u"http://č.net"), params={"foo": "bar"}, ) self.agent.request.assert_called_once_with( b"GET", b"http://xn--bea.net/?foo=bar", Headers({b"accept-encoding": [b"gzip"]}), None, ) def test_request_case_insensitive_methods(self): self.client.request('gEt', 'http://example.com/') self.agent.request.assert_called_once_with( b'GET', b'http://example.com/', Headers({b'accept-encoding': [b'gzip']}), None) def test_request_query_params(self): self.client.request('GET', 'http://example.com/', params={'foo': ['bar']}) self.agent.request.assert_called_once_with( b'GET', b'http://example.com/?foo=bar', Headers({b'accept-encoding': [b'gzip']}), None) def test_request_tuple_query_values(self): self.client.request('GET', 'http://example.com/', params={'foo': ('bar',)}) self.agent.request.assert_called_once_with( b'GET', b'http://example.com/?foo=bar', Headers({b'accept-encoding': [b'gzip']}), None) def test_request_tuple_query_value_coercion(self): """ treq coerces non-string values passed to *params* like `urllib.urlencode()` """ self.client.request('GET', 'http://example.com/', params=[ ('text', u'A\u03a9'), ('text-seq', [u'A\u03a9']), ('bytes', [b'ascii']), ('bytes-seq', [b'ascii']), ('native', ['native']), ('native-seq', ['aa', 'bb']), ('int', 1), ('int-seq', (1, 2, 3)), ('none', None), ('none-seq', [None, None]), ]) self.agent.request.assert_called_once_with( b'GET', ( b'http://example.com/?' b'text=A%CE%A9&text-seq=A%CE%A9' b'&bytes=ascii&bytes-seq=ascii' b'&native=native&native-seq=aa&native-seq=bb' b'&int=1&int-seq=1&int-seq=2&int-seq=3' b'&none=None&none-seq=None&none-seq=None' ), Headers({b'accept-encoding': [b'gzip']}), None, ) def test_request_tuple_query_param_coercion(self): """ treq coerces non-string param names passed to *params* like `urllib.urlencode()` """ self.client.request('GET', 'http://example.com/', params=[ (u'text', u'A\u03a9'), (b'bytes', ['ascii']), ('native', 'native'), (1, 'int'), (None, ['none']), ]) self.agent.request.assert_called_once_with( b'GET', ( b'http://example.com/' b'?text=A%CE%A9&bytes=ascii' b'&native=native&1=int&None=none' ), Headers({b'accept-encoding': [b'gzip']}), None, ) def test_request_query_param_seps(self): """ When the characters ``&`` and ``#`` are passed to *params* as param names or values they are percent-escaped in the URL. This reproduces https://github.com/twisted/treq/issues/282 """ self.client.request('GET', 'http://example.com/', params=( ('ampersand', '&'), ('&', 'ampersand'), ('octothorpe', '#'), ('#', 'octothorpe'), )) self.agent.request.assert_called_once_with( b'GET', ( b'http://example.com/' b'?ampersand=%26' b'&%26=ampersand' b'&octothorpe=%23' b'&%23=octothorpe' ), Headers({b'accept-encoding': [b'gzip']}), None, ) def test_request_merge_query_params(self): self.client.request('GET', 'http://example.com/?baz=bax', params={'foo': ['bar', 'baz']}) self.agent.request.assert_called_once_with( b'GET', b'http://example.com/?baz=bax&foo=bar&foo=baz', Headers({b'accept-encoding': [b'gzip']}), None) def test_request_merge_tuple_query_params(self): self.client.request('GET', 'http://example.com/?baz=bax', params=[('foo', 'bar')]) self.agent.request.assert_called_once_with( b'GET', b'http://example.com/?baz=bax&foo=bar', Headers({b'accept-encoding': [b'gzip']}), None) def test_request_dict_single_value_query_params(self): self.client.request('GET', 'http://example.com/', params={'foo': 'bar'}) self.agent.request.assert_called_once_with( b'GET', b'http://example.com/?foo=bar', Headers({b'accept-encoding': [b'gzip']}), None) def test_request_data_dict(self): self.client.request('POST', 'http://example.com/', data={'foo': ['bar', 'baz']}) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({b'Content-Type': [b'application/x-www-form-urlencoded'], b'accept-encoding': [b'gzip']}), self.FileBodyProducer.return_value) self.assertBody(b'foo=bar&foo=baz') def test_request_data_single_dict(self): self.client.request('POST', 'http://example.com/', data={'foo': 'bar'}) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({b'Content-Type': [b'application/x-www-form-urlencoded'], b'accept-encoding': [b'gzip']}), self.FileBodyProducer.return_value) self.assertBody(b'foo=bar') def test_request_data_tuple(self): self.client.request('POST', 'http://example.com/', data=[('foo', 'bar')]) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({b'Content-Type': [b'application/x-www-form-urlencoded'], b'accept-encoding': [b'gzip']}), self.FileBodyProducer.return_value) self.assertBody(b'foo=bar') def test_request_data_file(self): temp_fn = self.mktemp() with open(temp_fn, "wb") as temp_file: temp_file.write(b'hello') self.client.request('POST', 'http://example.com/', data=open(temp_fn, 'rb')) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({b'accept-encoding': [b'gzip']}), self.FileBodyProducer.return_value) self.assertBody(b'hello') def test_request_json_dict(self): self.client.request('POST', 'http://example.com/', json={'foo': 'bar'}) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({b'Content-Type': [b'application/json; charset=UTF-8'], b'accept-encoding': [b'gzip']}), self.FileBodyProducer.return_value) self.assertBody(b'{"foo":"bar"}') def test_request_json_tuple(self): self.client.request('POST', 'http://example.com/', json=('foo', 1)) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({b'Content-Type': [b'application/json; charset=UTF-8'], b'accept-encoding': [b'gzip']}), self.FileBodyProducer.return_value) self.assertBody(b'["foo",1]') def test_request_json_number(self): self.client.request('POST', 'http://example.com/', json=1.) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({b'Content-Type': [b'application/json; charset=UTF-8'], b'accept-encoding': [b'gzip']}), self.FileBodyProducer.return_value) self.assertBody(b'1.0') def test_request_json_string(self): self.client.request('POST', 'http://example.com/', json='hello') self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({b'Content-Type': [b'application/json; charset=UTF-8'], b'accept-encoding': [b'gzip']}), self.FileBodyProducer.return_value) self.assertBody(b'"hello"') def test_request_json_bool(self): self.client.request('POST', 'http://example.com/', json=True) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({b'Content-Type': [b'application/json; charset=UTF-8'], b'accept-encoding': [b'gzip']}), self.FileBodyProducer.return_value) self.assertBody(b'true') def test_request_json_none(self): self.client.request('POST', 'http://example.com/', json=None) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({b'Content-Type': [b'application/json; charset=UTF-8'], b'accept-encoding': [b'gzip']}), self.FileBodyProducer.return_value) self.assertBody(b'null') @mock.patch('treq.client.uuid.uuid4', mock.Mock(return_value="heyDavid")) def test_request_no_name_attachment(self): self.client.request( 'POST', 'http://example.com/', files={"name": BytesIO(b"hello")}) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({ b'accept-encoding': [b'gzip'], b'Content-Type': [b'multipart/form-data; boundary=heyDavid']}), self.MultiPartProducer.return_value) FP = self.FileBodyProducer.return_value self.assertEqual( mock.call( [('name', (None, 'application/octet-stream', FP))], boundary=b'heyDavid'), self.MultiPartProducer.call_args) @mock.patch('treq.client.uuid.uuid4', mock.Mock(return_value="heyDavid")) def test_request_named_attachment(self): self.client.request( 'POST', 'http://example.com/', files={ "name": ('image.jpg', BytesIO(b"hello"))}) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({ b'accept-encoding': [b'gzip'], b'Content-Type': [b'multipart/form-data; boundary=heyDavid']}), self.MultiPartProducer.return_value) FP = self.FileBodyProducer.return_value self.assertEqual( mock.call( [('name', ('image.jpg', 'image/jpeg', FP))], boundary=b'heyDavid'), self.MultiPartProducer.call_args) @mock.patch('treq.client.uuid.uuid4', mock.Mock(return_value="heyDavid")) def test_request_named_attachment_and_ctype(self): self.client.request( 'POST', 'http://example.com/', files={ "name": ('image.jpg', 'text/plain', BytesIO(b"hello"))}) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({ b'accept-encoding': [b'gzip'], b'Content-Type': [b'multipart/form-data; boundary=heyDavid']}), self.MultiPartProducer.return_value) FP = self.FileBodyProducer.return_value self.assertEqual( mock.call( [('name', ('image.jpg', 'text/plain', FP))], boundary=b'heyDavid'), self.MultiPartProducer.call_args) @mock.patch('treq.client.uuid.uuid4', mock.Mock(return_value="heyDavid")) def test_request_mixed_params(self): class NamedFile(BytesIO): def __init__(self, val): BytesIO.__init__(self, val) self.name = "image.png" self.client.request( 'POST', 'http://example.com/', data=[("a", "b"), ("key", "val")], files=[ ("file1", ('image.jpg', BytesIO(b"hello"))), ("file2", NamedFile(b"yo"))]) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({ b'accept-encoding': [b'gzip'], b'Content-Type': [b'multipart/form-data; boundary=heyDavid']}), self.MultiPartProducer.return_value) FP = self.FileBodyProducer.return_value self.assertEqual( mock.call([ ('a', 'b'), ('key', 'val'), ('file1', ('image.jpg', 'image/jpeg', FP)), ('file2', ('image.png', 'image/png', FP))], boundary=b'heyDavid'), self.MultiPartProducer.call_args) @mock.patch('treq.client.uuid.uuid4', mock.Mock(return_value="heyDavid")) def test_request_mixed_params_dict(self): self.client.request( 'POST', 'http://example.com/', data={"key": "a", "key2": "b"}, files={"file1": BytesIO(b"hey")}) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({ b'accept-encoding': [b'gzip'], b'Content-Type': [b'multipart/form-data; boundary=heyDavid']}), self.MultiPartProducer.return_value) FP = self.FileBodyProducer.return_value self.assertEqual( mock.call([ ('key', 'a'), ('key2', 'b'), ('file1', (None, 'application/octet-stream', FP))], boundary=b'heyDavid'), self.MultiPartProducer.call_args) def test_request_unsupported_params_combination(self): self.assertRaises(ValueError, self.client.request, 'POST', 'http://example.com/', data=BytesIO(b"yo"), files={"file1": BytesIO(b"hey")}) def test_request_dict_headers(self): self.client.request('GET', 'http://example.com/', headers={ 'User-Agent': 'treq/0.1dev', 'Accept': ['application/json', 'text/plain'] }) self.agent.request.assert_called_once_with( b'GET', b'http://example.com/', Headers({b'User-Agent': [b'treq/0.1dev'], b'accept-encoding': [b'gzip'], b'Accept': [b'application/json', b'text/plain']}), None) @with_clock def test_request_timeout_fired(self, clock): """ Verify the request is cancelled if a response is not received within specified timeout period. """ self.agent.request.return_value = d = Deferred() self.client.request('GET', 'http://example.com', timeout=2) # simulate we haven't gotten a response within timeout seconds clock.advance(3) # a deferred should have been cancelled self.failureResultOf(d, CancelledError) @with_clock def test_request_timeout_cancelled(self, clock): """ Verify timeout is cancelled if a response is received before timeout period elapses. """ self.agent.request.return_value = d = Deferred() self.client.request('GET', 'http://example.com', timeout=2) # simulate a response d.callback(mock.Mock(code=200, headers=Headers({}))) # now advance the clock but since we already got a result, # a cancellation timer should have been cancelled clock.advance(3) self.successResultOf(d) def test_response_is_buffered(self): response = mock.Mock(deliverBody=mock.Mock(), headers=Headers({})) self.agent.request.return_value = succeed(response) d = self.client.get('http://www.example.com') result = self.successResultOf(d) protocol = mock.Mock(Protocol) result.deliverBody(protocol) self.assertEqual(response.deliverBody.call_count, 1) result.deliverBody(protocol) self.assertEqual(response.deliverBody.call_count, 1) def test_response_buffering_is_disabled_with_unbufferred_arg(self): response = mock.Mock(headers=Headers({})) self.agent.request.return_value = succeed(response) d = self.client.get('http://www.example.com', unbuffered=True) # YOLO public attribute. self.assertEqual(self.successResultOf(d).original, response) def test_request_post_redirect_denied(self): response = mock.Mock(code=302, headers=Headers({'Location': ['/']})) self.agent.request.return_value = succeed(response) d = self.client.post('http://www.example.com') self.failureResultOf(d, ResponseFailed) def test_request_browser_like_redirects(self): response = mock.Mock(code=302, headers=Headers({'Location': ['/']})) self.agent.request.return_value = succeed(response) raw = mock.Mock(return_value=[]) final_resp = mock.Mock(code=200, headers=mock.Mock(getRawHeaders=raw)) with mock.patch('twisted.web.client.RedirectAgent._handleRedirect', return_value=final_resp): d = self.client.post('http://www.google.com', browser_like_redirects=True, unbuffered=True) self.assertEqual(self.successResultOf(d).original, final_resp)
class TestAdderPlugin(TestCase): def _getAdder(self, *args, **kw): self.adderAPI = AdderPlugin(*args, **kw) self.adderServer = reactor.listenTCP(0, self.adderAPI) self.adderPort = self.adderServer.getHost().port def setUp(self): self.agent = Agent(reactor) # no connectionpool self.client = HTTPClient(self.agent) def tearDown(self): return self.adderServer.stopListening() def test_adder_explode(self): """ The adder adapter blows up (sends an HTTP 500) when asked to. """ self._getAdder(explode=True) d = self.client.post('http://127.0.0.1:%d/adapter' % (self.adderPort,), json.dumps({}), headers={'Content-Type': ['application/json']}) def verifyResponseCode(response): self.assertEqual(response.code, 500) return response d.addCallback(verifyResponseCode) d.addCallback(treq.content) def verify(body): self.assertEqual(body, "sadness for you, today.") d.addCallback(verify) return d def test_adder_pre(self): """ The adder pre-hook increments an integer according to the protocol defined in the README. """ self._getAdder(pre=True) d = self.client.post('http://127.0.0.1:%d/adapter' % (self.adderPort,), json.dumps({ "PowerstripProtocolVersion": 1, "Type": "pre-hook", "ClientRequest": { "Method": "POST", "Request": "/fictional", "Body": json.dumps({"Number": 7})}}), headers={'Content-Type': ['application/json']}) def verifyResponseCode(response): self.assertEqual(response.code, 200) return response d.addCallback(verifyResponseCode) d.addCallback(treq.json_content) def verify(body): self.assertEqual(json.loads(body["ModifiedClientRequest"]["Body"])["Number"], 8) d.addCallback(verify) return d def test_adder_post(self): """ The adder post-hook increments an integer according to the protocol defined in the README. """ self._getAdder(post=True) d = self.client.post('http://127.0.0.1:%d/adapter' % (self.adderPort,), json.dumps({ "Type": "post-hook", "ClientRequest": { "Method": "POST", "Request": "/fictional", "Body": json.dumps({}),}, "ServerResponse": { "ContentType": "application/json", "Body": json.dumps({"Number": 7}), "Code": 200,}, }), headers={'Content-Type': ['application/json']}) def verifyResponseCode(response): self.assertEqual(response.code, 200) return response d.addCallback(verifyResponseCode) d.addCallback(treq.json_content) def verify(body): self.assertEqual(json.loads(body["ModifiedServerResponse"]["Body"])["Number"], 8) d.addCallback(verify) return d
class DockerProxy(proxy.ReverseProxyResource): proxyClientFactoryClass = DockerProxyClientFactory def __init__(self, dockerAddr=None, dockerPort=None, dockerSocket=None, path='', reactor=reactor, config=None): """ A docker proxy resource which knows how to connect to real Docker daemon either via socket (dockerSocket specified) or address + port for TCP connection (dockerAddr + dockerPort specified). """ if config is None: # Try to get the configuration from the default place on the # filesystem. self.config = PluginConfiguration() else: self.config = config self.config.read_and_parse() self.parser = EndpointParser(self.config) Resource.__init__(self) self.host = dockerAddr self.port = dockerPort self.socket = dockerSocket self.path = path self.reactor = reactor proxy.ReverseProxyResource.__init__( self, dockerAddr, dockerPort, path, reactor) # NB dockerAddr is not actually used self.agent = Agent(reactor) # no connectionpool self.client = HTTPClient(self.agent) def render(self, request, reactor=reactor): # We are processing a leaf request. # Get the original request body from the client. skipPreHooks = False if request.requestHeaders.getRawHeaders('content-type') == [ "application/json" ]: originalRequestBody = request.content.read() request.content.seek(0) # hee hee elif request.requestHeaders.getRawHeaders('content-type') == [ "application/tar" ]: # We can't JSON encode binary data, so don't even try. skipPreHooks = True originalRequestBody = None else: originalRequestBody = None preHooks = [] postHooks = [] d = defer.succeed(None) for endpoint in self.parser.match_endpoint(request.method, request.uri.split("?")[0]): # It's possible for a request to match multiple endpoint # definitions. Order of matched endpoint is not defined in # that case. adapters = self.config.endpoint(endpoint) preHooks.extend(adapters.pre) postHooks.extend(adapters.post) def callPreHook(result, hookURL): if result is None: newRequestBody = originalRequestBody else: newRequestBody = result["ModifiedClientRequest"]["Body"] return self.client.post( hookURL, json.dumps({ "PowerstripProtocolVersion": 1, "Type": "pre-hook", "ClientRequest": { "Method": request.method, "Request": request.uri, "Body": newRequestBody, } }), headers={'Content-Type': ['application/json']}) if not skipPreHooks: for preHook in preHooks: hookURL = self.config.adapter_uri(preHook) d.addCallback(callPreHook, hookURL=hookURL) d.addCallback(treq.json_content) def doneAllPrehooks(result): # Finally pass through the request to actual Docker. For now we # mutate request in-place in such a way that ReverseProxyResource # understands it. if result is not None: requestBody = b"" bodyFromAdapter = result["ModifiedClientRequest"]["Body"] if bodyFromAdapter is not None: requestBody = bodyFromAdapter.encode("utf-8") request.content = StringIO.StringIO(requestBody) request.requestHeaders.setRawHeaders(b"content-length", [str(len(requestBody))]) ########################### # The following code is copied from t.w.proxy.ReverseProxy so that # clientFactory reference can be kept. if not self.socket: if self.port == 80: host = self.host else: host = "%s:%d" % (self.host, self.port) request.requestHeaders.setRawHeaders(b"host", [host]) request.content.seek(0, 0) qs = urlparse.urlparse(request.uri)[4] if qs: rest = self.path + '?' + qs else: rest = self.path allRequestHeaders = request.getAllHeaders() if allRequestHeaders.get("transfer-encoding") == "chunked": del allRequestHeaders["transfer-encoding"] # XXX Streaming the contents of the request body into memory could # cause OOM issues for large build contexts POSTed through # powerstrip. See https://github.com/ClusterHQ/powerstrip/issues/51 body = request.content.read() allRequestHeaders["content-length"] = str(len(body)) clientFactory = self.proxyClientFactoryClass( request.method, rest, request.clientproto, allRequestHeaders, body, request) ########################### if self.socket: self.reactor.connectUNIX(self.socket, clientFactory) else: self.reactor.connectTCP(self.host, self.port, clientFactory) d = defer.Deferred() clientFactory.onCreate(d) return d d.addCallback(doneAllPrehooks) def inspect(client): # If there are no post-hooks, allow the response to be streamed # back to the client, rather than buffered. d = defer.Deferred() client.registerListener(d) if not postHooks: client.setStreamingMode(True) return d d.addCallback(inspect) def callPostHook(result, hookURL): serverResponse = result["ModifiedServerResponse"] return self.client.post( hookURL, json.dumps({ # TODO Write tests for the information provided to the adapter. "PowerstripProtocolVersion": 1, "Type": "post-hook", "ClientRequest": { "Method": request.method, "Request": request.uri, "Body": originalRequestBody, }, "ServerResponse": { "ContentType": serverResponse["ContentType"], "Body": serverResponse["Body"], "Code": serverResponse["Code"], }, }), headers={'Content-Type': ['application/json']}) # XXX Need to skip post-hooks for tar archives from e.g. docker export. # https://github.com/ClusterHQ/powerstrip/issues/52 for postHook in postHooks: hookURL = self.config.adapter_uri(postHook) d.addCallback(callPostHook, hookURL=hookURL) d.addCallback(treq.json_content) def sendFinalResponseToClient(result): # Write the final response to the client. request.write( result["ModifiedServerResponse"]["Body"].encode("utf-8")) request.finish() d.addCallback(sendFinalResponseToClient) def squashNoPostHooks(failure): failure.trap(NoPostHooks) d.addErrback(squashNoPostHooks) d.addErrback(log.err, 'while running chain') return NOT_DONE_YET def getChild(self, path, request): fragments = request.uri.split("/") fragments.pop(0) proxyArgs = (self.host, self.port, self.socket, self.path + '/' + urlquote(path, safe=""), self.reactor) #if not request.postpath: resource = DockerProxy(*proxyArgs, config=self.config) return resource
class AdapterResource(resource.Resource): """ A powerstrip pre-hook for container create. """ isLeaf = True def __init__(self, *args, **kw): self._agent = Agent(reactor) # no connectionpool self.client = HTTPClient(self._agent) return resource.Resource.__init__(self, *args, **kw) def render_POST(self, request): """ Handle a pre-hook: either create a filesystem, or move it in place. """ requestJson = json.loads(request.content.read()) if requestJson["Type"] != "pre-hook": raise Exception("unsupported hook type %s" % (requestJson["Type"],)) pprint.pprint(os.environ) # BASE_URL like http://control-service/v1/ ^ json_payload = requestJson["ClientRequest"]["Body"] json_parsed = json.loads(json_payload) self.base_url = os.environ.get("FLOCKER_CONTROL_SERVICE_BASE_URL") self.ip = os.environ.get("MY_NETWORK_IDENTITY") self.host_uuid = os.environ.get("MY_HOST_UUID") def wait_until_volume_in_place(result, fs): """ Called after a dataset has been created or moved in the cluster's desired configuration. Wait until the volume shows up in the cluster actual state on the right host (either having been created or moved). :return: Deferred which fires with the tuple (fs, dataset_id) -- that is, the filesystem and the corresponding flocker dataset uuid that the docker client asked for -- firing only once the filesystem has been created/moved and mounted (iow, exists on the right host in the cluster state). """ dataset_id = result["dataset_id"] def dataset_exists(): d = self.client.get(self.base_url + "/state/datasets") d.addCallback(treq.json_content) def check_dataset_exists(datasets): """ The /v1/state/datasets API seems to show the volume as being on two hosts at once during a move. We assume therefore that when it settles down to only show it on one host that this means the move is complete. """ print "Got", self.ip, "datasets:", datasets matching_datasets = [] for dataset in datasets: if dataset["dataset_id"] == dataset_id: matching_datasets.append(dataset) if len(matching_datasets) == 1: if matching_datasets[0]["primary"] == self.ip: return True return False d.addCallback(check_dataset_exists) return d d = loop_until(dataset_exists) d.addCallback(lambda ignored: (fs, dataset_id)) return d d = self.client.get(self.base_url + "/configuration/datasets") d.addCallback(treq.json_content) def got_dataset_configuration(configured_datasets): # form a mapping from names onto dataset objects configured_dataset_mapping = {} for dataset in configured_datasets: if dataset["metadata"].get("name"): configured_dataset_mapping[dataset["metadata"].get("name")] = dataset # iterate over the datasets we were asked to create by the docker client fs_create_deferreds = [] old_binds = [] if json_parsed['HostConfig']['Binds'] is not None: for bind in json_parsed['HostConfig']['Binds']: host_path, remainder = bind.split(":", 1) # TODO validation # if "/" in fs: # raise Exception("Not allowed flocker filesystems more than one level deep") if host_path.startswith("/flocker/"): fs = host_path[len("/flocker/"):] old_binds.append((fs, remainder)) # if a dataset exists, and is in the right place, we're cool. if fs in configured_dataset_mapping: dataset = configured_dataset_mapping[fs] if dataset["primary"] == self.ip: # simulate "immediate success" fs_create_deferreds.append(defer.succeed((fs, dataset["dataset_id"]))) else: # if a dataset exists, but is on the wrong server [TODO # and is not being used], then move it in place. d = self.client.post( self.base_url + "/configuration/datasets/%s" % ( dataset["dataset_id"].encode('ascii'),), json.dumps({"primary": self.ip}), headers={'Content-Type': ['application/json']}) d.addCallback(treq.json_content) d.addCallback(wait_until_volume_in_place, fs=fs) fs_create_deferreds.append(d) else: # if a dataset doesn't exist at all, create it on this server. d = self.client.post(self.base_url + "/configuration/datasets", json.dumps({"primary": self.ip, "metadata": {"name": fs}}), headers={'Content-Type': ['application/json']}) d.addCallback(treq.json_content) d.addCallback(wait_until_volume_in_place, fs=fs) fs_create_deferreds.append(d) d = defer.gatherResults(fs_create_deferreds) def got_created_and_moved_datasets(list_new_datasets): dataset_mapping = dict(list_new_datasets) new_binds = [] for fs, reminder in old_binds: new_binds.append("/flocker/%s.default.%s:%s" % (self.host_uuid, dataset_mapping[fs], remainder)) new_json_parsed = json_parsed.copy() new_json_parsed['HostConfig']['Binds'] = new_binds request.write(json.dumps({ "PowerstripProtocolVersion": 1, "ModifiedClientRequest": { "Method": "POST", "Request": request.uri, "Body": json.dumps(new_json_parsed)}})) request.finish() d.addCallback(got_created_and_moved_datasets) return d d.addCallback(got_dataset_configuration) d.addErrback(log.err, 'while processing configured datasets') return server.NOT_DONE_YET
def run_test(self, content, encoded_content=None, datacoding=None, port=1401): yield self.connect('127.0.0.1', self.pbPort) yield self.prepareRoutingsAndStartConnector() # Set content self.params['content'] = content # Set datacoding if datacoding is None and 'coding' in self.params: del self.params['coding'] if datacoding is not None: self.params['coding'] = datacoding # Prepare baseurl baseurl = 'http://127.0.0.1:%s/send' % port if encoded_content is None: encoded_content = content # Send a MT # We should receive a msg id agent = Agent(reactor) client = HTTPClient(agent) response = yield client.post(baseurl, data=self.params) text = yield text_content(response) msgStatus = text[:7] # Wait 2 seconds before stopping SmppClientConnectors exitDeferred = defer.Deferred() reactor.callLater(2, exitDeferred.callback, None) yield exitDeferred yield self.stopSmppClientConnectors() # Run tests self.assertEqual(msgStatus, 'Success') if datacoding is None: datacoding = 0 datacoding_matrix = {} datacoding_matrix[0] = {'schemeData': DataCodingDefault.SMSC_DEFAULT_ALPHABET} datacoding_matrix[1] = {'schemeData': DataCodingDefault.IA5_ASCII} datacoding_matrix[2] = {'schemeData': DataCodingDefault.OCTET_UNSPECIFIED} datacoding_matrix[3] = {'schemeData': DataCodingDefault.LATIN_1} datacoding_matrix[4] = {'schemeData': DataCodingDefault.OCTET_UNSPECIFIED_COMMON} datacoding_matrix[5] = {'schemeData': DataCodingDefault.JIS} datacoding_matrix[6] = {'schemeData': DataCodingDefault.CYRILLIC} datacoding_matrix[7] = {'schemeData': DataCodingDefault.ISO_8859_8} datacoding_matrix[8] = {'schemeData': DataCodingDefault.UCS2} datacoding_matrix[9] = {'schemeData': DataCodingDefault.PICTOGRAM} datacoding_matrix[10] = {'schemeData': DataCodingDefault.ISO_2022_JP} datacoding_matrix[13] = {'schemeData': DataCodingDefault.EXTENDED_KANJI_JIS} datacoding_matrix[14] = {'schemeData': DataCodingDefault.KS_C_5601} # Check for content encoding receivedContent = b'' for submitSm in self.SMSCPort.factory.lastClient.submitRecords: if (EsmClassGsmFeatures.UDHI_INDICATOR_SET in submitSm.params['esm_class'].gsmFeatures and submitSm.params['short_message'][:3] == b'\x05\x00\x03'): receivedContent += submitSm.params['short_message'][6:] else: receivedContent += submitSm.params['short_message'] self.assertEqual(encoded_content, receivedContent) # Check for schemeData sentDataCoding = datacoding_matrix[datacoding]['schemeData'] for submitSm in self.SMSCPort.factory.lastClient.submitRecords: self.assertEqual(submitSm.params['data_coding'].schemeData, sentDataCoding)
class HTTPClientTests(TestCase): def setUp(self): self.agent = mock.Mock(Agent) self.client = HTTPClient(self.agent) self.fbp_patcher = mock.patch('treq.client.FileBodyProducer') self.FileBodyProducer = self.fbp_patcher.start() self.addCleanup(self.fbp_patcher.stop) self.mbp_patcher = mock.patch('treq.multipart.MultiPartProducer') self.MultiPartProducer = self.mbp_patcher.start() self.addCleanup(self.mbp_patcher.stop) def assertBody(self, expected): body = self.FileBodyProducer.mock_calls[0][1][0] self.assertEqual(body.read(), expected) def test_post(self): self.client.post('http://example.com/') self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({b'accept-encoding': [b'gzip']}), None) def test_request_uri_idn(self): self.client.request('GET', u'http://č.net') self.agent.request.assert_called_once_with( b'GET', b'http://xn--bea.net', Headers({b'accept-encoding': [b'gzip']}), None) def test_request_case_insensitive_methods(self): self.client.request('gEt', 'http://example.com/') self.agent.request.assert_called_once_with( b'GET', b'http://example.com/', Headers({b'accept-encoding': [b'gzip']}), None) def test_request_query_params(self): self.client.request('GET', 'http://example.com/', params={'foo': ['bar']}) self.agent.request.assert_called_once_with( b'GET', b'http://example.com/?foo=bar', Headers({b'accept-encoding': [b'gzip']}), None) def test_request_tuple_query_values(self): self.client.request('GET', 'http://example.com/', params={'foo': ('bar',)}) self.agent.request.assert_called_once_with( b'GET', b'http://example.com/?foo=bar', Headers({b'accept-encoding': [b'gzip']}), None) def test_request_merge_query_params(self): self.client.request('GET', 'http://example.com/?baz=bax', params={'foo': ['bar', 'baz']}) self.agent.request.assert_called_once_with( b'GET', b'http://example.com/?baz=bax&foo=bar&foo=baz', Headers({b'accept-encoding': [b'gzip']}), None) def test_request_merge_tuple_query_params(self): self.client.request('GET', 'http://example.com/?baz=bax', params=[('foo', 'bar')]) self.agent.request.assert_called_once_with( b'GET', b'http://example.com/?baz=bax&foo=bar', Headers({b'accept-encoding': [b'gzip']}), None) def test_request_dict_single_value_query_params(self): self.client.request('GET', 'http://example.com/', params={'foo': 'bar'}) self.agent.request.assert_called_once_with( b'GET', b'http://example.com/?foo=bar', Headers({b'accept-encoding': [b'gzip']}), None) def test_request_data_dict(self): self.client.request('POST', 'http://example.com/', data={'foo': ['bar', 'baz']}) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({b'Content-Type': [b'application/x-www-form-urlencoded'], b'accept-encoding': [b'gzip']}), self.FileBodyProducer.return_value) self.assertBody(b'foo=bar&foo=baz') def test_request_data_single_dict(self): self.client.request('POST', 'http://example.com/', data={'foo': 'bar'}) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({b'Content-Type': [b'application/x-www-form-urlencoded'], b'accept-encoding': [b'gzip']}), self.FileBodyProducer.return_value) self.assertBody(b'foo=bar') def test_request_data_tuple(self): self.client.request('POST', 'http://example.com/', data=[('foo', 'bar')]) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({b'Content-Type': [b'application/x-www-form-urlencoded'], b'accept-encoding': [b'gzip']}), self.FileBodyProducer.return_value) self.assertBody(b'foo=bar') def test_request_data_file(self): temp_fn = self.mktemp() with open(temp_fn, "wb") as temp_file: temp_file.write(b'hello') self.client.request('POST', 'http://example.com/', data=open(temp_fn, 'rb')) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({b'accept-encoding': [b'gzip']}), self.FileBodyProducer.return_value) self.assertBody(b'hello') def test_request_json_dict(self): self.client.request('POST', 'http://example.com/', json={'foo': 'bar'}) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({b'Content-Type': [b'application/json; charset=UTF-8'], b'accept-encoding': [b'gzip']}), self.FileBodyProducer.return_value) self.assertBody(b'{"foo":"bar"}') def test_request_json_tuple(self): self.client.request('POST', 'http://example.com/', json=('foo', 1)) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({b'Content-Type': [b'application/json; charset=UTF-8'], b'accept-encoding': [b'gzip']}), self.FileBodyProducer.return_value) self.assertBody(b'["foo",1]') def test_request_json_number(self): self.client.request('POST', 'http://example.com/', json=1.) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({b'Content-Type': [b'application/json; charset=UTF-8'], b'accept-encoding': [b'gzip']}), self.FileBodyProducer.return_value) self.assertBody(b'1.0') def test_request_json_string(self): self.client.request('POST', 'http://example.com/', json='hello') self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({b'Content-Type': [b'application/json; charset=UTF-8'], b'accept-encoding': [b'gzip']}), self.FileBodyProducer.return_value) self.assertBody(b'"hello"') def test_request_json_bool(self): self.client.request('POST', 'http://example.com/', json=True) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({b'Content-Type': [b'application/json; charset=UTF-8'], b'accept-encoding': [b'gzip']}), self.FileBodyProducer.return_value) self.assertBody(b'true') def test_request_json_none(self): self.client.request('POST', 'http://example.com/', json=None) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({b'Content-Type': [b'application/json; charset=UTF-8'], b'accept-encoding': [b'gzip']}), self.FileBodyProducer.return_value) self.assertBody(b'null') @mock.patch('treq.client.uuid.uuid4', mock.Mock(return_value="heyDavid")) def test_request_no_name_attachment(self): self.client.request( 'POST', 'http://example.com/', files={"name": BytesIO(b"hello")}) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({ b'accept-encoding': [b'gzip'], b'Content-Type': [b'multipart/form-data; boundary=heyDavid']}), self.MultiPartProducer.return_value) FP = self.FileBodyProducer.return_value self.assertEqual( mock.call( [('name', (None, 'application/octet-stream', FP))], boundary=b'heyDavid'), self.MultiPartProducer.call_args) @mock.patch('treq.client.uuid.uuid4', mock.Mock(return_value="heyDavid")) def test_request_named_attachment(self): self.client.request( 'POST', 'http://example.com/', files={ "name": ('image.jpg', BytesIO(b"hello"))}) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({ b'accept-encoding': [b'gzip'], b'Content-Type': [b'multipart/form-data; boundary=heyDavid']}), self.MultiPartProducer.return_value) FP = self.FileBodyProducer.return_value self.assertEqual( mock.call( [('name', ('image.jpg', 'image/jpeg', FP))], boundary=b'heyDavid'), self.MultiPartProducer.call_args) @mock.patch('treq.client.uuid.uuid4', mock.Mock(return_value="heyDavid")) def test_request_named_attachment_and_ctype(self): self.client.request( 'POST', 'http://example.com/', files={ "name": ('image.jpg', 'text/plain', BytesIO(b"hello"))}) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({ b'accept-encoding': [b'gzip'], b'Content-Type': [b'multipart/form-data; boundary=heyDavid']}), self.MultiPartProducer.return_value) FP = self.FileBodyProducer.return_value self.assertEqual( mock.call( [('name', ('image.jpg', 'text/plain', FP))], boundary=b'heyDavid'), self.MultiPartProducer.call_args) @mock.patch('treq.client.uuid.uuid4', mock.Mock(return_value="heyDavid")) def test_request_mixed_params(self): class NamedFile(BytesIO): def __init__(self, val): BytesIO.__init__(self, val) self.name = "image.png" self.client.request( 'POST', 'http://example.com/', data=[("a", "b"), ("key", "val")], files=[ ("file1", ('image.jpg', BytesIO(b"hello"))), ("file2", NamedFile(b"yo"))]) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({ b'accept-encoding': [b'gzip'], b'Content-Type': [b'multipart/form-data; boundary=heyDavid']}), self.MultiPartProducer.return_value) FP = self.FileBodyProducer.return_value self.assertEqual( mock.call([ ('a', 'b'), ('key', 'val'), ('file1', ('image.jpg', 'image/jpeg', FP)), ('file2', ('image.png', 'image/png', FP))], boundary=b'heyDavid'), self.MultiPartProducer.call_args) @mock.patch('treq.client.uuid.uuid4', mock.Mock(return_value="heyDavid")) def test_request_mixed_params_dict(self): self.client.request( 'POST', 'http://example.com/', data={"key": "a", "key2": "b"}, files={"file1": BytesIO(b"hey")}) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({ b'accept-encoding': [b'gzip'], b'Content-Type': [b'multipart/form-data; boundary=heyDavid']}), self.MultiPartProducer.return_value) FP = self.FileBodyProducer.return_value self.assertEqual( mock.call([ ('key', 'a'), ('key2', 'b'), ('file1', (None, 'application/octet-stream', FP))], boundary=b'heyDavid'), self.MultiPartProducer.call_args) def test_request_unsupported_params_combination(self): self.assertRaises(ValueError, self.client.request, 'POST', 'http://example.com/', data=BytesIO(b"yo"), files={"file1": BytesIO(b"hey")}) def test_request_dict_headers(self): self.client.request('GET', 'http://example.com/', headers={ 'User-Agent': 'treq/0.1dev', 'Accept': ['application/json', 'text/plain'] }) self.agent.request.assert_called_once_with( b'GET', b'http://example.com/', Headers({b'User-Agent': [b'treq/0.1dev'], b'accept-encoding': [b'gzip'], b'Accept': [b'application/json', b'text/plain']}), None) @with_clock def test_request_timeout_fired(self, clock): """ Verify the request is cancelled if a response is not received within specified timeout period. """ self.agent.request.return_value = d = Deferred() self.client.request('GET', 'http://example.com', timeout=2) # simulate we haven't gotten a response within timeout seconds clock.advance(3) # a deferred should have been cancelled self.failureResultOf(d, CancelledError) @with_clock def test_request_timeout_cancelled(self, clock): """ Verify timeout is cancelled if a response is received before timeout period elapses. """ self.agent.request.return_value = d = Deferred() self.client.request('GET', 'http://example.com', timeout=2) # simulate a response d.callback(mock.Mock(code=200, headers=Headers({}))) # now advance the clock but since we already got a result, # a cancellation timer should have been cancelled clock.advance(3) self.successResultOf(d) def test_response_is_buffered(self): response = mock.Mock(deliverBody=mock.Mock(), headers=Headers({})) self.agent.request.return_value = succeed(response) d = self.client.get('http://www.example.com') result = self.successResultOf(d) protocol = mock.Mock(Protocol) result.deliverBody(protocol) self.assertEqual(response.deliverBody.call_count, 1) result.deliverBody(protocol) self.assertEqual(response.deliverBody.call_count, 1) def test_response_buffering_is_disabled_with_unbufferred_arg(self): response = mock.Mock(headers=Headers({})) self.agent.request.return_value = succeed(response) d = self.client.get('http://www.example.com', unbuffered=True) # YOLO public attribute. self.assertEqual(self.successResultOf(d).original, response) def test_request_post_redirect_denied(self): response = mock.Mock(code=302, headers=Headers({'Location': ['/']})) self.agent.request.return_value = succeed(response) d = self.client.post('http://www.example.com') self.failureResultOf(d, ResponseFailed) def test_request_browser_like_redirects(self): response = mock.Mock(code=302, headers=Headers({'Location': ['/']})) self.agent.request.return_value = succeed(response) raw = mock.Mock(return_value=[]) final_resp = mock.Mock(code=200, headers=mock.Mock(getRawHeaders=raw)) with mock.patch('twisted.web.client.RedirectAgent._handleRedirect', return_value=final_resp): d = self.client.post('http://www.google.com', browser_like_redirects=True, unbuffered=True) self.assertEqual(self.successResultOf(d).original, final_resp)
class HTTPClientTests(TestCase): def setUp(self): self.agent = mock.Mock(Agent) self.client = HTTPClient(self.agent) self.fbp_patcher = mock.patch('treq.client.FileBodyProducer') self.FileBodyProducer = self.fbp_patcher.start() self.addCleanup(self.fbp_patcher.stop) self.mbp_patcher = mock.patch('treq.multipart.MultiPartProducer') self.MultiPartProducer = self.mbp_patcher.start() self.addCleanup(self.mbp_patcher.stop) def assertBody(self, expected): body = self.FileBodyProducer.mock_calls[0][1][0] self.assertEqual(body.read(), expected) def test_post(self): self.client.post('http://example.com/') self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({b'accept-encoding': [b'gzip']}), None) def test_request_uri_idn(self): self.client.request('GET', u'http://‽.net') self.agent.request.assert_called_once_with( b'GET', b'http://xn--fwg.net', Headers({b'accept-encoding': [b'gzip']}), None) def test_request_case_insensitive_methods(self): self.client.request('gEt', 'http://example.com/') self.agent.request.assert_called_once_with( b'GET', b'http://example.com/', Headers({b'accept-encoding': [b'gzip']}), None) def test_request_query_params(self): self.client.request('GET', 'http://example.com/', params={'foo': ['bar']}) self.agent.request.assert_called_once_with( b'GET', b'http://example.com/?foo=bar', Headers({b'accept-encoding': [b'gzip']}), None) def test_request_tuple_query_values(self): self.client.request('GET', 'http://example.com/', params={'foo': ('bar', )}) self.agent.request.assert_called_once_with( b'GET', b'http://example.com/?foo=bar', Headers({b'accept-encoding': [b'gzip']}), None) def test_request_merge_query_params(self): self.client.request('GET', 'http://example.com/?baz=bax', params={'foo': ['bar', 'baz']}) self.agent.request.assert_called_once_with( b'GET', b'http://example.com/?baz=bax&foo=bar&foo=baz', Headers({b'accept-encoding': [b'gzip']}), None) def test_request_merge_tuple_query_params(self): self.client.request('GET', 'http://example.com/?baz=bax', params=[('foo', 'bar')]) self.agent.request.assert_called_once_with( b'GET', b'http://example.com/?baz=bax&foo=bar', Headers({b'accept-encoding': [b'gzip']}), None) def test_request_dict_single_value_query_params(self): self.client.request('GET', 'http://example.com/', params={'foo': 'bar'}) self.agent.request.assert_called_once_with( b'GET', b'http://example.com/?foo=bar', Headers({b'accept-encoding': [b'gzip']}), None) def test_request_data_dict(self): self.client.request('POST', 'http://example.com/', data={'foo': ['bar', 'baz']}) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({ b'Content-Type': [b'application/x-www-form-urlencoded'], b'accept-encoding': [b'gzip'] }), self.FileBodyProducer.return_value) self.assertBody(b'foo=bar&foo=baz') def test_request_data_single_dict(self): self.client.request('POST', 'http://example.com/', data={'foo': 'bar'}) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({ b'Content-Type': [b'application/x-www-form-urlencoded'], b'accept-encoding': [b'gzip'] }), self.FileBodyProducer.return_value) self.assertBody(b'foo=bar') def test_request_data_tuple(self): self.client.request('POST', 'http://example.com/', data=[('foo', 'bar')]) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({ b'Content-Type': [b'application/x-www-form-urlencoded'], b'accept-encoding': [b'gzip'] }), self.FileBodyProducer.return_value) self.assertBody(b'foo=bar') def test_request_data_file(self): temp_fn = self.mktemp() with open(temp_fn, "wb") as temp_file: temp_file.write(b'hello') self.client.request('POST', 'http://example.com/', data=open(temp_fn, 'rb')) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({b'accept-encoding': [b'gzip']}), self.FileBodyProducer.return_value) self.assertBody(b'hello') def test_request_json_dict(self): self.client.request('POST', 'http://example.com/', json={'foo': 'bar'}) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({ b'Content-Type': [b'application/json; charset=UTF-8'], b'accept-encoding': [b'gzip'] }), self.FileBodyProducer.return_value) self.assertBody(b'{"foo":"bar"}') def test_request_json_tuple(self): self.client.request('POST', 'http://example.com/', json=('foo', 1)) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({ b'Content-Type': [b'application/json; charset=UTF-8'], b'accept-encoding': [b'gzip'] }), self.FileBodyProducer.return_value) self.assertBody(b'["foo",1]') def test_request_json_number(self): self.client.request('POST', 'http://example.com/', json=1.) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({ b'Content-Type': [b'application/json; charset=UTF-8'], b'accept-encoding': [b'gzip'] }), self.FileBodyProducer.return_value) self.assertBody(b'1.0') def test_request_json_string(self): self.client.request('POST', 'http://example.com/', json='hello') self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({ b'Content-Type': [b'application/json; charset=UTF-8'], b'accept-encoding': [b'gzip'] }), self.FileBodyProducer.return_value) self.assertBody(b'"hello"') def test_request_json_bool(self): self.client.request('POST', 'http://example.com/', json=True) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({ b'Content-Type': [b'application/json; charset=UTF-8'], b'accept-encoding': [b'gzip'] }), self.FileBodyProducer.return_value) self.assertBody(b'true') def test_request_json_none(self): self.client.request('POST', 'http://example.com/', json=None) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({ b'Content-Type': [b'application/json; charset=UTF-8'], b'accept-encoding': [b'gzip'] }), self.FileBodyProducer.return_value) self.assertBody(b'null') @mock.patch('treq.client.uuid.uuid4', mock.Mock(return_value="heyDavid")) def test_request_no_name_attachment(self): self.client.request('POST', 'http://example.com/', files={"name": BytesIO(b"hello")}) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({ b'accept-encoding': [b'gzip'], b'Content-Type': [b'multipart/form-data; boundary=heyDavid'] }), self.MultiPartProducer.return_value) FP = self.FileBodyProducer.return_value self.assertEqual( mock.call([('name', (None, 'application/octet-stream', FP))], boundary=b'heyDavid'), self.MultiPartProducer.call_args) @mock.patch('treq.client.uuid.uuid4', mock.Mock(return_value="heyDavid")) def test_request_named_attachment(self): self.client.request('POST', 'http://example.com/', files={"name": ('image.jpg', BytesIO(b"hello"))}) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({ b'accept-encoding': [b'gzip'], b'Content-Type': [b'multipart/form-data; boundary=heyDavid'] }), self.MultiPartProducer.return_value) FP = self.FileBodyProducer.return_value self.assertEqual( mock.call([('name', ('image.jpg', 'image/jpeg', FP))], boundary=b'heyDavid'), self.MultiPartProducer.call_args) @mock.patch('treq.client.uuid.uuid4', mock.Mock(return_value="heyDavid")) def test_request_named_attachment_and_ctype(self): self.client.request( 'POST', 'http://example.com/', files={"name": ('image.jpg', 'text/plain', BytesIO(b"hello"))}) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({ b'accept-encoding': [b'gzip'], b'Content-Type': [b'multipart/form-data; boundary=heyDavid'] }), self.MultiPartProducer.return_value) FP = self.FileBodyProducer.return_value self.assertEqual( mock.call([('name', ('image.jpg', 'text/plain', FP))], boundary=b'heyDavid'), self.MultiPartProducer.call_args) @mock.patch('treq.client.uuid.uuid4', mock.Mock(return_value="heyDavid")) def test_request_mixed_params(self): class NamedFile(BytesIO): def __init__(self, val): BytesIO.__init__(self, val) self.name = "image.png" self.client.request('POST', 'http://example.com/', data=[("a", "b"), ("key", "val")], files=[("file1", ('image.jpg', BytesIO(b"hello"))), ("file2", NamedFile(b"yo"))]) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({ b'accept-encoding': [b'gzip'], b'Content-Type': [b'multipart/form-data; boundary=heyDavid'] }), self.MultiPartProducer.return_value) FP = self.FileBodyProducer.return_value self.assertEqual( mock.call([('a', 'b'), ('key', 'val'), ('file1', ('image.jpg', 'image/jpeg', FP)), ('file2', ('image.png', 'image/png', FP))], boundary=b'heyDavid'), self.MultiPartProducer.call_args) @mock.patch('treq.client.uuid.uuid4', mock.Mock(return_value="heyDavid")) def test_request_mixed_params_dict(self): self.client.request('POST', 'http://example.com/', data={ "key": "a", "key2": "b" }, files={"file1": BytesIO(b"hey")}) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({ b'accept-encoding': [b'gzip'], b'Content-Type': [b'multipart/form-data; boundary=heyDavid'] }), self.MultiPartProducer.return_value) FP = self.FileBodyProducer.return_value self.assertEqual( mock.call([('key', 'a'), ('key2', 'b'), ('file1', (None, 'application/octet-stream', FP))], boundary=b'heyDavid'), self.MultiPartProducer.call_args) def test_request_unsupported_params_combination(self): self.assertRaises(ValueError, self.client.request, 'POST', 'http://example.com/', data=BytesIO(b"yo"), files={"file1": BytesIO(b"hey")}) def test_request_dict_headers(self): self.client.request('GET', 'http://example.com/', headers={ 'User-Agent': 'treq/0.1dev', 'Accept': ['application/json', 'text/plain'] }) self.agent.request.assert_called_once_with( b'GET', b'http://example.com/', Headers({ b'User-Agent': [b'treq/0.1dev'], b'accept-encoding': [b'gzip'], b'Accept': [b'application/json', b'text/plain'] }), None) @with_clock def test_request_timeout_fired(self, clock): """ Verify the request is cancelled if a response is not received within specified timeout period. """ self.agent.request.return_value = d = Deferred() self.client.request('GET', 'http://example.com', timeout=2) # simulate we haven't gotten a response within timeout seconds clock.advance(3) # a deferred should have been cancelled self.failureResultOf(d, CancelledError) @with_clock def test_request_timeout_cancelled(self, clock): """ Verify timeout is cancelled if a response is received before timeout period elapses. """ self.agent.request.return_value = d = Deferred() self.client.request('GET', 'http://example.com', timeout=2) # simulate a response d.callback(mock.Mock(code=200, headers=Headers({}))) # now advance the clock but since we already got a result, # a cancellation timer should have been cancelled clock.advance(3) self.successResultOf(d) def test_response_is_buffered(self): response = mock.Mock(deliverBody=mock.Mock(), headers=Headers({})) self.agent.request.return_value = succeed(response) d = self.client.get('http://www.example.com') result = self.successResultOf(d) protocol = mock.Mock(Protocol) result.deliverBody(protocol) self.assertEqual(response.deliverBody.call_count, 1) result.deliverBody(protocol) self.assertEqual(response.deliverBody.call_count, 1) def test_response_buffering_is_disabled_with_unbufferred_arg(self): response = mock.Mock(headers=Headers({})) self.agent.request.return_value = succeed(response) d = self.client.get('http://www.example.com', unbuffered=True) # YOLO public attribute. self.assertEqual(self.successResultOf(d).original, response) def test_request_post_redirect_denied(self): response = mock.Mock(code=302, headers=Headers({'Location': ['/']})) self.agent.request.return_value = succeed(response) d = self.client.post('http://www.example.com') self.failureResultOf(d, ResponseFailed) def test_request_browser_like_redirects(self): response = mock.Mock(code=302, headers=Headers({'Location': ['/']})) self.agent.request.return_value = succeed(response) raw = mock.Mock(return_value=[]) final_resp = mock.Mock(code=200, headers=mock.Mock(getRawHeaders=raw)) with mock.patch('twisted.web.client.RedirectAgent._handleRedirect', return_value=final_resp): d = self.client.post('http://www.google.com', browser_like_redirects=True, unbuffered=True) self.assertEqual(self.successResultOf(d).original, final_resp)
class HTTPClientTests(TestCase): def setUp(self): self.agent = mock.Mock(Agent) self.client = HTTPClient(self.agent) self.fbp_patcher = mock.patch('treq.client.FileBodyProducer') self.FileBodyProducer = self.fbp_patcher.start() self.addCleanup(self.fbp_patcher.stop) self.mbp_patcher = mock.patch('treq.multipart.MultiPartProducer') self.MultiPartProducer = self.mbp_patcher.start() self.addCleanup(self.mbp_patcher.stop) def assertBody(self, expected): body = self.FileBodyProducer.mock_calls[0][1][0] self.assertEqual(body.read(), expected) def test_post(self): self.client.post('http://example.com/') self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({b'accept-encoding': [b'gzip']}), None) def test_request_uri_idn(self): self.client.request('GET', u'http://č.net') self.agent.request.assert_called_once_with( b'GET', b'http://xn--bea.net', Headers({b'accept-encoding': [b'gzip']}), None) def test_request_uri_decodedurl(self): """ A URL may be passed as a `hyperlink.DecodedURL` object. It is converted to bytes when passed to the underlying agent. """ url = DecodedURL.from_text(u"https://example.org/foo") self.client.request("GET", url) self.agent.request.assert_called_once_with( b"GET", b"https://example.org/foo", Headers({b"accept-encoding": [b"gzip"]}), None, ) def test_request_uri_encodedurl(self): """ A URL may be passed as a `hyperlink.EncodedURL` object. It is converted to bytes when passed to the underlying agent. """ url = EncodedURL.from_text(u"https://example.org/foo") self.client.request("GET", url) self.agent.request.assert_called_once_with( b"GET", b"https://example.org/foo", Headers({b"accept-encoding": [b"gzip"]}), None, ) def test_request_uri_bytes_pass(self): """ The URL parameter may contain path segments or querystring parameters that are not valid UTF-8. These pass through. """ # This URL is http://example.com/hello?who=you, but "hello", "who", and # "you" are encoded as UTF-16. The particulars of the encoding aren't # important; what matters is that those segments can't be decoded by # Hyperlink's UTF-8 default. self.client.request( "GET", ("http://example.com/%FF%FEh%00e%00l%00l%00o%00" "?%FF%FEw%00h%00o%00=%FF%FEy%00o%00u%00"), ) self.agent.request.assert_called_once_with( b'GET', (b'http://example.com/%FF%FEh%00e%00l%00l%00o%00' b'?%FF%FEw%00h%00o%00=%FF%FEy%00o%00u%00'), Headers({b'accept-encoding': [b'gzip']}), None, ) def test_request_uri_plus_pass(self): """ URL parameters may contain spaces encoded as ``+``. These remain as such and are not mangled. This reproduces `Klein #339 <https://github.com/twisted/klein/issues/339>`_. """ self.client.request( "GET", "https://example.com/?foo+bar=baz+biff", ) self.agent.request.assert_called_once_with( b'GET', b"https://example.com/?foo+bar=baz+biff", Headers({b'accept-encoding': [b'gzip']}), None, ) def test_request_uri_idn_params(self): """ A URL that contains non-ASCII characters can be augmented with querystring parameters. This reproduces treq #264. """ self.client.request('GET', u'http://č.net', params={'foo': 'bar'}) self.agent.request.assert_called_once_with( b'GET', b'http://xn--bea.net/?foo=bar', Headers({b'accept-encoding': [b'gzip']}), None) def test_request_uri_hyperlink_params(self): """ The *params* argument augments an instance of `hyperlink.DecodedURL` passed as the *url* parameter, just as if it were a string. """ self.client.request( method="GET", url=DecodedURL.from_text(u"http://č.net"), params={"foo": "bar"}, ) self.agent.request.assert_called_once_with( b"GET", b"http://xn--bea.net/?foo=bar", Headers({b"accept-encoding": [b"gzip"]}), None, ) def test_request_case_insensitive_methods(self): self.client.request('gEt', 'http://example.com/') self.agent.request.assert_called_once_with( b'GET', b'http://example.com/', Headers({b'accept-encoding': [b'gzip']}), None) def test_request_query_params(self): self.client.request('GET', 'http://example.com/', params={'foo': ['bar']}) self.agent.request.assert_called_once_with( b'GET', b'http://example.com/?foo=bar', Headers({b'accept-encoding': [b'gzip']}), None) def test_request_tuple_query_values(self): self.client.request('GET', 'http://example.com/', params={'foo': ('bar', )}) self.agent.request.assert_called_once_with( b'GET', b'http://example.com/?foo=bar', Headers({b'accept-encoding': [b'gzip']}), None) def test_request_tuple_query_value_coercion(self): """ treq coerces non-string values passed to *params* like `urllib.urlencode()` """ self.client.request('GET', 'http://example.com/', params=[ ('text', u'A\u03a9'), ('text-seq', [u'A\u03a9']), ('bytes', [b'ascii']), ('bytes-seq', [b'ascii']), ('native', ['native']), ('native-seq', ['aa', 'bb']), ('int', 1), ('int-seq', (1, 2, 3)), ('none', None), ('none-seq', [None, None]), ]) self.agent.request.assert_called_once_with( b'GET', (b'http://example.com/?' b'text=A%CE%A9&text-seq=A%CE%A9' b'&bytes=ascii&bytes-seq=ascii' b'&native=native&native-seq=aa&native-seq=bb' b'&int=1&int-seq=1&int-seq=2&int-seq=3' b'&none=None&none-seq=None&none-seq=None'), Headers({b'accept-encoding': [b'gzip']}), None, ) def test_request_tuple_query_param_coercion(self): """ treq coerces non-string param names passed to *params* like `urllib.urlencode()` """ # A value used to test that it is never encoded or decoded. # It should be invalid UTF-8 or UTF-32 (at least). raw_bytes = b"\x00\xff\xfb" self.client.request('GET', 'http://example.com/', params=[ (u'text', u'A\u03a9'), (b'bytes', ['ascii', raw_bytes]), ('native', 'native'), (1, 'int'), (None, ['none']), ]) self.agent.request.assert_called_once_with( b'GET', (b'http://example.com/' b'?text=A%CE%A9&bytes=ascii&bytes=%00%FF%FB' b'&native=native&1=int&None=none'), Headers({b'accept-encoding': [b'gzip']}), None, ) def test_request_query_param_seps(self): """ When the characters ``&`` and ``#`` are passed to *params* as param names or values they are percent-escaped in the URL. This reproduces https://github.com/twisted/treq/issues/282 """ self.client.request('GET', 'http://example.com/', params=( ('ampersand', '&'), ('&', 'ampersand'), ('octothorpe', '#'), ('#', 'octothorpe'), )) self.agent.request.assert_called_once_with( b'GET', (b'http://example.com/' b'?ampersand=%26' b'&%26=ampersand' b'&octothorpe=%23' b'&%23=octothorpe'), Headers({b'accept-encoding': [b'gzip']}), None, ) def test_request_merge_query_params(self): self.client.request('GET', 'http://example.com/?baz=bax', params={'foo': ['bar', 'baz']}) self.agent.request.assert_called_once_with( b'GET', b'http://example.com/?baz=bax&foo=bar&foo=baz', Headers({b'accept-encoding': [b'gzip']}), None) def test_request_merge_tuple_query_params(self): self.client.request('GET', 'http://example.com/?baz=bax', params=[('foo', 'bar')]) self.agent.request.assert_called_once_with( b'GET', b'http://example.com/?baz=bax&foo=bar', Headers({b'accept-encoding': [b'gzip']}), None) def test_request_dict_single_value_query_params(self): self.client.request('GET', 'http://example.com/', params={'foo': 'bar'}) self.agent.request.assert_called_once_with( b'GET', b'http://example.com/?foo=bar', Headers({b'accept-encoding': [b'gzip']}), None) def test_request_data_dict(self): self.client.request('POST', 'http://example.com/', data={'foo': ['bar', 'baz']}) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({ b'Content-Type': [b'application/x-www-form-urlencoded'], b'accept-encoding': [b'gzip'] }), self.FileBodyProducer.return_value) self.assertBody(b'foo=bar&foo=baz') def test_request_data_single_dict(self): self.client.request('POST', 'http://example.com/', data={'foo': 'bar'}) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({ b'Content-Type': [b'application/x-www-form-urlencoded'], b'accept-encoding': [b'gzip'] }), self.FileBodyProducer.return_value) self.assertBody(b'foo=bar') def test_request_data_tuple(self): self.client.request('POST', 'http://example.com/', data=[('foo', 'bar')]) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({ b'Content-Type': [b'application/x-www-form-urlencoded'], b'accept-encoding': [b'gzip'] }), self.FileBodyProducer.return_value) self.assertBody(b'foo=bar') def test_request_data_file(self): temp_fn = self.mktemp() with open(temp_fn, "wb") as temp_file: temp_file.write(b'hello') self.client.request('POST', 'http://example.com/', data=open(temp_fn, 'rb')) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({b'accept-encoding': [b'gzip']}), self.FileBodyProducer.return_value) self.assertBody(b'hello') def test_request_json_dict(self): self.client.request('POST', 'http://example.com/', json={'foo': 'bar'}) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({ b'Content-Type': [b'application/json; charset=UTF-8'], b'accept-encoding': [b'gzip'] }), self.FileBodyProducer.return_value) self.assertBody(b'{"foo":"bar"}') def test_request_json_tuple(self): self.client.request('POST', 'http://example.com/', json=('foo', 1)) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({ b'Content-Type': [b'application/json; charset=UTF-8'], b'accept-encoding': [b'gzip'] }), self.FileBodyProducer.return_value) self.assertBody(b'["foo",1]') def test_request_json_number(self): self.client.request('POST', 'http://example.com/', json=1.) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({ b'Content-Type': [b'application/json; charset=UTF-8'], b'accept-encoding': [b'gzip'] }), self.FileBodyProducer.return_value) self.assertBody(b'1.0') def test_request_json_string(self): self.client.request('POST', 'http://example.com/', json='hello') self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({ b'Content-Type': [b'application/json; charset=UTF-8'], b'accept-encoding': [b'gzip'] }), self.FileBodyProducer.return_value) self.assertBody(b'"hello"') def test_request_json_bool(self): self.client.request('POST', 'http://example.com/', json=True) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({ b'Content-Type': [b'application/json; charset=UTF-8'], b'accept-encoding': [b'gzip'] }), self.FileBodyProducer.return_value) self.assertBody(b'true') def test_request_json_none(self): self.client.request('POST', 'http://example.com/', json=None) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({ b'Content-Type': [b'application/json; charset=UTF-8'], b'accept-encoding': [b'gzip'] }), self.FileBodyProducer.return_value) self.assertBody(b'null') @mock.patch('treq.client.uuid.uuid4', mock.Mock(return_value="heyDavid")) def test_request_no_name_attachment(self): self.client.request('POST', 'http://example.com/', files={"name": BytesIO(b"hello")}) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({ b'accept-encoding': [b'gzip'], b'Content-Type': [b'multipart/form-data; boundary=heyDavid'] }), self.MultiPartProducer.return_value) FP = self.FileBodyProducer.return_value self.assertEqual( mock.call([('name', (None, 'application/octet-stream', FP))], boundary=b'heyDavid'), self.MultiPartProducer.call_args) @mock.patch('treq.client.uuid.uuid4', mock.Mock(return_value="heyDavid")) def test_request_named_attachment(self): self.client.request('POST', 'http://example.com/', files={"name": ('image.jpg', BytesIO(b"hello"))}) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({ b'accept-encoding': [b'gzip'], b'Content-Type': [b'multipart/form-data; boundary=heyDavid'] }), self.MultiPartProducer.return_value) FP = self.FileBodyProducer.return_value self.assertEqual( mock.call([('name', ('image.jpg', 'image/jpeg', FP))], boundary=b'heyDavid'), self.MultiPartProducer.call_args) @mock.patch('treq.client.uuid.uuid4', mock.Mock(return_value="heyDavid")) def test_request_named_attachment_and_ctype(self): self.client.request( 'POST', 'http://example.com/', files={"name": ('image.jpg', 'text/plain', BytesIO(b"hello"))}) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({ b'accept-encoding': [b'gzip'], b'Content-Type': [b'multipart/form-data; boundary=heyDavid'] }), self.MultiPartProducer.return_value) FP = self.FileBodyProducer.return_value self.assertEqual( mock.call([('name', ('image.jpg', 'text/plain', FP))], boundary=b'heyDavid'), self.MultiPartProducer.call_args) def test_request_files_tuple_too_short(self): """ The `HTTPClient.request()` *files* argument requires tuples of length 2 or 3. It raises `TypeError` when the tuple is too short. """ with self.assertRaises(TypeError) as c: self.client.request( "POST", b"http://example.com/", files=[("t1", ("foo.txt", ))], ) self.assertIn("'t1' tuple has length 1", str(c.exception)) def test_request_files_tuple_too_long(self): """ The `HTTPClient.request()` *files* argument requires tuples of length 2 or 3. It raises `TypeError` when the tuple is too long. """ with self.assertRaises(TypeError) as c: self.client.request( "POST", b"http://example.com/", files=[ ("t4", ("foo.txt", "text/plain", BytesIO(b"...\n"), "extra!")), ], ) self.assertIn("'t4' tuple has length 4", str(c.exception)) @mock.patch('treq.client.uuid.uuid4', mock.Mock(return_value="heyDavid")) def test_request_mixed_params(self): class NamedFile(BytesIO): def __init__(self, val): BytesIO.__init__(self, val) self.name = "image.png" self.client.request('POST', 'http://example.com/', data=[("a", "b"), ("key", "val")], files=[("file1", ('image.jpg', BytesIO(b"hello"))), ("file2", NamedFile(b"yo"))]) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({ b'accept-encoding': [b'gzip'], b'Content-Type': [b'multipart/form-data; boundary=heyDavid'] }), self.MultiPartProducer.return_value) FP = self.FileBodyProducer.return_value self.assertEqual( mock.call([('a', 'b'), ('key', 'val'), ('file1', ('image.jpg', 'image/jpeg', FP)), ('file2', ('image.png', 'image/png', FP))], boundary=b'heyDavid'), self.MultiPartProducer.call_args) @mock.patch('treq.client.uuid.uuid4', mock.Mock(return_value="heyDavid")) def test_request_mixed_params_dict(self): self.client.request('POST', 'http://example.com/', data={ "key": "a", "key2": "b" }, files={"file1": BytesIO(b"hey")}) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({ b'accept-encoding': [b'gzip'], b'Content-Type': [b'multipart/form-data; boundary=heyDavid'] }), self.MultiPartProducer.return_value) FP = self.FileBodyProducer.return_value self.assertEqual( mock.call([('key', 'a'), ('key2', 'b'), ('file1', (None, 'application/octet-stream', FP))], boundary=b'heyDavid'), self.MultiPartProducer.call_args) def test_request_unsupported_params_combination(self): self.assertRaises(ValueError, self.client.request, 'POST', 'http://example.com/', data=BytesIO(b"yo"), files={"file1": BytesIO(b"hey")}) def test_request_json_with_data(self): """ Passing `HTTPClient.request()` both *data* and *json* parameters is invalid because *json* is ignored. This behavior is deprecated. """ self.client.request( "POST", "http://example.com/", data=BytesIO(b"..."), json=None, # NB: None is a valid value. It encodes to b'null'. ) [w] = self.flushWarnings([self.test_request_json_with_data]) self.assertEqual(DeprecationWarning, w["category"]) self.assertEqual( ("Argument 'json' will be ignored because 'data' was also passed." " This will raise TypeError in the next treq release."), w['message'], ) def test_request_json_with_files(self): """ Passing `HTTPClient.request()` both *files* and *json* parameters is invalid because *json* is ignored. This behavior is deprecated. """ self.client.request( "POST", "http://example.com/", files={"f1": ("foo.txt", "text/plain", BytesIO(b"...\n"))}, json=["this is ignored"], ) [w] = self.flushWarnings([self.test_request_json_with_files]) self.assertEqual(DeprecationWarning, w["category"]) self.assertEqual( ("Argument 'json' will be ignored because 'files' was also passed." " This will raise TypeError in the next treq release."), w['message'], ) def test_request_dict_headers(self): self.client.request('GET', 'http://example.com/', headers={ 'User-Agent': 'treq/0.1dev', 'Accept': ['application/json', 'text/plain'] }) self.agent.request.assert_called_once_with( b'GET', b'http://example.com/', Headers({ b'User-Agent': [b'treq/0.1dev'], b'accept-encoding': [b'gzip'], b'Accept': [b'application/json', b'text/plain'] }), None) def test_request_headers_object(self): """ The *headers* parameter accepts a `twisted.web.http_headers.Headers` instance. """ self.client.request( "GET", "https://example.com", headers=Headers({"X-Foo": ["bar"]}), ) self.agent.request.assert_called_once_with( b"GET", b"https://example.com", Headers({ "X-Foo": ["bar"], "Accept-Encoding": ["gzip"], }), None, ) def test_request_headers_invalid_type(self): """ `HTTPClient.request()` warns that headers of an unexpected type are invalid and that this behavior is deprecated. """ self.client.request('GET', 'http://example.com', headers=[]) [w] = self.flushWarnings([self.test_request_headers_invalid_type]) self.assertEqual(DeprecationWarning, w['category']) self.assertIn( "headers must be a dict, twisted.web.http_headers.Headers, or None,", w['message'], ) def test_request_dict_headers_invalid_values(self): """ `HTTPClient.request()` warns that non-string header values are dropped and that this behavior is deprecated. """ self.client.request('GET', 'http://example.com', headers=OrderedDict([ ('none', None), ('one', 1), ('ok', 'string'), ])) [w1, w2] = self.flushWarnings( [self.test_request_dict_headers_invalid_values]) self.assertEqual(DeprecationWarning, w1['category']) self.assertEqual(DeprecationWarning, w2['category']) self.assertIn( "The value of headers key 'none' has non-string type", w1['message'], ) self.assertIn( "The value of headers key 'one' has non-string type", w2['message'], ) def test_request_invalid_param(self): """ `HTTPClient.request()` rejects invalid keyword parameters with `TypeError`. """ self.assertRaises( TypeError, self.client.request, "GET", b"http://example.com", invalid=True, ) @with_clock def test_request_timeout_fired(self, clock): """ Verify the request is cancelled if a response is not received within specified timeout period. """ self.agent.request.return_value = d = Deferred() self.client.request('GET', 'http://example.com', timeout=2) # simulate we haven't gotten a response within timeout seconds clock.advance(3) # a deferred should have been cancelled self.failureResultOf(d, CancelledError) @with_clock def test_request_timeout_cancelled(self, clock): """ Verify timeout is cancelled if a response is received before timeout period elapses. """ self.agent.request.return_value = d = Deferred() self.client.request('GET', 'http://example.com', timeout=2) # simulate a response d.callback(mock.Mock(code=200, headers=Headers({}))) # now advance the clock but since we already got a result, # a cancellation timer should have been cancelled clock.advance(3) self.successResultOf(d) def test_response_is_buffered(self): response = mock.Mock(deliverBody=mock.Mock(), headers=Headers({})) self.agent.request.return_value = succeed(response) d = self.client.get('http://www.example.com') result = self.successResultOf(d) protocol = mock.Mock(Protocol) result.deliverBody(protocol) self.assertEqual(response.deliverBody.call_count, 1) result.deliverBody(protocol) self.assertEqual(response.deliverBody.call_count, 1) def test_response_buffering_is_disabled_with_unbufferred_arg(self): response = mock.Mock(headers=Headers({})) self.agent.request.return_value = succeed(response) d = self.client.get('http://www.example.com', unbuffered=True) # YOLO public attribute. self.assertEqual(self.successResultOf(d).original, response) def test_request_post_redirect_denied(self): response = mock.Mock(code=302, headers=Headers({'Location': ['/']})) self.agent.request.return_value = succeed(response) d = self.client.post('http://www.example.com') self.failureResultOf(d, ResponseFailed) def test_request_browser_like_redirects(self): response = mock.Mock(code=302, headers=Headers({'Location': ['/']})) self.agent.request.return_value = succeed(response) raw = mock.Mock(return_value=[]) final_resp = mock.Mock(code=200, headers=mock.Mock(getRawHeaders=raw)) with mock.patch('twisted.web.client.RedirectAgent._handleRedirect', return_value=final_resp): d = self.client.post('http://www.google.com', browser_like_redirects=True, unbuffered=True) self.assertEqual(self.successResultOf(d).original, final_resp)
class DockerProxy(proxy.ReverseProxyResource): proxyClientFactoryClass = DockerProxyClientFactory def __init__(self, dockerAddr=None, dockerPort=None, dockerSocket=None, path='', reactor=reactor, config=None): """ A docker proxy resource which knows how to connect to real Docker daemon either via socket (dockerSocket specified) or address + port for TCP connection (dockerAddr + dockerPort specified). """ if config is None: # Try to get the configuration from the default place on the # filesystem. self.config = PluginConfiguration() else: self.config = config self.config.read_and_parse() self.parser = EndpointParser(self.config) Resource.__init__(self) self.host = dockerAddr self.port = dockerPort self.socket = dockerSocket self.path = path self.reactor = reactor proxy.ReverseProxyResource.__init__(self, dockerAddr, dockerPort, path, reactor) # NB dockerAddr is not actually used self.agent = Agent(reactor) # no connectionpool self.client = HTTPClient(self.agent) def render(self, request, reactor=reactor): # We are processing a leaf request. # Get the original request body from the client. skipPreHooks = False if request.requestHeaders.getRawHeaders('content-type') == ["application/json"]: originalRequestBody = request.content.read() request.content.seek(0) # hee hee elif request.requestHeaders.getRawHeaders('content-type') == ["application/tar"]: # We can't JSON encode binary data, so don't even try. skipPreHooks = True originalRequestBody = None else: originalRequestBody = None preHooks = [] postHooks = [] d = defer.succeed(None) for endpoint in self.parser.match_endpoint(request.method, request.uri.split("?")[0]): # It's possible for a request to match multiple endpoint # definitions. Order of matched endpoint is not defined in # that case. adapters = self.config.endpoint(endpoint) preHooks.extend(adapters.pre) postHooks.extend(adapters.post) def callPreHook(result, hookURL): if result is None: newRequestBody = originalRequestBody else: newRequestBody = result["ModifiedClientRequest"]["Body"] return self.client.post(hookURL, json.dumps({ "PowerstripProtocolVersion": 1, "Type": "pre-hook", "ClientRequest": { "Method": request.method, "Request": request.uri, "Body": newRequestBody, } }), headers={'Content-Type': ['application/json']}) if not skipPreHooks: for preHook in preHooks: hookURL = self.config.adapter_uri(preHook) d.addCallback(callPreHook, hookURL=hookURL) d.addCallback(treq.json_content) def doneAllPrehooks(result): # Finally pass through the request to actual Docker. For now we # mutate request in-place in such a way that ReverseProxyResource # understands it. if result is not None: requestBody = b"" bodyFromAdapter = result["ModifiedClientRequest"]["Body"] if bodyFromAdapter is not None: requestBody = bodyFromAdapter.encode("utf-8") request.content = StringIO.StringIO(requestBody) request.requestHeaders.setRawHeaders(b"content-length", [str(len(requestBody))]) ########################### # The following code is copied from t.w.proxy.ReverseProxy so that # clientFactory reference can be kept. if not self.socket: if self.port == 80: host = self.host else: host = "%s:%d" % (self.host, self.port) request.requestHeaders.setRawHeaders(b"host", [host]) request.content.seek(0, 0) qs = urlparse.urlparse(request.uri)[4] if qs: rest = self.path + '?' + qs else: rest = self.path allRequestHeaders = request.getAllHeaders() if allRequestHeaders.get("transfer-encoding") == "chunked": del allRequestHeaders["transfer-encoding"] # XXX Streaming the contents of the request body into memory could # cause OOM issues for large build contexts POSTed through # powerstrip. See https://github.com/ClusterHQ/powerstrip/issues/51 body = request.content.read() allRequestHeaders["content-length"] = str(len(body)) clientFactory = self.proxyClientFactoryClass( request.method, rest, request.clientproto, allRequestHeaders, body, request) ########################### if self.socket: self.reactor.connectUNIX(self.socket, clientFactory) else: self.reactor.connectTCP(self.host, self.port, clientFactory) d = defer.Deferred() clientFactory.onCreate(d) return d d.addCallback(doneAllPrehooks) def inspect(client): # If there are no post-hooks, allow the response to be streamed # back to the client, rather than buffered. d = defer.Deferred() client.registerListener(d) if not postHooks: client.setStreamingMode(True) return d d.addCallback(inspect) def callPostHook(result, hookURL): serverResponse = result["ModifiedServerResponse"] return self.client.post(hookURL, json.dumps({ # TODO Write tests for the information provided to the adapter. "PowerstripProtocolVersion": 1, "Type": "post-hook", "ClientRequest": { "Method": request.method, "Request": request.uri, "Body": originalRequestBody, }, "ServerResponse": { "ContentType": serverResponse["ContentType"], "Body": serverResponse["Body"], "Code": serverResponse["Code"], }, }), headers={'Content-Type': ['application/json']}) # XXX Need to skip post-hooks for tar archives from e.g. docker export. # https://github.com/ClusterHQ/powerstrip/issues/52 for postHook in postHooks: hookURL = self.config.adapter_uri(postHook) d.addCallback(callPostHook, hookURL=hookURL) d.addCallback(treq.json_content) def sendFinalResponseToClient(result): resultBody = result["ModifiedServerResponse"]["Body"].encode("utf-8") # Update the Content-Length, since we're modifying the request object in-place. request.responseHeaders.setRawHeaders( b"content-length", [str(len(resultBody))] ) # Write the final response to the client. request.write(resultBody) request.finish() d.addCallback(sendFinalResponseToClient) def squashNoPostHooks(failure): failure.trap(NoPostHooks) d.addErrback(squashNoPostHooks) d.addErrback(log.err, 'while running chain') return NOT_DONE_YET def getChild(self, path, request): fragments = request.uri.split("/") fragments.pop(0) proxyArgs = (self.host, self.port, self.socket, self.path + '/' + urlquote(path, safe=""), self.reactor) #if not request.postpath: resource = DockerProxy(*proxyArgs, config=self.config) return resource