class PathResource(resource.Resource): """ Docker has asked us for the concrete on-disk location of an extant volume. If it hasn't already asked for it to be mounted, or is currently on another machine, this is an error. """ def __init__(self, *args, **kw): self._agent = Agent(reactor) # no connectionpool self.client = HTTPClient(self._agent) return resource.Resource.__init__(self, *args, **kw) def render_POST(self, request): # TODO make a FlockerResource base class self.base_url = os.environ.get("FLOCKER_CONTROL_SERVICE_BASE_URL") # expect Name data = json.loads(request.content.read()) print "path:", data d = self.client.get(self.base_url + "/configuration/datasets") d.addCallback(treq.json_content) def get_dataset(datasets): dataset_id = None # 1. find the flocker dataset_id of the named volume # 2. look up the path of that volume in the datasets current state for dataset in datasets: if dataset["metadata"]["name"] == data["Name"]: dataset_id = dataset["dataset_id"] d = self.client.get(self.base_url + "/state/datasets") d.addCallback(treq.json_content) def get_path(datasets, dataset_id): if dataset_id is None: path = None else: for dataset in datasets: if dataset["dataset_id"] == dataset_id: path = dataset["path"] if path is not None: request.write(json.dumps(dict( Mountpoint=path, Err=None, ))) else: request.write(json.dumps(dict( Mountpoint="", Err="unable to find %s" % (data["Name"],), ))) request.finish() d.addCallback(get_path, dataset_id=dataset_id) return d d.addCallback(get_dataset) return server.NOT_DONE_YET
def validate_ticket(self, ticket, request): service_name = self.service_name ticket_name = self.ticket_name this_url = self.get_url(request) p = urlparse.urlparse(this_url) qs_map = urlparse.parse_qs(p.query) if ticket_name in qs_map: del qs_map[ticket_name] param_str = urlencode(qs_map, doseq=True) p = urlparse.ParseResult(*tuple(p[:4] + (param_str, ) + p[5:])) service_url = urlparse.urlunparse(p) params = { service_name: service_url, ticket_name: ticket, } param_str = urlencode(params, doseq=True) p = urlparse.urlparse(self.cas_info['service_validate_url']) p = urlparse.ParseResult(*tuple(p[:4] + (param_str, ) + p[5:])) service_validate_url = urlparse.urlunparse(p) self.log("Requesting service-validate URL => '{0}' ...".format( service_validate_url)) http_client = HTTPClient(self.cas_agent) d = http_client.get(service_validate_url) d.addCallback(treq.content) d.addCallback(self.parse_sv_results, service_url, ticket, request) return d
def validate_ticket(self, ticket, request): service_name = self.service_name ticket_name = self.ticket_name this_url = self.get_url(request) p = urlparse.urlparse(this_url) qs_map = urlparse.parse_qs(p.query) if ticket_name in qs_map: del qs_map[ticket_name] param_str = urlencode(qs_map, doseq=True) p = urlparse.ParseResult(*tuple(p[:4] + (param_str,) + p[5:])) service_url = urlparse.urlunparse(p) params = { service_name: service_url, ticket_name: ticket,} param_str = urlencode(params, doseq=True) p = urlparse.urlparse(self.cas_info['service_validate_url']) p = urlparse.ParseResult(*tuple(p[:4] + (param_str,) + p[5:])) service_validate_url = urlparse.urlunparse(p) self.log( "Requesting service-validate URL => '{0}' ...".format( service_validate_url)) http_client = HTTPClient(self.cas_agent) d = http_client.get(service_validate_url) d.addCallback(treq.content) d.addCallback(self.parse_sv_results, service_url, ticket, request) return d
def main(reactor, *args): agent = make_custom_agent(reactor) http_client = HTTPClient(agent) d = http_client.get('https://secure.example.net/area51', auth=('admin', "you'll never guess!")) d.addCallback(print_response) return d
def run_balance_test(self, user=None, default_route=None, side_effect=None): yield self.connect('127.0.0.1', self.pbPort) yield self.prepareRoutingsAndStartConnector(user, default_route, side_effect) # Set baseurl params = { 'username': self.params['username'], 'password': self.params['password'], } baseurl = 'http://127.0.0.1:1401/balance' # Send a balance check request agent = Agent(reactor) client = HTTPClient(agent) response = yield client.get(baseurl, params=params) response_text = yield text_content(response) response_code = response.code # Wait 5 seconds before stopping SmppClientConnectors yield waitFor(5) yield self.stopSmppClientConnectors() defer.returnValue((response_text, response_code))
def run_rate_test(self, user=None, content=None, source_address=None, destination_address=None, default_route=None, side_effect=None): yield self.connect('127.0.0.1', self.pbPort) yield self.prepareRoutingsAndStartConnector(user, default_route, side_effect) # Set content if content is not None: self.params['content'] = content else: del self.params['content'] if source_address is not None: self.params['from'] = source_address if destination_address is not None: self.params['to'] = destination_address baseurl = 'http://127.0.0.1:1401/rate' # Send a MT # We should receive a msg id agent = Agent(reactor) client = HTTPClient(agent) response = yield client.get(baseurl, params=self.params) response_text = yield text_content(response) response_code = response.code # Wait 5 seconds before stopping SmppClientConnectors yield waitFor(5) yield self.stopSmppClientConnectors() defer.returnValue((response_text, response_code))
def test_rate_interceptorpb_not_connected(self): _ic = self.stats_http.get('interceptor_count') _iec = self.stats_http.get('interceptor_error_count') # Send a SMS MT through http interface url = 'http://127.0.0.1:1401/rate' params = { 'to': '06155423', 'username': self.u1.username, 'password': self.u1_password } agent = Agent(reactor) client = HTTPClient(agent) response = yield client.get(url, params=params) lastErrorStatus = response.code lastResponse = yield text_content(response) # Asserts self.assertEqual(lastErrorStatus, 503) self.assertEqual(lastResponse, '"InterceptorPB not connected !"') self.assertEqual(_ic, self.stats_http.get('interceptor_count')) self.assertEqual(_iec + 1, self.stats_http.get('interceptor_error_count'))
def test_rate_syntax_error(self): _ic = self.stats_http.get('interceptor_count') _iec = self.stats_http.get('interceptor_error_count') # Connect to InterceptorPB yield self.ipb_connect() # Send a SMS MT through http interface url = 'http://127.0.0.1:1401/rate' params = { 'to': '06155423', 'username': self.u1.username, 'password': self.u1_password } agent = Agent(reactor) client = HTTPClient(agent) response = yield client.get(url, params=params) lastErrorStatus = response.code lastResponse = yield text_content(response) # Asserts self.assertEqual(lastErrorStatus, 400) self.assertEqual( lastResponse, '"Failed running interception script, check log for details"') self.assertEqual(_ic, self.stats_http.get('interceptor_count')) self.assertEqual(_iec + 1, self.stats_http.get('interceptor_error_count'))
def validate_ticket(self, ticket, request): service_name = self.service_name ticket_name = self.ticket_name this_url = self.get_url(request) p = urlparse.urlparse(this_url) qs_map = urlparse.parse_qs(p.query) if ticket_name in qs_map: del qs_map[ticket_name] param_str = urlencode(qs_map) p = urlparse.ParseResult(*tuple(p[:4] + (param_str,) + p[5:])) service_url = urlparse.urlunparse(p) params = { service_name: service_url, ticket_name: ticket,} param_str = urlencode(params) p = urlparse.urlparse(self.cas_info['service_validate_url']) p = urlparse.ParseResult(*tuple(p[:4] + (param_str,) + p[5:])) service_validate_url = urlparse.urlunparse(p) log.msg("[INFO] requesting URL '%s' ..." % service_validate_url) http_client = HTTPClient(self.agent) d = http_client.get(service_validate_url) d.addCallback(treq.content) d.addCallback(self.parse_sv_results, service_url, ticket, request) return d
def test_rate_success(self): _ic = self.stats_http.get('interceptor_count') _iec = self.stats_http.get('interceptor_error_count') # Re-provision interceptor with correct script mt_interceptor = MTInterceptorScript(self.update_message_sript) yield self.mtinterceptor_add(DefaultInterceptor(mt_interceptor), 0) # Connect to InterceptorPB yield self.ipb_connect() # Send a SMS MT through http interface url = 'http://127.0.0.1:1401/rate' params = { 'to': '06155423', 'username': self.u1.username, 'password': self.u1_password } # We should receive an error since interceptorpb is not connected agent = Agent(reactor) client = HTTPClient(agent) response = yield client.get(url, params=params) lastErrorStatus = response.code lastResponse = yield text_content(response) # Asserts self.assertEqual(lastErrorStatus, 200) self.assertEqual(_ic + 1, self.stats_http.get('interceptor_count')) self.assertEqual(_iec, self.stats_http.get('interceptor_error_count'))
def request(self, url: str, expected_certificate: x509.Certificate): """ Send a HTTPS request to the given URL, ensuring that the given certificate is the one used via SPKI-hash-based pinning comparison. """ # No persistent connections, so we don't have dirty reactor at the end # of the test. treq_client = HTTPClient( Agent( reactor, _StorageClientHTTPSPolicy( expected_spki_hash=get_spki_hash(expected_certificate)), pool=HTTPConnectionPool(reactor, persistent=False), )) return treq_client.get(url)
class StreamingEliotLogsTests(SyncTestCase): """ Tests for the log streaming resources created by ``create_log_resources``. """ def setUp(self): self.resource = create_log_resources() self.agent = RequestTraversalAgent(self.resource) self.client = HTTPClient(self.agent) return super(StreamingEliotLogsTests, self).setUp() def test_v1(self): """ There is a resource at *v1*. """ self.assertThat( self.client.get(b"http:///v1"), succeeded(has_response_code(Equals(OK))), )
def main(): url = "http://google.com" factory = ssl.ClientContextFactory() factory.protocol = LineReceiver tor_endpoint = TCP4ClientEndpoint(reactor, '127.0.0.1', 9050) #tls_endpoint = TLSWrapClientEndpoint(tor_endpoint, factory) socks_agent = SOCKS5Agent(reactor, proxyEndpoint=tor_endpoint) socks_client = HTTPClient(socks_agent) d = socks_client.get("https://wtfismyip.com/text") d.addCallback(readBody) d.addCallback(foo) reactor.run()
def test_rate_ESME_RINVESMCLASS_from_script(self): "Will ensure if script defines only smpp error it will implicitly cause a http 520 error" _ic = self.stats_http.get('interceptor_count') _iec = self.stats_http.get('interceptor_error_count') # Re-provision interceptor with correct script mt_interceptor = MTInterceptorScript(self.return_ESME_RINVESMCLASS) yield self.mtinterceptor_add(DefaultInterceptor(mt_interceptor), 0) # Connect to InterceptorPB yield self.ipb_connect() # Send a SMS MT through http interface url = 'http://127.0.0.1:1401/rate' params = { 'to': '06155423', 'username': self.u1.username, 'password': self.u1_password } agent = Agent(reactor) client = HTTPClient(agent) response = yield client.get(url, params=params) lastErrorStatus = response.code lastResponse = yield text_content(response) # Wait some time for message delivery through smppc yield waitFor(2) # Asserts self.assertEqual(lastErrorStatus, 520) self.assertEqual(lastResponse, '"Interception specific error code 520"') self.assertEqual(_ic, self.stats_http.get('interceptor_count')) self.assertEqual(_iec + 1, self.stats_http.get('interceptor_error_count'))
class RabbitmqManagementClient(object): clock = reactor @classmethod def pool_factory(self, reactor): pool = HTTPConnectionPool(reactor, persistent=True) pool.maxPersistentPerHost = TPS_LIMIT @classmethod def agent_factory(self, reactor, pool=None): return Agent(reactor, pool=pool) def __init__(self, base_url, username, password): self.base_url = base_url self.username = username self.password = password self.http_client = HTTPClient(self.agent_factory( self.clock, pool=self.pool_factory(self.clock))) self.semaphore = defer.DeferredSemaphore(TPS_LIMIT) def get_queue(self, vhost, queue_name): url = 'http://%s/api/queues/%s/%s' % ( self.base_url, urllib.quote(vhost, safe=''), queue_name ) def _get_queue(): d = self.http_client.get(url, auth=(self.username, self.password)) d.addCallback(treq.json_content) return d return self.semaphore.run(_get_queue)
def test_rate_any_exception_from_script(self): _ic = self.stats_http.get('interceptor_count') _iec = self.stats_http.get('interceptor_error_count') # Re-provision interceptor with correct script mt_interceptor = MTInterceptorScript(self.raise_any_exception) yield self.mtinterceptor_add(DefaultInterceptor(mt_interceptor), 0) # Connect to InterceptorPB yield self.ipb_connect() # Send a SMS MT through http interface url = 'http://127.0.0.1:1401/rate' params = { 'to': '06155423', 'username': self.u1.username, 'password': self.u1_password } agent = Agent(reactor) client = HTTPClient(agent) response = yield client.get(url, params=params) lastErrorStatus = response.code lastResponse = yield text_content(response) # Wait some time for message delivery through smppc yield waitFor(2) # Asserts self.assertEqual(lastErrorStatus, 400) self.assertEqual( lastResponse, '"Failed running interception script, check log for details"') self.assertEqual(_ic, self.stats_http.get('interceptor_count')) self.assertEqual(_iec + 1, self.stats_http.get('interceptor_error_count'))
class ProxyTests(TestCase): def setUp(self): """ Construct a fake "Docker daemon" (one which does much less than the actual Docker daemon) and a Proxy instance. Pre- and post-hook API servers are provided by the individual tests. """ self.agent = Agent(reactor) # no connectionpool self.client = HTTPClient(self.agent) def tearDown(self): shutdowns = [ self.dockerServer.stopListening(), self.proxyServer.stopListening()] if hasattr(self, 'adderServer'): shutdowns.append(self.adderServer.stopListening()) if hasattr(self, 'adderTwoServer'): shutdowns.append(self.adderTwoServer.stopListening()) return defer.gatherResults(shutdowns) def _configure(self, config_yml, dockerArgs={}, dockerOnSocket=False): self.dockerAPI = TrafficLoggingFactory(testtools.FakeDockerServer(**dockerArgs), "docker-") if dockerOnSocket: self.socketPath = self.mktemp() self.dockerServer = reactor.listenUNIX(self.socketPath, self.dockerAPI) else: self.dockerServer = reactor.listenTCP(0, self.dockerAPI) self.dockerPort = self.dockerServer.getHost().port self.config = PluginConfiguration() tmp = self.mktemp() self.config._default_file = tmp fp = FilePath(tmp) fp.setContent(config_yml) self.parser = EndpointParser(self.config) if dockerOnSocket: self.proxyAPI = TrafficLoggingFactory(powerstrip.ServerProtocolFactory( dockerSocket=self.socketPath, config=self.config), "proxy-") else: self.proxyAPI = TrafficLoggingFactory( powerstrip.ServerProtocolFactory( dockerAddr="127.0.0.1", dockerPort=self.dockerPort, config=self.config), "proxy-") self.proxyServer = reactor.listenTCP(0, self.proxyAPI) self.proxyPort = self.proxyServer.getHost().port def test_empty_endpoints(self): """ The proxy passes through requests when no endpoints are specified. In particular, when POST to the /towel endpoint on the *proxy*, we get to see that we were seen by the (admittedly fake) Docker daemon. """ self._configure("endpoints: {}\nadapters: {}") d = self.client.post('http://127.0.0.1:%d/towel' % (self.proxyPort,), json.dumps({"hiding": "things"}), headers={'Content-Type': ['application/json']}) d.addCallback(treq.json_content) def verify(response): self.assertEqual(response, {"hiding": "things", "SeenByFakeDocker": 42}) d.addCallback(verify) return d def test_empty_endpoints_socket(self): """ The proxy is able to connect to Docker on a UNIX socket. """ self._configure("endpoints: {}\nadapters: {}", dockerOnSocket=True) d = self.client.post('http://127.0.0.1:%d/towel' % (self.proxyPort,), json.dumps({"hiding": "things"}), headers={'Content-Type': ['application/json']}) d.addCallback(treq.json_content) def verify(response): self.assertEqual(response, {"hiding": "things", "SeenByFakeDocker": 42}) d.addCallback(verify) return d def test_endpoint_and_empty_hooks(self): """ An endpoint is specified, but no pre-or post hooks are added to it. Requests to the endpoint are proxied. """ endpoint = "/towel" self._configure("""endpoints: "POST %s": pre: [] post: [] adapters: {}""" % (endpoint,)) d = self.client.post('http://127.0.0.1:%d%s' % (self.proxyPort, endpoint), json.dumps({"hiding": "things"}), headers={'Content-Type': ['application/json']}) d.addCallback(treq.json_content) def verify(response): self.assertEqual(response, {"hiding": "things", "SeenByFakeDocker": 42}) d.addCallback(verify) return d def _getAdder(self, *args, **kw): self.adderAPI = TrafficLoggingFactory(AdderPlugin(*args, **kw), "adder-") self.adderServer = reactor.listenTCP(0, self.adderAPI) self.adderPort = self.adderServer.getHost().port def _getAdderTwo(self, *args, **kw): kw["incrementBy"] = 2 self.adderTwoAPI = TrafficLoggingFactory(AdderPlugin(*args, **kw), "adder2-") self.adderTwoServer = reactor.listenTCP(0, self.adderTwoAPI) self.adderTwoPort = self.adderTwoServer.getHost().port def _hookTest(self, config_yml, adderArgs=dict(pre=True), adderTwoArgs=dict(pre=True)): """ Generalised version of a pre-hook test. """ self._getAdder(**adderArgs) self._getAdderTwo(**adderTwoArgs) self.dockerEndpoint = "/towel" self.adapterEndpoint = "/adapter" self.args = dict(dockerEndpoint=self.dockerEndpoint, adapterEndpoint=self.adapterEndpoint, adderPort=self.adderPort, adderTwoPort=self.adderTwoPort) self._configure(config_yml % self.args) self.args["proxyPort"] = self.proxyPort d = self.client.post('http://127.0.0.1:%(proxyPort)d%(dockerEndpoint)s' % self.args, json.dumps({"Number": 1}), headers={'Content-Type': ['application/json']}) d.addCallback(treq.json_content) def debug(result, *args, **kw): return result d.addCallback(debug) return d def test_adding_pre_hook_adapter(self): """ A adapter has a pre-hook which increments an integral field in the JSON POST body called "Number" which starts with value 1. Calling that pre-hook once increments the number to 2. """ d = self._hookTest("""endpoints: "POST %(dockerEndpoint)s": pre: [adder] post: [] adapters: adder: http://127.0.0.1:%(adderPort)d%(adapterEndpoint)s""") def verify(response): self.assertEqual(response, {"Number": 2, "SeenByFakeDocker": 42}) d.addCallback(verify) return d def test_adding_pre_hook_twice_adapter(self): """ Chaining pre-hooks: adding twice means you get +2. Note that the naming here is confusing. the adapter "adder2" here is defined as being the **same adapter** as "adder", which increments by 1. In later tests, we use a different adder on "adderTwoPort" which increments by 2. """ d = self._hookTest("""endpoints: "POST %(dockerEndpoint)s": pre: [adder, adder2] post: [] adapters: adder: http://127.0.0.1:%(adderPort)d%(adapterEndpoint)s adder2: http://127.0.0.1:%(adderPort)d%(adapterEndpoint)s""") def verify(response): self.assertEqual(response, {"Number": 3, "SeenByFakeDocker": 42}) d.addCallback(verify) return d def test_adding_one_then_two_pre_hook_adapter(self): """ Chaining pre-hooks: adding +1 and then +2 gives you +3. Note that the naming here is confusing. the adapter "adder2" here is defined as being a **different adapter** to "adder", which increments by 2. """ d = self._hookTest("""endpoints: "POST %(dockerEndpoint)s": pre: [adder, adder2] post: [] adapters: adder: http://127.0.0.1:%(adderPort)d%(adapterEndpoint)s adder2: http://127.0.0.1:%(adderTwoPort)d%(adapterEndpoint)s""") def verify(response): self.assertEqual(response, {"Number": 4, "SeenByFakeDocker": 42}) d.addCallback(verify) return d def test_adding_post_hook_adapter(self): """ A adapter has a post-hook which increments an integral field in the JSON (Docker) response body called "Number". """ d = self._hookTest("""endpoints: "POST %(dockerEndpoint)s": pre: [] post: [adder] adapters: adder: http://127.0.0.1:%(adderPort)d%(adapterEndpoint)s""", adderArgs=dict(post=True)) def verify(response): self.assertEqual(response, {"Number": 2, "SeenByFakeDocker": 42}) d.addCallback(verify) return d def test_adding_post_hook_twice_adapter(self): """ Chaining post-hooks: adding twice means you get +2. """ d = self._hookTest("""endpoints: "POST %(dockerEndpoint)s": pre: [] post: [adder, adder2] adapters: adder: http://127.0.0.1:%(adderPort)d%(adapterEndpoint)s adder2: http://127.0.0.1:%(adderTwoPort)d%(adapterEndpoint)s""", adderArgs=dict(post=True), adderTwoArgs=dict(post=True)) def verify(response): self.assertEqual(response, {"Number": 4, "SeenByFakeDocker": 42}) d.addCallback(verify) return d def test_stream_endpoint(self): """ A streaming (aka hijacking) endpoint like /attach is permitted with no post-hooks (the Docker response's content-type is detected and the entire connection switched down into simple TCP-proxying mode (with support for half-close). """ self._configure("endpoints: {}\nadapters: {}", dockerArgs=dict(rawStream=True)) d = self.client.post('http://127.0.0.1:%d/towel' % (self.proxyPort,), json.dumps({"raw": "stream"}), headers={'Content-Type': ['application/json']}) def verify(response): self.assertEqual(response.headers.getRawHeaders("content-type"), ["application/vnd.docker.raw-stream"]) # TODO Verify that half-close, and bi-directional TCP proxying # works. d.addCallback(verify) return d def test_chunked_endpoint(self): """ A chunking endpoint like /pull is permitted with no post-hooks (the Docker response's Content-Encoding is chunked). """ self._configure("endpoints: {}\nadapters: {}", dockerArgs=dict(chunkedResponse=True)) d = self.client.post('http://127.0.0.1:%d/towel' % (self.proxyPort,), json.dumps({"chunked": "response"}), headers={'Content-Type': ['application/json']}) def verify(response): self.assertEqual(response.headers.getRawHeaders("transfer-encoding"), ["chunked"]) d.addCallback(verify) return d test_chunked_endpoint.skip = ("Doesn't work yet. " "Need a fake docker which can emit chunked encodings.") def test_endpoint_GET_args(self): """ An endpoint is matched when it has ?-style GET arguments (and no JSON body), and the GET request is passed through. """ self._configure("endpoints: {}\nadapters: {}", dockerArgs=dict(chunkedResponse=True)) d = self.client.get('http://127.0.0.1:%d/info?return=fish' % (self.proxyPort,)) d.addCallback(treq.content) def verify(response): self.assertEqual(response, "INFORMATION FOR YOU: fish") d.addCallback(verify) return d def test_stream_endpoint_reject_post_hook(self): """ A streaming (aka hijacking) endpoint like /attach is rejected if a post-hook is attached: a runtime error is raised when the Content-Type is detected. """ test_stream_endpoint_reject_post_hook.skip = "not implemented yet" def test_chunked_endpoint_reject_post_hook(self): """ A chunking endpoint like /pull is rejected if a post-hook is attached: a runtime error is raised when the Content-Encoding is detected. """ test_chunked_endpoint_reject_post_hook.skip = "not implemented yet" def test_prehook_error_does_not_call_docker(self): """ An error in the pre-hook does not call through to Docker and returns the error to the user. """ test_prehook_error_does_not_call_docker.skip = "not implemented yet" def test_prehook_error_stops_chain(self): """ An error in the pre-hook stops the chain when there are multiple pre-hooks. """ test_prehook_error_stops_chain.skip = "not implemented yet" def test_posthook_error_stops_chain(self): """ An error in the post-hook stops the chain and returns the error to the user. """ test_posthook_error_stops_chain.skip = "not implemented yet" def test_docker_error_does_not_stop_posthooks(self): """ If Docker returns an HTTP error code, the post-hooks are given a chance to take a look at it/modify it. """ test_docker_error_does_not_stop_posthooks.skip = "not implemented yet" def test_second_pre_hook_gets_new_request_and_method(self): """ Chaining pre-hooks: the next pre-hook gets the request and method from the previous. """ test_second_pre_hook_gets_new_request_and_method.skip = "not implemented yet" def test_second_post_hook_gets_new_request_and_code(self): """ Chaining post-hooks: the next post-hook gets the request and code from the previous. Also content-type. """ test_second_post_hook_gets_new_request_and_code.skip = "not implemented yet" def test_endpoint_globbing(self): """ An endpoint is matched when there are '*' characters in the string """ test_endpoint_globbing.skip = "not implemented yet"
class PowerstripFlockerTests(TestCase): """ Real powerstrip-flocker tests against two nodes using the flocker acceptance testing framework. """ # Slow builds because initial runs involve pulling some docker images # (powerstrip, and powerstrip-flocker). timeout = 1200 def setUp(self): """ Ready the environment for tests which actually run docker against powerstrip with powerstrip-flocker enabled. * Log into each node in turn: * Run powerstrip-flocker in docker * Run powerstrip in docker """ self.agent = Agent(reactor) # no connectionpool self.client = HTTPClient(self.agent) d = get_test_cluster(self, 2) def got_cluster(cluster): self.cluster = cluster self.powerstripflockers = {} self.powerstrips = {} daemonReadyDeferreds = [] self.ips = [node.address for node in cluster.nodes] for ip in self.ips: # cleanup after previous test runs #run(ip, ["pkill", "-f", "flocker"]) for proc in ("powerstrip", "powerstrip-flocker"): try: run(ip, ["docker", "rm", "-f", proc]) except Exception: print proc, "was not running, not killed, OK." # put a powerstrip config in place run(ip, ["mkdir", "-p", "/root/powerstrip-config"]) run(ip, ["sh", "-c", "cat > /root/powerstrip-config/adapters.yml"], """ version: 1 endpoints: "POST /*/containers/create": pre: [flocker] adapters: flocker: http://powerstrip-flocker/flocker-adapter """) # start powerstrip-flocker POWERSTRIP_FLOCKER = "%s/powerstrip-flocker:latest" % (DOCKER_PULL_REPO,) run(ip, ["docker", "pull", POWERSTRIP_FLOCKER]) # TODO - come up with cleaner/nicer way of powerstrip-flocker # being able to establish its own host uuid (or volume # mountpoints), such as API calls. host_uuid = run(ip, ["python", "-c", "import json; " "print json.load(open('/etc/flocker/volume.json'))['uuid']"]).strip() self.powerstripflockers[ip] = remote_service_for_test(self, ip, ["docker", "run", "--name=powerstrip-flocker", "--expose", "80", "-p", "9999:80", # so that we can detect it being up "-e", "FLOCKER_CONTROL_SERVICE_BASE_URL=%s" % (self.cluster.base_url,), "-e", "MY_NETWORK_IDENTITY=%s" % (ip,), "-e", "MY_HOST_UUID=%s" % (host_uuid,), POWERSTRIP_FLOCKER]) print "Waiting for powerstrip-flocker to show up on", ip, "..." daemonReadyDeferreds.append(wait_for_socket(ip, 9999)) # start powerstrip # TODO - use the new unix-socket powerstrip approach. POWERSTRIP = "clusterhq/powerstrip:latest" run(ip, ["docker", "pull", POWERSTRIP]) self.powerstrips[ip] = remote_service_for_test(self, ip, ["docker", "run", "--name=powerstrip", "-p", "2375:2375", "-v", "/var/run/docker.sock:/var/run/docker.sock", "-v", "/root/powerstrip-config/adapters.yml:" "/etc/powerstrip/adapters.yml", "--link", "powerstrip-flocker:powerstrip-flocker", POWERSTRIP]) print "Waiting for powerstrip to show up on", ip, "..." daemonReadyDeferreds.append(wait_for_socket(ip, 2375)) d = defer.gatherResults(daemonReadyDeferreds) # def debug(): # services # import pdb; pdb.set_trace() # d.addCallback(lambda ignored: deferLater(reactor, 1, debug)) return d d.addCallback(got_cluster) return d def test_create_a_dataset(self): """ Running a docker container specifying a dataset name which has never been created before creates it in the API. """ node1, node2 = sorted(self.ips) fsName = "test001" powerstrip(node1, "docker run " "-v /flocker/%s:/data busybox " "sh -c 'echo 1 > /data/file'" % (fsName,)) url = self.cluster.base_url + "/configuration/datasets" d = self.client.get(url) d.addCallback(treq.json_content) def verify(result): self.assertTrue(len(result) > 0) self.assertEqual(result[0]["metadata"], {"name": fsName}) self.assertEqual(result[0]["primary"], node1) d.addBoth(verify) return d def test_create_a_dataset_manifests(self): """ Running a docker container specifying a dataset name which has never been created before creates the actual filesystem and mounts it in place in time for the container to start. We can verify this by asking Docker for the information about which volumes are *actually* mounted in the container, then going and checking that the real volume path on the host contains the '1' written to the 'file' file specified in the docker run command... """ node1, node2 = sorted(self.ips) fsName = "test001" container_id = powerstrip(node1, "docker run -d " "-v /flocker/%s:/data busybox " "sh -c 'echo fish > /data/file'" % (fsName,)).strip() # The volume that Docker now has mounted... docker_inspect = json.loads(run(node1, ["docker", "inspect", container_id])) volume = docker_inspect[0]["Volumes"].values()[0] # ... exists as a ZFS volume... zfs_volumes = shell(node1, "zfs list -t snapshot,filesystem -r flocker " "|grep %s |wc -l" % (volume,)).strip() self.assertEqual(int(zfs_volumes), 1) # ... and contains a file which contains the characters "fish". catFileOutput = run(node1, ["cat", "%s/file" % (volume,)]).strip() self.assertEqual(catFileOutput, "fish") def test_create_two_datasets_same_name(self): """ The metadata stored about a dataset name is checked to make sure that no two volumes with the same name are created. (In fact, if two volumes are created with the same name on the same host, it's a shared volume.) """ node1, node2 = sorted(self.ips) fsName = "test001" # First volume... container_id_1 = powerstrip(node1, "docker run -d " "-v /flocker/%s:/data busybox " "sh -c 'echo fish > /data/file'" % (fsName,)).strip() docker_inspect = json.loads(run(node1, ["docker", "inspect", container_id_1])) volume_1 = docker_inspect[0]["Volumes"].values()[0] # Second volume... container_id_2 = powerstrip(node1, "docker run -d " "-v /flocker/%s:/data busybox " "sh -c 'echo fish > /data/file'" % (fsName,)).strip() docker_inspect = json.loads(run(node1, ["docker", "inspect", container_id_2])) volume_2 = docker_inspect[0]["Volumes"].values()[0] # ... have the same flocker UUID. self.assertEqual(volume_1, volume_2) def test_move_a_dataset(self): """ Running a docker container specifying a dataset name which has been created before but which is no longer running moves the dataset before starting the container. """ node1, node2 = sorted(self.ips) fsName = "test001" # Write some bytes to a volume on one host... powerstrip(node1, "docker run " "-v /flocker/%s:/data busybox " "sh -c 'echo chicken > /data/file'" % (fsName,)) # ... and read them from the same named volume on another... container_id = powerstrip(node2, "docker run -d " "-v /flocker/%s:/data busybox " "sh -c 'cat /data/file'" % (fsName,)).strip() output = run(node2, ["docker", "logs", container_id]) self.assertEqual(output.strip(), "chicken") def test_move_a_dataset_check_persistence(self): """ The data in the dataset between the initial instantiation of it and the second instantiation of it persists. """ pass test_move_a_dataset_check_persistence.todo = "not implemented yet" def test_dataset_is_not_moved_when_being_used(self): """ If a container (*any* container) is currently running with a dataset mounted, an error is reported rather than ripping it out from underneath a running container. """ pass test_dataset_is_not_moved_when_being_used.todo = "not implemented yet" def test_two_datasets_one_move_one_create(self): """ When a docker run command mentions two datasets, one which is currently not running on another host, and another which is new, the new one gets created and the extant one gets moved. Both operations complete before the container is started. """ pass test_two_datasets_one_move_one_create.todo = "not implemented yet"
class ProxyTests(TestCase, GenerallyUsefulPowerstripTestMixin): def setUp(self): """ Construct a fake "Docker daemon" (one which does much less than the actual Docker daemon) and a Proxy instance. Pre- and post-hook API servers are provided by the individual tests. """ self.agent = Agent(reactor) # no connectionpool self.client = HTTPClient(self.agent) def tearDown(self): shutdowns = [ self.dockerServer.stopListening(), self.proxyServer.stopListening()] if hasattr(self, 'adderServer'): shutdowns.append(self.adderServer.stopListening()) if hasattr(self, 'adderTwoServer'): shutdowns.append(self.adderTwoServer.stopListening()) return defer.gatherResults(shutdowns) def test_empty_endpoints(self): """ The proxy passes through requests when no endpoints are specified. In particular, when POST to the /towel endpoint on the *proxy*, we get to see that we were seen by the (admittedly fake) Docker daemon. """ self._configure("endpoints: {}\nadapters: {}") d = self.client.post('http://127.0.0.1:%d/towel' % (self.proxyPort,), json.dumps({"hiding": "things"}), headers={'Content-Type': ['application/json']}) d.addCallback(treq.json_content) def verify(response): self.assertEqual(response, {"hiding": "things", "SeenByFakeDocker": 42}) d.addCallback(verify) return d def test_empty_endpoints_socket(self): """ The proxy is able to connect to Docker on a UNIX socket. """ self._configure("endpoints: {}\nadapters: {}", dockerOnSocket=True) d = self.client.post('http://127.0.0.1:%d/towel' % (self.proxyPort,), json.dumps({"hiding": "things"}), headers={'Content-Type': ['application/json']}) d.addCallback(treq.json_content) def verify(response): self.assertEqual(response, {"hiding": "things", "SeenByFakeDocker": 42}) d.addCallback(verify) return d def test_endpoint_and_empty_hooks(self): """ An endpoint is specified, but no pre-or post hooks are added to it. Requests to the endpoint are proxied. """ endpoint = "/towel" self._configure("""endpoints: "POST %s": pre: [] post: [] adapters: {}""" % (endpoint,)) d = self.client.post('http://127.0.0.1:%d%s' % (self.proxyPort, endpoint), json.dumps({"hiding": "things"}), headers={'Content-Type': ['application/json']}) d.addCallback(treq.json_content) def verify(response): self.assertEqual(response, {"hiding": "things", "SeenByFakeDocker": 42}) d.addCallback(verify) return d def _getAdder(self, *args, **kw): self.adderAPI = TrafficLoggingFactory(AdderPlugin(*args, **kw), "adder-") self.adderServer = reactor.listenTCP(0, self.adderAPI) self.adderPort = self.adderServer.getHost().port def _getAdderTwo(self, *args, **kw): kw["incrementBy"] = 2 self.adderTwoAPI = TrafficLoggingFactory(AdderPlugin(*args, **kw), "adder2-") self.adderTwoServer = reactor.listenTCP(0, self.adderTwoAPI) self.adderTwoPort = self.adderTwoServer.getHost().port def _hookTest(self, config_yml, adderArgs=dict(pre=True), adderTwoArgs=dict(pre=True)): """ Generalised version of a pre-hook test. """ self._getAdder(**adderArgs) self._getAdderTwo(**adderTwoArgs) self.dockerEndpoint = "/towel" self.adapterEndpoint = "/adapter" self.args = dict(dockerEndpoint=self.dockerEndpoint, adapterEndpoint=self.adapterEndpoint, adderPort=self.adderPort, adderTwoPort=self.adderTwoPort) self._configure(config_yml % self.args) self.args["proxyPort"] = self.proxyPort d = self.client.post('http://127.0.0.1:%(proxyPort)d%(dockerEndpoint)s' % self.args, json.dumps({"Number": 1}), headers={'Content-Type': ['application/json']}) d.addCallback(treq.json_content) def debug(result, *args, **kw): return result d.addCallback(debug) return d def test_adding_pre_hook_adapter(self): """ A adapter has a pre-hook which increments an integral field in the JSON POST body called "Number" which starts with value 1. Calling that pre-hook once increments the number to 2. """ d = self._hookTest("""endpoints: "POST %(dockerEndpoint)s": pre: [adder] post: [] adapters: adder: http://127.0.0.1:%(adderPort)d%(adapterEndpoint)s""") def verify(response): self.assertEqual(response, {"Number": 2, "SeenByFakeDocker": 42}) d.addCallback(verify) return d def test_adding_pre_hook_twice_adapter(self): """ Chaining pre-hooks: adding twice means you get +2. Note that the naming here is confusing. the adapter "adder2" here is defined as being the **same adapter** as "adder", which increments by 1. In later tests, we use a different adder on "adderTwoPort" which increments by 2. """ d = self._hookTest("""endpoints: "POST %(dockerEndpoint)s": pre: [adder, adder2] post: [] adapters: adder: http://127.0.0.1:%(adderPort)d%(adapterEndpoint)s adder2: http://127.0.0.1:%(adderPort)d%(adapterEndpoint)s""") def verify(response): self.assertEqual(response, {"Number": 3, "SeenByFakeDocker": 42}) d.addCallback(verify) return d def test_adding_one_then_two_pre_hook_adapter(self): """ Chaining pre-hooks: adding +1 and then +2 gives you +3. Note that the naming here is confusing. the adapter "adder2" here is defined as being a **different adapter** to "adder", which increments by 2. """ d = self._hookTest("""endpoints: "POST %(dockerEndpoint)s": pre: [adder, adder2] post: [] adapters: adder: http://127.0.0.1:%(adderPort)d%(adapterEndpoint)s adder2: http://127.0.0.1:%(adderTwoPort)d%(adapterEndpoint)s""") def verify(response): self.assertEqual(response, {"Number": 4, "SeenByFakeDocker": 42}) d.addCallback(verify) return d def test_adding_post_hook_adapter(self): """ A adapter has a post-hook which increments an integral field in the JSON (Docker) response body called "Number". """ d = self._hookTest("""endpoints: "POST %(dockerEndpoint)s": pre: [] post: [adder] adapters: adder: http://127.0.0.1:%(adderPort)d%(adapterEndpoint)s""", adderArgs=dict(post=True)) def verify(response): self.assertEqual(response, {"Number": 2, "SeenByFakeDocker": 42}) d.addCallback(verify) return d def test_content_length_post_hook(self): """ When the content length is changed by a post-hook, test that powerstrip returns the correct content as per the content-length """ d = self._hookTest("""endpoints: "POST %(dockerEndpoint)s": pre: [] post: [adder,adder,adder,adder,adder,adder,adder,adder,adder] adapters: adder: http://127.0.0.1:%(adderPort)d%(adapterEndpoint)s""", adderArgs=dict(post=True)) def verify(response): self.assertEqual(response, {"Number": 10, "SeenByFakeDocker": 42}) d.addCallback(verify) return d def test_adding_post_hook_twice_adapter(self): """ Chaining post-hooks: adding twice means you get +2. """ d = self._hookTest("""endpoints: "POST %(dockerEndpoint)s": pre: [] post: [adder, adder2] adapters: adder: http://127.0.0.1:%(adderPort)d%(adapterEndpoint)s adder2: http://127.0.0.1:%(adderTwoPort)d%(adapterEndpoint)s""", adderArgs=dict(post=True), adderTwoArgs=dict(post=True)) def verify(response): self.assertEqual(response, {"Number": 4, "SeenByFakeDocker": 42}) d.addCallback(verify) return d def test_stream_endpoint(self): """ A streaming (aka hijacking) endpoint like /attach is permitted with no post-hooks (the Docker response's content-type is detected and the entire connection switched down into simple TCP-proxying mode (with support for half-close). """ self._configure("endpoints: {}\nadapters: {}", dockerArgs=dict(rawStream=True)) d = self.client.post('http://127.0.0.1:%d/towel' % (self.proxyPort,), json.dumps({"raw": "stream"}), headers={'Content-Type': ['application/json']}) def verify(response): self.assertEqual(response.headers.getRawHeaders("content-type"), ["application/vnd.docker.raw-stream"]) # TODO Verify that half-close, and bi-directional TCP proxying # works. d.addCallback(verify) return d def test_chunked_endpoint(self): """ A chunking endpoint like /pull is permitted with no post-hooks (the Docker response's Content-Encoding is chunked). """ self._configure("endpoints: {}\nadapters: {}", dockerArgs=dict(chunkedResponse=True)) d = self.client.post('http://127.0.0.1:%d/towel' % (self.proxyPort,), json.dumps({"chunked": "response"}), headers={'Content-Type': ['application/json']}) def verify(response): self.assertEqual(response.headers.getRawHeaders("transfer-encoding"), ["chunked"]) d.addCallback(verify) return d test_chunked_endpoint.skip = ("Doesn't work yet. " "Need a fake docker which can emit chunked encodings.") def test_endpoint_GET_args(self): """ An endpoint is matched when it has ?-style GET arguments (and no JSON body), and the GET request is passed through. """ self._configure("endpoints: {}\nadapters: {}", dockerArgs=dict(chunkedResponse=True)) d = self.client.get('http://127.0.0.1:%d/info?return=fish' % (self.proxyPort,)) d.addCallback(treq.content) def verify(response): self.assertEqual(response, "INFORMATION FOR YOU: fish") d.addCallback(verify) return d def test_stream_endpoint_reject_post_hook(self): """ A streaming (aka hijacking) endpoint like /attach is rejected if a post-hook is attached: a runtime error is raised when the Content-Type is detected. """ test_stream_endpoint_reject_post_hook.skip = "not implemented yet" def test_chunked_endpoint_reject_post_hook(self): """ A chunking endpoint like /pull is rejected if a post-hook is attached: a runtime error is raised when the Content-Encoding is detected. """ test_chunked_endpoint_reject_post_hook.skip = "not implemented yet" def test_prehook_error_does_not_call_docker(self): """ An error in the pre-hook does not call through to Docker and returns the error to the user. """ test_prehook_error_does_not_call_docker.skip = "not implemented yet" def test_prehook_error_stops_chain(self): """ An error in the pre-hook stops the chain when there are multiple pre-hooks. """ test_prehook_error_stops_chain.skip = "not implemented yet" def test_posthook_error_stops_chain(self): """ An error in the post-hook stops the chain and returns the error to the user. """ test_posthook_error_stops_chain.skip = "not implemented yet" def test_docker_error_does_not_stop_posthooks(self): """ If Docker returns an HTTP error code, the post-hooks are given a chance to take a look at it/modify it. """ test_docker_error_does_not_stop_posthooks.skip = "not implemented yet" def test_second_pre_hook_gets_new_request_and_method(self): """ Chaining pre-hooks: the next pre-hook gets the request and method from the previous. """ test_second_pre_hook_gets_new_request_and_method.skip = "not implemented yet" def test_second_post_hook_gets_new_request_and_code(self): """ Chaining post-hooks: the next post-hook gets the request and code from the previous. Also content-type. """ test_second_post_hook_gets_new_request_and_code.skip = "not implemented yet" def test_endpoint_globbing(self): """ An endpoint is matched when there are '*' characters in the string """ test_endpoint_globbing.skip = "not implemented yet"
class PowerstripFlockerTests(TestCase): """ Real flocker-plugin tests against two nodes using the flocker acceptance testing framework. """ # Slow builds because initial runs involve pulling some docker images # (flocker-plugin). timeout = 1200 def _buildDockerOnce(self): """ Using blocking APIs, build docker once per test run. """ if len(BUILD_ONCE): return if path.exists(DOCKER_PATH): dockerCmd = ("cd %(dockerDir)s;" "docker build -t custom-docker .;" "docker run --privileged --rm " "-e DOCKER_EXPERIMENTAL=1 " "-e DOCKER_GITCOMMIT=`git log -1 --format=%%h` " "-v %(dockerDir)s:/go/src/github.com/docker/docker " "custom-docker hack/make.sh binary" % dict( dockerDir=DOCKER_PATH)) print "Running docker command:", dockerCmd exit = system(dockerCmd) if exit > 0: raise Exception("failed to build docker") BUILD_ONCE.append(1) def _injectDockerOnce(self, ip): """ Using blocking APIs, copy the docker binary from whence it was built in _buildDockerOnce to the given ip. """ if ip not in INJECT_ONCE: INJECT_ONCE[ip] = [] if len(INJECT_ONCE[ip]): return if path.exists(DOCKER_PATH): # e.g. 1.5.0-plugins dockerVersion = "1.7.0-dev-experimental" # XXX Docker need to update their VERSION file open("%s/VERSION" % (DOCKER_PATH,)).read().strip() binaryPath = "%(dockerDir)s/bundles/%(dockerVersion)s/binary/docker-%(dockerVersion)s" % dict( dockerDir=DOCKER_PATH, dockerVersion=dockerVersion) hostBinaryPath = "/usr/bin/docker" key = "/home/buildslave/.ssh/id_rsa_flocker" exit = system("scp -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null " "-i %(key)s %(binaryPath)s root@%(ip)s:%(hostBinaryPath)s" % dict( key=key, hostBinaryPath=hostBinaryPath, binaryPath=binaryPath, ip=ip)) if exit > 0: raise Exception("failed to inject docker into %(ip)s" % dict(ip=ip)) INJECT_ONCE[ip].append(1) def setUp(self): """ Ready the environment for tests which actually run docker with flocker-plugin enabled. * Log into each node in turn: * Load flocker-plugin into docker """ self.agent = Agent(reactor) # no connectionpool self.client = HTTPClient(self.agent) d = get_test_cluster(self, 2) def got_cluster(cluster): self.cluster = cluster self.plugins = {} daemonReadyDeferreds = [] self.ips = [node.address for node in cluster.nodes] # Build docker if necessary (if there's a docker submodule) self._buildDockerOnce() for ip in self.ips: # cleanup after previous test runs #run(ip, ["pkill", "-f", "flocker"]) shell(ip, "sleep 5 && initctl stop docker || true") # Copy docker into the respective node self._injectDockerOnce(ip) # workaround https://github.com/calavera/docker/pull/4#issuecomment-100046383 shell(ip, "mkdir -p %s" % (PLUGIN_DIR,)) # cleanup stale sockets shell(ip, "rm -f %s/*" % (PLUGIN_DIR,)) #shell(ip, "supervisorctl stop flocker-agent") #shell(ip, "supervisorctl start flocker-agent") """ for container in ("flocker",): try: run(ip, ["docker", "rm", "-f", container]) except Exception: print container, "was not running, not killed, OK." # start flocker-plugin FLOCKER_PLUGIN = "%s/flocker-plugin:%s" % (DOCKER_PULL_REPO, PF_VERSION) run(ip, ["docker", "pull", FLOCKER_PLUGIN]) """ # TODO - come up with cleaner/nicer way of flocker-plugin # being able to establish its own host uuid (or volume # mountpoints), such as API calls. # See https://github.com/ClusterHQ/flocker-plugin/issues/2 # for how to do this now. """ self.plugins[ip] = remote_service_for_test(self, ip, ["docker", "run", "--name=flocker", "-v", "%s:%s" % (PLUGIN_DIR, PLUGIN_DIR), "-e", "FLOCKER_CONTROL_SERVICE_BASE_URL=%s" % (self.cluster.base_url,), "-e", "MY_NETWORK_IDENTITY=%s" % (ip,), "-e", "MY_HOST_UUID=%s" % (host_uuid,), FLOCKER_PLUGIN]) """ host_uuid = run(ip, ["python", "-c", "import json; " "print json.load(open('/etc/flocker/volume.json'))['uuid']"]).strip() cmd = ("cd /root && if [ ! -e powerstrip-flocker ]; then " "git clone https://github.com/clusterhq/powerstrip-flocker && " "cd powerstrip-flocker && " "git checkout %s && cd /root;" % (PF_VERSION,) + "fi && cd /root/powerstrip-flocker && " + "FLOCKER_CONTROL_SERVICE_BASE_URL=%s" % (self.cluster.base_url,) + " MY_NETWORK_IDENTITY=%s" % (ip,) + " MY_HOST_UUID=%s" % (host_uuid,) + " twistd -noy flockerdockerplugin.tac") print "CMD >>", cmd self.plugins[ip] = remote_service_for_test(self, ip, ["bash", "-c", cmd]) # XXX Better not to have sleep 5 in here but hey shell(ip, "sleep 5 && initctl start docker") print "Waiting for flocker-plugin to show up on", ip, "..." # XXX This will only work for the first test, need to restart # docker in tearDown. daemonReadyDeferreds.append(wait_for_plugin(ip)) d = defer.gatherResults(daemonReadyDeferreds) # def debug(): # services # import pdb; pdb.set_trace() # d.addCallback(lambda ignored: deferLater(reactor, 1, debug)) return d d.addCallback(got_cluster) return d def test_create_a_dataset(self): """ Running a docker container specifying a dataset name which has never been created before creates it in the API. """ node1, node2 = sorted(self.ips) fsName = "test001" print "About to run docker run..." shell(node1, "docker run " "-v %s:/data --volume-driver=flocker busybox " "sh -c 'echo 1 > /data/file'" % (fsName,)) url = self.cluster.base_url + "/configuration/datasets" d = self.client.get(url) d.addCallback(treq.json_content) def verify(result): self.assertTrue(len(result) > 0) self.assertEqual(result[0]["metadata"], {"name": fsName}) #self.assertEqual(result[0]["primary"], node1) d.addBoth(verify) return d def test_create_a_dataset_manifests(self): """ Running a docker container specifying a dataset name which has never been created before creates the actual filesystem and mounts it in place in time for the container to start. We can verify this by asking Docker for the information about which volumes are *actually* mounted in the container, then going and checking that the real volume path on the host contains the '1' written to the 'file' file specified in the docker run command... """ node1, node2 = sorted(self.ips) fsName = "test001" shell(node1, "docker run -d " "-v %s:/data --volume-driver=flocker busybox " "sh -c 'echo fish > /data/file'" % (fsName,)).strip() # The volume that Docker now has mounted exists as a ZFS volume... zfs_volumes = shell(node1, "zfs list -t snapshot,filesystem -r flocker " "|grep flocker/ |wc -l").strip() self.assertEqual(int(zfs_volumes), 1) # ... and contains a file which contains the characters "fish". catFileOutput = shell(node1, "docker run " "-v %s:/data --volume-driver=flocker busybox " "cat /data/file" % (fsName,)).strip() self.assertEqual(catFileOutput, "fish") def test_create_two_datasets_same_name(self): """ The metadata stored about a dataset name is checked to make sure that no two volumes with the same name are created. (In fact, if two volumes are created with the same name on the same host, it's a shared volume.) """ node1, node2 = sorted(self.ips) fsName = "test001" # First volume... container_id_1 = shell(node1, "docker run -d " "-v %s:/data --volume-driver=flocker busybox " "sh -c 'echo fish > /data/file'" % (fsName,)).strip() docker_inspect = json.loads(run(node1, ["docker", "inspect", container_id_1])) volume_1 = docker_inspect[0]["Volumes"].values()[0] # Second volume... container_id_2 = shell(node1, "docker run -d " "-v %s:/data --volume-driver=flocker busybox " "sh -c 'echo fish > /data/file'" % (fsName,)).strip() docker_inspect = json.loads(run(node1, ["docker", "inspect", container_id_2])) volume_2 = docker_inspect[0]["Volumes"].values()[0] # ... have the same flocker UUID. self.assertEqual(volume_1, volume_2) def test_move_a_dataset(self): """ Running a docker container specifying a dataset name which has been created before but which is no longer running moves the dataset before starting the container. """ node1, node2 = sorted(self.ips) fsName = "test001" # Write some bytes to a volume on one host... shell(node1, "docker run " "-v %s:/data --volume-driver=flocker busybox " "sh -c 'echo chicken > /data/file'" % (fsName,)) # ... and read them from the same named volume on another... container_id = shell(node2, "docker run -d " "-v %s:/data --volume-driver=flocker busybox " "sh -c 'cat /data/file'" % (fsName,)).strip() output = run(node2, ["docker", "logs", container_id]) self.assertEqual(output.strip(), "chicken") def test_move_a_dataset_check_persistence(self): """ The data in the dataset between the initial instantiation of it and the second instantiation of it persists. """ pass test_move_a_dataset_check_persistence.skip = "not implemented yet" def test_dataset_is_not_moved_when_being_used(self): """ If a container (*any* container) is currently running with a dataset mounted, an error is reported rather than ripping it out from underneath a running container. """ pass test_dataset_is_not_moved_when_being_used.skip = "not implemented yet" def test_two_datasets_one_move_one_create(self): """ When a docker run command mentions two datasets, one which is currently not running on another host, and another which is new, the new one gets created and the extant one gets moved. Both operations complete before the container is started. """ pass test_two_datasets_one_move_one_create.skip = "not implemented yet"
class HTTPClientTests(TestCase): def setUp(self): self.agent = mock.Mock(Agent) self.client = HTTPClient(self.agent) self.fbp_patcher = mock.patch('treq.client.FileBodyProducer') self.FileBodyProducer = self.fbp_patcher.start() self.addCleanup(self.fbp_patcher.stop) self.mbp_patcher = mock.patch('treq.multipart.MultiPartProducer') self.MultiPartProducer = self.mbp_patcher.start() self.addCleanup(self.mbp_patcher.stop) def assertBody(self, expected): body = self.FileBodyProducer.mock_calls[0][1][0] self.assertEqual(body.read(), expected) def test_request_case_insensitive_methods(self): self.client.request('gEt', 'http://example.com/') self.agent.request.assert_called_once_with( 'GET', 'http://example.com/', headers=Headers({}), bodyProducer=None) def test_request_query_params(self): self.client.request('GET', 'http://example.com/', params={'foo': ['bar']}) self.agent.request.assert_called_once_with( 'GET', 'http://example.com/?foo=bar', headers=Headers({}), bodyProducer=None) def test_request_tuple_query_values(self): self.client.request('GET', 'http://example.com/', params={'foo': ('bar',)}) self.agent.request.assert_called_once_with( 'GET', 'http://example.com/?foo=bar', headers=Headers({}), bodyProducer=None) def test_request_merge_query_params(self): self.client.request('GET', 'http://example.com/?baz=bax', params={'foo': ['bar', 'baz']}) self.agent.request.assert_called_once_with( 'GET', 'http://example.com/?baz=bax&foo=bar&foo=baz', headers=Headers({}), bodyProducer=None) def test_request_merge_tuple_query_params(self): self.client.request('GET', 'http://example.com/?baz=bax', params=[('foo', 'bar')]) self.agent.request.assert_called_once_with( 'GET', 'http://example.com/?baz=bax&foo=bar', headers=Headers({}), bodyProducer=None) def test_request_dict_single_value_query_params(self): self.client.request('GET', 'http://example.com/', params={'foo': 'bar'}) self.agent.request.assert_called_once_with( 'GET', 'http://example.com/?foo=bar', headers=Headers({}), bodyProducer=None) def test_request_data_dict(self): self.client.request('POST', 'http://example.com/', data={'foo': ['bar', 'baz']}) self.agent.request.assert_called_once_with( 'POST', 'http://example.com/', headers=Headers( {'Content-Type': ['application/x-www-form-urlencoded']}), bodyProducer=self.FileBodyProducer.return_value) self.assertBody('foo=bar&foo=baz') def test_request_data_single_dict(self): self.client.request('POST', 'http://example.com/', data={'foo': 'bar'}) self.agent.request.assert_called_once_with( 'POST', 'http://example.com/', headers=Headers( {'Content-Type': ['application/x-www-form-urlencoded']}), bodyProducer=self.FileBodyProducer.return_value) self.assertBody('foo=bar') def test_request_data_tuple(self): self.client.request('POST', 'http://example.com/', data=[('foo', 'bar')]) self.agent.request.assert_called_once_with( 'POST', 'http://example.com/', headers=Headers( {'Content-Type': ['application/x-www-form-urlencoded']}), bodyProducer=self.FileBodyProducer.return_value) self.assertBody('foo=bar') def test_request_data_file(self): temp_fn = self.mktemp() with open(temp_fn, "w") as temp_file: temp_file.write('hello') self.client.request('POST', 'http://example.com/', data=file(temp_fn)) self.agent.request.assert_called_once_with( 'POST', 'http://example.com/', headers=Headers({}), bodyProducer=self.FileBodyProducer.return_value) self.assertBody('hello') @mock.patch('treq.client.uuid.uuid4', mock.Mock(return_value="heyDavid")) def test_request_no_name_attachment(self): self.client.request( 'POST', 'http://example.com/', files={"name": StringIO("hello")}) self.agent.request.assert_called_once_with( 'POST', 'http://example.com/', headers=Headers({ 'Content-Type': [ 'multipart/form-data; boundary=heyDavid']}), bodyProducer=self.MultiPartProducer.return_value) FP = self.FileBodyProducer.return_value self.assertEqual( mock.call( [('name', (None, 'application/octet-stream', FP))], boundary='heyDavid'), self.MultiPartProducer.call_args) @mock.patch('treq.client.uuid.uuid4', mock.Mock(return_value="heyDavid")) def test_request_named_attachment(self): self.client.request( 'POST', 'http://example.com/', files={ "name": ('image.jpg', StringIO("hello"))}) self.agent.request.assert_called_once_with( 'POST', 'http://example.com/', headers=Headers({ 'Content-Type': [ 'multipart/form-data; boundary=heyDavid']}), bodyProducer=self.MultiPartProducer.return_value) FP = self.FileBodyProducer.return_value self.assertEqual( mock.call( [('name', ('image.jpg', 'image/jpeg', FP))], boundary='heyDavid'), self.MultiPartProducer.call_args) @mock.patch('treq.client.uuid.uuid4', mock.Mock(return_value="heyDavid")) def test_request_named_attachment_and_ctype(self): self.client.request( 'POST', 'http://example.com/', files={ "name": ('image.jpg', 'text/plain', StringIO("hello"))}) self.agent.request.assert_called_once_with( 'POST', 'http://example.com/', headers=Headers({ 'Content-Type': [ 'multipart/form-data; boundary=heyDavid']}), bodyProducer=self.MultiPartProducer.return_value) FP = self.FileBodyProducer.return_value self.assertEqual( mock.call( [('name', ('image.jpg', 'text/plain', FP))], boundary='heyDavid'), self.MultiPartProducer.call_args) @mock.patch('treq.client.uuid.uuid4', mock.Mock(return_value="heyDavid")) def test_request_mixed_params(self): class NamedFile(StringIO): def __init__(self, val): StringIO.__init__(self, val) self.name = "image.png" self.client.request( 'POST', 'http://example.com/', data=[("a", "b"), ("key", "val")], files=[ ("file1", ('image.jpg', StringIO("hello"))), ("file2", NamedFile("yo"))]) self.agent.request.assert_called_once_with( 'POST', 'http://example.com/', headers=Headers({ 'Content-Type': [ 'multipart/form-data; boundary=heyDavid']}), bodyProducer=self.MultiPartProducer.return_value) FP = self.FileBodyProducer.return_value self.assertEqual( mock.call([ ('a', 'b'), ('key', 'val'), ('file1', ('image.jpg', 'image/jpeg', FP)), ('file2', ('image.png', 'image/png', FP))], boundary='heyDavid'), self.MultiPartProducer.call_args) @mock.patch('treq.client.uuid.uuid4', mock.Mock(return_value="heyDavid")) def test_request_mixed_params_dict(self): self.client.request( 'POST', 'http://example.com/', data={"key": "a", "key2": "b"}, files={"file1": StringIO("hey")}) self.agent.request.assert_called_once_with( 'POST', 'http://example.com/', headers=Headers({ 'Content-Type': [ 'multipart/form-data; boundary=heyDavid']}), bodyProducer=self.MultiPartProducer.return_value) FP = self.FileBodyProducer.return_value self.assertEqual( mock.call([ ('key', 'a'), ('key2', 'b'), ('file1', (None, 'application/octet-stream', FP))], boundary='heyDavid'), self.MultiPartProducer.call_args) def test_request_unsupported_params_combination(self): self.assertRaises(ValueError, self.client.request, 'POST', 'http://example.com/', data=StringIO("yo"), files={"file1": StringIO("hey")}) def test_request_dict_headers(self): self.client.request('GET', 'http://example.com/', headers={ 'User-Agent': 'treq/0.1dev', 'Accept': ['application/json', 'text/plain'] }) self.agent.request.assert_called_once_with( 'GET', 'http://example.com/', headers=Headers({'User-Agent': ['treq/0.1dev'], 'Accept': ['application/json', 'text/plain']}), bodyProducer=None) @with_clock def test_request_timeout_fired(self, clock): """ Verify the request is cancelled if a response is not received within specified timeout period. """ self.client.request('GET', 'http://example.com', timeout=2) # simulate we haven't gotten a response within timeout seconds clock.advance(3) deferred = self.agent.request.return_value # a deferred should have been cancelled self.assertTrue(deferred.cancel.called) @with_clock def test_request_timeout_cancelled(self, clock): """ Verify timeout is cancelled if a response is received before timeout period elapses. """ self.client.request('GET', 'http://example.com', timeout=2) # simulate a response deferred = self.agent.request.return_value gotResult = deferred.addBoth.call_args[0][0] gotResult('result') # now advance the clock but since we already got a result, # a cancellation timer should have been cancelled clock.advance(3) self.assertFalse(deferred.cancel.called) def test_response_is_buffered(self): response = mock.Mock(deliverBody=mock.Mock()) self.agent.request.return_value = succeed(response) d = self.client.get('http://www.example.com') result = self.successResultOf(d) protocol = mock.Mock(Protocol) result.deliverBody(protocol) self.assertEqual(response.deliverBody.call_count, 1) result.deliverBody(protocol) self.assertEqual(response.deliverBody.call_count, 1) def test_response_buffering_is_disabled_with_unbufferred_arg(self): response = mock.Mock() self.agent.request.return_value = succeed(response) d = self.client.get('http://www.example.com', unbuffered=True) # YOLO public attribute. self.assertEqual(self.successResultOf(d).original, response)
class HTTPClientTests(TestCase): def setUp(self): self.agent = mock.Mock(Agent) self.client = HTTPClient(self.agent) self.fbp_patcher = mock.patch("treq.client.FileBodyProducer") self.FileBodyProducer = self.fbp_patcher.start() self.addCleanup(self.fbp_patcher.stop) self.mbp_patcher = mock.patch("treq.multipart.MultiPartProducer") self.MultiPartProducer = self.mbp_patcher.start() self.addCleanup(self.mbp_patcher.stop) def assertBody(self, expected): body = self.FileBodyProducer.mock_calls[0][1][0] self.assertEqual(body.read(), expected) def test_request_case_insensitive_methods(self): self.client.request("gEt", "http://example.com/") self.agent.request.assert_called_once_with("GET", "http://example.com/", headers=Headers({}), bodyProducer=None) def test_request_query_params(self): self.client.request("GET", "http://example.com/", params={"foo": ["bar"]}) self.agent.request.assert_called_once_with( "GET", "http://example.com/?foo=bar", headers=Headers({}), bodyProducer=None ) def test_request_tuple_query_values(self): self.client.request("GET", "http://example.com/", params={"foo": ("bar",)}) self.agent.request.assert_called_once_with( "GET", "http://example.com/?foo=bar", headers=Headers({}), bodyProducer=None ) def test_request_merge_query_params(self): self.client.request("GET", "http://example.com/?baz=bax", params={"foo": ["bar", "baz"]}) self.agent.request.assert_called_once_with( "GET", "http://example.com/?baz=bax&foo=bar&foo=baz", headers=Headers({}), bodyProducer=None ) def test_request_merge_tuple_query_params(self): self.client.request("GET", "http://example.com/?baz=bax", params=[("foo", "bar")]) self.agent.request.assert_called_once_with( "GET", "http://example.com/?baz=bax&foo=bar", headers=Headers({}), bodyProducer=None ) def test_request_dict_single_value_query_params(self): self.client.request("GET", "http://example.com/", params={"foo": "bar"}) self.agent.request.assert_called_once_with( "GET", "http://example.com/?foo=bar", headers=Headers({}), bodyProducer=None ) def test_request_data_dict(self): self.client.request("POST", "http://example.com/", data={"foo": ["bar", "baz"]}) self.agent.request.assert_called_once_with( "POST", "http://example.com/", headers=Headers({"Content-Type": ["application/x-www-form-urlencoded"]}), bodyProducer=self.FileBodyProducer.return_value, ) self.assertBody("foo=bar&foo=baz") def test_request_data_single_dict(self): self.client.request("POST", "http://example.com/", data={"foo": "bar"}) self.agent.request.assert_called_once_with( "POST", "http://example.com/", headers=Headers({"Content-Type": ["application/x-www-form-urlencoded"]}), bodyProducer=self.FileBodyProducer.return_value, ) self.assertBody("foo=bar") def test_request_data_tuple(self): self.client.request("POST", "http://example.com/", data=[("foo", "bar")]) self.agent.request.assert_called_once_with( "POST", "http://example.com/", headers=Headers({"Content-Type": ["application/x-www-form-urlencoded"]}), bodyProducer=self.FileBodyProducer.return_value, ) self.assertBody("foo=bar") def test_request_data_file(self): temp_fn = self.mktemp() with open(temp_fn, "w") as temp_file: temp_file.write("hello") self.client.request("POST", "http://example.com/", data=file(temp_fn)) self.agent.request.assert_called_once_with( "POST", "http://example.com/", headers=Headers({}), bodyProducer=self.FileBodyProducer.return_value ) self.assertBody("hello") @mock.patch("treq.client.uuid.uuid4", mock.Mock(return_value="heyDavid")) def test_request_no_name_attachment(self): self.client.request("POST", "http://example.com/", files={"name": StringIO("hello")}) self.agent.request.assert_called_once_with( "POST", "http://example.com/", headers=Headers({"Content-Type": ["multipart/form-data; boundary=heyDavid"]}), bodyProducer=self.MultiPartProducer.return_value, ) FP = self.FileBodyProducer.return_value self.assertEqual( mock.call([("name", (None, "application/octet-stream", FP))], boundary="heyDavid"), self.MultiPartProducer.call_args, ) @mock.patch("treq.client.uuid.uuid4", mock.Mock(return_value="heyDavid")) def test_request_named_attachment(self): self.client.request("POST", "http://example.com/", files={"name": ("image.jpg", StringIO("hello"))}) self.agent.request.assert_called_once_with( "POST", "http://example.com/", headers=Headers({"Content-Type": ["multipart/form-data; boundary=heyDavid"]}), bodyProducer=self.MultiPartProducer.return_value, ) FP = self.FileBodyProducer.return_value self.assertEqual( mock.call([("name", ("image.jpg", "image/jpeg", FP))], boundary="heyDavid"), self.MultiPartProducer.call_args, ) @mock.patch("treq.client.uuid.uuid4", mock.Mock(return_value="heyDavid")) def test_request_named_attachment_and_ctype(self): self.client.request( "POST", "http://example.com/", files={"name": ("image.jpg", "text/plain", StringIO("hello"))} ) self.agent.request.assert_called_once_with( "POST", "http://example.com/", headers=Headers({"Content-Type": ["multipart/form-data; boundary=heyDavid"]}), bodyProducer=self.MultiPartProducer.return_value, ) FP = self.FileBodyProducer.return_value self.assertEqual( mock.call([("name", ("image.jpg", "text/plain", FP))], boundary="heyDavid"), self.MultiPartProducer.call_args, ) @mock.patch("treq.client.uuid.uuid4", mock.Mock(return_value="heyDavid")) def test_request_mixed_params(self): class NamedFile(StringIO): def __init__(self, val): StringIO.__init__(self, val) self.name = "image.png" self.client.request( "POST", "http://example.com/", data=[("a", "b"), ("key", "val")], files=[("file1", ("image.jpg", StringIO("hello"))), ("file2", NamedFile("yo"))], ) self.agent.request.assert_called_once_with( "POST", "http://example.com/", headers=Headers({"Content-Type": ["multipart/form-data; boundary=heyDavid"]}), bodyProducer=self.MultiPartProducer.return_value, ) FP = self.FileBodyProducer.return_value self.assertEqual( mock.call( [ ("a", "b"), ("key", "val"), ("file1", ("image.jpg", "image/jpeg", FP)), ("file2", ("image.png", "image/png", FP)), ], boundary="heyDavid", ), self.MultiPartProducer.call_args, ) @mock.patch("treq.client.uuid.uuid4", mock.Mock(return_value="heyDavid")) def test_request_mixed_params_dict(self): self.client.request( "POST", "http://example.com/", data={"key": "a", "key2": "b"}, files={"file1": StringIO("hey")} ) self.agent.request.assert_called_once_with( "POST", "http://example.com/", headers=Headers({"Content-Type": ["multipart/form-data; boundary=heyDavid"]}), bodyProducer=self.MultiPartProducer.return_value, ) FP = self.FileBodyProducer.return_value self.assertEqual( mock.call( [("key", "a"), ("key2", "b"), ("file1", (None, "application/octet-stream", FP))], boundary="heyDavid" ), self.MultiPartProducer.call_args, ) def test_request_unsupported_params_combination(self): self.assertRaises( ValueError, self.client.request, "POST", "http://example.com/", data=StringIO("yo"), files={"file1": StringIO("hey")}, ) def test_request_dict_headers(self): self.client.request( "GET", "http://example.com/", headers={"User-Agent": "treq/0.1dev", "Accept": ["application/json", "text/plain"]}, ) self.agent.request.assert_called_once_with( "GET", "http://example.com/", headers=Headers({"User-Agent": ["treq/0.1dev"], "Accept": ["application/json", "text/plain"]}), bodyProducer=None, ) @with_clock def test_request_timeout_fired(self, clock): """ Verify the request is cancelled if a response is not received within specified timeout period. """ self.client.request("GET", "http://example.com", timeout=2) # simulate we haven't gotten a response within timeout seconds clock.advance(3) deferred = self.agent.request.return_value # a deferred should have been cancelled self.assertTrue(deferred.cancel.called) @with_clock def test_request_timeout_cancelled(self, clock): """ Verify timeout is cancelled if a response is received before timeout period elapses. """ self.client.request("GET", "http://example.com", timeout=2) # simulate a response deferred = self.agent.request.return_value gotResult = deferred.addBoth.call_args[0][0] gotResult("result") # now advance the clock but since we already got a result, # a cancellation timer should have been cancelled clock.advance(3) self.assertFalse(deferred.cancel.called) def test_response_is_buffered(self): response = mock.Mock(deliverBody=mock.Mock()) self.agent.request.return_value = succeed(response) d = self.client.get("http://www.example.com") result = self.successResultOf(d) protocol = mock.Mock(Protocol) result.deliverBody(protocol) self.assertEqual(response.deliverBody.call_count, 1) result.deliverBody(protocol) self.assertEqual(response.deliverBody.call_count, 1) def test_response_buffering_is_disabled_with_unbufferred_arg(self): response = mock.Mock() self.agent.request.return_value = succeed(response) d = self.client.get("http://www.example.com", unbuffered=True) self.assertEqual(self.successResultOf(d), response)
class PowerstripFlockerTests(TestCase): """ Real powerstrip-flocker tests against two nodes using the flocker acceptance testing framework. """ # Slow builds because initial runs involve pulling some docker images # (powerstrip, and powerstrip-flocker). timeout = 1200 def setUp(self): """ Ready the environment for tests which actually run docker against powerstrip with powerstrip-flocker enabled. * Log into each node in turn: * Run powerstrip-flocker in docker * Run powerstrip in docker """ self.agent = Agent(reactor) # no connectionpool self.client = HTTPClient(self.agent) d = wait_for_cluster(self, 2) def got_cluster(cluster): self.cluster = cluster self.powerstripflockers = {} self.powerstrips = {} daemonReadyDeferreds = [] self.ips = [node.address for node in cluster.nodes] for ip in self.ips: # cleanup after previous test runs #run(ip, ["pkill", "-f", "flocker"]) for proc in ("powerstrip", "powerstrip-flocker"): try: run(ip, ["docker", "rm", "-f", proc]) except Exception: print proc, "was not running, not killed, OK." # put a powerstrip config in place run(ip, ["mkdir", "-p", "/root/powerstrip-config"]) run( ip, ["sh", "-c", "cat > /root/powerstrip-config/adapters.yml"], """ version: 1 endpoints: "POST /*/containers/create": pre: [flocker] adapters: flocker: http://powerstrip-flocker/flocker-adapter """) # start powerstrip-flocker POWERSTRIP_FLOCKER = "%s/powerstrip-flocker:latest" % ( DOCKER_PULL_REPO, ) run(ip, ["docker", "pull", POWERSTRIP_FLOCKER]) # TODO - come up with cleaner/nicer way of powerstrip-flocker # being able to establish its own host uuid (or volume # mountpoints), such as API calls. host_uuid = run(ip, [ "python", "-c", "import json; " "print json.load(open('/etc/flocker/volume.json'))['uuid']" ]).strip() self.powerstripflockers[ip] = remote_service_for_test( self, ip, [ "docker", "run", "--name=powerstrip-flocker", "--expose", "80", "-p", "9999:80", # so that we can detect it being up "-e", "FLOCKER_CONTROL_SERVICE_BASE_URL=%s" % (self.cluster.base_url, ), "-e", "MY_NETWORK_IDENTITY=%s" % (ip, ), "-e", "MY_HOST_UUID=%s" % (host_uuid, ), POWERSTRIP_FLOCKER ]) print "Waiting for powerstrip-flocker to show up on", ip, "..." daemonReadyDeferreds.append(wait_for_socket(ip, 9999)) # start powerstrip # TODO - use the new unix-socket powerstrip approach. POWERSTRIP = "clusterhq/powerstrip:latest" run(ip, ["docker", "pull", POWERSTRIP]) self.powerstrips[ip] = remote_service_for_test( self, ip, [ "docker", "run", "--name=powerstrip", "-p", "2375:2375", "-v", "/var/run/docker.sock:/var/run/docker.sock", "-v", "/root/powerstrip-config/adapters.yml:" "/etc/powerstrip/adapters.yml", "--link", "powerstrip-flocker:powerstrip-flocker", POWERSTRIP ]) print "Waiting for powerstrip to show up on", ip, "..." daemonReadyDeferreds.append(wait_for_socket(ip, 2375)) d = defer.gatherResults(daemonReadyDeferreds) # def debug(): # services # import pdb; pdb.set_trace() # d.addCallback(lambda ignored: deferLater(reactor, 1, debug)) return d d.addCallback(got_cluster) return d def test_create_a_dataset(self): """ Running a docker container specifying a dataset name which has never been created before creates it in the API. """ node1, node2 = sorted(self.ips) fsName = "test001" powerstrip( node1, "docker run " "-v /flocker/%s:/data busybox " "sh -c 'echo 1 > /data/file'" % (fsName, )) url = self.cluster.base_url + "/configuration/datasets" d = self.client.get(url) d.addCallback(treq.json_content) def verify(result): self.assertTrue(len(result) > 0) self.assertEqual(result[0]["metadata"], {"name": fsName}) self.assertEqual(result[0]["primary"], node1) d.addBoth(verify) return d def test_create_a_dataset_manifests(self): """ Running a docker container specifying a dataset name which has never been created before creates the actual filesystem and mounts it in place in time for the container to start. We can verify this by asking Docker for the information about which volumes are *actually* mounted in the container, then going and checking that the real volume path on the host contains the '1' written to the 'file' file specified in the docker run command... """ node1, node2 = sorted(self.ips) fsName = "test001" container_id = powerstrip( node1, "docker run -d " "-v /flocker/%s:/data busybox " "sh -c 'echo fish > /data/file'" % (fsName, )).strip() # The volume that Docker now has mounted... docker_inspect = json.loads( run(node1, ["docker", "inspect", container_id])) volume = docker_inspect[0]["Volumes"].values()[0] # ... exists as a ZFS volume... zfs_volumes = shell( node1, "zfs list -t snapshot,filesystem -r flocker " "|grep %s |wc -l" % (volume, )).strip() self.assertEqual(int(zfs_volumes), 1) # ... and contains a file which contains the characters "fish". catFileOutput = run(node1, ["cat", "%s/file" % (volume, )]).strip() self.assertEqual(catFileOutput, "fish") def test_create_two_datasets_same_name(self): """ The metadata stored about a dataset name is checked to make sure that no two volumes with the same name are created. (In fact, if two volumes are created with the same name on the same host, it's a shared volume.) """ node1, node2 = sorted(self.ips) fsName = "test001" # First volume... container_id_1 = powerstrip( node1, "docker run -d " "-v /flocker/%s:/data busybox " "sh -c 'echo fish > /data/file'" % (fsName, )).strip() docker_inspect = json.loads( run(node1, ["docker", "inspect", container_id_1])) volume_1 = docker_inspect[0]["Volumes"].values()[0] # Second volume... container_id_2 = powerstrip( node1, "docker run -d " "-v /flocker/%s:/data busybox " "sh -c 'echo fish > /data/file'" % (fsName, )).strip() docker_inspect = json.loads( run(node1, ["docker", "inspect", container_id_2])) volume_2 = docker_inspect[0]["Volumes"].values()[0] # ... have the same flocker UUID. self.assertEqual(volume_1, volume_2) def test_move_a_dataset(self): """ Running a docker container specifying a dataset name which has been created before but which is no longer running moves the dataset before starting the container. """ node1, node2 = sorted(self.ips) fsName = "test001" # Write some bytes to a volume on one host... powerstrip( node1, "docker run " "-v /flocker/%s:/data busybox " "sh -c 'echo chicken > /data/file'" % (fsName, )) # ... and read them from the same named volume on another... container_id = powerstrip( node2, "docker run -d " "-v /flocker/%s:/data busybox " "sh -c 'cat /data/file'" % (fsName, )).strip() output = run(node2, ["docker", "logs", container_id]) self.assertEqual(output.strip(), "chicken") def test_move_a_dataset_check_persistence(self): """ The data in the dataset between the initial instantiation of it and the second instantiation of it persists. """ pass test_move_a_dataset_check_persistence.todo = "not implemented yet" def test_dataset_is_not_moved_when_being_used(self): """ If a container (*any* container) is currently running with a dataset mounted, an error is reported rather than ripping it out from underneath a running container. """ pass test_dataset_is_not_moved_when_being_used.todo = "not implemented yet" def test_two_datasets_one_move_one_create(self): """ When a docker run command mentions two datasets, one which is currently not running on another host, and another which is new, the new one gets created and the extant one gets moved. Both operations complete before the container is started. """ pass test_two_datasets_one_move_one_create.todo = "not implemented yet"
class AdapterResource(resource.Resource): """ A powerstrip pre-hook for container create. """ isLeaf = True def __init__(self, *args, **kw): self._agent = Agent(reactor) # no connectionpool self.client = HTTPClient(self._agent) return resource.Resource.__init__(self, *args, **kw) def render_POST(self, request): """ Handle a pre-hook: either create a filesystem, or move it in place. """ requestJson = json.loads(request.content.read()) if requestJson["Type"] != "pre-hook": raise Exception("unsupported hook type %s" % (requestJson["Type"], )) pprint.pprint(os.environ) # BASE_URL like http://control-service/v1/ ^ json_payload = requestJson["ClientRequest"]["Body"] json_parsed = json.loads(json_payload) self.base_url = os.environ.get("FLOCKER_CONTROL_SERVICE_BASE_URL") self.ip = os.environ.get("MY_NETWORK_IDENTITY") self.host_uuid = os.environ.get("MY_HOST_UUID") def wait_until_volume_in_place(result, fs): """ Called after a dataset has been created or moved in the cluster's desired configuration. Wait until the volume shows up in the cluster actual state on the right host (either having been created or moved). :return: Deferred which fires with the tuple (fs, dataset_id) -- that is, the filesystem and the corresponding flocker dataset uuid that the docker client asked for -- firing only once the filesystem has been created/moved and mounted (iow, exists on the right host in the cluster state). """ dataset_id = result["dataset_id"] def dataset_exists(): d = self.client.get(self.base_url + "/state/datasets") d.addCallback(treq.json_content) def check_dataset_exists(datasets): """ The /v1/state/datasets API seems to show the volume as being on two hosts at once during a move. We assume therefore that when it settles down to only show it on one host that this means the move is complete. """ print "Got", self.ip, "datasets:", datasets matching_datasets = [] for dataset in datasets: if dataset["dataset_id"] == dataset_id: matching_datasets.append(dataset) if len(matching_datasets) == 1: if matching_datasets[0]["primary"] == self.ip: return True return False d.addCallback(check_dataset_exists) return d d = loop_until(dataset_exists) d.addCallback(lambda ignored: (fs, dataset_id)) return d d = self.client.get(self.base_url + "/configuration/datasets") d.addCallback(treq.json_content) def got_dataset_configuration(configured_datasets): # form a mapping from names onto dataset objects configured_dataset_mapping = {} for dataset in configured_datasets: if dataset["metadata"].get("name"): configured_dataset_mapping[dataset["metadata"].get( "name")] = dataset # iterate over the datasets we were asked to create by the docker client fs_create_deferreds = [] old_binds = [] if json_parsed['HostConfig']['Binds'] is not None: for bind in json_parsed['HostConfig']['Binds']: host_path, remainder = bind.split(":", 1) # TODO validation # if "/" in fs: # raise Exception("Not allowed flocker filesystems more than one level deep") if host_path.startswith("/flocker/"): fs = host_path[len("/flocker/"):] old_binds.append((fs, remainder)) # if a dataset exists, and is in the right place, we're cool. if fs in configured_dataset_mapping: dataset = configured_dataset_mapping[fs] if dataset["primary"] == self.ip: # simulate "immediate success" fs_create_deferreds.append( defer.succeed((fs, dataset["dataset_id"]))) else: # if a dataset exists, but is on the wrong server [TODO # and is not being used], then move it in place. d = self.client.post( self.base_url + "/configuration/datasets/%s" % (dataset["dataset_id"].encode('ascii'), ), json.dumps({"primary": self.ip}), headers={ 'Content-Type': ['application/json'] }) d.addCallback(treq.json_content) d.addCallback(wait_until_volume_in_place, fs=fs) fs_create_deferreds.append(d) else: # if a dataset doesn't exist at all, create it on this server. d = self.client.post( self.base_url + "/configuration/datasets", json.dumps({ "primary": self.ip, "metadata": { "name": fs } }), headers={'Content-Type': ['application/json']}) d.addCallback(treq.json_content) d.addCallback(wait_until_volume_in_place, fs=fs) fs_create_deferreds.append(d) d = defer.gatherResults(fs_create_deferreds) def got_created_and_moved_datasets(list_new_datasets): dataset_mapping = dict(list_new_datasets) new_binds = [] for fs, reminder in old_binds: new_binds.append( "/flocker/%s.default.%s:%s" % (self.host_uuid, dataset_mapping[fs], remainder)) new_json_parsed = json_parsed.copy() new_json_parsed['HostConfig']['Binds'] = new_binds request.write( json.dumps({ "PowerstripProtocolVersion": 1, "ModifiedClientRequest": { "Method": "POST", "Request": request.uri, "Body": json.dumps(new_json_parsed) } })) request.finish() d.addCallback(got_created_and_moved_datasets) return d d.addCallback(got_dataset_configuration) d.addErrback(log.err, 'while processing configured datasets') return server.NOT_DONE_YET
class MountResource(resource.Resource): """ A powerstrip pre-hook for container create. """ isLeaf = True def __init__(self, *args, **kw): self._agent = Agent(reactor) # no connectionpool self.client = HTTPClient(self._agent) return resource.Resource.__init__(self, *args, **kw) def render_POST(self, request): """ Handle a pre-hook: either create a filesystem, or move it in place. """ json_parsed = json.loads(request.content.read()) print ">>> called with", json_parsed pprint.pprint(os.environ) # BASE_URL like http://control-service/v1/ ^ self.base_url = os.environ.get("FLOCKER_CONTROL_SERVICE_BASE_URL") self.ip = os.environ.get("MY_NETWORK_IDENTITY") def wait_until_volume_in_place(result, fs): """ Called after a dataset has been created or moved in the cluster's desired configuration. Wait until the volume shows up in the cluster actual state on the right host (either having been created or moved). :return: Deferred which fires with the tuple (fs, dataset_id) -- that is, the filesystem and the corresponding flocker dataset uuid that the docker client asked for -- firing only once the filesystem has been created/moved and mounted (iow, exists on the right host in the cluster state). """ print "wait_until_volume_in_place while processing", fs, "got result", result dataset_id = result["dataset_id"] def dataset_exists(): d = self.client.get(self.base_url + "/state/datasets") d.addCallback(treq.json_content) def check_dataset_exists(datasets): """ The /v1/state/datasets API seems to show the volume as being on two hosts at once during a move. We assume therefore that when it settles down to only show it on one host that this means the move is complete. """ print "Got", self.ip, self.host_uuid, "datasets:", datasets matching_datasets = [] for dataset in datasets: if dataset["dataset_id"] == dataset_id: matching_datasets.append(dataset) if len(matching_datasets) == 1: if matching_datasets[0]["primary"] == self.host_uuid: return matching_datasets[0] return False d.addCallback(check_dataset_exists) return d d = loop_until(dataset_exists) d.addCallback(lambda dataset: (fs, dataset)) return d d = self.client.get(self.base_url + "/state/nodes") d.addCallback(treq.json_content) def find_my_uuid(nodes): for node in nodes: if node["host"] == self.ip: self.host_uuid = node["uuid"] break return self.client.get(self.base_url + "/configuration/datasets") d.addCallback(find_my_uuid) d.addCallback(treq.json_content) def got_dataset_configuration(configured_datasets): # form a mapping from names onto dataset objects configured_dataset_mapping = {} for dataset in configured_datasets: if dataset["metadata"].get("name"): configured_dataset_mapping[dataset["metadata"].get("name")] = dataset # iterate over the datasets we were asked to create by the docker client fs_create_deferreds = [] old_binds = [] print "got json_parsed...", json_parsed if json_parsed['Name'] is not None and json_parsed['Name'] != "": binds = [json_parsed['Name']] for bind in binds: fs, remainder = bind, "" # TODO validation # if "/" in fs: # raise Exception("Not allowed flocker filesystems more than one level deep") old_binds.append((fs, remainder)) # if a dataset exists, and is in the right place, we're cool. if fs in configured_dataset_mapping: dataset = configured_dataset_mapping[fs] if dataset["primary"] == self.host_uuid: # check / wait for the state to match the desired # configuration fs_create_deferreds.append(wait_until_volume_in_place(dataset, fs=fs)) else: # if a dataset exists, but is on the wrong server [TODO # and is not being used], then move it in place. d = self.client.post( self.base_url + "/configuration/datasets/%s" % ( dataset["dataset_id"].encode('ascii'),), json.dumps({"primary": self.host_uuid}), headers={'Content-Type': ['application/json']}) d.addCallback(treq.json_content) d.addCallback(wait_until_volume_in_place, fs=fs) fs_create_deferreds.append(d) else: # if a dataset doesn't exist at all, create it on this server. d = self.client.post(self.base_url + "/configuration/datasets", json.dumps({"primary": self.host_uuid, "metadata": {"name": fs}}), headers={'Content-Type': ['application/json']}) d.addCallback(treq.json_content) d.addCallback(wait_until_volume_in_place, fs=fs) fs_create_deferreds.append(d) d = defer.gatherResults(fs_create_deferreds) def got_created_and_moved_datasets(list_new_datasets): dataset_mapping = dict(list_new_datasets) print "constructed dataset_mapping", dataset_mapping new_binds = [] for fs, remainder in old_binds: # forget about remainder... new_binds.append(dataset_mapping[fs]["path"]) new_json = {} if new_binds: new_json["Mountpoint"] = new_binds[0] new_json["Err"] = None else: # This is how you indicate not handling this request new_json["Mountpoint"] = "" new_json["Err"] = "unable to handle" print "<<< responding with", new_json request.write(json.dumps(new_json)) request.finish() d.addCallback(got_created_and_moved_datasets) return d d.addCallback(got_dataset_configuration) d.addErrback(log.err, 'while processing configured datasets') return server.NOT_DONE_YET
class HTTPClientTests(TestCase): def setUp(self): self.agent = mock.Mock(Agent) self.client = HTTPClient(self.agent) self.fbp_patcher = mock.patch('treq.client.FileBodyProducer') self.FileBodyProducer = self.fbp_patcher.start() self.addCleanup(self.fbp_patcher.stop) self.mbp_patcher = mock.patch('treq.multipart.MultiPartProducer') self.MultiPartProducer = self.mbp_patcher.start() self.addCleanup(self.mbp_patcher.stop) def assertBody(self, expected): body = self.FileBodyProducer.mock_calls[0][1][0] self.assertEqual(body.read(), expected) def test_post(self): self.client.post('http://example.com/') self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({b'accept-encoding': [b'gzip']}), None) def test_request_uri_idn(self): self.client.request('GET', u'http://č.net') self.agent.request.assert_called_once_with( b'GET', b'http://xn--bea.net', Headers({b'accept-encoding': [b'gzip']}), None) def test_request_case_insensitive_methods(self): self.client.request('gEt', 'http://example.com/') self.agent.request.assert_called_once_with( b'GET', b'http://example.com/', Headers({b'accept-encoding': [b'gzip']}), None) def test_request_query_params(self): self.client.request('GET', 'http://example.com/', params={'foo': ['bar']}) self.agent.request.assert_called_once_with( b'GET', b'http://example.com/?foo=bar', Headers({b'accept-encoding': [b'gzip']}), None) def test_request_tuple_query_values(self): self.client.request('GET', 'http://example.com/', params={'foo': ('bar',)}) self.agent.request.assert_called_once_with( b'GET', b'http://example.com/?foo=bar', Headers({b'accept-encoding': [b'gzip']}), None) def test_request_merge_query_params(self): self.client.request('GET', 'http://example.com/?baz=bax', params={'foo': ['bar', 'baz']}) self.agent.request.assert_called_once_with( b'GET', b'http://example.com/?baz=bax&foo=bar&foo=baz', Headers({b'accept-encoding': [b'gzip']}), None) def test_request_merge_tuple_query_params(self): self.client.request('GET', 'http://example.com/?baz=bax', params=[('foo', 'bar')]) self.agent.request.assert_called_once_with( b'GET', b'http://example.com/?baz=bax&foo=bar', Headers({b'accept-encoding': [b'gzip']}), None) def test_request_dict_single_value_query_params(self): self.client.request('GET', 'http://example.com/', params={'foo': 'bar'}) self.agent.request.assert_called_once_with( b'GET', b'http://example.com/?foo=bar', Headers({b'accept-encoding': [b'gzip']}), None) def test_request_data_dict(self): self.client.request('POST', 'http://example.com/', data={'foo': ['bar', 'baz']}) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({b'Content-Type': [b'application/x-www-form-urlencoded'], b'accept-encoding': [b'gzip']}), self.FileBodyProducer.return_value) self.assertBody(b'foo=bar&foo=baz') def test_request_data_single_dict(self): self.client.request('POST', 'http://example.com/', data={'foo': 'bar'}) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({b'Content-Type': [b'application/x-www-form-urlencoded'], b'accept-encoding': [b'gzip']}), self.FileBodyProducer.return_value) self.assertBody(b'foo=bar') def test_request_data_tuple(self): self.client.request('POST', 'http://example.com/', data=[('foo', 'bar')]) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({b'Content-Type': [b'application/x-www-form-urlencoded'], b'accept-encoding': [b'gzip']}), self.FileBodyProducer.return_value) self.assertBody(b'foo=bar') def test_request_data_file(self): temp_fn = self.mktemp() with open(temp_fn, "wb") as temp_file: temp_file.write(b'hello') self.client.request('POST', 'http://example.com/', data=open(temp_fn, 'rb')) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({b'accept-encoding': [b'gzip']}), self.FileBodyProducer.return_value) self.assertBody(b'hello') def test_request_json_dict(self): self.client.request('POST', 'http://example.com/', json={'foo': 'bar'}) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({b'Content-Type': [b'application/json; charset=UTF-8'], b'accept-encoding': [b'gzip']}), self.FileBodyProducer.return_value) self.assertBody(b'{"foo":"bar"}') def test_request_json_tuple(self): self.client.request('POST', 'http://example.com/', json=('foo', 1)) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({b'Content-Type': [b'application/json; charset=UTF-8'], b'accept-encoding': [b'gzip']}), self.FileBodyProducer.return_value) self.assertBody(b'["foo",1]') def test_request_json_number(self): self.client.request('POST', 'http://example.com/', json=1.) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({b'Content-Type': [b'application/json; charset=UTF-8'], b'accept-encoding': [b'gzip']}), self.FileBodyProducer.return_value) self.assertBody(b'1.0') def test_request_json_string(self): self.client.request('POST', 'http://example.com/', json='hello') self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({b'Content-Type': [b'application/json; charset=UTF-8'], b'accept-encoding': [b'gzip']}), self.FileBodyProducer.return_value) self.assertBody(b'"hello"') def test_request_json_bool(self): self.client.request('POST', 'http://example.com/', json=True) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({b'Content-Type': [b'application/json; charset=UTF-8'], b'accept-encoding': [b'gzip']}), self.FileBodyProducer.return_value) self.assertBody(b'true') def test_request_json_none(self): self.client.request('POST', 'http://example.com/', json=None) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({b'Content-Type': [b'application/json; charset=UTF-8'], b'accept-encoding': [b'gzip']}), self.FileBodyProducer.return_value) self.assertBody(b'null') @mock.patch('treq.client.uuid.uuid4', mock.Mock(return_value="heyDavid")) def test_request_no_name_attachment(self): self.client.request( 'POST', 'http://example.com/', files={"name": BytesIO(b"hello")}) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({ b'accept-encoding': [b'gzip'], b'Content-Type': [b'multipart/form-data; boundary=heyDavid']}), self.MultiPartProducer.return_value) FP = self.FileBodyProducer.return_value self.assertEqual( mock.call( [('name', (None, 'application/octet-stream', FP))], boundary=b'heyDavid'), self.MultiPartProducer.call_args) @mock.patch('treq.client.uuid.uuid4', mock.Mock(return_value="heyDavid")) def test_request_named_attachment(self): self.client.request( 'POST', 'http://example.com/', files={ "name": ('image.jpg', BytesIO(b"hello"))}) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({ b'accept-encoding': [b'gzip'], b'Content-Type': [b'multipart/form-data; boundary=heyDavid']}), self.MultiPartProducer.return_value) FP = self.FileBodyProducer.return_value self.assertEqual( mock.call( [('name', ('image.jpg', 'image/jpeg', FP))], boundary=b'heyDavid'), self.MultiPartProducer.call_args) @mock.patch('treq.client.uuid.uuid4', mock.Mock(return_value="heyDavid")) def test_request_named_attachment_and_ctype(self): self.client.request( 'POST', 'http://example.com/', files={ "name": ('image.jpg', 'text/plain', BytesIO(b"hello"))}) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({ b'accept-encoding': [b'gzip'], b'Content-Type': [b'multipart/form-data; boundary=heyDavid']}), self.MultiPartProducer.return_value) FP = self.FileBodyProducer.return_value self.assertEqual( mock.call( [('name', ('image.jpg', 'text/plain', FP))], boundary=b'heyDavid'), self.MultiPartProducer.call_args) @mock.patch('treq.client.uuid.uuid4', mock.Mock(return_value="heyDavid")) def test_request_mixed_params(self): class NamedFile(BytesIO): def __init__(self, val): BytesIO.__init__(self, val) self.name = "image.png" self.client.request( 'POST', 'http://example.com/', data=[("a", "b"), ("key", "val")], files=[ ("file1", ('image.jpg', BytesIO(b"hello"))), ("file2", NamedFile(b"yo"))]) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({ b'accept-encoding': [b'gzip'], b'Content-Type': [b'multipart/form-data; boundary=heyDavid']}), self.MultiPartProducer.return_value) FP = self.FileBodyProducer.return_value self.assertEqual( mock.call([ ('a', 'b'), ('key', 'val'), ('file1', ('image.jpg', 'image/jpeg', FP)), ('file2', ('image.png', 'image/png', FP))], boundary=b'heyDavid'), self.MultiPartProducer.call_args) @mock.patch('treq.client.uuid.uuid4', mock.Mock(return_value="heyDavid")) def test_request_mixed_params_dict(self): self.client.request( 'POST', 'http://example.com/', data={"key": "a", "key2": "b"}, files={"file1": BytesIO(b"hey")}) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({ b'accept-encoding': [b'gzip'], b'Content-Type': [b'multipart/form-data; boundary=heyDavid']}), self.MultiPartProducer.return_value) FP = self.FileBodyProducer.return_value self.assertEqual( mock.call([ ('key', 'a'), ('key2', 'b'), ('file1', (None, 'application/octet-stream', FP))], boundary=b'heyDavid'), self.MultiPartProducer.call_args) def test_request_unsupported_params_combination(self): self.assertRaises(ValueError, self.client.request, 'POST', 'http://example.com/', data=BytesIO(b"yo"), files={"file1": BytesIO(b"hey")}) def test_request_dict_headers(self): self.client.request('GET', 'http://example.com/', headers={ 'User-Agent': 'treq/0.1dev', 'Accept': ['application/json', 'text/plain'] }) self.agent.request.assert_called_once_with( b'GET', b'http://example.com/', Headers({b'User-Agent': [b'treq/0.1dev'], b'accept-encoding': [b'gzip'], b'Accept': [b'application/json', b'text/plain']}), None) @with_clock def test_request_timeout_fired(self, clock): """ Verify the request is cancelled if a response is not received within specified timeout period. """ self.agent.request.return_value = d = Deferred() self.client.request('GET', 'http://example.com', timeout=2) # simulate we haven't gotten a response within timeout seconds clock.advance(3) # a deferred should have been cancelled self.failureResultOf(d, CancelledError) @with_clock def test_request_timeout_cancelled(self, clock): """ Verify timeout is cancelled if a response is received before timeout period elapses. """ self.agent.request.return_value = d = Deferred() self.client.request('GET', 'http://example.com', timeout=2) # simulate a response d.callback(mock.Mock(code=200, headers=Headers({}))) # now advance the clock but since we already got a result, # a cancellation timer should have been cancelled clock.advance(3) self.successResultOf(d) def test_response_is_buffered(self): response = mock.Mock(deliverBody=mock.Mock(), headers=Headers({})) self.agent.request.return_value = succeed(response) d = self.client.get('http://www.example.com') result = self.successResultOf(d) protocol = mock.Mock(Protocol) result.deliverBody(protocol) self.assertEqual(response.deliverBody.call_count, 1) result.deliverBody(protocol) self.assertEqual(response.deliverBody.call_count, 1) def test_response_buffering_is_disabled_with_unbufferred_arg(self): response = mock.Mock(headers=Headers({})) self.agent.request.return_value = succeed(response) d = self.client.get('http://www.example.com', unbuffered=True) # YOLO public attribute. self.assertEqual(self.successResultOf(d).original, response) def test_request_post_redirect_denied(self): response = mock.Mock(code=302, headers=Headers({'Location': ['/']})) self.agent.request.return_value = succeed(response) d = self.client.post('http://www.example.com') self.failureResultOf(d, ResponseFailed) def test_request_browser_like_redirects(self): response = mock.Mock(code=302, headers=Headers({'Location': ['/']})) self.agent.request.return_value = succeed(response) raw = mock.Mock(return_value=[]) final_resp = mock.Mock(code=200, headers=mock.Mock(getRawHeaders=raw)) with mock.patch('twisted.web.client.RedirectAgent._handleRedirect', return_value=final_resp): d = self.client.post('http://www.google.com', browser_like_redirects=True, unbuffered=True) self.assertEqual(self.successResultOf(d).original, final_resp)
class HTTPClientTests(TestCase): def setUp(self): self.agent = mock.Mock(Agent) self.client = HTTPClient(self.agent) self.fbp_patcher = mock.patch('treq.client.FileBodyProducer') self.FileBodyProducer = self.fbp_patcher.start() self.addCleanup(self.fbp_patcher.stop) self.mbp_patcher = mock.patch('treq.multipart.MultiPartProducer') self.MultiPartProducer = self.mbp_patcher.start() self.addCleanup(self.mbp_patcher.stop) def assertBody(self, expected): body = self.FileBodyProducer.mock_calls[0][1][0] self.assertEqual(body.read(), expected) def test_post(self): self.client.post('http://example.com/') self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({b'accept-encoding': [b'gzip']}), None) def test_request_uri_idn(self): self.client.request('GET', u'http://‽.net') self.agent.request.assert_called_once_with( b'GET', b'http://xn--fwg.net', Headers({b'accept-encoding': [b'gzip']}), None) def test_request_case_insensitive_methods(self): self.client.request('gEt', 'http://example.com/') self.agent.request.assert_called_once_with( b'GET', b'http://example.com/', Headers({b'accept-encoding': [b'gzip']}), None) def test_request_query_params(self): self.client.request('GET', 'http://example.com/', params={'foo': ['bar']}) self.agent.request.assert_called_once_with( b'GET', b'http://example.com/?foo=bar', Headers({b'accept-encoding': [b'gzip']}), None) def test_request_tuple_query_values(self): self.client.request('GET', 'http://example.com/', params={'foo': ('bar', )}) self.agent.request.assert_called_once_with( b'GET', b'http://example.com/?foo=bar', Headers({b'accept-encoding': [b'gzip']}), None) def test_request_merge_query_params(self): self.client.request('GET', 'http://example.com/?baz=bax', params={'foo': ['bar', 'baz']}) self.agent.request.assert_called_once_with( b'GET', b'http://example.com/?baz=bax&foo=bar&foo=baz', Headers({b'accept-encoding': [b'gzip']}), None) def test_request_merge_tuple_query_params(self): self.client.request('GET', 'http://example.com/?baz=bax', params=[('foo', 'bar')]) self.agent.request.assert_called_once_with( b'GET', b'http://example.com/?baz=bax&foo=bar', Headers({b'accept-encoding': [b'gzip']}), None) def test_request_dict_single_value_query_params(self): self.client.request('GET', 'http://example.com/', params={'foo': 'bar'}) self.agent.request.assert_called_once_with( b'GET', b'http://example.com/?foo=bar', Headers({b'accept-encoding': [b'gzip']}), None) def test_request_data_dict(self): self.client.request('POST', 'http://example.com/', data={'foo': ['bar', 'baz']}) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({ b'Content-Type': [b'application/x-www-form-urlencoded'], b'accept-encoding': [b'gzip'] }), self.FileBodyProducer.return_value) self.assertBody(b'foo=bar&foo=baz') def test_request_data_single_dict(self): self.client.request('POST', 'http://example.com/', data={'foo': 'bar'}) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({ b'Content-Type': [b'application/x-www-form-urlencoded'], b'accept-encoding': [b'gzip'] }), self.FileBodyProducer.return_value) self.assertBody(b'foo=bar') def test_request_data_tuple(self): self.client.request('POST', 'http://example.com/', data=[('foo', 'bar')]) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({ b'Content-Type': [b'application/x-www-form-urlencoded'], b'accept-encoding': [b'gzip'] }), self.FileBodyProducer.return_value) self.assertBody(b'foo=bar') def test_request_data_file(self): temp_fn = self.mktemp() with open(temp_fn, "wb") as temp_file: temp_file.write(b'hello') self.client.request('POST', 'http://example.com/', data=open(temp_fn, 'rb')) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({b'accept-encoding': [b'gzip']}), self.FileBodyProducer.return_value) self.assertBody(b'hello') def test_request_json_dict(self): self.client.request('POST', 'http://example.com/', json={'foo': 'bar'}) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({ b'Content-Type': [b'application/json; charset=UTF-8'], b'accept-encoding': [b'gzip'] }), self.FileBodyProducer.return_value) self.assertBody(b'{"foo":"bar"}') def test_request_json_tuple(self): self.client.request('POST', 'http://example.com/', json=('foo', 1)) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({ b'Content-Type': [b'application/json; charset=UTF-8'], b'accept-encoding': [b'gzip'] }), self.FileBodyProducer.return_value) self.assertBody(b'["foo",1]') def test_request_json_number(self): self.client.request('POST', 'http://example.com/', json=1.) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({ b'Content-Type': [b'application/json; charset=UTF-8'], b'accept-encoding': [b'gzip'] }), self.FileBodyProducer.return_value) self.assertBody(b'1.0') def test_request_json_string(self): self.client.request('POST', 'http://example.com/', json='hello') self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({ b'Content-Type': [b'application/json; charset=UTF-8'], b'accept-encoding': [b'gzip'] }), self.FileBodyProducer.return_value) self.assertBody(b'"hello"') def test_request_json_bool(self): self.client.request('POST', 'http://example.com/', json=True) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({ b'Content-Type': [b'application/json; charset=UTF-8'], b'accept-encoding': [b'gzip'] }), self.FileBodyProducer.return_value) self.assertBody(b'true') def test_request_json_none(self): self.client.request('POST', 'http://example.com/', json=None) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({ b'Content-Type': [b'application/json; charset=UTF-8'], b'accept-encoding': [b'gzip'] }), self.FileBodyProducer.return_value) self.assertBody(b'null') @mock.patch('treq.client.uuid.uuid4', mock.Mock(return_value="heyDavid")) def test_request_no_name_attachment(self): self.client.request('POST', 'http://example.com/', files={"name": BytesIO(b"hello")}) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({ b'accept-encoding': [b'gzip'], b'Content-Type': [b'multipart/form-data; boundary=heyDavid'] }), self.MultiPartProducer.return_value) FP = self.FileBodyProducer.return_value self.assertEqual( mock.call([('name', (None, 'application/octet-stream', FP))], boundary=b'heyDavid'), self.MultiPartProducer.call_args) @mock.patch('treq.client.uuid.uuid4', mock.Mock(return_value="heyDavid")) def test_request_named_attachment(self): self.client.request('POST', 'http://example.com/', files={"name": ('image.jpg', BytesIO(b"hello"))}) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({ b'accept-encoding': [b'gzip'], b'Content-Type': [b'multipart/form-data; boundary=heyDavid'] }), self.MultiPartProducer.return_value) FP = self.FileBodyProducer.return_value self.assertEqual( mock.call([('name', ('image.jpg', 'image/jpeg', FP))], boundary=b'heyDavid'), self.MultiPartProducer.call_args) @mock.patch('treq.client.uuid.uuid4', mock.Mock(return_value="heyDavid")) def test_request_named_attachment_and_ctype(self): self.client.request( 'POST', 'http://example.com/', files={"name": ('image.jpg', 'text/plain', BytesIO(b"hello"))}) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({ b'accept-encoding': [b'gzip'], b'Content-Type': [b'multipart/form-data; boundary=heyDavid'] }), self.MultiPartProducer.return_value) FP = self.FileBodyProducer.return_value self.assertEqual( mock.call([('name', ('image.jpg', 'text/plain', FP))], boundary=b'heyDavid'), self.MultiPartProducer.call_args) @mock.patch('treq.client.uuid.uuid4', mock.Mock(return_value="heyDavid")) def test_request_mixed_params(self): class NamedFile(BytesIO): def __init__(self, val): BytesIO.__init__(self, val) self.name = "image.png" self.client.request('POST', 'http://example.com/', data=[("a", "b"), ("key", "val")], files=[("file1", ('image.jpg', BytesIO(b"hello"))), ("file2", NamedFile(b"yo"))]) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({ b'accept-encoding': [b'gzip'], b'Content-Type': [b'multipart/form-data; boundary=heyDavid'] }), self.MultiPartProducer.return_value) FP = self.FileBodyProducer.return_value self.assertEqual( mock.call([('a', 'b'), ('key', 'val'), ('file1', ('image.jpg', 'image/jpeg', FP)), ('file2', ('image.png', 'image/png', FP))], boundary=b'heyDavid'), self.MultiPartProducer.call_args) @mock.patch('treq.client.uuid.uuid4', mock.Mock(return_value="heyDavid")) def test_request_mixed_params_dict(self): self.client.request('POST', 'http://example.com/', data={ "key": "a", "key2": "b" }, files={"file1": BytesIO(b"hey")}) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({ b'accept-encoding': [b'gzip'], b'Content-Type': [b'multipart/form-data; boundary=heyDavid'] }), self.MultiPartProducer.return_value) FP = self.FileBodyProducer.return_value self.assertEqual( mock.call([('key', 'a'), ('key2', 'b'), ('file1', (None, 'application/octet-stream', FP))], boundary=b'heyDavid'), self.MultiPartProducer.call_args) def test_request_unsupported_params_combination(self): self.assertRaises(ValueError, self.client.request, 'POST', 'http://example.com/', data=BytesIO(b"yo"), files={"file1": BytesIO(b"hey")}) def test_request_dict_headers(self): self.client.request('GET', 'http://example.com/', headers={ 'User-Agent': 'treq/0.1dev', 'Accept': ['application/json', 'text/plain'] }) self.agent.request.assert_called_once_with( b'GET', b'http://example.com/', Headers({ b'User-Agent': [b'treq/0.1dev'], b'accept-encoding': [b'gzip'], b'Accept': [b'application/json', b'text/plain'] }), None) @with_clock def test_request_timeout_fired(self, clock): """ Verify the request is cancelled if a response is not received within specified timeout period. """ self.agent.request.return_value = d = Deferred() self.client.request('GET', 'http://example.com', timeout=2) # simulate we haven't gotten a response within timeout seconds clock.advance(3) # a deferred should have been cancelled self.failureResultOf(d, CancelledError) @with_clock def test_request_timeout_cancelled(self, clock): """ Verify timeout is cancelled if a response is received before timeout period elapses. """ self.agent.request.return_value = d = Deferred() self.client.request('GET', 'http://example.com', timeout=2) # simulate a response d.callback(mock.Mock(code=200, headers=Headers({}))) # now advance the clock but since we already got a result, # a cancellation timer should have been cancelled clock.advance(3) self.successResultOf(d) def test_response_is_buffered(self): response = mock.Mock(deliverBody=mock.Mock(), headers=Headers({})) self.agent.request.return_value = succeed(response) d = self.client.get('http://www.example.com') result = self.successResultOf(d) protocol = mock.Mock(Protocol) result.deliverBody(protocol) self.assertEqual(response.deliverBody.call_count, 1) result.deliverBody(protocol) self.assertEqual(response.deliverBody.call_count, 1) def test_response_buffering_is_disabled_with_unbufferred_arg(self): response = mock.Mock(headers=Headers({})) self.agent.request.return_value = succeed(response) d = self.client.get('http://www.example.com', unbuffered=True) # YOLO public attribute. self.assertEqual(self.successResultOf(d).original, response) def test_request_post_redirect_denied(self): response = mock.Mock(code=302, headers=Headers({'Location': ['/']})) self.agent.request.return_value = succeed(response) d = self.client.post('http://www.example.com') self.failureResultOf(d, ResponseFailed) def test_request_browser_like_redirects(self): response = mock.Mock(code=302, headers=Headers({'Location': ['/']})) self.agent.request.return_value = succeed(response) raw = mock.Mock(return_value=[]) final_resp = mock.Mock(code=200, headers=mock.Mock(getRawHeaders=raw)) with mock.patch('twisted.web.client.RedirectAgent._handleRedirect', return_value=final_resp): d = self.client.post('http://www.google.com', browser_like_redirects=True, unbuffered=True) self.assertEqual(self.successResultOf(d).original, final_resp)
class AdapterResource(resource.Resource): """ A powerstrip pre-hook for container create. """ isLeaf = True def __init__(self, *args, **kw): self._agent = Agent(reactor) # no connectionpool self.client = HTTPClient(self._agent) return resource.Resource.__init__(self, *args, **kw) def render_POST(self, request): """ Handle a pre-hook: either create a filesystem, or move it in place. """ requestJson = json.loads(request.content.read()) if requestJson["Type"] != "pre-hook": raise Exception("unsupported hook type %s" % (requestJson["Type"],)) pprint.pprint(os.environ) # BASE_URL like http://control-service/v1/ ^ json_payload = requestJson["ClientRequest"]["Body"] json_parsed = json.loads(json_payload) self.base_url = os.environ.get("FLOCKER_CONTROL_SERVICE_BASE_URL") self.ip = os.environ.get("MY_NETWORK_IDENTITY") self.host_uuid = os.environ.get("MY_HOST_UUID") def wait_until_volume_in_place(result, fs): """ Called after a dataset has been created or moved in the cluster's desired configuration. Wait until the volume shows up in the cluster actual state on the right host (either having been created or moved). :return: Deferred which fires with the tuple (fs, dataset_id) -- that is, the filesystem and the corresponding flocker dataset uuid that the docker client asked for -- firing only once the filesystem has been created/moved and mounted (iow, exists on the right host in the cluster state). """ dataset_id = result["dataset_id"] def dataset_exists(): d = self.client.get(self.base_url + "/state/datasets") d.addCallback(treq.json_content) def check_dataset_exists(datasets): """ The /v1/state/datasets API seems to show the volume as being on two hosts at once during a move. We assume therefore that when it settles down to only show it on one host that this means the move is complete. """ print "Got", self.ip, "datasets:", datasets matching_datasets = [] for dataset in datasets: if dataset["dataset_id"] == dataset_id: matching_datasets.append(dataset) if len(matching_datasets) == 1: if matching_datasets[0]["primary"] == self.ip: return True return False d.addCallback(check_dataset_exists) return d d = loop_until(dataset_exists) d.addCallback(lambda ignored: (fs, dataset_id)) return d d = self.client.get(self.base_url + "/configuration/datasets") d.addCallback(treq.json_content) def got_dataset_configuration(configured_datasets): # form a mapping from names onto dataset objects configured_dataset_mapping = {} for dataset in configured_datasets: if dataset["metadata"].get("name"): configured_dataset_mapping[dataset["metadata"].get("name")] = dataset # iterate over the datasets we were asked to create by the docker client fs_create_deferreds = [] old_binds = [] if json_parsed['HostConfig']['Binds'] is not None: for bind in json_parsed['HostConfig']['Binds']: host_path, remainder = bind.split(":", 1) # TODO validation # if "/" in fs: # raise Exception("Not allowed flocker filesystems more than one level deep") if host_path.startswith("/flocker/"): fs = host_path[len("/flocker/"):] old_binds.append((fs, remainder)) # if a dataset exists, and is in the right place, we're cool. if fs in configured_dataset_mapping: dataset = configured_dataset_mapping[fs] if dataset["primary"] == self.ip: # simulate "immediate success" fs_create_deferreds.append(defer.succeed((fs, dataset["dataset_id"]))) else: # if a dataset exists, but is on the wrong server [TODO # and is not being used], then move it in place. d = self.client.post( self.base_url + "/configuration/datasets/%s" % ( dataset["dataset_id"].encode('ascii'),), json.dumps({"primary": self.ip}), headers={'Content-Type': ['application/json']}) d.addCallback(treq.json_content) d.addCallback(wait_until_volume_in_place, fs=fs) fs_create_deferreds.append(d) else: # if a dataset doesn't exist at all, create it on this server. d = self.client.post(self.base_url + "/configuration/datasets", json.dumps({"primary": self.ip, "metadata": {"name": fs}}), headers={'Content-Type': ['application/json']}) d.addCallback(treq.json_content) d.addCallback(wait_until_volume_in_place, fs=fs) fs_create_deferreds.append(d) d = defer.gatherResults(fs_create_deferreds) def got_created_and_moved_datasets(list_new_datasets): dataset_mapping = dict(list_new_datasets) new_binds = [] for fs, reminder in old_binds: new_binds.append("/flocker/%s.default.%s:%s" % (self.host_uuid, dataset_mapping[fs], remainder)) new_json_parsed = json_parsed.copy() new_json_parsed['HostConfig']['Binds'] = new_binds request.write(json.dumps({ "PowerstripProtocolVersion": 1, "ModifiedClientRequest": { "Method": "POST", "Request": request.uri, "Body": json.dumps(new_json_parsed)}})) request.finish() d.addCallback(got_created_and_moved_datasets) return d d.addCallback(got_dataset_configuration) d.addErrback(log.err, 'while processing configured datasets') return server.NOT_DONE_YET
class HTTPClientTests(TestCase): def setUp(self): self.agent = mock.Mock(Agent) self.client = HTTPClient(self.agent) self.fbp_patcher = mock.patch('treq.client.FileBodyProducer') self.FileBodyProducer = self.fbp_patcher.start() self.addCleanup(self.fbp_patcher.stop) self.mbp_patcher = mock.patch('treq.multipart.MultiPartProducer') self.MultiPartProducer = self.mbp_patcher.start() self.addCleanup(self.mbp_patcher.stop) def assertBody(self, expected): body = self.FileBodyProducer.mock_calls[0][1][0] self.assertEqual(body.read(), expected) def test_post(self): self.client.post('http://example.com/') self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({b'accept-encoding': [b'gzip']}), None) def test_request_uri_idn(self): self.client.request('GET', u'http://č.net') self.agent.request.assert_called_once_with( b'GET', b'http://xn--bea.net', Headers({b'accept-encoding': [b'gzip']}), None) def test_request_uri_decodedurl(self): """ A URL may be passed as a `hyperlink.DecodedURL` object. It is converted to bytes when passed to the underlying agent. """ url = DecodedURL.from_text(u"https://example.org/foo") self.client.request("GET", url) self.agent.request.assert_called_once_with( b"GET", b"https://example.org/foo", Headers({b"accept-encoding": [b"gzip"]}), None, ) def test_request_uri_encodedurl(self): """ A URL may be passed as a `hyperlink.EncodedURL` object. It is converted to bytes when passed to the underlying agent. """ url = EncodedURL.from_text(u"https://example.org/foo") self.client.request("GET", url) self.agent.request.assert_called_once_with( b"GET", b"https://example.org/foo", Headers({b"accept-encoding": [b"gzip"]}), None, ) def test_request_uri_bytes_pass(self): """ The URL parameter may contain path segments or querystring parameters that are not valid UTF-8. These pass through. """ # This URL is http://example.com/hello?who=you, but "hello", "who", and # "you" are encoded as UTF-16. The particulars of the encoding aren't # important; what matters is that those segments can't be decoded by # Hyperlink's UTF-8 default. self.client.request( "GET", ("http://example.com/%FF%FEh%00e%00l%00l%00o%00" "?%FF%FEw%00h%00o%00=%FF%FEy%00o%00u%00"), ) self.agent.request.assert_called_once_with( b'GET', (b'http://example.com/%FF%FEh%00e%00l%00l%00o%00' b'?%FF%FEw%00h%00o%00=%FF%FEy%00o%00u%00'), Headers({b'accept-encoding': [b'gzip']}), None, ) def test_request_uri_plus_pass(self): """ URL parameters may contain spaces encoded as ``+``. These remain as such and are not mangled. This reproduces `Klein #339 <https://github.com/twisted/klein/issues/339>`_. """ self.client.request( "GET", "https://example.com/?foo+bar=baz+biff", ) self.agent.request.assert_called_once_with( b'GET', b"https://example.com/?foo+bar=baz+biff", Headers({b'accept-encoding': [b'gzip']}), None, ) def test_request_uri_idn_params(self): """ A URL that contains non-ASCII characters can be augmented with querystring parameters. This reproduces treq #264. """ self.client.request('GET', u'http://č.net', params={'foo': 'bar'}) self.agent.request.assert_called_once_with( b'GET', b'http://xn--bea.net/?foo=bar', Headers({b'accept-encoding': [b'gzip']}), None) def test_request_uri_hyperlink_params(self): """ The *params* argument augments an instance of `hyperlink.DecodedURL` passed as the *url* parameter, just as if it were a string. """ self.client.request( method="GET", url=DecodedURL.from_text(u"http://č.net"), params={"foo": "bar"}, ) self.agent.request.assert_called_once_with( b"GET", b"http://xn--bea.net/?foo=bar", Headers({b"accept-encoding": [b"gzip"]}), None, ) def test_request_case_insensitive_methods(self): self.client.request('gEt', 'http://example.com/') self.agent.request.assert_called_once_with( b'GET', b'http://example.com/', Headers({b'accept-encoding': [b'gzip']}), None) def test_request_query_params(self): self.client.request('GET', 'http://example.com/', params={'foo': ['bar']}) self.agent.request.assert_called_once_with( b'GET', b'http://example.com/?foo=bar', Headers({b'accept-encoding': [b'gzip']}), None) def test_request_tuple_query_values(self): self.client.request('GET', 'http://example.com/', params={'foo': ('bar', )}) self.agent.request.assert_called_once_with( b'GET', b'http://example.com/?foo=bar', Headers({b'accept-encoding': [b'gzip']}), None) def test_request_tuple_query_value_coercion(self): """ treq coerces non-string values passed to *params* like `urllib.urlencode()` """ self.client.request('GET', 'http://example.com/', params=[ ('text', u'A\u03a9'), ('text-seq', [u'A\u03a9']), ('bytes', [b'ascii']), ('bytes-seq', [b'ascii']), ('native', ['native']), ('native-seq', ['aa', 'bb']), ('int', 1), ('int-seq', (1, 2, 3)), ('none', None), ('none-seq', [None, None]), ]) self.agent.request.assert_called_once_with( b'GET', (b'http://example.com/?' b'text=A%CE%A9&text-seq=A%CE%A9' b'&bytes=ascii&bytes-seq=ascii' b'&native=native&native-seq=aa&native-seq=bb' b'&int=1&int-seq=1&int-seq=2&int-seq=3' b'&none=None&none-seq=None&none-seq=None'), Headers({b'accept-encoding': [b'gzip']}), None, ) def test_request_tuple_query_param_coercion(self): """ treq coerces non-string param names passed to *params* like `urllib.urlencode()` """ # A value used to test that it is never encoded or decoded. # It should be invalid UTF-8 or UTF-32 (at least). raw_bytes = b"\x00\xff\xfb" self.client.request('GET', 'http://example.com/', params=[ (u'text', u'A\u03a9'), (b'bytes', ['ascii', raw_bytes]), ('native', 'native'), (1, 'int'), (None, ['none']), ]) self.agent.request.assert_called_once_with( b'GET', (b'http://example.com/' b'?text=A%CE%A9&bytes=ascii&bytes=%00%FF%FB' b'&native=native&1=int&None=none'), Headers({b'accept-encoding': [b'gzip']}), None, ) def test_request_query_param_seps(self): """ When the characters ``&`` and ``#`` are passed to *params* as param names or values they are percent-escaped in the URL. This reproduces https://github.com/twisted/treq/issues/282 """ self.client.request('GET', 'http://example.com/', params=( ('ampersand', '&'), ('&', 'ampersand'), ('octothorpe', '#'), ('#', 'octothorpe'), )) self.agent.request.assert_called_once_with( b'GET', (b'http://example.com/' b'?ampersand=%26' b'&%26=ampersand' b'&octothorpe=%23' b'&%23=octothorpe'), Headers({b'accept-encoding': [b'gzip']}), None, ) def test_request_merge_query_params(self): self.client.request('GET', 'http://example.com/?baz=bax', params={'foo': ['bar', 'baz']}) self.agent.request.assert_called_once_with( b'GET', b'http://example.com/?baz=bax&foo=bar&foo=baz', Headers({b'accept-encoding': [b'gzip']}), None) def test_request_merge_tuple_query_params(self): self.client.request('GET', 'http://example.com/?baz=bax', params=[('foo', 'bar')]) self.agent.request.assert_called_once_with( b'GET', b'http://example.com/?baz=bax&foo=bar', Headers({b'accept-encoding': [b'gzip']}), None) def test_request_dict_single_value_query_params(self): self.client.request('GET', 'http://example.com/', params={'foo': 'bar'}) self.agent.request.assert_called_once_with( b'GET', b'http://example.com/?foo=bar', Headers({b'accept-encoding': [b'gzip']}), None) def test_request_data_dict(self): self.client.request('POST', 'http://example.com/', data={'foo': ['bar', 'baz']}) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({ b'Content-Type': [b'application/x-www-form-urlencoded'], b'accept-encoding': [b'gzip'] }), self.FileBodyProducer.return_value) self.assertBody(b'foo=bar&foo=baz') def test_request_data_single_dict(self): self.client.request('POST', 'http://example.com/', data={'foo': 'bar'}) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({ b'Content-Type': [b'application/x-www-form-urlencoded'], b'accept-encoding': [b'gzip'] }), self.FileBodyProducer.return_value) self.assertBody(b'foo=bar') def test_request_data_tuple(self): self.client.request('POST', 'http://example.com/', data=[('foo', 'bar')]) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({ b'Content-Type': [b'application/x-www-form-urlencoded'], b'accept-encoding': [b'gzip'] }), self.FileBodyProducer.return_value) self.assertBody(b'foo=bar') def test_request_data_file(self): temp_fn = self.mktemp() with open(temp_fn, "wb") as temp_file: temp_file.write(b'hello') self.client.request('POST', 'http://example.com/', data=open(temp_fn, 'rb')) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({b'accept-encoding': [b'gzip']}), self.FileBodyProducer.return_value) self.assertBody(b'hello') def test_request_json_dict(self): self.client.request('POST', 'http://example.com/', json={'foo': 'bar'}) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({ b'Content-Type': [b'application/json; charset=UTF-8'], b'accept-encoding': [b'gzip'] }), self.FileBodyProducer.return_value) self.assertBody(b'{"foo":"bar"}') def test_request_json_tuple(self): self.client.request('POST', 'http://example.com/', json=('foo', 1)) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({ b'Content-Type': [b'application/json; charset=UTF-8'], b'accept-encoding': [b'gzip'] }), self.FileBodyProducer.return_value) self.assertBody(b'["foo",1]') def test_request_json_number(self): self.client.request('POST', 'http://example.com/', json=1.) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({ b'Content-Type': [b'application/json; charset=UTF-8'], b'accept-encoding': [b'gzip'] }), self.FileBodyProducer.return_value) self.assertBody(b'1.0') def test_request_json_string(self): self.client.request('POST', 'http://example.com/', json='hello') self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({ b'Content-Type': [b'application/json; charset=UTF-8'], b'accept-encoding': [b'gzip'] }), self.FileBodyProducer.return_value) self.assertBody(b'"hello"') def test_request_json_bool(self): self.client.request('POST', 'http://example.com/', json=True) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({ b'Content-Type': [b'application/json; charset=UTF-8'], b'accept-encoding': [b'gzip'] }), self.FileBodyProducer.return_value) self.assertBody(b'true') def test_request_json_none(self): self.client.request('POST', 'http://example.com/', json=None) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({ b'Content-Type': [b'application/json; charset=UTF-8'], b'accept-encoding': [b'gzip'] }), self.FileBodyProducer.return_value) self.assertBody(b'null') @mock.patch('treq.client.uuid.uuid4', mock.Mock(return_value="heyDavid")) def test_request_no_name_attachment(self): self.client.request('POST', 'http://example.com/', files={"name": BytesIO(b"hello")}) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({ b'accept-encoding': [b'gzip'], b'Content-Type': [b'multipart/form-data; boundary=heyDavid'] }), self.MultiPartProducer.return_value) FP = self.FileBodyProducer.return_value self.assertEqual( mock.call([('name', (None, 'application/octet-stream', FP))], boundary=b'heyDavid'), self.MultiPartProducer.call_args) @mock.patch('treq.client.uuid.uuid4', mock.Mock(return_value="heyDavid")) def test_request_named_attachment(self): self.client.request('POST', 'http://example.com/', files={"name": ('image.jpg', BytesIO(b"hello"))}) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({ b'accept-encoding': [b'gzip'], b'Content-Type': [b'multipart/form-data; boundary=heyDavid'] }), self.MultiPartProducer.return_value) FP = self.FileBodyProducer.return_value self.assertEqual( mock.call([('name', ('image.jpg', 'image/jpeg', FP))], boundary=b'heyDavid'), self.MultiPartProducer.call_args) @mock.patch('treq.client.uuid.uuid4', mock.Mock(return_value="heyDavid")) def test_request_named_attachment_and_ctype(self): self.client.request( 'POST', 'http://example.com/', files={"name": ('image.jpg', 'text/plain', BytesIO(b"hello"))}) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({ b'accept-encoding': [b'gzip'], b'Content-Type': [b'multipart/form-data; boundary=heyDavid'] }), self.MultiPartProducer.return_value) FP = self.FileBodyProducer.return_value self.assertEqual( mock.call([('name', ('image.jpg', 'text/plain', FP))], boundary=b'heyDavid'), self.MultiPartProducer.call_args) def test_request_files_tuple_too_short(self): """ The `HTTPClient.request()` *files* argument requires tuples of length 2 or 3. It raises `TypeError` when the tuple is too short. """ with self.assertRaises(TypeError) as c: self.client.request( "POST", b"http://example.com/", files=[("t1", ("foo.txt", ))], ) self.assertIn("'t1' tuple has length 1", str(c.exception)) def test_request_files_tuple_too_long(self): """ The `HTTPClient.request()` *files* argument requires tuples of length 2 or 3. It raises `TypeError` when the tuple is too long. """ with self.assertRaises(TypeError) as c: self.client.request( "POST", b"http://example.com/", files=[ ("t4", ("foo.txt", "text/plain", BytesIO(b"...\n"), "extra!")), ], ) self.assertIn("'t4' tuple has length 4", str(c.exception)) @mock.patch('treq.client.uuid.uuid4', mock.Mock(return_value="heyDavid")) def test_request_mixed_params(self): class NamedFile(BytesIO): def __init__(self, val): BytesIO.__init__(self, val) self.name = "image.png" self.client.request('POST', 'http://example.com/', data=[("a", "b"), ("key", "val")], files=[("file1", ('image.jpg', BytesIO(b"hello"))), ("file2", NamedFile(b"yo"))]) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({ b'accept-encoding': [b'gzip'], b'Content-Type': [b'multipart/form-data; boundary=heyDavid'] }), self.MultiPartProducer.return_value) FP = self.FileBodyProducer.return_value self.assertEqual( mock.call([('a', 'b'), ('key', 'val'), ('file1', ('image.jpg', 'image/jpeg', FP)), ('file2', ('image.png', 'image/png', FP))], boundary=b'heyDavid'), self.MultiPartProducer.call_args) @mock.patch('treq.client.uuid.uuid4', mock.Mock(return_value="heyDavid")) def test_request_mixed_params_dict(self): self.client.request('POST', 'http://example.com/', data={ "key": "a", "key2": "b" }, files={"file1": BytesIO(b"hey")}) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({ b'accept-encoding': [b'gzip'], b'Content-Type': [b'multipart/form-data; boundary=heyDavid'] }), self.MultiPartProducer.return_value) FP = self.FileBodyProducer.return_value self.assertEqual( mock.call([('key', 'a'), ('key2', 'b'), ('file1', (None, 'application/octet-stream', FP))], boundary=b'heyDavid'), self.MultiPartProducer.call_args) def test_request_unsupported_params_combination(self): self.assertRaises(ValueError, self.client.request, 'POST', 'http://example.com/', data=BytesIO(b"yo"), files={"file1": BytesIO(b"hey")}) def test_request_json_with_data(self): """ Passing `HTTPClient.request()` both *data* and *json* parameters is invalid because *json* is ignored. This behavior is deprecated. """ self.client.request( "POST", "http://example.com/", data=BytesIO(b"..."), json=None, # NB: None is a valid value. It encodes to b'null'. ) [w] = self.flushWarnings([self.test_request_json_with_data]) self.assertEqual(DeprecationWarning, w["category"]) self.assertEqual( ("Argument 'json' will be ignored because 'data' was also passed." " This will raise TypeError in the next treq release."), w['message'], ) def test_request_json_with_files(self): """ Passing `HTTPClient.request()` both *files* and *json* parameters is invalid because *json* is ignored. This behavior is deprecated. """ self.client.request( "POST", "http://example.com/", files={"f1": ("foo.txt", "text/plain", BytesIO(b"...\n"))}, json=["this is ignored"], ) [w] = self.flushWarnings([self.test_request_json_with_files]) self.assertEqual(DeprecationWarning, w["category"]) self.assertEqual( ("Argument 'json' will be ignored because 'files' was also passed." " This will raise TypeError in the next treq release."), w['message'], ) def test_request_dict_headers(self): self.client.request('GET', 'http://example.com/', headers={ 'User-Agent': 'treq/0.1dev', 'Accept': ['application/json', 'text/plain'] }) self.agent.request.assert_called_once_with( b'GET', b'http://example.com/', Headers({ b'User-Agent': [b'treq/0.1dev'], b'accept-encoding': [b'gzip'], b'Accept': [b'application/json', b'text/plain'] }), None) def test_request_headers_object(self): """ The *headers* parameter accepts a `twisted.web.http_headers.Headers` instance. """ self.client.request( "GET", "https://example.com", headers=Headers({"X-Foo": ["bar"]}), ) self.agent.request.assert_called_once_with( b"GET", b"https://example.com", Headers({ "X-Foo": ["bar"], "Accept-Encoding": ["gzip"], }), None, ) def test_request_headers_invalid_type(self): """ `HTTPClient.request()` warns that headers of an unexpected type are invalid and that this behavior is deprecated. """ self.client.request('GET', 'http://example.com', headers=[]) [w] = self.flushWarnings([self.test_request_headers_invalid_type]) self.assertEqual(DeprecationWarning, w['category']) self.assertIn( "headers must be a dict, twisted.web.http_headers.Headers, or None,", w['message'], ) def test_request_dict_headers_invalid_values(self): """ `HTTPClient.request()` warns that non-string header values are dropped and that this behavior is deprecated. """ self.client.request('GET', 'http://example.com', headers=OrderedDict([ ('none', None), ('one', 1), ('ok', 'string'), ])) [w1, w2] = self.flushWarnings( [self.test_request_dict_headers_invalid_values]) self.assertEqual(DeprecationWarning, w1['category']) self.assertEqual(DeprecationWarning, w2['category']) self.assertIn( "The value of headers key 'none' has non-string type", w1['message'], ) self.assertIn( "The value of headers key 'one' has non-string type", w2['message'], ) def test_request_invalid_param(self): """ `HTTPClient.request()` rejects invalid keyword parameters with `TypeError`. """ self.assertRaises( TypeError, self.client.request, "GET", b"http://example.com", invalid=True, ) @with_clock def test_request_timeout_fired(self, clock): """ Verify the request is cancelled if a response is not received within specified timeout period. """ self.agent.request.return_value = d = Deferred() self.client.request('GET', 'http://example.com', timeout=2) # simulate we haven't gotten a response within timeout seconds clock.advance(3) # a deferred should have been cancelled self.failureResultOf(d, CancelledError) @with_clock def test_request_timeout_cancelled(self, clock): """ Verify timeout is cancelled if a response is received before timeout period elapses. """ self.agent.request.return_value = d = Deferred() self.client.request('GET', 'http://example.com', timeout=2) # simulate a response d.callback(mock.Mock(code=200, headers=Headers({}))) # now advance the clock but since we already got a result, # a cancellation timer should have been cancelled clock.advance(3) self.successResultOf(d) def test_response_is_buffered(self): response = mock.Mock(deliverBody=mock.Mock(), headers=Headers({})) self.agent.request.return_value = succeed(response) d = self.client.get('http://www.example.com') result = self.successResultOf(d) protocol = mock.Mock(Protocol) result.deliverBody(protocol) self.assertEqual(response.deliverBody.call_count, 1) result.deliverBody(protocol) self.assertEqual(response.deliverBody.call_count, 1) def test_response_buffering_is_disabled_with_unbufferred_arg(self): response = mock.Mock(headers=Headers({})) self.agent.request.return_value = succeed(response) d = self.client.get('http://www.example.com', unbuffered=True) # YOLO public attribute. self.assertEqual(self.successResultOf(d).original, response) def test_request_post_redirect_denied(self): response = mock.Mock(code=302, headers=Headers({'Location': ['/']})) self.agent.request.return_value = succeed(response) d = self.client.post('http://www.example.com') self.failureResultOf(d, ResponseFailed) def test_request_browser_like_redirects(self): response = mock.Mock(code=302, headers=Headers({'Location': ['/']})) self.agent.request.return_value = succeed(response) raw = mock.Mock(return_value=[]) final_resp = mock.Mock(code=200, headers=mock.Mock(getRawHeaders=raw)) with mock.patch('twisted.web.client.RedirectAgent._handleRedirect', return_value=final_resp): d = self.client.post('http://www.google.com', browser_like_redirects=True, unbuffered=True) self.assertEqual(self.successResultOf(d).original, final_resp)
class HTTPClientTests(TestCase): def setUp(self): self.agent = mock.Mock(Agent) self.client = HTTPClient(self.agent) self.fbp_patcher = mock.patch('treq.client.FileBodyProducer') self.FileBodyProducer = self.fbp_patcher.start() self.addCleanup(self.fbp_patcher.stop) self.mbp_patcher = mock.patch('treq.multipart.MultiPartProducer') self.MultiPartProducer = self.mbp_patcher.start() self.addCleanup(self.mbp_patcher.stop) def assertBody(self, expected): body = self.FileBodyProducer.mock_calls[0][1][0] self.assertEqual(body.read(), expected) def test_post(self): self.client.post('http://example.com/') self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({b'accept-encoding': [b'gzip']}), None) def test_request_uri_idn(self): self.client.request('GET', u'http://č.net') self.agent.request.assert_called_once_with( b'GET', b'http://xn--bea.net', Headers({b'accept-encoding': [b'gzip']}), None) def test_request_uri_decodedurl(self): """ A URL may be passed as a `hyperlink.DecodedURL` object. It is converted to bytes when passed to the underlying agent. """ url = DecodedURL.from_text(u"https://example.org/foo") self.client.request("GET", url) self.agent.request.assert_called_once_with( b"GET", b"https://example.org/foo", Headers({b"accept-encoding": [b"gzip"]}), None, ) def test_request_uri_encodedurl(self): """ A URL may be passed as a `hyperlink.EncodedURL` object. It is converted to bytes when passed to the underlying agent. """ url = EncodedURL.from_text(u"https://example.org/foo") self.client.request("GET", url) self.agent.request.assert_called_once_with( b"GET", b"https://example.org/foo", Headers({b"accept-encoding": [b"gzip"]}), None, ) def test_request_uri_idn_params(self): """ A URL that contains non-ASCII characters can be augmented with querystring parameters. This reproduces treq #264. """ self.client.request('GET', u'http://č.net', params={'foo': 'bar'}) self.agent.request.assert_called_once_with( b'GET', b'http://xn--bea.net/?foo=bar', Headers({b'accept-encoding': [b'gzip']}), None) def test_request_uri_hyperlink_params(self): """ The *params* argument augments an instance of `hyperlink.DecodedURL` passed as the *url* parameter, just as if it were a string. """ self.client.request( method="GET", url=DecodedURL.from_text(u"http://č.net"), params={"foo": "bar"}, ) self.agent.request.assert_called_once_with( b"GET", b"http://xn--bea.net/?foo=bar", Headers({b"accept-encoding": [b"gzip"]}), None, ) def test_request_case_insensitive_methods(self): self.client.request('gEt', 'http://example.com/') self.agent.request.assert_called_once_with( b'GET', b'http://example.com/', Headers({b'accept-encoding': [b'gzip']}), None) def test_request_query_params(self): self.client.request('GET', 'http://example.com/', params={'foo': ['bar']}) self.agent.request.assert_called_once_with( b'GET', b'http://example.com/?foo=bar', Headers({b'accept-encoding': [b'gzip']}), None) def test_request_tuple_query_values(self): self.client.request('GET', 'http://example.com/', params={'foo': ('bar',)}) self.agent.request.assert_called_once_with( b'GET', b'http://example.com/?foo=bar', Headers({b'accept-encoding': [b'gzip']}), None) def test_request_tuple_query_value_coercion(self): """ treq coerces non-string values passed to *params* like `urllib.urlencode()` """ self.client.request('GET', 'http://example.com/', params=[ ('text', u'A\u03a9'), ('text-seq', [u'A\u03a9']), ('bytes', [b'ascii']), ('bytes-seq', [b'ascii']), ('native', ['native']), ('native-seq', ['aa', 'bb']), ('int', 1), ('int-seq', (1, 2, 3)), ('none', None), ('none-seq', [None, None]), ]) self.agent.request.assert_called_once_with( b'GET', ( b'http://example.com/?' b'text=A%CE%A9&text-seq=A%CE%A9' b'&bytes=ascii&bytes-seq=ascii' b'&native=native&native-seq=aa&native-seq=bb' b'&int=1&int-seq=1&int-seq=2&int-seq=3' b'&none=None&none-seq=None&none-seq=None' ), Headers({b'accept-encoding': [b'gzip']}), None, ) def test_request_tuple_query_param_coercion(self): """ treq coerces non-string param names passed to *params* like `urllib.urlencode()` """ self.client.request('GET', 'http://example.com/', params=[ (u'text', u'A\u03a9'), (b'bytes', ['ascii']), ('native', 'native'), (1, 'int'), (None, ['none']), ]) self.agent.request.assert_called_once_with( b'GET', ( b'http://example.com/' b'?text=A%CE%A9&bytes=ascii' b'&native=native&1=int&None=none' ), Headers({b'accept-encoding': [b'gzip']}), None, ) def test_request_query_param_seps(self): """ When the characters ``&`` and ``#`` are passed to *params* as param names or values they are percent-escaped in the URL. This reproduces https://github.com/twisted/treq/issues/282 """ self.client.request('GET', 'http://example.com/', params=( ('ampersand', '&'), ('&', 'ampersand'), ('octothorpe', '#'), ('#', 'octothorpe'), )) self.agent.request.assert_called_once_with( b'GET', ( b'http://example.com/' b'?ampersand=%26' b'&%26=ampersand' b'&octothorpe=%23' b'&%23=octothorpe' ), Headers({b'accept-encoding': [b'gzip']}), None, ) def test_request_merge_query_params(self): self.client.request('GET', 'http://example.com/?baz=bax', params={'foo': ['bar', 'baz']}) self.agent.request.assert_called_once_with( b'GET', b'http://example.com/?baz=bax&foo=bar&foo=baz', Headers({b'accept-encoding': [b'gzip']}), None) def test_request_merge_tuple_query_params(self): self.client.request('GET', 'http://example.com/?baz=bax', params=[('foo', 'bar')]) self.agent.request.assert_called_once_with( b'GET', b'http://example.com/?baz=bax&foo=bar', Headers({b'accept-encoding': [b'gzip']}), None) def test_request_dict_single_value_query_params(self): self.client.request('GET', 'http://example.com/', params={'foo': 'bar'}) self.agent.request.assert_called_once_with( b'GET', b'http://example.com/?foo=bar', Headers({b'accept-encoding': [b'gzip']}), None) def test_request_data_dict(self): self.client.request('POST', 'http://example.com/', data={'foo': ['bar', 'baz']}) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({b'Content-Type': [b'application/x-www-form-urlencoded'], b'accept-encoding': [b'gzip']}), self.FileBodyProducer.return_value) self.assertBody(b'foo=bar&foo=baz') def test_request_data_single_dict(self): self.client.request('POST', 'http://example.com/', data={'foo': 'bar'}) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({b'Content-Type': [b'application/x-www-form-urlencoded'], b'accept-encoding': [b'gzip']}), self.FileBodyProducer.return_value) self.assertBody(b'foo=bar') def test_request_data_tuple(self): self.client.request('POST', 'http://example.com/', data=[('foo', 'bar')]) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({b'Content-Type': [b'application/x-www-form-urlencoded'], b'accept-encoding': [b'gzip']}), self.FileBodyProducer.return_value) self.assertBody(b'foo=bar') def test_request_data_file(self): temp_fn = self.mktemp() with open(temp_fn, "wb") as temp_file: temp_file.write(b'hello') self.client.request('POST', 'http://example.com/', data=open(temp_fn, 'rb')) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({b'accept-encoding': [b'gzip']}), self.FileBodyProducer.return_value) self.assertBody(b'hello') def test_request_json_dict(self): self.client.request('POST', 'http://example.com/', json={'foo': 'bar'}) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({b'Content-Type': [b'application/json; charset=UTF-8'], b'accept-encoding': [b'gzip']}), self.FileBodyProducer.return_value) self.assertBody(b'{"foo":"bar"}') def test_request_json_tuple(self): self.client.request('POST', 'http://example.com/', json=('foo', 1)) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({b'Content-Type': [b'application/json; charset=UTF-8'], b'accept-encoding': [b'gzip']}), self.FileBodyProducer.return_value) self.assertBody(b'["foo",1]') def test_request_json_number(self): self.client.request('POST', 'http://example.com/', json=1.) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({b'Content-Type': [b'application/json; charset=UTF-8'], b'accept-encoding': [b'gzip']}), self.FileBodyProducer.return_value) self.assertBody(b'1.0') def test_request_json_string(self): self.client.request('POST', 'http://example.com/', json='hello') self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({b'Content-Type': [b'application/json; charset=UTF-8'], b'accept-encoding': [b'gzip']}), self.FileBodyProducer.return_value) self.assertBody(b'"hello"') def test_request_json_bool(self): self.client.request('POST', 'http://example.com/', json=True) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({b'Content-Type': [b'application/json; charset=UTF-8'], b'accept-encoding': [b'gzip']}), self.FileBodyProducer.return_value) self.assertBody(b'true') def test_request_json_none(self): self.client.request('POST', 'http://example.com/', json=None) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({b'Content-Type': [b'application/json; charset=UTF-8'], b'accept-encoding': [b'gzip']}), self.FileBodyProducer.return_value) self.assertBody(b'null') @mock.patch('treq.client.uuid.uuid4', mock.Mock(return_value="heyDavid")) def test_request_no_name_attachment(self): self.client.request( 'POST', 'http://example.com/', files={"name": BytesIO(b"hello")}) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({ b'accept-encoding': [b'gzip'], b'Content-Type': [b'multipart/form-data; boundary=heyDavid']}), self.MultiPartProducer.return_value) FP = self.FileBodyProducer.return_value self.assertEqual( mock.call( [('name', (None, 'application/octet-stream', FP))], boundary=b'heyDavid'), self.MultiPartProducer.call_args) @mock.patch('treq.client.uuid.uuid4', mock.Mock(return_value="heyDavid")) def test_request_named_attachment(self): self.client.request( 'POST', 'http://example.com/', files={ "name": ('image.jpg', BytesIO(b"hello"))}) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({ b'accept-encoding': [b'gzip'], b'Content-Type': [b'multipart/form-data; boundary=heyDavid']}), self.MultiPartProducer.return_value) FP = self.FileBodyProducer.return_value self.assertEqual( mock.call( [('name', ('image.jpg', 'image/jpeg', FP))], boundary=b'heyDavid'), self.MultiPartProducer.call_args) @mock.patch('treq.client.uuid.uuid4', mock.Mock(return_value="heyDavid")) def test_request_named_attachment_and_ctype(self): self.client.request( 'POST', 'http://example.com/', files={ "name": ('image.jpg', 'text/plain', BytesIO(b"hello"))}) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({ b'accept-encoding': [b'gzip'], b'Content-Type': [b'multipart/form-data; boundary=heyDavid']}), self.MultiPartProducer.return_value) FP = self.FileBodyProducer.return_value self.assertEqual( mock.call( [('name', ('image.jpg', 'text/plain', FP))], boundary=b'heyDavid'), self.MultiPartProducer.call_args) @mock.patch('treq.client.uuid.uuid4', mock.Mock(return_value="heyDavid")) def test_request_mixed_params(self): class NamedFile(BytesIO): def __init__(self, val): BytesIO.__init__(self, val) self.name = "image.png" self.client.request( 'POST', 'http://example.com/', data=[("a", "b"), ("key", "val")], files=[ ("file1", ('image.jpg', BytesIO(b"hello"))), ("file2", NamedFile(b"yo"))]) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({ b'accept-encoding': [b'gzip'], b'Content-Type': [b'multipart/form-data; boundary=heyDavid']}), self.MultiPartProducer.return_value) FP = self.FileBodyProducer.return_value self.assertEqual( mock.call([ ('a', 'b'), ('key', 'val'), ('file1', ('image.jpg', 'image/jpeg', FP)), ('file2', ('image.png', 'image/png', FP))], boundary=b'heyDavid'), self.MultiPartProducer.call_args) @mock.patch('treq.client.uuid.uuid4', mock.Mock(return_value="heyDavid")) def test_request_mixed_params_dict(self): self.client.request( 'POST', 'http://example.com/', data={"key": "a", "key2": "b"}, files={"file1": BytesIO(b"hey")}) self.agent.request.assert_called_once_with( b'POST', b'http://example.com/', Headers({ b'accept-encoding': [b'gzip'], b'Content-Type': [b'multipart/form-data; boundary=heyDavid']}), self.MultiPartProducer.return_value) FP = self.FileBodyProducer.return_value self.assertEqual( mock.call([ ('key', 'a'), ('key2', 'b'), ('file1', (None, 'application/octet-stream', FP))], boundary=b'heyDavid'), self.MultiPartProducer.call_args) def test_request_unsupported_params_combination(self): self.assertRaises(ValueError, self.client.request, 'POST', 'http://example.com/', data=BytesIO(b"yo"), files={"file1": BytesIO(b"hey")}) def test_request_dict_headers(self): self.client.request('GET', 'http://example.com/', headers={ 'User-Agent': 'treq/0.1dev', 'Accept': ['application/json', 'text/plain'] }) self.agent.request.assert_called_once_with( b'GET', b'http://example.com/', Headers({b'User-Agent': [b'treq/0.1dev'], b'accept-encoding': [b'gzip'], b'Accept': [b'application/json', b'text/plain']}), None) @with_clock def test_request_timeout_fired(self, clock): """ Verify the request is cancelled if a response is not received within specified timeout period. """ self.agent.request.return_value = d = Deferred() self.client.request('GET', 'http://example.com', timeout=2) # simulate we haven't gotten a response within timeout seconds clock.advance(3) # a deferred should have been cancelled self.failureResultOf(d, CancelledError) @with_clock def test_request_timeout_cancelled(self, clock): """ Verify timeout is cancelled if a response is received before timeout period elapses. """ self.agent.request.return_value = d = Deferred() self.client.request('GET', 'http://example.com', timeout=2) # simulate a response d.callback(mock.Mock(code=200, headers=Headers({}))) # now advance the clock but since we already got a result, # a cancellation timer should have been cancelled clock.advance(3) self.successResultOf(d) def test_response_is_buffered(self): response = mock.Mock(deliverBody=mock.Mock(), headers=Headers({})) self.agent.request.return_value = succeed(response) d = self.client.get('http://www.example.com') result = self.successResultOf(d) protocol = mock.Mock(Protocol) result.deliverBody(protocol) self.assertEqual(response.deliverBody.call_count, 1) result.deliverBody(protocol) self.assertEqual(response.deliverBody.call_count, 1) def test_response_buffering_is_disabled_with_unbufferred_arg(self): response = mock.Mock(headers=Headers({})) self.agent.request.return_value = succeed(response) d = self.client.get('http://www.example.com', unbuffered=True) # YOLO public attribute. self.assertEqual(self.successResultOf(d).original, response) def test_request_post_redirect_denied(self): response = mock.Mock(code=302, headers=Headers({'Location': ['/']})) self.agent.request.return_value = succeed(response) d = self.client.post('http://www.example.com') self.failureResultOf(d, ResponseFailed) def test_request_browser_like_redirects(self): response = mock.Mock(code=302, headers=Headers({'Location': ['/']})) self.agent.request.return_value = succeed(response) raw = mock.Mock(return_value=[]) final_resp = mock.Mock(code=200, headers=mock.Mock(getRawHeaders=raw)) with mock.patch('twisted.web.client.RedirectAgent._handleRedirect', return_value=final_resp): d = self.client.post('http://www.google.com', browser_like_redirects=True, unbuffered=True) self.assertEqual(self.successResultOf(d).original, final_resp)