コード例 #1
0
ファイル: test_connectionpool.py プロジェクト: kojit/urllib3
    def test_for_double_release(self):
        MAXSIZE=5

        # Check default state
        pool = HTTPConnectionPool(self.host, self.port, maxsize=MAXSIZE)
        self.assertEqual(pool.num_connections, 0)
        self.assertEqual(pool.pool.qsize(), MAXSIZE)

        # Make an empty slot for testing
        pool.pool.get()
        self.assertEqual(pool.pool.qsize(), MAXSIZE-1)

        # Check state after simple request
        pool.urlopen('GET', '/')
        self.assertEqual(pool.pool.qsize(), MAXSIZE-1)

        # Check state without release
        pool.urlopen('GET', '/', preload_content=False)
        self.assertEqual(pool.pool.qsize(), MAXSIZE-2)

        pool.urlopen('GET', '/')
        self.assertEqual(pool.pool.qsize(), MAXSIZE-2)

        # Check state after read
        pool.urlopen('GET', '/').data
        self.assertEqual(pool.pool.qsize(), MAXSIZE-2)

        pool.urlopen('GET', '/')
        self.assertEqual(pool.pool.qsize(), MAXSIZE-2)
コード例 #2
0
class TestFileBodiesOnRetryOrRedirect(HTTPDummyServerTestCase):
    def setUp(self):
        self.pool = HTTPConnectionPool(self.host, self.port, timeout=0.1)
        self.addCleanup(self.pool.close)

    def test_retries_put_filehandle(self):
        """HTTP PUT retry with a file-like object should not timeout"""
        retry = Retry(total=3, status_forcelist=[418])
        # httplib reads in 8k chunks; use a larger content length
        content_length = 65535
        data = b'A' * content_length
        uploaded_file = io.BytesIO(data)
        headers = {'test-name': 'test_retries_put_filehandle',
                   'Content-Length': str(content_length)}
        resp = self.pool.urlopen('PUT', '/successful_retry',
                                 headers=headers,
                                 retries=retry,
                                 body=uploaded_file,
                                 assert_same_host=False, redirect=False)
        self.assertEqual(resp.status, 200)

    def test_redirect_put_file(self):
        """PUT with file object should work with a redirection response"""
        retry = Retry(total=3, status_forcelist=[418])
        # httplib reads in 8k chunks; use a larger content length
        content_length = 65535
        data = b'A' * content_length
        uploaded_file = io.BytesIO(data)
        headers = {'test-name': 'test_redirect_put_file',
                   'Content-Length': str(content_length)}
        url = '/redirect?target=/echo&status=307'
        resp = self.pool.urlopen('PUT', url,
                                 headers=headers,
                                 retries=retry,
                                 body=uploaded_file,
                                 assert_same_host=False, redirect=True)
        self.assertEqual(resp.status, 200)
        self.assertEqual(resp.data, data)

    def test_redirect_with_failed_tell(self):
        """Abort request if failed to get a position from tell()"""
        class BadTellObject(io.BytesIO):

            def tell(self):
                raise IOError

        body = BadTellObject(b'the data')
        url = '/redirect?target=/successful_retry'
        # httplib uses fileno if Content-Length isn't supplied,
        # which is unsupported by BytesIO.
        headers = {'Content-Length': '8'}
        try:
            self.pool.urlopen('PUT', url, headers=headers, body=body)
            self.fail('PUT successful despite failed rewind.')
        except UnrewindableBodyError as e:
            self.assertIn('Unable to record file position for', str(e))
コード例 #3
0
    def test_provides_default_host_header(self):
        self.start_chunked_handler()
        chunks = ['foo', 'bar', '', 'bazzzzzzzzzzzzzzzzzzzzzz']
        pool = HTTPConnectionPool(self.host, self.port, retries=False)
        pool.urlopen('GET', '/', chunks, chunked=True)

        header_block = self.buffer.split(b'\r\n\r\n', 1)[0].lower()
        header_lines = header_block.split(b'\r\n')[1:]

        host_headers = [x for x in header_lines if x.startswith(b'host')]
        self.assertEqual(len(host_headers), 1)
コード例 #4
0
ファイル: test_connectionpool.py プロジェクト: gruns/urllib3
 def test_conn_closed(self):
     pool = HTTPConnectionPool(self.host, self.port, timeout=0.001)
     conn = pool._get_conn()
     pool._put_conn(conn)
     try:
         url = '/sleep?seconds=0.005'
         pool.urlopen('GET', url)
         self.fail("The request should fail with a timeout error.")
     except ReadTimeoutError:
         if conn.sock:
             self.assertRaises(socket.error, conn.sock.recv, 1024)
     finally:
         pool._put_conn(conn)
コード例 #5
0
    def test_removes_duplicate_host_header(self):
        self.start_chunked_handler()
        chunks = ['foo', 'bar', '', 'bazzzzzzzzzzzzzzzzzzzzzz']
        pool = HTTPConnectionPool(self.host, self.port, retries=False)
        self.addCleanup(pool.close)
        pool.urlopen(
            'GET', '/', chunks, headers={'Host': 'test.org'}, chunked=True
        )

        header_block = self.buffer.split(b'\r\n\r\n', 1)[0].lower()
        header_lines = header_block.split(b'\r\n')[1:]

        host_headers = [x for x in header_lines if x.startswith(b'host')]
        self.assertEqual(len(host_headers), 1)
コード例 #6
0
class TestRetryPoolSize(HTTPDummyServerTestCase):
    def setUp(self):
        retries = Retry(
            total=1,
            raise_on_status=False,
            status_forcelist=[404],
        )
        self.pool = HTTPConnectionPool(self.host, self.port, maxsize=10,
                                       retries=retries, block=True)
        self.addCleanup(self.pool.close)

    def test_pool_size_retry(self):
        self.pool.urlopen('GET', '/not_found', preload_content=False)
        assert self.pool.num_connections == 1
コード例 #7
0
    def test_chunks(self):
        self.start_chunked_handler()
        chunks = ['foo', 'bar', '', 'bazzzzzzzzzzzzzzzzzzzzzz']
        pool = HTTPConnectionPool(self.host, self.port, retries=False)
        pool.urlopen('GET', '/', chunks, headers=dict(DNT='1'), chunked=True)
        self.addCleanup(pool.close)

        self.assertIn(b'Transfer-Encoding', self.buffer)
        body = self.buffer.split(b'\r\n\r\n', 1)[1]
        lines = body.split(b'\r\n')
        # Empty chunks should have been skipped, as this could not be distinguished
        # from terminating the transmission
        for i, chunk in enumerate([c for c in chunks if c]):
            self.assertEqual(lines[i * 2], hex(len(chunk))[2:].encode('utf-8'))
            self.assertEqual(lines[i * 2 + 1], chunk.encode('utf-8'))
コード例 #8
0
ファイル: test_socketlevel.py プロジェクト: Altynai/urllib3
    def test_delayed_body_read_timeout(self):
        timed_out = Event()

        def socket_handler(listener):
            sock = listener.accept()[0]
            buf = b''
            body = 'Hi'
            while not buf.endswith(b'\r\n\r\n'):
                buf = sock.recv(65536)
            sock.send(('HTTP/1.1 200 OK\r\n'
                       'Content-Type: text/plain\r\n'
                       'Content-Length: %d\r\n'
                       '\r\n' % len(body)).encode('utf-8'))

            timed_out.wait()
            sock.send(body.encode('utf-8'))
            sock.close()

        self._start_server(socket_handler)
        pool = HTTPConnectionPool(self.host, self.port)

        response = pool.urlopen('GET', '/', retries=0, preload_content=False,
                                timeout=Timeout(connect=1, read=0.001))
        try:
            self.assertRaises(ReadTimeoutError, response.read)
        finally:
            timed_out.set()
コード例 #9
0
    def test_conn_closed(self):
        block_event = Event()
        self.start_basic_handler(block_send=block_event, num=1)

        pool = HTTPConnectionPool(self.host, self.port, timeout=SHORT_TIMEOUT, retries=False)
        conn = pool._get_conn()
        pool._put_conn(conn)
        try:
            pool.urlopen('GET', '/')
            self.fail("The request should fail with a timeout error.")
        except ReadTimeoutError:
            if conn.sock:
                self.assertRaises(socket.error, conn.sock.recv, 1024)
        finally:
            pool._put_conn(conn)

        block_event.set()
コード例 #10
0
    def _test_body(self, data):
        self.start_chunked_handler()
        pool = HTTPConnectionPool(self.host, self.port, retries=False)
        self.addCleanup(pool.close)

        pool.urlopen('GET', '/', data, chunked=True)
        header, body = self.buffer.split(b'\r\n\r\n', 1)

        self.assertIn(b'Transfer-Encoding: chunked', header.split(b'\r\n'))
        if data:
            bdata = data if isinstance(data, bytes) else data.encode('utf-8')
            self.assertIn(b'\r\n' + bdata + b'\r\n', body)
            self.assertTrue(body.endswith(b'\r\n0\r\n\r\n'))

            len_str = body.split(b'\r\n', 1)[0]
            stated_len = int(len_str, 16)
            self.assertEqual(stated_len, len(bdata))
        else:
            self.assertEqual(body, b'0\r\n\r\n')
コード例 #11
0
ファイル: Dog.py プロジェクト: tlechauve/Dog
	def download(self, url):
		""" download a url given in parameter
		"""
		http = HTTPConnectionPool(self.hostname)
		try:
			request = http.urlopen('GET', url)
			headerContent = request.headers.get('content-disposition')
			m = re.match('.*"(.*)".*', headerContent)
			filename = m.groups()[0]
			localfile = open('/'.join([self.outputDir, filename]), 'wb')
			localfile.write(request.data)
			localfile.close()
		except Exception, e:
			logging.exception(e)
			self.failed.append(url)
コード例 #12
0
class deviceapi(object):
    def __init__(self,zenoss_server,zenoss_username,zenoss_password):
        self.zenoss_server=zenoss_server
        self.username=zenoss_username
        self.password=zenoss_password
    def conn(self):
        self.loginParams={'came_fraaaaaom':'http://'+self.zenoss_server+':8080/zport/dmd',
                     '__ac_name':self.username,
                     '__ac_password':self.password,
                     'submitted':'true'
                    }
        self.reqheaders={'Content-Type':'application/json'}
        self.reqCount = 1
        self.pool=HTTPConnectionPool(self.zenoss_server,port=8080,maxsize=5)
        self.loginResponse=self.pool.request('POST','/zport/acl_users/cookieAuthHelper/login',fields=self.loginParams,redirect=False)
        self.cookie={'cookie': self.loginResponse.getheader('set-cookie')}
        return self.cookie
    def operate(self,action,method,datalist=[],cookie={}):
            self.routers = {    'MessagingRouter': 'messaging',
                                'EventsRouter': 'evconsole',
                                'ProcessRouter': 'process',
                                'ServiceRouter': 'service',
                                'DeviceRouter': 'device',
                                'NetworkRouter': 'network',
                                'TemplateRouter': 'template',
                                'DetailNavRouter': 'detailnav',
                                'ReportRouter': 'report',
                                'MibRouter': 'mib',
                                'ZenPackRouter': 'zenpack' }

            self.cookie=cookie
            self.reqdata=[{
                            'type': 'rpc',
                            'data': datalist,
                            'method':method,
                            'action':action,
                            'tid':self.reqCount
                    }]
            self.reqCount +=1
            self.reqheaders.update(self.cookie)
            self.operateResponse=self.pool.urlopen('POST','/zport/dmd/'+self.routers[action]+'_router',body=json.dumps(self.reqdata),headers=self.reqheaders)
            if self.operateResponse.getheaders().getlist('Content-Type')[0] !='application/json':
                print('\033[1;31;47mLogin Failed, Please check your username and password !\033[0m')
                sys.exit(1)
            return self.operateResponse
コード例 #13
0
class HttpClient(object):
    """
    Implements an mPlane HTTP client endpoint for component-push workflows. 
    This client endpoint can retrieve capabilities from a given URL, then post 
    Specifications to the component and retrieve Results or Receipts; it can
    also present Redeptions to retrieve Results.

    Caches retrieved Capabilities, Receipts, and Results.

    """
    def __init__(self, security, posturl, certfile=None):
        # store urls
        self._posturl = posturl
        url = urllib3.util.parse_url(posturl) 

        if security == True: 
            cert = mplane.utils.normalize_path(mplane.utils.read_setting(certfile, "cert"))
            key = mplane.utils.normalize_path(mplane.utils.read_setting(certfile, "key"))
            ca = mplane.utils.normalize_path(mplane.utils.read_setting(certfile, "ca-chain"))
            mplane.utils.check_file(cert)
            mplane.utils.check_file(key)
            mplane.utils.check_file(ca)
            self.pool = HTTPSConnectionPool(url.host, url.port, key_file=key, cert_file=cert, ca_certs=ca) 
        else: 
            self.pool = HTTPConnectionPool(url.host, url.port) 

        print("new client: "+self._posturl)

        # empty capability and measurement lists
        self._capabilities = OrderedDict()
        self._receipts = []
        self._results = []

    def get_mplane_reply(self, url, postmsg=None):
        """
        Given a URL, parses the object at the URL as an mPlane 
        message and processes it.

        Given a message to POST, sends the message to the given 
        URL and processes the reply as an mPlane message.

        """
        if postmsg is not None:
            print(postmsg)
            res = self.pool.urlopen('POST', url, 
                    body=postmsg.encode("utf-8"), 
                    headers={"content-type": "application/x-mplane+json"})
        else:
            res = self.pool.request('GET', url)
        print("get_mplane_reply "+url+" "+str(res.status)+" Content-Type "+res.getheader("content-type"))
        if res.status == 200 and \
           res.getheader("content-type") == "application/x-mplane+json":
            print("parsing json")
            return mplane.model.parse_json(res.data.decode("utf-8"))
        else:
            return [res.status, res.data.decode("utf-8")]

    def handle_message(self, msg, dn = None):
        """
        Processes a message. Caches capabilities, receipts, 
        and results, and handles Exceptions.

        """
        try:
            print("got message:")
            print(mplane.model.unparse_yaml(msg))

            if isinstance(msg, mplane.model.Capability):
                self.add_capability(msg, dn)
            elif isinstance(msg, mplane.model.Receipt):
                self.add_receipt(msg)
            elif isinstance(msg, mplane.model.Result):
                self.add_result(msg)
            elif isinstance(msg, mplane.model.Exception):
                self._handle_exception(msg)
            else:
                pass
        except:
            print("Supervisor returned: " + str(msg[0]) + " - " + msg[1])

    def add_capability(self, cap, dn):
        """Add a capability to the capability cache"""
        print("adding "+repr(cap))
        mplane.utils.add_value_to(self._capabilities, dn, cap)

    def clear_capabilities(self):
        """Clear the capability cache"""
        self._capabilities = OrderedDict()

    def retrieve_capabilities(self):
        """
        Given a URL, retrieves an object, parses it as an HTML page, 
        extracts links to capabilities, and retrieves and processes them
        into the capability cache.

        """
        self.clear_capabilities()
        url = "/" + S_CAPABILITY_PATH

        print("getting capabilities from " + url)
        res = self.pool.request('GET', url)
        if res.status == 200:
            body = json.loads(res.data.decode("utf-8"))
            for key in body:
                print(key)
                print(body[key])
                caps = mplane.utils.split_stmt_list(json.dumps(body[key]))
                for cap in caps:
                    self.handle_message(cap, key)
        else:
            print("Supervisor returned: " + str(res.status) + " - " + res.data.decode("utf-8"))
       
    def receipts(self):
        """Iterate over receipts (pending measurements)"""
        yield from self._receipts

    def add_receipt(self, msg):
        """Add a receipt. Check for duplicates."""
        if msg.get_token() not in [receipt.get_token() for receipt in self.receipts()]:
            self._receipts.append(msg)

    def redeem_receipt(self, msg):
        self.handle_message(self.get_mplane_reply("/"+S_RESULT_PATH, mplane.model.unparse_json(mplane.model.Redemption(receipt=msg))))

    def redeem_receipts(self):
        """
        Send all pending receipts to the Component,
        attempting to retrieve results.

        """
        for receipt in self.receipts():
            self.redeem_receipt(receipt)

    def _delete_receipt_for(self, token):
        self._receipts = list(filter(lambda msg: msg.get_token() != token, self._receipts))

    def results(self):
        """Iterate over receipts (pending measurements)"""
        yield from self._results

    def add_result(self, msg):
        """Add a receipt. Check for duplicates."""
        if msg.get_token() not in [result.get_token() for result in self.results()]:
            self._results.append(msg)
            self._delete_receipt_for(msg.get_token())

    def measurements(self):
        """Iterate over all measurements (receipts and results)"""
        yield from self._results
        yield from self._receipts

    def measurement_at(index):
        """Retrieve a measurement at a given index"""
        if index < len(self._results):
            return self._results[index]
        else:
            index -= len(self._results)
            return self._receipts[index]

    def _handle_exception(self, exc):
        print(repr(exc))
コード例 #14
0
class HttpProbe():
    """
    This class manages interactions with the supervisor:
    registration, specification retrievement, and return of results
    
    """
    
    def __init__(self, immediate_ms = 5000):
        parse_args()
        self.dn = None
        
        # check if security is enabled, if so read certificate files
        self.security = not args.DISABLE_SSL
        if self.security:
            mplane.utils.check_file(args.CERTFILE)
            self.cert = mplane.utils.normalize_path(mplane.utils.read_setting(args.CERTFILE, "cert"))
            self.key = mplane.utils.normalize_path(mplane.utils.read_setting(args.CERTFILE, "key"))
            self.ca = mplane.utils.normalize_path(mplane.utils.read_setting(args.CERTFILE, "ca-chain"))
            mplane.utils.check_file(self.cert)
            mplane.utils.check_file(self.key)
            mplane.utils.check_file(self.ca)
            self.pool = HTTPSConnectionPool(args.SUPERVISOR_IP4, args.SUPERVISOR_PORT, key_file=self.key, cert_file=self.cert, ca_certs=self.ca)
        else: 
            self.pool = HTTPConnectionPool(args.SUPERVISOR_IP4, args.SUPERVISOR_PORT)
            self.cert = None
        
        # get server DN, for Access Control purposes
        self.dn = self.get_dn()
        
        # generate a Service for each capability
        self.immediate_ms = immediate_ms
        self.scheduler = mplane.scheduler.Scheduler(self.security, self.cert)
        self.scheduler.add_service(FirelogService(firelog_capability(args.URL)))
        
    def get_dn(self):
        """
        Extracts the DN from the server. 
        If SSL is disabled, returns a dummy DN
        
        """
        if self.security == True:
            
            # extract DN from server certificate.
            # Unfortunately, there seems to be no way to do this using urllib3,
            # thus ssl library is being used
            s = socket()
            c = ssl.wrap_socket(s,cert_reqs=ssl.CERT_REQUIRED, keyfile=self.key, certfile=self.cert, ca_certs=self.ca)
            c.connect((args.SUPERVISOR_IP4, args.SUPERVISOR_PORT))
            cert = c.getpeercert()
            
            dn = ""
            for elem in cert.get('subject'):
                if dn == "":
                    dn = dn + str(elem[0][1])
                else: 
                    dn = dn + "." + str(elem[0][1])
        else:
            dn = DUMMY_DN
        return dn
     
    def register_to_supervisor(self):
        """
        Sends a list of capabilities to the Supervisor, in order to register them
        
        """
        url = "/" + REGISTRATION_PATH
        
        # generate the capability list
        caps_list = ""
        no_caps_exposed = True
        for key in self.scheduler.capability_keys():
            cap = self.scheduler.capability_for_key(key)
            if (self.scheduler.ac.check_azn(cap._label, self.dn)):
                caps_list = caps_list + mplane.model.unparse_json(cap) + ","
                no_caps_exposed = False
        caps_list = "[" + caps_list[:-1].replace("\n","") + "]"
        connected = False
        
        if no_caps_exposed is True:
           print("\nNo Capabilities are being exposed to the Supervisor, check permission files. Exiting")
           exit(0)
        
        # send the list to the supervisor, if reachable
        while not connected:
            try:
                res = self.pool.urlopen('POST', url, 
                    body=caps_list.encode("utf-8"), 
                    headers={"content-type": "application/x-mplane+json"})
                connected = True
                
            except:
                print("Supervisor unreachable. Retrying connection in 5 seconds")
                sleep(5)
                
        # handle response message
        if res.status == 200:
            body = json.loads(res.data.decode("utf-8"))
            print("\nCapability registration outcome:")
            for key in body:
                if body[key]['registered'] == "ok":
                    print(key + ": Ok")
                else:
                    print(key + ": Failed (" + body[key]['reason'] + ")")
            print("")
        else:
            print("Error registering capabilities, Supervisor said: " + str(res.status) + " - " + res.data.decode("utf-8"))
            exit(1)
    
    def check_for_specs(self):
        """
        Poll the supervisor for specifications
        
        """
        url = "/" + SPECIFICATION_PATH
        
        # send a request for specifications
        res = self.pool.request('GET', url)
        if res.status == 200:
            
            # specs retrieved: split them if there is more than one
            specs = mplane.utils.split_stmt_list(res.data.decode("utf-8"))
            for spec in specs:
                
                # hand spec to scheduler
                reply = self.scheduler.receive_message(self.dn, spec)
                
                # return error if spec is not authorized
                if isinstance(reply, mplane.model.Exception):
                    result_url = "/" + RESULT_PATH
                    # send result to the Supervisor
                    res = self.pool.urlopen('POST', result_url, 
                            body=mplane.model.unparse_json(reply).encode("utf-8"), 
                            headers={"content-type": "application/x-mplane+json"})
                    return
                
                # enqueue job
                job = self.scheduler.job_for_message(reply)
                
                # launch a thread to monitor the status of the running measurement
                t = threading.Thread(target=self.return_results, args=[job])
                t.start()
                
        # not registered on supervisor, need to re-register
        elif res.status == 428:
            print("\nRe-registering capabilities on Supervisor")
            self.register_to_supervisor()
        pass
    
    def return_results(self, job):
        """
        Monitors a job, and as soon as it is complete sends it to the Supervisor
        
        """
        url = "/" + RESULT_PATH
        reply = job.get_reply()
        
        # check if job is completed
        while job.finished() is not True:
            if job.failed():
                reply = job.get_reply()
                break
            sleep(1)
        if isinstance (reply, mplane.model.Receipt):
            reply = job.get_reply()
        
        # send result to the Supervisor
        res = self.pool.urlopen('POST', url, 
                body=mplane.model.unparse_json(reply).encode("utf-8"), 
                headers={"content-type": "application/x-mplane+json"})
                
        # handle response
        if res.status == 200:
            print("Result for " + reply.get_label() + " successfully returned!")
        else:
            print("Error returning Result for " + reply.get_label())
            print("Supervisor said: " + str(res.status) + " - " + res.data.decode("utf-8"))
        pass
コード例 #15
0
ファイル: main.py プロジェクト: titanjer/benchmark_urllib
 def test_urllib3_connection_pool(self):
     ''' ''' # {{{
     http_pool = HTTPConnectionPool(self.server_ip, self.server_port)
     for n in self.numbers:
         r = http_pool.urlopen('GET', "/hello?q=%s" % n)
         l = len(r.data)
コード例 #16
0
ファイル: tstat_proxy.py プロジェクト: stepenta/RI
class HttpProbe():
    """
    This class manages interactions with the supervisor:
    registration, specification retrievement, and return of results
    
    """
    
    def __init__(self, immediate_ms = 5000):
        parse_args()
        self.dn = None
        
        # check if security is enabled, if so read certificate files
        self.security = not args.DISABLE_SSL
        if self.security:
            mplane.utils.check_file(args.CERTFILE)
            self.cert = mplane.utils.normalize_path(mplane.utils.read_setting(args.CERTFILE, "cert"))
            self.key = mplane.utils.normalize_path(mplane.utils.read_setting(args.CERTFILE, "key"))
            self.ca = mplane.utils.normalize_path(mplane.utils.read_setting(args.CERTFILE, "ca-chain"))
            mplane.utils.check_file(self.cert)
            mplane.utils.check_file(self.key)
            mplane.utils.check_file(self.ca)
            self.pool = HTTPSConnectionPool(args.SUPERVISOR_IP4, args.SUPERVISOR_PORT, key_file=self.key, cert_file=self.cert, ca_certs=self.ca)
        else: 
            self.pool = HTTPConnectionPool(args.SUPERVISOR_IP4, args.SUPERVISOR_PORT)
        
        # get server DN, for Access Control purposes
        self.dn = self.get_dn()
        
        # generate a Service for each capability
        self.immediate_ms = immediate_ms
        self.scheduler = mplane.scheduler.Scheduler(self.security, self.cert)
        self.scheduler.add_service(tStatService(mplane.tstat_caps.tcp_flows_capability(args.IP4_NET), args.TSTAT_RUNTIMECONF))
        self.scheduler.add_service(tStatService(mplane.tstat_caps.e2e_tcp_flows_capability(args.IP4_NET), args.TSTAT_RUNTIMECONF))
        self.scheduler.add_service(tStatService(mplane.tstat_caps.tcp_options_capability(args.IP4_NET), args.TSTAT_RUNTIMECONF))
        self.scheduler.add_service(tStatService(mplane.tstat_caps.tcp_p2p_stats_capability(args.IP4_NET), args.TSTAT_RUNTIMECONF))
        self.scheduler.add_service(tStatService(mplane.tstat_caps.tcp_layer7_capability(args.IP4_NET), args.TSTAT_RUNTIMECONF))
        
    def get_dn(self):
        """
        Extracts the DN from the server. 
        If SSL is disabled, returns a dummy DN
        
        """
        if self.security == True:
            
            # extract DN from server certificate.
            # Unfortunately, there seems to be no way to do this using urllib3,
            # thus ssl library is being used
            s = socket()
            c = ssl.wrap_socket(s,cert_reqs=ssl.CERT_REQUIRED, keyfile=self.key, certfile=self.cert, ca_certs=self.ca)
            c.connect((args.SUPERVISOR_IP4, args.SUPERVISOR_PORT))
            cert = c.getpeercert()
            
            dn = ""
            for elem in cert.get('subject'):
                if dn == "":
                    dn = dn + str(elem[0][1])
                else: 
                    dn = dn + "." + str(elem[0][1])
        else:
            dn = "org.mplane.Test PKI.Test Clients.mPlane-Client"
        return dn
     
    def register_to_supervisor(self):
        """
        Sends a list of capabilities to the Supervisor, in order to register them
        
        """
        url = "/" + REGISTRATION_PATH
        
        # generate the capability list
        caps_list = ""
        for key in self.scheduler.capability_keys():
            cap = self.scheduler.capability_for_key(key)
            if (self.scheduler.ac.check_azn(cap._label, self.dn)):
                caps_list = caps_list + mplane.model.unparse_json(cap) + ","
        caps_list = "[" + caps_list[:-1].replace("\n","") + "]"
        connected = False
        
        # send the list to the supervisor, if reachable
        while not connected:
            try:
                res = self.pool.urlopen('POST', url, 
                    body=caps_list.encode("utf-8"), 
                    headers={"content-type": "application/x-mplane+json"})
                connected = True
                
            except:
                print("Supervisor unreachable. Retrying connection in 5 seconds")
                sleep(5)
                
        # handle response message
        if res.status == 200:
            body = json.loads(res.data.decode("utf-8"))
            print("\nCapability registration outcome:")
            for key in body:
                if body[key]['registered'] == "ok":
                    print(key + ": Ok")
                else:
                    print(key + ": Failed (" + body[key]['reason'] + ")")
            print("")
        else:
            print("Error registering capabilities, Supervisor said: " + str(res.status) + " - " + res.data.decode("utf-8"))
            exit(1)
    
    def check_for_specs(self):
        """
        Poll the supervisor for specifications
        
        """
        url = "/" + SPECIFICATION_PATH
        
        # send a request for specifications
        res = self.pool.request('GET', url)
        if res.status == 200:
            
            # specs retrieved: split them if there is more than one
            specs = mplane.utils.split_stmt_list(res.data.decode("utf-8"))
            for spec in specs:
                
                # hand spec to scheduler
                reply = self.scheduler.receive_message(self.dn, spec)
                
                # return error if spec is not authorized
                if isinstance(reply, mplane.model.Exception):
                    result_url = "/" + RESULT_PATH
                    # send result to the Supervisor
                    res = self.pool.urlopen('POST', result_url, 
                            body=mplane.model.unparse_json(reply).encode("utf-8"), 
                            headers={"content-type": "application/x-mplane+json"})
                    return
                
                # enqueue job
                job = self.scheduler.job_for_message(reply)
                
                # launch a thread to monitor the status of the running measurement
                t = threading.Thread(target=self.return_results, args=[job])
                t.start()
                
        # not registered on supervisor, need to re-register
        elif res.status == 428:
            print("\nRe-registering capabilities on Supervisor")
            self.register_to_supervisor()
        pass
    
    def return_results(self, job):
        """
        Monitors a job, and as soon as it is complete sends it to the Supervisor
        
        """
        url = "/" + RESULT_PATH
        reply = job.get_reply()
        
        # check if job is completed
        while job.finished() is not True:
            if job.failed():
                reply = job.get_reply()
                break
            sleep(1)
        if isinstance (reply, mplane.model.Receipt):
            reply = job.get_reply()
        
        # send result to the Supervisor
        res = self.pool.urlopen('POST', url, 
                body=mplane.model.unparse_json(reply).encode("utf-8"), 
                headers={"content-type": "application/x-mplane+json"})
                
        # handle response
        if res.status == 200:
            print("Result for " + reply.get_label() + " successfully returned!")
        else:
            print("Error returning Result for " + reply.get_label())
            print("Supervisor said: " + str(res.status) + " - " + res.data.decode("utf-8"))
        pass
コード例 #17
0
class TestConnectionPool(HTTPDummyServerTestCase):

    def setUp(self):
        self.pool = HTTPConnectionPool(self.host, self.port)

    def test_get(self):
        r = self.pool.request('GET', '/specific_method',
                               fields={'method': 'GET'})
        self.assertEqual(r.status, 200, r.data)

    def test_post_url(self):
        r = self.pool.request('POST', '/specific_method',
                               fields={'method': 'POST'})
        self.assertEqual(r.status, 200, r.data)

    def test_urlopen_put(self):
        r = self.pool.urlopen('PUT', '/specific_method?method=PUT')
        self.assertEqual(r.status, 200, r.data)

    def test_wrong_specific_method(self):
        # To make sure the dummy server is actually returning failed responses
        r = self.pool.request('GET', '/specific_method',
                               fields={'method': 'POST'})
        self.assertEqual(r.status, 400, r.data)

        r = self.pool.request('POST', '/specific_method',
                               fields={'method': 'GET'})
        self.assertEqual(r.status, 400, r.data)

    def test_upload(self):
        data = "I'm in ur multipart form-data, hazing a cheezburgr"
        fields = {
            'upload_param': 'filefield',
            'upload_filename': 'lolcat.txt',
            'upload_size': len(data),
            'filefield': ('lolcat.txt', data),
        }

        r = self.pool.request('POST', '/upload', fields=fields)
        self.assertEqual(r.status, 200, r.data)

    def test_one_name_multiple_values(self):
        fields = [
            ('foo', 'a'),
            ('foo', 'b'),
        ]

        # urlencode
        r = self.pool.request('GET', '/echo', fields=fields)
        self.assertEqual(r.data, b'foo=a&foo=b')

        # multipart
        r = self.pool.request('POST', '/echo', fields=fields)
        self.assertEqual(r.data.count(b'name="foo"'), 2)

    def test_request_method_body(self):
        body = b'hi'
        r = self.pool.request('POST', '/echo', body=body)
        self.assertEqual(r.data, body)

        fields = [('hi', 'hello')]
        self.assertRaises(TypeError, self.pool.request, 'POST', '/echo', body=body, fields=fields)

    def test_unicode_upload(self):
        fieldname = u('myfile')
        filename = u('\xe2\x99\xa5.txt')
        data = u('\xe2\x99\xa5').encode('utf8')
        size = len(data)

        fields = {
            u('upload_param'): fieldname,
            u('upload_filename'): filename,
            u('upload_size'): size,
            fieldname: (filename, data),
        }

        r = self.pool.request('POST', '/upload', fields=fields)
        self.assertEqual(r.status, 200, r.data)

    def test_nagle(self):
        """ Test that connections have TCP_NODELAY turned on """
        # This test needs to be here in order to be run. socket.create_connection actually tries to
        # connect to the host provided so we need a dummyserver to be running.
        pool = HTTPConnectionPool(self.host, self.port)
        conn = pool._get_conn()
        pool._make_request(conn, 'GET', '/')
        tcp_nodelay_setting = conn.sock.getsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY)
        self.assertTrue(tcp_nodelay_setting)

    def test_socket_options(self):
        """Test that connections accept socket options."""
        # This test needs to be here in order to be run. socket.create_connection actually tries to
        # connect to the host provided so we need a dummyserver to be running.
        pool = HTTPConnectionPool(self.host, self.port, socket_options=[
            (socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
        ])
        s = pool._new_conn()._new_conn()  # Get the socket
        using_keepalive = s.getsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE) > 0
        self.assertTrue(using_keepalive)
        s.close()

    def test_disable_default_socket_options(self):
        """Test that passing None disables all socket options."""
        # This test needs to be here in order to be run. socket.create_connection actually tries to
        # connect to the host provided so we need a dummyserver to be running.
        pool = HTTPConnectionPool(self.host, self.port, socket_options=None)
        s = pool._new_conn()._new_conn()
        using_nagle = s.getsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY) == 0
        self.assertTrue(using_nagle)
        s.close()

    def test_defaults_are_applied(self):
        """Test that modifying the default socket options works."""
        # This test needs to be here in order to be run. socket.create_connection actually tries to
        # connect to the host provided so we need a dummyserver to be running.
        pool = HTTPConnectionPool(self.host, self.port)
        # Get the HTTPConnection instance
        conn = pool._new_conn()
        # Update the default socket options
        conn.default_socket_options += [(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)]
        s = conn._new_conn()
        nagle_disabled = s.getsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY) > 0
        using_keepalive = s.getsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE) > 0
        self.assertTrue(nagle_disabled)
        self.assertTrue(using_keepalive)

    def test_connection_error_retries(self):
        """ ECONNREFUSED error should raise a connection error, with retries """
        port = find_unused_port()
        pool = HTTPConnectionPool(self.host, port)
        try:
            pool.request('GET', '/', retries=Retry(connect=3))
            self.fail("Should have failed with a connection error.")
        except MaxRetryError as e:
            self.assertEqual(type(e.reason), NewConnectionError)

    def test_timeout_success(self):
        timeout = Timeout(connect=3, read=5, total=None)
        pool = HTTPConnectionPool(self.host, self.port, timeout=timeout)
        pool.request('GET', '/')
        # This should not raise a "Timeout already started" error
        pool.request('GET', '/')

        pool = HTTPConnectionPool(self.host, self.port, timeout=timeout)
        # This should also not raise a "Timeout already started" error
        pool.request('GET', '/')

        timeout = Timeout(total=None)
        pool = HTTPConnectionPool(self.host, self.port, timeout=timeout)
        pool.request('GET', '/')

    def test_tunnel(self):
        # note the actual httplib.py has no tests for this functionality
        timeout = Timeout(total=None)
        pool = HTTPConnectionPool(self.host, self.port, timeout=timeout)
        conn = pool._get_conn()
        try:
            conn.set_tunnel(self.host, self.port)
        except AttributeError: # python 2.6
            conn._set_tunnel(self.host, self.port)

        conn._tunnel = mock.Mock(return_value=None)
        pool._make_request(conn, 'GET', '/')
        conn._tunnel.assert_called_once_with()

        # test that it's not called when tunnel is not set
        timeout = Timeout(total=None)
        pool = HTTPConnectionPool(self.host, self.port, timeout=timeout)
        conn = pool._get_conn()

        conn._tunnel = mock.Mock(return_value=None)
        pool._make_request(conn, 'GET', '/')
        self.assertEqual(conn._tunnel.called, False)

    def test_redirect(self):
        r = self.pool.request('GET', '/redirect', fields={'target': '/'}, redirect=False)
        self.assertEqual(r.status, 303)

        r = self.pool.request('GET', '/redirect', fields={'target': '/'})
        self.assertEqual(r.status, 200)
        self.assertEqual(r.data, b'Dummy server!')

    def test_bad_connect(self):
        pool = HTTPConnectionPool('badhost.invalid', self.port)
        try:
            pool.request('GET', '/', retries=5)
            self.fail("should raise timeout exception here")
        except MaxRetryError as e:
            self.assertEqual(type(e.reason), NewConnectionError)

    def test_keepalive(self):
        pool = HTTPConnectionPool(self.host, self.port, block=True, maxsize=1)

        r = pool.request('GET', '/keepalive?close=0')
        r = pool.request('GET', '/keepalive?close=0')

        self.assertEqual(r.status, 200)
        self.assertEqual(pool.num_connections, 1)
        self.assertEqual(pool.num_requests, 2)

    def test_keepalive_close(self):
        pool = HTTPConnectionPool(self.host, self.port,
                                  block=True, maxsize=1, timeout=2)

        r = pool.request('GET', '/keepalive?close=1', retries=0,
                         headers={
                             "Connection": "close",
                         })

        self.assertEqual(pool.num_connections, 1)

        # The dummyserver will have responded with Connection:close,
        # and httplib will properly cleanup the socket.

        # We grab the HTTPConnection object straight from the Queue,
        # because _get_conn() is where the check & reset occurs
        # pylint: disable-msg=W0212
        conn = pool.pool.get()
        self.assertEqual(conn.sock, None)
        pool._put_conn(conn)

        # Now with keep-alive
        r = pool.request('GET', '/keepalive?close=0', retries=0,
                         headers={
                             "Connection": "keep-alive",
                         })

        # The dummyserver responded with Connection:keep-alive, the connection
        # persists.
        conn = pool.pool.get()
        self.assertNotEqual(conn.sock, None)
        pool._put_conn(conn)

        # Another request asking the server to close the connection. This one
        # should get cleaned up for the next request.
        r = pool.request('GET', '/keepalive?close=1', retries=0,
                         headers={
                             "Connection": "close",
                         })

        self.assertEqual(r.status, 200)

        conn = pool.pool.get()
        self.assertEqual(conn.sock, None)
        pool._put_conn(conn)

        # Next request
        r = pool.request('GET', '/keepalive?close=0')

    def test_post_with_urlencode(self):
        data = {'banana': 'hammock', 'lol': 'cat'}
        r = self.pool.request('POST', '/echo', fields=data, encode_multipart=False)
        self.assertEqual(r.data.decode('utf-8'), urlencode(data))

    def test_post_with_multipart(self):
        data = {'banana': 'hammock', 'lol': 'cat'}
        r = self.pool.request('POST', '/echo',
                                    fields=data,
                                    encode_multipart=True)
        body = r.data.split(b'\r\n')

        encoded_data = encode_multipart_formdata(data)[0]
        expected_body = encoded_data.split(b'\r\n')

        # TODO: Get rid of extra parsing stuff when you can specify
        # a custom boundary to encode_multipart_formdata
        """
        We need to loop the return lines because a timestamp is attached
        from within encode_multipart_formdata. When the server echos back
        the data, it has the timestamp from when the data was encoded, which
        is not equivalent to when we run encode_multipart_formdata on
        the data again.
        """
        for i, line in enumerate(body):
            if line.startswith(b'--'):
                continue

            self.assertEqual(body[i], expected_body[i])

    def test_check_gzip(self):
        r = self.pool.request('GET', '/encodingrequest',
                                   headers={'accept-encoding': 'gzip'})
        self.assertEqual(r.headers.get('content-encoding'), 'gzip')
        self.assertEqual(r.data, b'hello, world!')

    def test_check_deflate(self):
        r = self.pool.request('GET', '/encodingrequest',
                                   headers={'accept-encoding': 'deflate'})
        self.assertEqual(r.headers.get('content-encoding'), 'deflate')
        self.assertEqual(r.data, b'hello, world!')

    def test_bad_decode(self):
        self.assertRaises(DecodeError, self.pool.request,
                          'GET', '/encodingrequest',
                          headers={'accept-encoding': 'garbage-deflate'})

        self.assertRaises(DecodeError, self.pool.request,
                          'GET', '/encodingrequest',
                          headers={'accept-encoding': 'garbage-gzip'})

    def test_connection_count(self):
        pool = HTTPConnectionPool(self.host, self.port, maxsize=1)

        pool.request('GET', '/')
        pool.request('GET', '/')
        pool.request('GET', '/')

        self.assertEqual(pool.num_connections, 1)
        self.assertEqual(pool.num_requests, 3)

    def test_connection_count_bigpool(self):
        http_pool = HTTPConnectionPool(self.host, self.port, maxsize=16)

        http_pool.request('GET', '/')
        http_pool.request('GET', '/')
        http_pool.request('GET', '/')

        self.assertEqual(http_pool.num_connections, 1)
        self.assertEqual(http_pool.num_requests, 3)

    def test_partial_response(self):
        pool = HTTPConnectionPool(self.host, self.port, maxsize=1)

        req_data = {'lol': 'cat'}
        resp_data = urlencode(req_data).encode('utf-8')

        r = pool.request('GET', '/echo', fields=req_data, preload_content=False)

        self.assertEqual(r.read(5), resp_data[:5])
        self.assertEqual(r.read(), resp_data[5:])

    def test_lazy_load_twice(self):
        # This test is sad and confusing. Need to figure out what's
        # going on with partial reads and socket reuse.

        pool = HTTPConnectionPool(self.host, self.port, block=True, maxsize=1, timeout=2)

        payload_size = 1024 * 2
        first_chunk = 512

        boundary = 'foo'

        req_data = {'count': 'a' * payload_size}
        resp_data = encode_multipart_formdata(req_data, boundary=boundary)[0]

        req2_data = {'count': 'b' * payload_size}
        resp2_data = encode_multipart_formdata(req2_data, boundary=boundary)[0]

        r1 = pool.request('POST', '/echo', fields=req_data, multipart_boundary=boundary, preload_content=False)

        self.assertEqual(r1.read(first_chunk), resp_data[:first_chunk])

        try:
            r2 = pool.request('POST', '/echo', fields=req2_data, multipart_boundary=boundary,
                                    preload_content=False, pool_timeout=0.001)

            # This branch should generally bail here, but maybe someday it will
            # work? Perhaps by some sort of magic. Consider it a TODO.

            self.assertEqual(r2.read(first_chunk), resp2_data[:first_chunk])

            self.assertEqual(r1.read(), resp_data[first_chunk:])
            self.assertEqual(r2.read(), resp2_data[first_chunk:])
            self.assertEqual(pool.num_requests, 2)

        except EmptyPoolError:
            self.assertEqual(r1.read(), resp_data[first_chunk:])
            self.assertEqual(pool.num_requests, 1)

        self.assertEqual(pool.num_connections, 1)

    def test_for_double_release(self):
        MAXSIZE=5

        # Check default state
        pool = HTTPConnectionPool(self.host, self.port, maxsize=MAXSIZE)
        self.assertEqual(pool.num_connections, 0)
        self.assertEqual(pool.pool.qsize(), MAXSIZE)

        # Make an empty slot for testing
        pool.pool.get()
        self.assertEqual(pool.pool.qsize(), MAXSIZE-1)

        # Check state after simple request
        pool.urlopen('GET', '/')
        self.assertEqual(pool.pool.qsize(), MAXSIZE-1)

        # Check state without release
        pool.urlopen('GET', '/', preload_content=False)
        self.assertEqual(pool.pool.qsize(), MAXSIZE-2)

        pool.urlopen('GET', '/')
        self.assertEqual(pool.pool.qsize(), MAXSIZE-2)

        # Check state after read
        pool.urlopen('GET', '/').data
        self.assertEqual(pool.pool.qsize(), MAXSIZE-2)

        pool.urlopen('GET', '/')
        self.assertEqual(pool.pool.qsize(), MAXSIZE-2)

    def test_release_conn_parameter(self):
        MAXSIZE=5
        pool = HTTPConnectionPool(self.host, self.port, maxsize=MAXSIZE)
        self.assertEqual(pool.pool.qsize(), MAXSIZE)

        # Make request without releasing connection
        pool.request('GET', '/', release_conn=False, preload_content=False)
        self.assertEqual(pool.pool.qsize(), MAXSIZE-1)

    def test_dns_error(self):
        pool = HTTPConnectionPool('thishostdoesnotexist.invalid', self.port, timeout=0.001)
        self.assertRaises(MaxRetryError, pool.request, 'GET', '/test', retries=2)

    def test_source_address(self):
        for addr, is_ipv6 in VALID_SOURCE_ADDRESSES:
            if is_ipv6 and not HAS_IPV6_AND_DNS:
                warnings.warn("No IPv6 support: skipping.",
                              NoIPv6Warning)
                continue
            pool = HTTPConnectionPool(self.host, self.port,
                    source_address=addr, retries=False)
            r = pool.request('GET', '/source_address')
            self.assertEqual(r.data, b(addr[0]))

    def test_source_address_error(self):
        for addr in INVALID_SOURCE_ADDRESSES:
            pool = HTTPConnectionPool(self.host, self.port, source_address=addr, retries=False)
            # FIXME: This assert flakes sometimes. Not sure why.
            self.assertRaises(NewConnectionError, pool.request, 'GET', '/source_address?{0}'.format(addr))

    def test_stream_keepalive(self):
        x = 2

        for _ in range(x):
            response = self.pool.request(
                    'GET',
                    '/chunked',
                    headers={
                        'Connection': 'keep-alive',
                        },
                    preload_content=False,
                    retries=False,
                    )
            for chunk in response.stream():
                self.assertEqual(chunk, b'123')

        self.assertEqual(self.pool.num_connections, 1)
        self.assertEqual(self.pool.num_requests, x)

    def test_chunked_gzip(self):
        response = self.pool.request(
                'GET',
                '/chunked_gzip',
                preload_content=False,
                decode_content=True,
                )

        self.assertEqual(b'123' * 4, response.read())

    def test_cleanup_on_connection_error(self):
        '''
        Test that connections are recycled to the pool on
        connection errors where no http response is received.
        '''
        poolsize = 3
        with HTTPConnectionPool(self.host, self.port, maxsize=poolsize, block=True) as http:
            self.assertEqual(http.pool.qsize(), poolsize)

            # force a connection error by supplying a non-existent
            # url. We won't get a response for this  and so the
            # conn won't be implicitly returned to the pool.
            self.assertRaises(MaxRetryError,
                http.request, 'GET', '/redirect', fields={'target': '/'}, release_conn=False, retries=0)

            r = http.request('GET', '/redirect', fields={'target': '/'}, release_conn=False, retries=1)
            r.release_conn()

            # the pool should still contain poolsize elements
            self.assertEqual(http.pool.qsize(), http.pool.maxsize)
コード例 #18
0
ファイル: test_connectionpool.py プロジェクト: kojit/urllib3
class TestConnectionPool(HTTPDummyServerTestCase):

    def setUp(self):
        self.pool = HTTPConnectionPool(self.host, self.port)

    def test_get(self):
        r = self.pool.request('GET', '/specific_method',
                               fields={'method': 'GET'})
        self.assertEqual(r.status, 200, r.data)

    def test_post_url(self):
        r = self.pool.request('POST', '/specific_method',
                               fields={'method': 'POST'})
        self.assertEqual(r.status, 200, r.data)

    def test_urlopen_put(self):
        r = self.pool.urlopen('PUT', '/specific_method?method=PUT')
        self.assertEqual(r.status, 200, r.data)

    def test_wrong_specific_method(self):
        # To make sure the dummy server is actually returning failed responses
        r = self.pool.request('GET', '/specific_method',
                               fields={'method': 'POST'})
        self.assertEqual(r.status, 400, r.data)

        r = self.pool.request('POST', '/specific_method',
                               fields={'method': 'GET'})
        self.assertEqual(r.status, 400, r.data)

    def test_upload(self):
        data = "I'm in ur multipart form-data, hazing a cheezburgr"
        fields = {
            'upload_param': 'filefield',
            'upload_filename': 'lolcat.txt',
            'upload_size': len(data),
            'filefield': ('lolcat.txt', data),
        }

        r = self.pool.request('POST', '/upload', fields=fields)
        self.assertEqual(r.status, 200, r.data)

    def test_upload_with_multipul_values(self):
        data = "I'm in ur multipart form-data, hazing a cheezburgr"
        fields = {
            'params': ['aaa', 'bbb'],
            'upload_param': 'filefield',
            'upload_filename': 'lolcat.txt',
            'upload_size': len(data),
            'filefield': ('lolcat.txt', data),
        }

        r = self.pool.request('POST', '/upload', fields=fields)
        self.assertEqual(r.status, 200, r.data)

    def test_unicode_upload(self):
        fieldname = u('myfile')
        filename = u('\xe2\x99\xa5.txt')
        data = u('\xe2\x99\xa5').encode('utf8')
        size = len(data)

        fields = {
            u('upload_param'): fieldname,
            u('upload_filename'): filename,
            u('upload_size'): size,
            fieldname: (filename, data),
        }

        r = self.pool.request('POST', '/upload', fields=fields)
        self.assertEqual(r.status, 200, r.data)

    def test_timeout(self):
        pool = HTTPConnectionPool(self.host, self.port, timeout=0.01)
        try:
            pool.request('GET', '/sleep',
                         fields={'seconds': '0.02'})
            self.fail("Failed to raise TimeoutError exception")
        except TimeoutError:
            pass

    def test_redirect(self):
        r = self.pool.request('GET', '/redirect',
                                   fields={'target': '/'},
                                   redirect=False)
        self.assertEqual(r.status, 303)

        r = self.pool.request('GET', '/redirect',
                                   fields={'target': '/'})
        self.assertEqual(r.status, 200)
        self.assertEqual(r.data, b'Dummy server!')

    def test_maxretry(self):
        try:
            self.pool.request('GET', '/redirect',
                                   fields={'target': '/'},
                                   retries=0)
            self.fail("Failed to raise MaxRetryError exception")
        except MaxRetryError:
            pass

    def test_keepalive(self):
        pool = HTTPConnectionPool(self.host, self.port, block=True, maxsize=1)

        r = pool.request('GET', '/keepalive?close=0')
        r = pool.request('GET', '/keepalive?close=0')

        self.assertEqual(r.status, 200)
        self.assertEqual(pool.num_connections, 1)
        self.assertEqual(pool.num_requests, 2)

    def test_keepalive_close(self):
        # NOTE: This used to run against apache.org but it made the test suite
        # really slow and fail half the time. Setting it to skip until we can
        # make this run better locally.
        pool = HTTPConnectionPool(self.host, self.port,
                                  block=True, maxsize=1, timeout=2)

        r = pool.request('GET', '/keepalive?close=1', retries=0,
                         headers={
                             "Connection": "close",
                         })

        self.assertEqual(pool.num_connections, 1)

        # The dummyserver will have responded with Connection:close,
        # and httplib will properly cleanup the socket.

        # We grab the HTTPConnection object straight from the Queue,
        # because _get_conn() is where the check & reset occurs
        # pylint: disable-msg=W0212
        conn = pool.pool.get()
        self.assertEqual(conn.sock, None)
        pool._put_conn(conn)

        # Now with keep-alive
        r = pool.request('GET', '/keepalive?close=0', retries=0,
                         headers={
                             "Connection": "keep-alive",
                         })

        # The dummyserver responded with Connection:keep-alive, the connection
        # persists.
        conn = pool.pool.get()
        self.assertNotEqual(conn.sock, None)
        pool._put_conn(conn)

        # Another request asking the server to close the connection. This one
        # should get cleaned up for the next request.
        r = pool.request('GET', '/keepalive?close=1', retries=0,
                         headers={
                             "Connection": "close",
                         })

        self.assertEqual(r.status, 200)

        conn = pool.pool.get()
        self.assertEqual(conn.sock, None)
        pool._put_conn(conn)

        # Next request
        r = pool.request('GET', '/keepalive?close=0')

    def test_post_with_urlencode(self):
        data = {'banana': 'hammock', 'lol': 'cat'}
        r = self.pool.request('POST', '/echo',
                                    fields=data,
                                    encode_multipart=False)
        self.assertEqual(r.data.decode('utf-8'), urlencode(data))

    def test_post_with_multipart(self):
        data = {'banana': 'hammock', 'lol': 'cat'}
        r = self.pool.request('POST', '/echo',
                                    fields=data,
                                    encode_multipart=True)
        body = r.data.split(b'\r\n')

        encoded_data = encode_multipart_formdata(data)[0]
        expected_body = encoded_data.split(b'\r\n')

        # TODO: Get rid of extra parsing stuff when you can specify
        # a custom boundary to encode_multipart_formdata
        """
        We need to loop the return lines because a timestamp is attached
        from within encode_multipart_formdata. When the server echos back
        the data, it has the timestamp from when the data was encoded, which
        is not equivalent to when we run encode_multipart_formdata on
        the data again.
        """
        for i, line in enumerate(body):
            if line.startswith(b'--'):
                continue

            self.assertEqual(body[i], expected_body[i])

    def test_check_gzip(self):
        r = self.pool.request('GET', '/encodingrequest',
                                   headers={'accept-encoding': 'gzip'})
        self.assertEqual(r.headers.get('content-encoding'), 'gzip')
        self.assertEqual(r.data, b'hello, world!')

    def test_check_deflate(self):
        r = self.pool.request('GET', '/encodingrequest',
                                   headers={'accept-encoding': 'deflate'})
        self.assertEqual(r.headers.get('content-encoding'), 'deflate')
        self.assertEqual(r.data, b'hello, world!')

    def test_connection_count(self):
        pool = HTTPConnectionPool(self.host, self.port, maxsize=1)

        pool.request('GET', '/')
        pool.request('GET', '/')
        pool.request('GET', '/')

        self.assertEqual(pool.num_connections, 1)
        self.assertEqual(pool.num_requests, 3)

    def test_connection_count_bigpool(self):
        http_pool = HTTPConnectionPool(self.host, self.port, maxsize=16)

        http_pool.request('GET', '/')
        http_pool.request('GET', '/')
        http_pool.request('GET', '/')

        self.assertEqual(http_pool.num_connections, 1)
        self.assertEqual(http_pool.num_requests, 3)

    def test_partial_response(self):
        pool = HTTPConnectionPool(self.host, self.port, maxsize=1)

        req_data = {'lol': 'cat'}
        resp_data = urlencode(req_data).encode('utf-8')

        r = pool.request('GET', '/echo', fields=req_data, preload_content=False)

        self.assertEqual(r.read(5), resp_data[:5])
        self.assertEqual(r.read(), resp_data[5:])

    def test_lazy_load_twice(self):
        # This test is sad and confusing. Need to figure out what's
        # going on with partial reads and socket reuse.

        pool = HTTPConnectionPool(self.host, self.port, block=True, maxsize=1, timeout=2)

        payload_size = 1024 * 2
        first_chunk = 512

        boundary = 'foo'

        req_data = {'count': 'a' * payload_size}
        resp_data = encode_multipart_formdata(req_data, boundary=boundary)[0]

        req2_data = {'count': 'b' * payload_size}
        resp2_data = encode_multipart_formdata(req2_data, boundary=boundary)[0]

        r1 = pool.request('POST', '/echo', fields=req_data, multipart_boundary=boundary, preload_content=False)

        self.assertEqual(r1.read(first_chunk), resp_data[:first_chunk])

        try:
            r2 = pool.request('POST', '/echo', fields=req2_data, multipart_boundary=boundary,
                                    preload_content=False, pool_timeout=0.001)

            # This branch should generally bail here, but maybe someday it will
            # work? Perhaps by some sort of magic. Consider it a TODO.

            self.assertEqual(r2.read(first_chunk), resp2_data[:first_chunk])

            self.assertEqual(r1.read(), resp_data[first_chunk:])
            self.assertEqual(r2.read(), resp2_data[first_chunk:])
            self.assertEqual(pool.num_requests, 2)

        except EmptyPoolError:
            self.assertEqual(r1.read(), resp_data[first_chunk:])
            self.assertEqual(pool.num_requests, 1)

        self.assertEqual(pool.num_connections, 1)

    def test_for_double_release(self):
        MAXSIZE=5

        # Check default state
        pool = HTTPConnectionPool(self.host, self.port, maxsize=MAXSIZE)
        self.assertEqual(pool.num_connections, 0)
        self.assertEqual(pool.pool.qsize(), MAXSIZE)

        # Make an empty slot for testing
        pool.pool.get()
        self.assertEqual(pool.pool.qsize(), MAXSIZE-1)

        # Check state after simple request
        pool.urlopen('GET', '/')
        self.assertEqual(pool.pool.qsize(), MAXSIZE-1)

        # Check state without release
        pool.urlopen('GET', '/', preload_content=False)
        self.assertEqual(pool.pool.qsize(), MAXSIZE-2)

        pool.urlopen('GET', '/')
        self.assertEqual(pool.pool.qsize(), MAXSIZE-2)

        # Check state after read
        pool.urlopen('GET', '/').data
        self.assertEqual(pool.pool.qsize(), MAXSIZE-2)

        pool.urlopen('GET', '/')
        self.assertEqual(pool.pool.qsize(), MAXSIZE-2)

    def test_release_conn_parameter(self):
        MAXSIZE=5
        pool = HTTPConnectionPool(self.host, self.port, maxsize=MAXSIZE)
        self.assertEqual(pool.pool.qsize(), MAXSIZE)

        # Make request without releasing connection
        pool.request('GET', '/', release_conn=False, preload_content=False)
        self.assertEqual(pool.pool.qsize(), MAXSIZE-1)
コード例 #19
0
class TestConnectionPool(HTTPDummyServerTestCase):
    def setUp(self):
        self.pool = HTTPConnectionPool(self.host, self.port)

    def test_get(self):
        r = self.pool.request("GET", "/specific_method", fields={"method": "GET"})
        self.assertEqual(r.status, 200, r.data)

    def test_post_url(self):
        r = self.pool.request("POST", "/specific_method", fields={"method": "POST"})
        self.assertEqual(r.status, 200, r.data)

    def test_urlopen_put(self):
        r = self.pool.urlopen("PUT", "/specific_method?method=PUT")
        self.assertEqual(r.status, 200, r.data)

    def test_wrong_specific_method(self):
        # To make sure the dummy server is actually returning failed responses
        r = self.pool.request("GET", "/specific_method", fields={"method": "POST"})
        self.assertEqual(r.status, 400, r.data)

        r = self.pool.request("POST", "/specific_method", fields={"method": "GET"})
        self.assertEqual(r.status, 400, r.data)

    def test_upload(self):
        data = "I'm in ur multipart form-data, hazing a cheezburgr"
        fields = {
            "upload_param": "filefield",
            "upload_filename": "lolcat.txt",
            "upload_size": len(data),
            "filefield": ("lolcat.txt", data),
        }

        r = self.pool.request("POST", "/upload", fields=fields)
        self.assertEqual(r.status, 200, r.data)

    def test_one_name_multiple_values(self):
        fields = [("foo", "a"), ("foo", "b")]

        # urlencode
        r = self.pool.request("GET", "/echo", fields=fields)
        self.assertEqual(r.data, b"foo=a&foo=b")

        # multipart
        r = self.pool.request("POST", "/echo", fields=fields)
        self.assertEqual(r.data.count(b'name="foo"'), 2)

    def test_unicode_upload(self):
        fieldname = u("myfile")
        filename = u("\xe2\x99\xa5.txt")
        data = u("\xe2\x99\xa5").encode("utf8")
        size = len(data)

        fields = {
            u("upload_param"): fieldname,
            u("upload_filename"): filename,
            u("upload_size"): size,
            fieldname: (filename, data),
        }

        r = self.pool.request("POST", "/upload", fields=fields)
        self.assertEqual(r.status, 200, r.data)

    def test_timeout_float(self):
        url = "/sleep?seconds=0.005"
        # Pool-global timeout
        pool = HTTPConnectionPool(self.host, self.port, timeout=0.001)
        self.assertRaises(ReadTimeoutError, pool.request, "GET", url)

    def test_nagle(self):
        """ Test that connections have TCP_NODELAY turned on """
        pool = HTTPConnectionPool(self.host, self.port)
        conn = pool._get_conn()
        pool._make_request(conn, "GET", "/")
        tcp_nodelay_setting = conn.sock.getsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY)
        assert tcp_nodelay_setting > 0, (
            "Expected TCP_NODELAY to be set on the "
            "socket (with value greater than 0) "
            "but instead was %s" % tcp_nodelay_setting
        )

    def test_timeout(self):
        url = "/sleep?seconds=0.005"
        timeout = util.Timeout(read=0.001)

        # Pool-global timeout
        pool = HTTPConnectionPool(self.host, self.port, timeout=timeout)

        conn = pool._get_conn()
        self.assertRaises(ReadTimeoutError, pool._make_request, conn, "GET", url)
        pool._put_conn(conn)

        self.assertRaises(ReadTimeoutError, pool.request, "GET", url)

        # Request-specific timeouts should raise errors
        pool = HTTPConnectionPool(self.host, self.port, timeout=0.5)

        conn = pool._get_conn()
        self.assertRaises(ReadTimeoutError, pool._make_request, conn, "GET", url, timeout=timeout)
        pool._put_conn(conn)

        self.assertRaises(ReadTimeoutError, pool.request, "GET", url, timeout=timeout)

        # Timeout int/float passed directly to request and _make_request should
        # raise a request timeout
        self.assertRaises(ReadTimeoutError, pool.request, "GET", url, timeout=0.001)
        conn = pool._new_conn()
        self.assertRaises(ReadTimeoutError, pool._make_request, conn, "GET", url, timeout=0.001)
        pool._put_conn(conn)

        # Timeout int/float passed directly to _make_request should not raise a
        # request timeout if it's a high value
        pool.request("GET", url, timeout=5)

    @timed(0.1)
    def test_connect_timeout(self):
        url = "/sleep"
        timeout = util.Timeout(connect=0.001)

        # Pool-global timeout
        pool = HTTPConnectionPool(TARPIT_HOST, self.port, timeout=timeout)
        conn = pool._get_conn()
        self.assertRaises(ConnectTimeoutError, pool._make_request, conn, "GET", url)
        pool._put_conn(conn)
        self.assertRaises(ConnectTimeoutError, pool.request, "GET", url)

        # Request-specific connection timeouts
        big_timeout = util.Timeout(read=0.5, connect=0.5)
        pool = HTTPConnectionPool(TARPIT_HOST, self.port, timeout=big_timeout)
        conn = pool._get_conn()
        self.assertRaises(ConnectTimeoutError, pool._make_request, conn, "GET", url, timeout=timeout)

        pool._put_conn(conn)
        self.assertRaises(ConnectTimeoutError, pool.request, "GET", url, timeout=timeout)

    def test_timeout_reset(self):
        """ If the read timeout isn't set, socket timeout should reset """
        url = "/sleep?seconds=0.005"
        timeout = util.Timeout(connect=0.001)
        pool = HTTPConnectionPool(self.host, self.port, timeout=timeout)
        conn = pool._get_conn()
        try:
            pool._make_request(conn, "GET", url)
        except ReadTimeoutError:
            self.fail("This request shouldn't trigger a read timeout.")

    @timed(2.0)
    def test_total_timeout(self):
        url = "/sleep?seconds=0.005"

        timeout = util.Timeout(connect=3, read=5, total=0.001)
        pool = HTTPConnectionPool(TARPIT_HOST, self.port, timeout=timeout)
        conn = pool._get_conn()
        self.assertRaises(ConnectTimeoutError, pool._make_request, conn, "GET", url)

        # This will get the socket to raise an EAGAIN on the read
        timeout = util.Timeout(connect=3, read=0)
        pool = HTTPConnectionPool(self.host, self.port, timeout=timeout)
        conn = pool._get_conn()
        self.assertRaises(ReadTimeoutError, pool._make_request, conn, "GET", url)

        # The connect should succeed and this should hit the read timeout
        timeout = util.Timeout(connect=3, read=5, total=0.002)
        pool = HTTPConnectionPool(self.host, self.port, timeout=timeout)
        conn = pool._get_conn()
        self.assertRaises(ReadTimeoutError, pool._make_request, conn, "GET", url)

        timeout = util.Timeout(total=None, connect=0.001)
        pool = HTTPConnectionPool(TARPIT_HOST, self.port, timeout=timeout)
        conn = pool._get_conn()
        self.assertRaises(ConnectTimeoutError, pool._make_request, conn, "GET", url)

    def test_timeout_success(self):
        timeout = util.Timeout(connect=3, read=5, total=None)
        pool = HTTPConnectionPool(self.host, self.port, timeout=timeout)
        pool.request("GET", "/")
        # This should not raise a "Timeout already started" error
        pool.request("GET", "/")

        pool = HTTPConnectionPool(self.host, self.port, timeout=timeout)
        # This should also not raise a "Timeout already started" error
        pool.request("GET", "/")

        timeout = util.Timeout(total=None)
        pool = HTTPConnectionPool(self.host, self.port, timeout=timeout)
        pool.request("GET", "/")

    def test_tunnel(self):
        # note the actual httplib.py has no tests for this functionality
        timeout = util.Timeout(total=None)
        pool = HTTPConnectionPool(self.host, self.port, timeout=timeout)
        conn = pool._get_conn()
        try:
            conn.set_tunnel(self.host, self.port)
        except AttributeError:  # python 2.6
            conn._set_tunnel(self.host, self.port)

        conn._tunnel = mock.Mock(return_value=None)
        pool._make_request(conn, "GET", "/")
        conn._tunnel.assert_called_once_with()

        # test that it's not called when tunnel is not set
        timeout = util.Timeout(total=None)
        pool = HTTPConnectionPool(self.host, self.port, timeout=timeout)
        conn = pool._get_conn()

        conn._tunnel = mock.Mock(return_value=None)
        pool._make_request(conn, "GET", "/")
        self.assertEqual(conn._tunnel.called, False)

    def test_redirect(self):
        r = self.pool.request("GET", "/redirect", fields={"target": "/"}, redirect=False)
        self.assertEqual(r.status, 303)

        r = self.pool.request("GET", "/redirect", fields={"target": "/"})
        self.assertEqual(r.status, 200)
        self.assertEqual(r.data, b"Dummy server!")

    def test_maxretry(self):
        try:
            self.pool.request("GET", "/redirect", fields={"target": "/"}, retries=0)
            self.fail("Failed to raise MaxRetryError exception")
        except MaxRetryError:
            pass

    def test_keepalive(self):
        pool = HTTPConnectionPool(self.host, self.port, block=True, maxsize=1)

        r = pool.request("GET", "/keepalive?close=0")
        r = pool.request("GET", "/keepalive?close=0")

        self.assertEqual(r.status, 200)
        self.assertEqual(pool.num_connections, 1)
        self.assertEqual(pool.num_requests, 2)

    def test_keepalive_close(self):
        # NOTE: This used to run against apache.org but it made the test suite
        # really slow and fail half the time. Setting it to skip until we can
        # make this run better locally.
        pool = HTTPConnectionPool(self.host, self.port, block=True, maxsize=1, timeout=2)

        r = pool.request("GET", "/keepalive?close=1", retries=0, headers={"Connection": "close"})

        self.assertEqual(pool.num_connections, 1)

        # The dummyserver will have responded with Connection:close,
        # and httplib will properly cleanup the socket.

        # We grab the HTTPConnection object straight from the Queue,
        # because _get_conn() is where the check & reset occurs
        # pylint: disable-msg=W0212
        conn = pool.pool.get()
        self.assertEqual(conn.sock, None)
        pool._put_conn(conn)

        # Now with keep-alive
        r = pool.request("GET", "/keepalive?close=0", retries=0, headers={"Connection": "keep-alive"})

        # The dummyserver responded with Connection:keep-alive, the connection
        # persists.
        conn = pool.pool.get()
        self.assertNotEqual(conn.sock, None)
        pool._put_conn(conn)

        # Another request asking the server to close the connection. This one
        # should get cleaned up for the next request.
        r = pool.request("GET", "/keepalive?close=1", retries=0, headers={"Connection": "close"})

        self.assertEqual(r.status, 200)

        conn = pool.pool.get()
        self.assertEqual(conn.sock, None)
        pool._put_conn(conn)

        # Next request
        r = pool.request("GET", "/keepalive?close=0")

    def test_post_with_urlencode(self):
        data = {"banana": "hammock", "lol": "cat"}
        r = self.pool.request("POST", "/echo", fields=data, encode_multipart=False)
        self.assertEqual(r.data.decode("utf-8"), urlencode(data))

    def test_post_with_multipart(self):
        data = {"banana": "hammock", "lol": "cat"}
        r = self.pool.request("POST", "/echo", fields=data, encode_multipart=True)
        body = r.data.split(b"\r\n")

        encoded_data = encode_multipart_formdata(data)[0]
        expected_body = encoded_data.split(b"\r\n")

        # TODO: Get rid of extra parsing stuff when you can specify
        # a custom boundary to encode_multipart_formdata
        """
        We need to loop the return lines because a timestamp is attached
        from within encode_multipart_formdata. When the server echos back
        the data, it has the timestamp from when the data was encoded, which
        is not equivalent to when we run encode_multipart_formdata on
        the data again.
        """
        for i, line in enumerate(body):
            if line.startswith(b"--"):
                continue

            self.assertEqual(body[i], expected_body[i])

    def test_check_gzip(self):
        r = self.pool.request("GET", "/encodingrequest", headers={"accept-encoding": "gzip"})
        self.assertEqual(r.headers.get("content-encoding"), "gzip")
        self.assertEqual(r.data, b"hello, world!")

    def test_check_deflate(self):
        r = self.pool.request("GET", "/encodingrequest", headers={"accept-encoding": "deflate"})
        self.assertEqual(r.headers.get("content-encoding"), "deflate")
        self.assertEqual(r.data, b"hello, world!")

    def test_bad_decode(self):
        self.assertRaises(
            DecodeError, self.pool.request, "GET", "/encodingrequest", headers={"accept-encoding": "garbage-deflate"}
        )

        self.assertRaises(
            DecodeError, self.pool.request, "GET", "/encodingrequest", headers={"accept-encoding": "garbage-gzip"}
        )

    def test_connection_count(self):
        pool = HTTPConnectionPool(self.host, self.port, maxsize=1)

        pool.request("GET", "/")
        pool.request("GET", "/")
        pool.request("GET", "/")

        self.assertEqual(pool.num_connections, 1)
        self.assertEqual(pool.num_requests, 3)

    def test_connection_count_bigpool(self):
        http_pool = HTTPConnectionPool(self.host, self.port, maxsize=16)

        http_pool.request("GET", "/")
        http_pool.request("GET", "/")
        http_pool.request("GET", "/")

        self.assertEqual(http_pool.num_connections, 1)
        self.assertEqual(http_pool.num_requests, 3)

    def test_partial_response(self):
        pool = HTTPConnectionPool(self.host, self.port, maxsize=1)

        req_data = {"lol": "cat"}
        resp_data = urlencode(req_data).encode("utf-8")

        r = pool.request("GET", "/echo", fields=req_data, preload_content=False)

        self.assertEqual(r.read(5), resp_data[:5])
        self.assertEqual(r.read(), resp_data[5:])

    def test_lazy_load_twice(self):
        # This test is sad and confusing. Need to figure out what's
        # going on with partial reads and socket reuse.

        pool = HTTPConnectionPool(self.host, self.port, block=True, maxsize=1, timeout=2)

        payload_size = 1024 * 2
        first_chunk = 512

        boundary = "foo"

        req_data = {"count": "a" * payload_size}
        resp_data = encode_multipart_formdata(req_data, boundary=boundary)[0]

        req2_data = {"count": "b" * payload_size}
        resp2_data = encode_multipart_formdata(req2_data, boundary=boundary)[0]

        r1 = pool.request("POST", "/echo", fields=req_data, multipart_boundary=boundary, preload_content=False)

        self.assertEqual(r1.read(first_chunk), resp_data[:first_chunk])

        try:
            r2 = pool.request(
                "POST",
                "/echo",
                fields=req2_data,
                multipart_boundary=boundary,
                preload_content=False,
                pool_timeout=0.001,
            )

            # This branch should generally bail here, but maybe someday it will
            # work? Perhaps by some sort of magic. Consider it a TODO.

            self.assertEqual(r2.read(first_chunk), resp2_data[:first_chunk])

            self.assertEqual(r1.read(), resp_data[first_chunk:])
            self.assertEqual(r2.read(), resp2_data[first_chunk:])
            self.assertEqual(pool.num_requests, 2)

        except EmptyPoolError:
            self.assertEqual(r1.read(), resp_data[first_chunk:])
            self.assertEqual(pool.num_requests, 1)

        self.assertEqual(pool.num_connections, 1)

    def test_for_double_release(self):
        MAXSIZE = 5

        # Check default state
        pool = HTTPConnectionPool(self.host, self.port, maxsize=MAXSIZE)
        self.assertEqual(pool.num_connections, 0)
        self.assertEqual(pool.pool.qsize(), MAXSIZE)

        # Make an empty slot for testing
        pool.pool.get()
        self.assertEqual(pool.pool.qsize(), MAXSIZE - 1)

        # Check state after simple request
        pool.urlopen("GET", "/")
        self.assertEqual(pool.pool.qsize(), MAXSIZE - 1)

        # Check state without release
        pool.urlopen("GET", "/", preload_content=False)
        self.assertEqual(pool.pool.qsize(), MAXSIZE - 2)

        pool.urlopen("GET", "/")
        self.assertEqual(pool.pool.qsize(), MAXSIZE - 2)

        # Check state after read
        pool.urlopen("GET", "/").data
        self.assertEqual(pool.pool.qsize(), MAXSIZE - 2)

        pool.urlopen("GET", "/")
        self.assertEqual(pool.pool.qsize(), MAXSIZE - 2)

    def test_release_conn_parameter(self):
        MAXSIZE = 5
        pool = HTTPConnectionPool(self.host, self.port, maxsize=MAXSIZE)
        self.assertEqual(pool.pool.qsize(), MAXSIZE)

        # Make request without releasing connection
        pool.request("GET", "/", release_conn=False, preload_content=False)
        self.assertEqual(pool.pool.qsize(), MAXSIZE - 1)

    @SkipTest
    def test_dns_error(self):
        # This fails on everything except Py27. Not sure why...
        pool = HTTPConnectionPool("thishostdoesnotexist.invalid", self.port, timeout=0.001)
        self.assertRaises(MaxRetryError, pool.request, "GET", "/test", retries=2)
コード例 #20
0
class TestFileBodiesOnRetryOrRedirect(HTTPDummyServerTestCase):
    def setUp(self):
        self.pool = HTTPConnectionPool(self.host, self.port, timeout=0.1)
        self.addCleanup(self.pool.close)

    def test_retries_put_filehandle(self):
        """HTTP PUT retry with a file-like object should not timeout"""
        retry = Retry(total=3, status_forcelist=[418])
        # httplib reads in 8k chunks; use a larger content length
        content_length = 65535
        data = b"A" * content_length
        uploaded_file = io.BytesIO(data)
        headers = {
            "test-name": "test_retries_put_filehandle",
            "Content-Length": str(content_length),
        }
        resp = self.pool.urlopen(
            "PUT",
            "/successful_retry",
            headers=headers,
            retries=retry,
            body=uploaded_file,
            assert_same_host=False,
            redirect=False,
        )
        assert resp.status == 200

    def test_redirect_put_file(self):
        """PUT with file object should work with a redirection response"""
        retry = Retry(total=3, status_forcelist=[418])
        # httplib reads in 8k chunks; use a larger content length
        content_length = 65535
        data = b"A" * content_length
        uploaded_file = io.BytesIO(data)
        headers = {
            "test-name": "test_redirect_put_file",
            "Content-Length": str(content_length),
        }
        url = "/redirect?target=/echo&status=307"
        resp = self.pool.urlopen(
            "PUT",
            url,
            headers=headers,
            retries=retry,
            body=uploaded_file,
            assert_same_host=False,
            redirect=True,
        )
        assert resp.status == 200
        assert resp.data == data

    def test_redirect_with_failed_tell(self):
        """Abort request if failed to get a position from tell()"""
        class BadTellObject(io.BytesIO):
            def tell(self):
                raise IOError

        body = BadTellObject(b"the data")
        url = "/redirect?target=/successful_retry"
        # httplib uses fileno if Content-Length isn't supplied,
        # which is unsupported by BytesIO.
        headers = {"Content-Length": "8"}
        try:
            self.pool.urlopen("PUT", url, headers=headers, body=body)
            self.fail("PUT successful despite failed rewind.")
        except UnrewindableBodyError as e:
            assert "Unable to record file position for" in str(e)
コード例 #21
0
class TestConnectionPool(HTTPDummyServerTestCase):
    def setUp(self):
        self.pool = HTTPConnectionPool(self.host, self.port)

    def test_get(self):
        r = self.pool.request('GET',
                              '/specific_method',
                              fields={'method': 'GET'})
        self.assertEqual(r.status, 200, r.data)

    def test_post_url(self):
        r = self.pool.request('POST',
                              '/specific_method',
                              fields={'method': 'POST'})
        self.assertEqual(r.status, 200, r.data)

    def test_urlopen_put(self):
        r = self.pool.urlopen('PUT', '/specific_method?method=PUT')
        self.assertEqual(r.status, 200, r.data)

    def test_wrong_specific_method(self):
        # To make sure the dummy server is actually returning failed responses
        r = self.pool.request('GET',
                              '/specific_method',
                              fields={'method': 'POST'})
        self.assertEqual(r.status, 400, r.data)

        r = self.pool.request('POST',
                              '/specific_method',
                              fields={'method': 'GET'})
        self.assertEqual(r.status, 400, r.data)

    def test_upload(self):
        data = "I'm in ur multipart form-data, hazing a cheezburgr"
        fields = {
            'upload_param': 'filefield',
            'upload_filename': 'lolcat.txt',
            'upload_size': len(data),
            'filefield': ('lolcat.txt', data),
        }

        r = self.pool.request('POST', '/upload', fields=fields)
        self.assertEqual(r.status, 200, r.data)

    def test_one_name_multiple_values(self):
        fields = [
            ('foo', 'a'),
            ('foo', 'b'),
        ]

        # urlencode
        r = self.pool.request('GET', '/echo', fields=fields)
        self.assertEqual(r.data, b'foo=a&foo=b')

        # multipart
        r = self.pool.request('POST', '/echo', fields=fields)
        self.assertEqual(r.data.count(b'name="foo"'), 2)

    def test_unicode_upload(self):
        fieldname = u('myfile')
        filename = u('\xe2\x99\xa5.txt')
        data = u('\xe2\x99\xa5').encode('utf8')
        size = len(data)

        fields = {
            u('upload_param'): fieldname,
            u('upload_filename'): filename,
            u('upload_size'): size,
            fieldname: (filename, data),
        }

        r = self.pool.request('POST', '/upload', fields=fields)
        self.assertEqual(r.status, 200, r.data)

    def test_timeout_float(self):
        url = '/sleep?seconds=0.005'
        # Pool-global timeout
        pool = HTTPConnectionPool(self.host,
                                  self.port,
                                  timeout=0.001,
                                  retries=False)
        self.assertRaises(ReadTimeoutError, pool.request, 'GET', url)

    def test_conn_closed(self):
        pool = HTTPConnectionPool(self.host,
                                  self.port,
                                  timeout=0.001,
                                  retries=False)
        conn = pool._get_conn()
        pool._put_conn(conn)
        try:
            url = '/sleep?seconds=0.005'
            pool.urlopen('GET', url)
            self.fail("The request should fail with a timeout error.")
        except ReadTimeoutError:
            if conn.sock:
                self.assertRaises(socket.error, conn.sock.recv, 1024)
        finally:
            pool._put_conn(conn)

    def test_nagle(self):
        """ Test that connections have TCP_NODELAY turned on """
        # This test needs to be here in order to be run. socket.create_connection actually tries to
        # connect to the host provided so we need a dummyserver to be running.
        pool = HTTPConnectionPool(self.host, self.port)
        conn = pool._get_conn()
        pool._make_request(conn, 'GET', '/')
        tcp_nodelay_setting = conn.sock.getsockopt(socket.IPPROTO_TCP,
                                                   socket.TCP_NODELAY)
        assert tcp_nodelay_setting > 0, (
            "Expected TCP_NODELAY to be set on the "
            "socket (with value greater than 0) "
            "but instead was %s" % tcp_nodelay_setting)

    def test_socket_options(self):
        """Test that connections accept socket options."""
        # This test needs to be here in order to be run. socket.create_connection actually tries to
        # connect to the host provided so we need a dummyserver to be running.
        pool = HTTPConnectionPool(self.host,
                                  self.port,
                                  socket_options=[(socket.SOL_SOCKET,
                                                   socket.SO_KEEPALIVE, 1)])
        s = pool._new_conn()._new_conn()  # Get the socket
        using_keepalive = s.getsockopt(socket.SOL_SOCKET,
                                       socket.SO_KEEPALIVE) > 0
        self.assertTrue(using_keepalive)
        s.close()

    def test_disable_default_socket_options(self):
        """Test that passing None disables all socket options."""
        # This test needs to be here in order to be run. socket.create_connection actually tries to
        # connect to the host provided so we need a dummyserver to be running.
        pool = HTTPConnectionPool(self.host, self.port, socket_options=None)
        s = pool._new_conn()._new_conn()
        using_nagle = s.getsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY) == 0
        self.assertTrue(using_nagle)
        s.close()

    def test_defaults_are_applied(self):
        """Test that modifying the default socket options works."""
        # This test needs to be here in order to be run. socket.create_connection actually tries to
        # connect to the host provided so we need a dummyserver to be running.
        pool = HTTPConnectionPool(self.host, self.port)
        # Get the HTTPConnection instance
        conn = pool._new_conn()
        # Update the default socket options
        conn.default_socket_options += [(socket.SOL_SOCKET,
                                         socket.SO_KEEPALIVE, 1)]
        s = conn._new_conn()
        nagle_disabled = s.getsockopt(socket.IPPROTO_TCP,
                                      socket.TCP_NODELAY) > 0
        using_keepalive = s.getsockopt(socket.SOL_SOCKET,
                                       socket.SO_KEEPALIVE) > 0
        self.assertTrue(nagle_disabled)
        self.assertTrue(using_keepalive)

    @timed(0.5)
    def test_timeout(self):
        """ Requests should time out when expected """
        url = '/sleep?seconds=0.002'
        timeout = Timeout(read=0.001)

        # Pool-global timeout
        pool = HTTPConnectionPool(self.host,
                                  self.port,
                                  timeout=timeout,
                                  retries=False)

        conn = pool._get_conn()
        self.assertRaises(ReadTimeoutError, pool._make_request, conn, 'GET',
                          url)
        pool._put_conn(conn)

        time.sleep(0.02)  # Wait for server to start receiving again. :(

        self.assertRaises(ReadTimeoutError, pool.request, 'GET', url)

        # Request-specific timeouts should raise errors
        pool = HTTPConnectionPool(self.host,
                                  self.port,
                                  timeout=0.1,
                                  retries=False)

        conn = pool._get_conn()
        self.assertRaises(ReadTimeoutError,
                          pool._make_request,
                          conn,
                          'GET',
                          url,
                          timeout=timeout)
        pool._put_conn(conn)

        time.sleep(0.02)  # Wait for server to start receiving again. :(

        self.assertRaises(ReadTimeoutError,
                          pool.request,
                          'GET',
                          url,
                          timeout=timeout)

        # Timeout int/float passed directly to request and _make_request should
        # raise a request timeout
        self.assertRaises(ReadTimeoutError,
                          pool.request,
                          'GET',
                          url,
                          timeout=0.001)
        conn = pool._new_conn()
        self.assertRaises(ReadTimeoutError,
                          pool._make_request,
                          conn,
                          'GET',
                          url,
                          timeout=0.001)
        pool._put_conn(conn)

        # Timeout int/float passed directly to _make_request should not raise a
        # request timeout if it's a high value
        pool.request('GET', url, timeout=1)

    @requires_network
    @timed(0.5)
    def test_connect_timeout(self):
        url = '/sleep?seconds=0.005'
        timeout = Timeout(connect=0.001)

        # Pool-global timeout
        pool = HTTPConnectionPool(TARPIT_HOST, self.port, timeout=timeout)
        conn = pool._get_conn()
        self.assertRaises(ConnectTimeoutError, pool._make_request, conn, 'GET',
                          url)

        # Retries
        retries = Retry(connect=0)
        self.assertRaises(MaxRetryError,
                          pool.request,
                          'GET',
                          url,
                          retries=retries)

        # Request-specific connection timeouts
        big_timeout = Timeout(read=0.2, connect=0.2)
        pool = HTTPConnectionPool(TARPIT_HOST,
                                  self.port,
                                  timeout=big_timeout,
                                  retries=False)
        conn = pool._get_conn()
        self.assertRaises(ConnectTimeoutError,
                          pool._make_request,
                          conn,
                          'GET',
                          url,
                          timeout=timeout)

        pool._put_conn(conn)
        self.assertRaises(ConnectTimeoutError,
                          pool.request,
                          'GET',
                          url,
                          timeout=timeout)

    def test_connection_error_retries(self):
        """ ECONNREFUSED error should raise a connection error, with retries """
        port = find_unused_port()
        pool = HTTPConnectionPool(self.host, port)
        try:
            pool.request('GET', '/', retries=Retry(connect=3))
            self.fail("Should have failed with a connection error.")
        except MaxRetryError as e:
            self.assertTrue(isinstance(e.reason, ProtocolError))
            self.assertEqual(e.reason.args[1].errno, errno.ECONNREFUSED)

    def test_timeout_reset(self):
        """ If the read timeout isn't set, socket timeout should reset """
        url = '/sleep?seconds=0.005'
        timeout = Timeout(connect=0.001)
        pool = HTTPConnectionPool(self.host, self.port, timeout=timeout)
        conn = pool._get_conn()
        try:
            pool._make_request(conn, 'GET', url)
        except ReadTimeoutError:
            self.fail("This request shouldn't trigger a read timeout.")

    @requires_network
    @timed(5.0)
    def test_total_timeout(self):
        url = '/sleep?seconds=0.005'

        timeout = Timeout(connect=3, read=5, total=0.001)
        pool = HTTPConnectionPool(TARPIT_HOST, self.port, timeout=timeout)
        conn = pool._get_conn()
        self.assertRaises(ConnectTimeoutError, pool._make_request, conn, 'GET',
                          url)

        # This will get the socket to raise an EAGAIN on the read
        timeout = Timeout(connect=3, read=0)
        pool = HTTPConnectionPool(self.host, self.port, timeout=timeout)
        conn = pool._get_conn()
        self.assertRaises(ReadTimeoutError, pool._make_request, conn, 'GET',
                          url)

        # The connect should succeed and this should hit the read timeout
        timeout = Timeout(connect=3, read=5, total=0.002)
        pool = HTTPConnectionPool(self.host, self.port, timeout=timeout)
        conn = pool._get_conn()
        self.assertRaises(ReadTimeoutError, pool._make_request, conn, 'GET',
                          url)

    @requires_network
    def test_none_total_applies_connect(self):
        url = '/sleep?seconds=0.005'
        timeout = Timeout(total=None, connect=0.001)
        pool = HTTPConnectionPool(TARPIT_HOST, self.port, timeout=timeout)
        conn = pool._get_conn()
        self.assertRaises(ConnectTimeoutError, pool._make_request, conn, 'GET',
                          url)

    def test_timeout_success(self):
        timeout = Timeout(connect=3, read=5, total=None)
        pool = HTTPConnectionPool(self.host, self.port, timeout=timeout)
        pool.request('GET', '/')
        # This should not raise a "Timeout already started" error
        pool.request('GET', '/')

        pool = HTTPConnectionPool(self.host, self.port, timeout=timeout)
        # This should also not raise a "Timeout already started" error
        pool.request('GET', '/')

        timeout = Timeout(total=None)
        pool = HTTPConnectionPool(self.host, self.port, timeout=timeout)
        pool.request('GET', '/')

    def test_tunnel(self):
        # note the actual httplib.py has no tests for this functionality
        timeout = Timeout(total=None)
        pool = HTTPConnectionPool(self.host, self.port, timeout=timeout)
        conn = pool._get_conn()
        try:
            conn.set_tunnel(self.host, self.port)
        except AttributeError:  # python 2.6
            conn._set_tunnel(self.host, self.port)

        conn._tunnel = mock.Mock(return_value=None)
        pool._make_request(conn, 'GET', '/')
        conn._tunnel.assert_called_once_with()

        # test that it's not called when tunnel is not set
        timeout = Timeout(total=None)
        pool = HTTPConnectionPool(self.host, self.port, timeout=timeout)
        conn = pool._get_conn()

        conn._tunnel = mock.Mock(return_value=None)
        pool._make_request(conn, 'GET', '/')
        self.assertEqual(conn._tunnel.called, False)

    def test_redirect(self):
        r = self.pool.request('GET',
                              '/redirect',
                              fields={'target': '/'},
                              redirect=False)
        self.assertEqual(r.status, 303)

        r = self.pool.request('GET', '/redirect', fields={'target': '/'})
        self.assertEqual(r.status, 200)
        self.assertEqual(r.data, b'Dummy server!')

    def test_bad_connect(self):
        pool = HTTPConnectionPool('badhost.invalid', self.port)
        try:
            pool.request('GET', '/', retries=5)
            self.fail("should raise timeout exception here")
        except MaxRetryError as e:
            self.assertTrue(isinstance(e.reason, ProtocolError), e.reason)

    def test_keepalive(self):
        pool = HTTPConnectionPool(self.host, self.port, block=True, maxsize=1)

        r = pool.request('GET', '/keepalive?close=0')
        r = pool.request('GET', '/keepalive?close=0')

        self.assertEqual(r.status, 200)
        self.assertEqual(pool.num_connections, 1)
        self.assertEqual(pool.num_requests, 2)

    def test_keepalive_close(self):
        pool = HTTPConnectionPool(self.host,
                                  self.port,
                                  block=True,
                                  maxsize=1,
                                  timeout=2)

        r = pool.request('GET',
                         '/keepalive?close=1',
                         retries=0,
                         headers={
                             "Connection": "close",
                         })

        self.assertEqual(pool.num_connections, 1)

        # The dummyserver will have responded with Connection:close,
        # and httplib will properly cleanup the socket.

        # We grab the HTTPConnection object straight from the Queue,
        # because _get_conn() is where the check & reset occurs
        # pylint: disable-msg=W0212
        conn = pool.pool.get()
        self.assertEqual(conn.sock, None)
        pool._put_conn(conn)

        # Now with keep-alive
        r = pool.request('GET',
                         '/keepalive?close=0',
                         retries=0,
                         headers={
                             "Connection": "keep-alive",
                         })

        # The dummyserver responded with Connection:keep-alive, the connection
        # persists.
        conn = pool.pool.get()
        self.assertNotEqual(conn.sock, None)
        pool._put_conn(conn)

        # Another request asking the server to close the connection. This one
        # should get cleaned up for the next request.
        r = pool.request('GET',
                         '/keepalive?close=1',
                         retries=0,
                         headers={
                             "Connection": "close",
                         })

        self.assertEqual(r.status, 200)

        conn = pool.pool.get()
        self.assertEqual(conn.sock, None)
        pool._put_conn(conn)

        # Next request
        r = pool.request('GET', '/keepalive?close=0')

    def test_post_with_urlencode(self):
        data = {'banana': 'hammock', 'lol': 'cat'}
        r = self.pool.request('POST',
                              '/echo',
                              fields=data,
                              encode_multipart=False)
        self.assertEqual(r.data.decode('utf-8'), urlencode(data))

    def test_post_with_multipart(self):
        data = {'banana': 'hammock', 'lol': 'cat'}
        r = self.pool.request('POST',
                              '/echo',
                              fields=data,
                              encode_multipart=True)
        body = r.data.split(b'\r\n')

        encoded_data = encode_multipart_formdata(data)[0]
        expected_body = encoded_data.split(b'\r\n')

        # TODO: Get rid of extra parsing stuff when you can specify
        # a custom boundary to encode_multipart_formdata
        """
        We need to loop the return lines because a timestamp is attached
        from within encode_multipart_formdata. When the server echos back
        the data, it has the timestamp from when the data was encoded, which
        is not equivalent to when we run encode_multipart_formdata on
        the data again.
        """
        for i, line in enumerate(body):
            if line.startswith(b'--'):
                continue

            self.assertEqual(body[i], expected_body[i])

    def test_check_gzip(self):
        r = self.pool.request('GET',
                              '/encodingrequest',
                              headers={'accept-encoding': 'gzip'})
        self.assertEqual(r.headers.get('content-encoding'), 'gzip')
        self.assertEqual(r.data, b'hello, world!')

    def test_check_deflate(self):
        r = self.pool.request('GET',
                              '/encodingrequest',
                              headers={'accept-encoding': 'deflate'})
        self.assertEqual(r.headers.get('content-encoding'), 'deflate')
        self.assertEqual(r.data, b'hello, world!')

    def test_bad_decode(self):
        self.assertRaises(DecodeError,
                          self.pool.request,
                          'GET',
                          '/encodingrequest',
                          headers={'accept-encoding': 'garbage-deflate'})

        self.assertRaises(DecodeError,
                          self.pool.request,
                          'GET',
                          '/encodingrequest',
                          headers={'accept-encoding': 'garbage-gzip'})

    def test_connection_count(self):
        pool = HTTPConnectionPool(self.host, self.port, maxsize=1)

        pool.request('GET', '/')
        pool.request('GET', '/')
        pool.request('GET', '/')

        self.assertEqual(pool.num_connections, 1)
        self.assertEqual(pool.num_requests, 3)

    def test_connection_count_bigpool(self):
        http_pool = HTTPConnectionPool(self.host, self.port, maxsize=16)

        http_pool.request('GET', '/')
        http_pool.request('GET', '/')
        http_pool.request('GET', '/')

        self.assertEqual(http_pool.num_connections, 1)
        self.assertEqual(http_pool.num_requests, 3)

    def test_partial_response(self):
        pool = HTTPConnectionPool(self.host, self.port, maxsize=1)

        req_data = {'lol': 'cat'}
        resp_data = urlencode(req_data).encode('utf-8')

        r = pool.request('GET',
                         '/echo',
                         fields=req_data,
                         preload_content=False)

        self.assertEqual(r.read(5), resp_data[:5])
        self.assertEqual(r.read(), resp_data[5:])

    def test_lazy_load_twice(self):
        # This test is sad and confusing. Need to figure out what's
        # going on with partial reads and socket reuse.

        pool = HTTPConnectionPool(self.host,
                                  self.port,
                                  block=True,
                                  maxsize=1,
                                  timeout=2)

        payload_size = 1024 * 2
        first_chunk = 512

        boundary = 'foo'

        req_data = {'count': 'a' * payload_size}
        resp_data = encode_multipart_formdata(req_data, boundary=boundary)[0]

        req2_data = {'count': 'b' * payload_size}
        resp2_data = encode_multipart_formdata(req2_data, boundary=boundary)[0]

        r1 = pool.request('POST',
                          '/echo',
                          fields=req_data,
                          multipart_boundary=boundary,
                          preload_content=False)

        self.assertEqual(r1.read(first_chunk), resp_data[:first_chunk])

        try:
            r2 = pool.request('POST',
                              '/echo',
                              fields=req2_data,
                              multipart_boundary=boundary,
                              preload_content=False,
                              pool_timeout=0.001)

            # This branch should generally bail here, but maybe someday it will
            # work? Perhaps by some sort of magic. Consider it a TODO.

            self.assertEqual(r2.read(first_chunk), resp2_data[:first_chunk])

            self.assertEqual(r1.read(), resp_data[first_chunk:])
            self.assertEqual(r2.read(), resp2_data[first_chunk:])
            self.assertEqual(pool.num_requests, 2)

        except EmptyPoolError:
            self.assertEqual(r1.read(), resp_data[first_chunk:])
            self.assertEqual(pool.num_requests, 1)

        self.assertEqual(pool.num_connections, 1)

    def test_for_double_release(self):
        MAXSIZE = 5

        # Check default state
        pool = HTTPConnectionPool(self.host, self.port, maxsize=MAXSIZE)
        self.assertEqual(pool.num_connections, 0)
        self.assertEqual(pool.pool.qsize(), MAXSIZE)

        # Make an empty slot for testing
        pool.pool.get()
        self.assertEqual(pool.pool.qsize(), MAXSIZE - 1)

        # Check state after simple request
        pool.urlopen('GET', '/')
        self.assertEqual(pool.pool.qsize(), MAXSIZE - 1)

        # Check state without release
        pool.urlopen('GET', '/', preload_content=False)
        self.assertEqual(pool.pool.qsize(), MAXSIZE - 2)

        pool.urlopen('GET', '/')
        self.assertEqual(pool.pool.qsize(), MAXSIZE - 2)

        # Check state after read
        pool.urlopen('GET', '/').data
        self.assertEqual(pool.pool.qsize(), MAXSIZE - 2)

        pool.urlopen('GET', '/')
        self.assertEqual(pool.pool.qsize(), MAXSIZE - 2)

    def test_release_conn_parameter(self):
        MAXSIZE = 5
        pool = HTTPConnectionPool(self.host, self.port, maxsize=MAXSIZE)
        self.assertEqual(pool.pool.qsize(), MAXSIZE)

        # Make request without releasing connection
        pool.request('GET', '/', release_conn=False, preload_content=False)
        self.assertEqual(pool.pool.qsize(), MAXSIZE - 1)

    def test_dns_error(self):
        pool = HTTPConnectionPool('thishostdoesnotexist.invalid',
                                  self.port,
                                  timeout=0.001)
        self.assertRaises(MaxRetryError,
                          pool.request,
                          'GET',
                          '/test',
                          retries=2)

    def test_source_address(self):
        for addr in VALID_SOURCE_ADDRESSES:
            pool = HTTPConnectionPool(self.host,
                                      self.port,
                                      source_address=addr,
                                      retries=False)
            r = pool.request('GET', '/source_address')
            assert r.data == b(addr[0]), (
                "expected the response to contain the source address {addr}, "
                "but was {data}".format(data=r.data, addr=b(addr[0])))

    def test_source_address_error(self):
        for addr in INVALID_SOURCE_ADDRESSES:
            pool = HTTPConnectionPool(self.host,
                                      self.port,
                                      source_address=addr,
                                      retries=False)
            self.assertRaises(ProtocolError, pool.request, 'GET',
                              '/source_address')

    @onlyPy3
    def test_httplib_headers_case_insensitive(self):
        HEADERS = {
            'Content-Length': '0',
            'Content-type': 'text/plain',
            'Server': 'TornadoServer/%s' % tornado.version
        }
        r = self.pool.request('GET',
                              '/specific_method',
                              fields={'method': 'GET'})
        self.assertEqual(HEADERS, dict(
            r.headers.items()))  # to preserve case sensitivity
コード例 #22
0
stat_nr_string_exp = 'var astrStatNr = new Array\((.*?)\)'
cont_string_exp = 'var astrCont = new Array\((.*?)\)'

def get_js_arr_str(expr, data, group = 1):
    m = re.search(expr, data)
    return m.group(group)

def rm_quotes(data):
    p = re.compile(r'\'')
    return p.sub('', data).strip()

def arr_filter(item):
    return (item != ''and item != ',')

pool = HTTPConnectionPool(host)
r = pool.urlopen('GET', url)
html = r.data

stat_nr_str = get_js_arr_str(stat_nr_string_exp, html)
stat_nr = filter(arr_filter, map(rm_quotes, stat_nr_str.split('\'')))

cont_str = get_js_arr_str(cont_string_exp, html)
cont = filter(arr_filter, map(rm_quotes, cont_str.split('\'')))

url_file = open("./data/raw/url_climate.txt", "w")

i = 0
for station in stat_nr:
    data_url = 'http://www.zamg.ac.at/fix/klima/oe71-00/klima2000/daten/klimadaten/' + cont[i] + '/' + station + '.htm'
    i = i + 1
    url_file.write(data_url + "\n")
コード例 #23
0
class TestConnectionPool(HTTPDummyServerTestCase):
    def setUp(self):
        self.pool = HTTPConnectionPool(self.host, self.port)
        self.addCleanup(self.pool.close)

    def test_get(self):
        r = self.pool.request("GET",
                              "/specific_method",
                              fields={"method": "GET"})
        assert r.status == 200, r.data

    def test_post_url(self):
        r = self.pool.request("POST",
                              "/specific_method",
                              fields={"method": "POST"})
        assert r.status == 200, r.data

    def test_urlopen_put(self):
        r = self.pool.urlopen("PUT", "/specific_method?method=PUT")
        assert r.status == 200, r.data

    def test_wrong_specific_method(self):
        # To make sure the dummy server is actually returning failed responses
        r = self.pool.request("GET",
                              "/specific_method",
                              fields={"method": "POST"})
        assert r.status == 400, r.data

        r = self.pool.request("POST",
                              "/specific_method",
                              fields={"method": "GET"})
        assert r.status == 400, r.data

    def test_upload(self):
        data = "I'm in ur multipart form-data, hazing a cheezburgr"
        fields = {
            "upload_param": "filefield",
            "upload_filename": "lolcat.txt",
            "upload_size": len(data),
            "filefield": ("lolcat.txt", data),
        }

        r = self.pool.request("POST", "/upload", fields=fields)
        assert r.status == 200, r.data

    def test_one_name_multiple_values(self):
        fields = [("foo", "a"), ("foo", "b")]

        # urlencode
        r = self.pool.request("GET", "/echo", fields=fields)
        assert r.data == b"foo=a&foo=b"

        # multipart
        r = self.pool.request("POST", "/echo", fields=fields)
        assert r.data.count(b'name="foo"') == 2

    def test_request_method_body(self):
        body = b"hi"
        r = self.pool.request("POST", "/echo", body=body)
        assert r.data == body

        fields = [("hi", "hello")]
        with pytest.raises(TypeError):
            self.pool.request("POST", "/echo", body=body, fields=fields)

    def test_unicode_upload(self):
        fieldname = u("myfile")
        filename = u("\xe2\x99\xa5.txt")
        data = u("\xe2\x99\xa5").encode("utf8")
        size = len(data)

        fields = {
            u("upload_param"): fieldname,
            u("upload_filename"): filename,
            u("upload_size"): size,
            fieldname: (filename, data),
        }

        r = self.pool.request("POST", "/upload", fields=fields)
        assert r.status == 200, r.data

    def test_nagle(self):
        """ Test that connections have TCP_NODELAY turned on """
        # This test needs to be here in order to be run. socket.create_connection actually tries
        # to connect to the host provided so we need a dummyserver to be running.
        pool = HTTPConnectionPool(self.host, self.port)
        self.addCleanup(pool.close)
        conn = pool._get_conn()
        self.addCleanup(conn.close)
        pool._make_request(conn, "GET", "/")
        tcp_nodelay_setting = conn.sock.getsockopt(socket.IPPROTO_TCP,
                                                   socket.TCP_NODELAY)
        assert tcp_nodelay_setting

    def test_socket_options(self):
        """Test that connections accept socket options."""
        # This test needs to be here in order to be run. socket.create_connection actually tries to
        # connect to the host provided so we need a dummyserver to be running.
        pool = HTTPConnectionPool(
            self.host,
            self.port,
            socket_options=[(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)],
        )
        s = pool._new_conn()._new_conn()  # Get the socket
        using_keepalive = s.getsockopt(socket.SOL_SOCKET,
                                       socket.SO_KEEPALIVE) > 0
        assert using_keepalive
        s.close()

    def test_disable_default_socket_options(self):
        """Test that passing None disables all socket options."""
        # This test needs to be here in order to be run. socket.create_connection actually tries
        # to connect to the host provided so we need a dummyserver to be running.
        pool = HTTPConnectionPool(self.host, self.port, socket_options=None)
        s = pool._new_conn()._new_conn()
        using_nagle = s.getsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY) == 0
        assert using_nagle
        s.close()

    def test_defaults_are_applied(self):
        """Test that modifying the default socket options works."""
        # This test needs to be here in order to be run. socket.create_connection actually tries
        # to connect to the host provided so we need a dummyserver to be running.
        pool = HTTPConnectionPool(self.host, self.port)
        self.addCleanup(pool.close)
        # Get the HTTPConnection instance
        conn = pool._new_conn()
        self.addCleanup(conn.close)
        # Update the default socket options
        conn.default_socket_options += [(socket.SOL_SOCKET,
                                         socket.SO_KEEPALIVE, 1)]
        s = conn._new_conn()
        self.addCleanup(s.close)
        nagle_disabled = s.getsockopt(socket.IPPROTO_TCP,
                                      socket.TCP_NODELAY) > 0
        using_keepalive = s.getsockopt(socket.SOL_SOCKET,
                                       socket.SO_KEEPALIVE) > 0
        assert nagle_disabled
        assert using_keepalive

    def test_connection_error_retries(self):
        """ ECONNREFUSED error should raise a connection error, with retries """
        port = find_unused_port()
        pool = HTTPConnectionPool(self.host, port)
        try:
            pool.request("GET", "/", retries=Retry(connect=3))
            self.fail("Should have failed with a connection error.")
        except MaxRetryError as e:
            assert type(e.reason) == NewConnectionError

    def test_timeout_success(self):
        timeout = Timeout(connect=3, read=5, total=None)
        pool = HTTPConnectionPool(self.host, self.port, timeout=timeout)
        self.addCleanup(pool.close)
        pool.request("GET", "/")
        # This should not raise a "Timeout already started" error
        pool.request("GET", "/")

        pool = HTTPConnectionPool(self.host, self.port, timeout=timeout)
        self.addCleanup(pool.close)
        # This should also not raise a "Timeout already started" error
        pool.request("GET", "/")

        timeout = Timeout(total=None)
        pool = HTTPConnectionPool(self.host, self.port, timeout=timeout)
        self.addCleanup(pool.close)
        pool.request("GET", "/")

    def test_tunnel(self):
        # note the actual httplib.py has no tests for this functionality
        timeout = Timeout(total=None)
        pool = HTTPConnectionPool(self.host, self.port, timeout=timeout)
        self.addCleanup(pool.close)
        conn = pool._get_conn()
        self.addCleanup(conn.close)
        conn.set_tunnel(self.host, self.port)

        conn._tunnel = mock.Mock(return_value=None)
        pool._make_request(conn, "GET", "/")
        conn._tunnel.assert_called_once_with()

        # test that it's not called when tunnel is not set
        timeout = Timeout(total=None)
        pool = HTTPConnectionPool(self.host, self.port, timeout=timeout)
        self.addCleanup(pool.close)
        conn = pool._get_conn()
        self.addCleanup(conn.close)

        conn._tunnel = mock.Mock(return_value=None)
        pool._make_request(conn, "GET", "/")
        assert not conn._tunnel.called

    def test_redirect(self):
        r = self.pool.request("GET",
                              "/redirect",
                              fields={"target": "/"},
                              redirect=False)
        assert r.status == 303

        r = self.pool.request("GET", "/redirect", fields={"target": "/"})
        assert r.status == 200
        assert r.data == b"Dummy server!"

    def test_bad_connect(self):
        pool = HTTPConnectionPool("badhost.invalid", self.port)
        try:
            pool.request("GET", "/", retries=5)
            self.fail("should raise timeout exception here")
        except MaxRetryError as e:
            assert type(e.reason) == NewConnectionError

    def test_keepalive(self):
        pool = HTTPConnectionPool(self.host, self.port, block=True, maxsize=1)
        self.addCleanup(pool.close)

        r = pool.request("GET", "/keepalive?close=0")
        r = pool.request("GET", "/keepalive?close=0")

        assert r.status == 200
        assert pool.num_connections == 1
        assert pool.num_requests == 2

    def test_keepalive_close(self):
        pool = HTTPConnectionPool(self.host,
                                  self.port,
                                  block=True,
                                  maxsize=1,
                                  timeout=2)
        self.addCleanup(pool.close)

        r = pool.request("GET",
                         "/keepalive?close=1",
                         retries=0,
                         headers={"Connection": "close"})

        assert pool.num_connections == 1

        # The dummyserver will have responded with Connection:close,
        # and httplib will properly cleanup the socket.

        # We grab the HTTPConnection object straight from the Queue,
        # because _get_conn() is where the check & reset occurs
        # pylint: disable-msg=W0212
        conn = pool.pool.get()
        assert conn.sock is None
        pool._put_conn(conn)

        # Now with keep-alive
        r = pool.request("GET",
                         "/keepalive?close=0",
                         retries=0,
                         headers={"Connection": "keep-alive"})

        # The dummyserver responded with Connection:keep-alive, the connection
        # persists.
        conn = pool.pool.get()
        assert conn.sock is not None
        pool._put_conn(conn)

        # Another request asking the server to close the connection. This one
        # should get cleaned up for the next request.
        r = pool.request("GET",
                         "/keepalive?close=1",
                         retries=0,
                         headers={"Connection": "close"})

        assert r.status == 200

        conn = pool.pool.get()
        assert conn.sock is None
        pool._put_conn(conn)

        # Next request
        r = pool.request("GET", "/keepalive?close=0")

    def test_post_with_urlencode(self):
        data = {"banana": "hammock", "lol": "cat"}
        r = self.pool.request("POST",
                              "/echo",
                              fields=data,
                              encode_multipart=False)
        assert r.data.decode("utf-8") == urlencode(data)

    def test_post_with_multipart(self):
        data = {"banana": "hammock", "lol": "cat"}
        r = self.pool.request("POST",
                              "/echo",
                              fields=data,
                              encode_multipart=True)
        body = r.data.split(b"\r\n")

        encoded_data = encode_multipart_formdata(data)[0]
        expected_body = encoded_data.split(b"\r\n")

        # TODO: Get rid of extra parsing stuff when you can specify
        # a custom boundary to encode_multipart_formdata
        """
        We need to loop the return lines because a timestamp is attached
        from within encode_multipart_formdata. When the server echos back
        the data, it has the timestamp from when the data was encoded, which
        is not equivalent to when we run encode_multipart_formdata on
        the data again.
        """
        for i, line in enumerate(body):
            if line.startswith(b"--"):
                continue

            assert body[i] == expected_body[i]

    def test_post_with_multipart__iter__(self):
        data = {"hello": "world"}
        r = self.pool.request(
            "POST",
            "/echo",
            fields=data,
            preload_content=False,
            multipart_boundary="boundary",
            encode_multipart=True,
        )

        chunks = [chunk for chunk in r]
        assert chunks == [
            b"--boundary\r\n",
            b'Content-Disposition: form-data; name="hello"\r\n',
            b"\r\n",
            b"world\r\n",
            b"--boundary--\r\n",
        ]

    def test_check_gzip(self):
        r = self.pool.request("GET",
                              "/encodingrequest",
                              headers={"accept-encoding": "gzip"})
        assert r.headers.get("content-encoding") == "gzip"
        assert r.data == b"hello, world!"

    def test_check_deflate(self):
        r = self.pool.request("GET",
                              "/encodingrequest",
                              headers={"accept-encoding": "deflate"})
        assert r.headers.get("content-encoding") == "deflate"
        assert r.data == b"hello, world!"

    def test_bad_decode(self):
        with pytest.raises(DecodeError):
            self.pool.request(
                "GET",
                "/encodingrequest",
                headers={"accept-encoding": "garbage-deflate"},
            )

        with pytest.raises(DecodeError):
            self.pool.request("GET",
                              "/encodingrequest",
                              headers={"accept-encoding": "garbage-gzip"})

    def test_connection_count(self):
        pool = HTTPConnectionPool(self.host, self.port, maxsize=1)
        self.addCleanup(pool.close)

        pool.request("GET", "/")
        pool.request("GET", "/")
        pool.request("GET", "/")

        assert pool.num_connections == 1
        assert pool.num_requests == 3

    def test_connection_count_bigpool(self):
        http_pool = HTTPConnectionPool(self.host, self.port, maxsize=16)
        self.addCleanup(http_pool.close)

        http_pool.request("GET", "/")
        http_pool.request("GET", "/")
        http_pool.request("GET", "/")

        assert http_pool.num_connections == 1
        assert http_pool.num_requests == 3

    def test_partial_response(self):
        pool = HTTPConnectionPool(self.host, self.port, maxsize=1)
        self.addCleanup(pool.close)

        req_data = {"lol": "cat"}
        resp_data = urlencode(req_data).encode("utf-8")

        r = pool.request("GET",
                         "/echo",
                         fields=req_data,
                         preload_content=False)

        assert r.read(5) == resp_data[:5]
        assert r.read() == resp_data[5:]

    def test_lazy_load_twice(self):
        # This test is sad and confusing. Need to figure out what's
        # going on with partial reads and socket reuse.

        pool = HTTPConnectionPool(self.host,
                                  self.port,
                                  block=True,
                                  maxsize=1,
                                  timeout=2)

        payload_size = 1024 * 2
        first_chunk = 512

        boundary = "foo"

        req_data = {"count": "a" * payload_size}
        resp_data = encode_multipart_formdata(req_data, boundary=boundary)[0]

        req2_data = {"count": "b" * payload_size}
        resp2_data = encode_multipart_formdata(req2_data, boundary=boundary)[0]

        r1 = pool.request(
            "POST",
            "/echo",
            fields=req_data,
            multipart_boundary=boundary,
            preload_content=False,
        )

        assert r1.read(first_chunk) == resp_data[:first_chunk]

        try:
            r2 = pool.request(
                "POST",
                "/echo",
                fields=req2_data,
                multipart_boundary=boundary,
                preload_content=False,
                pool_timeout=0.001,
            )

            # This branch should generally bail here, but maybe someday it will
            # work? Perhaps by some sort of magic. Consider it a TODO.

            assert r2.read(first_chunk) == resp2_data[:first_chunk]

            assert r1.read() == resp_data[first_chunk:]
            assert r2.read() == resp2_data[first_chunk:]
            assert pool.num_requests == 2

        except EmptyPoolError:
            assert r1.read() == resp_data[first_chunk:]
            assert pool.num_requests == 1

        assert pool.num_connections == 1

    def test_for_double_release(self):
        MAXSIZE = 5

        # Check default state
        pool = HTTPConnectionPool(self.host, self.port, maxsize=MAXSIZE)
        self.addCleanup(pool.close)
        assert pool.num_connections == 0
        assert pool.pool.qsize() == MAXSIZE

        # Make an empty slot for testing
        pool.pool.get()
        assert pool.pool.qsize() == MAXSIZE - 1

        # Check state after simple request
        pool.urlopen("GET", "/")
        assert pool.pool.qsize() == MAXSIZE - 1

        # Check state without release
        pool.urlopen("GET", "/", preload_content=False)
        assert pool.pool.qsize() == MAXSIZE - 2

        pool.urlopen("GET", "/")
        assert pool.pool.qsize() == MAXSIZE - 2

        # Check state after read
        pool.urlopen("GET", "/").data
        assert pool.pool.qsize() == MAXSIZE - 2

        pool.urlopen("GET", "/")
        assert pool.pool.qsize() == MAXSIZE - 2

    def test_release_conn_parameter(self):
        MAXSIZE = 5
        pool = HTTPConnectionPool(self.host, self.port, maxsize=MAXSIZE)
        assert pool.pool.qsize() == MAXSIZE

        # Make request without releasing connection
        pool.request("GET", "/", release_conn=False, preload_content=False)
        assert pool.pool.qsize() == MAXSIZE - 1

    def test_dns_error(self):
        pool = HTTPConnectionPool("thishostdoesnotexist.invalid",
                                  self.port,
                                  timeout=0.001)
        with pytest.raises(MaxRetryError):
            pool.request("GET", "/test", retries=2)

    def test_source_address(self):
        for addr, is_ipv6 in VALID_SOURCE_ADDRESSES:
            if is_ipv6 and not HAS_IPV6_AND_DNS:
                warnings.warn("No IPv6 support: skipping.", NoIPv6Warning)
                continue
            pool = HTTPConnectionPool(self.host,
                                      self.port,
                                      source_address=addr,
                                      retries=False)
            self.addCleanup(pool.close)
            r = pool.request("GET", "/source_address")
            assert r.data == b(addr[0])

    def test_source_address_error(self):
        for addr in INVALID_SOURCE_ADDRESSES:
            pool = HTTPConnectionPool(self.host,
                                      self.port,
                                      source_address=addr,
                                      retries=False)
            # FIXME: This assert flakes sometimes. Not sure why.
            with pytest.raises(NewConnectionError):
                pool.request("GET", "/source_address?{0}".format(addr))

    def test_stream_keepalive(self):
        x = 2

        for _ in range(x):
            response = self.pool.request(
                "GET",
                "/chunked",
                headers={"Connection": "keep-alive"},
                preload_content=False,
                retries=False,
            )
            for chunk in response.stream():
                assert chunk == b"123"

        assert self.pool.num_connections == 1
        assert self.pool.num_requests == x

    def test_read_chunked_short_circuit(self):
        response = self.pool.request("GET", "/chunked", preload_content=False)
        response.read()
        with pytest.raises(StopIteration):
            next(response.read_chunked())

    def test_read_chunked_on_closed_response(self):
        response = self.pool.request("GET", "/chunked", preload_content=False)
        response.close()
        with pytest.raises(StopIteration):
            next(response.read_chunked())

    def test_chunked_gzip(self):
        response = self.pool.request("GET",
                                     "/chunked_gzip",
                                     preload_content=False,
                                     decode_content=True)

        assert b"123" * 4 == response.read()

    def test_cleanup_on_connection_error(self):
        """
        Test that connections are recycled to the pool on
        connection errors where no http response is received.
        """
        poolsize = 3
        with HTTPConnectionPool(self.host,
                                self.port,
                                maxsize=poolsize,
                                block=True) as http:
            assert http.pool.qsize() == poolsize

            # force a connection error by supplying a non-existent
            # url. We won't get a response for this  and so the
            # conn won't be implicitly returned to the pool.
            with pytest.raises(MaxRetryError):
                http.request(
                    "GET",
                    "/redirect",
                    fields={"target": "/"},
                    release_conn=False,
                    retries=0,
                )

            r = http.request(
                "GET",
                "/redirect",
                fields={"target": "/"},
                release_conn=False,
                retries=1,
            )
            r.release_conn()

            # the pool should still contain poolsize elements
            assert http.pool.qsize() == http.pool.maxsize

    def test_mixed_case_hostname(self):
        pool = HTTPConnectionPool("LoCaLhOsT", self.port)
        self.addCleanup(pool.close)
        response = pool.request("GET", "http://LoCaLhOsT:%d/" % self.port)
        assert response.status == 200
コード例 #24
0
ファイル: test_connectionpool.py プロジェクト: ruudud/urllib3
class TestConnectionPool(HTTPDummyServerTestCase):
    def setUp(self):
        self.pool = HTTPConnectionPool(self.host, self.port)

    def test_get(self):
        r = self.pool.request('GET',
                              '/specific_method',
                              fields={'method': 'GET'})
        self.assertEqual(r.status, 200, r.data)

    def test_post_url(self):
        r = self.pool.request('POST',
                              '/specific_method',
                              fields={'method': 'POST'})
        self.assertEqual(r.status, 200, r.data)

    def test_urlopen_put(self):
        r = self.pool.urlopen('PUT', '/specific_method?method=PUT')
        self.assertEqual(r.status, 200, r.data)

    def test_wrong_specific_method(self):
        # To make sure the dummy server is actually returning failed responses
        r = self.pool.request('GET',
                              '/specific_method',
                              fields={'method': 'POST'})
        self.assertEqual(r.status, 400, r.data)

        r = self.pool.request('POST',
                              '/specific_method',
                              fields={'method': 'GET'})
        self.assertEqual(r.status, 400, r.data)

    def test_upload(self):
        data = "I'm in ur multipart form-data, hazing a cheezburgr"
        fields = {
            'upload_param': 'filefield',
            'upload_filename': 'lolcat.txt',
            'upload_size': len(data),
            'filefield': ('lolcat.txt', data),
        }

        r = self.pool.request('POST', '/upload', fields=fields)
        self.assertEqual(r.status, 200, r.data)

    def test_unicode_upload(self):
        fieldname = u'myfile'
        filename = u'\xe2\x99\xa5.txt'
        data = u'\xe2\x99\xa5'.encode('utf8')
        size = len(data)

        fields = {
            u'upload_param': fieldname,
            u'upload_filename': filename,
            u'upload_size': size,
            fieldname: (filename, data),
        }

        r = self.pool.request('POST', '/upload', fields=fields)
        self.assertEqual(r.status, 200, r.data)

    def test_timeout(self):
        pool = HTTPConnectionPool(self.host, self.port, timeout=0.01)
        try:
            pool.request('GET', '/sleep', fields={'seconds': '0.02'})
            self.fail("Failed to raise TimeoutError exception")
        except TimeoutError:
            pass

    def test_redirect(self):
        r = self.pool.request('GET',
                              '/redirect',
                              fields={'target': '/'},
                              redirect=False)
        self.assertEqual(r.status, 303)

        r = self.pool.request('GET', '/redirect', fields={'target': '/'})
        self.assertEqual(r.status, 200)
        self.assertEqual(r.data, 'Dummy server!')

    def test_maxretry(self):
        try:
            self.pool.request('GET',
                              '/redirect',
                              fields={'target': '/'},
                              retries=0)
            self.fail("Failed to raise MaxRetryError exception")
        except MaxRetryError:
            pass

    def test_keepalive(self):
        pool = HTTPConnectionPool(self.host, self.port, block=True, maxsize=1)

        r = pool.request('GET', '/keepalive?close=0')
        r = pool.request('GET', '/keepalive?close=0')

        self.assertEqual(r.status, 200)
        self.assertEqual(pool.num_connections, 1)
        self.assertEqual(pool.num_requests, 2)

    def test_keepalive_close(self):
        # NOTE: This used to run against apache.org but it made the test suite
        # really slow and fail half the time. Setting it to skip until we can
        # make this run better locally.
        pool = HTTPConnectionPool(self.host,
                                  self.port,
                                  block=True,
                                  maxsize=1,
                                  timeout=2)

        r = pool.request('GET',
                         '/keepalive?close=1',
                         retries=0,
                         headers={
                             "Connection": "close",
                         })

        self.assertEqual(pool.num_connections, 1)

        # The dummyserver will have responded with Connection:close,
        # and httplib will properly cleanup the socket.

        # We grab the HTTPConnection object straight from the Queue,
        # because _get_conn() is where the check & reset occurs
        # pylint: disable-msg=W0212
        conn = pool.pool.get()
        self.assertEqual(conn.sock, None)
        pool._put_conn(conn)

        # Now with keep-alive
        r = pool.request('GET',
                         '/keepalive?close=0',
                         retries=0,
                         headers={
                             "Connection": "keep-alive",
                         })

        # The dummyserver responded with Connection:keep-alive, the connection
        # persists.
        conn = pool.pool.get()
        self.assertNotEqual(conn.sock, None)
        pool._put_conn(conn)

        # Another request asking the server to close the connection. This one
        # should get cleaned up for the next request.
        r = pool.request('GET',
                         '/keepalive?close=1',
                         retries=0,
                         headers={
                             "Connection": "close",
                         })

        self.assertEqual(r.status, 200)

        conn = pool.pool.get()
        self.assertEqual(conn.sock, None)
        pool._put_conn(conn)

        # Next request
        r = pool.request('GET', '/keepalive?close=0')

    def test_post_with_urlencode(self):
        data = {'banana': 'hammock', 'lol': 'cat'}
        r = self.pool.request('POST',
                              '/echo',
                              fields=data,
                              encode_multipart=False)
        self.assertEqual(r.data, urllib.urlencode(data))

    def test_post_with_multipart(self):
        data = {'banana': 'hammock', 'lol': 'cat'}
        r = self.pool.request('POST',
                              '/echo',
                              fields=data,
                              encode_multipart=True)
        body = r.data.split('\r\n')

        encoded_data = encode_multipart_formdata(data)[0]
        expected_body = encoded_data.split('\r\n')

        # TODO: Get rid of extra parsing stuff when you can specify
        # a custom boundary to encode_multipart_formdata
        """
        We need to loop the return lines because a timestamp is attached
        from within encode_multipart_formdata. When the server echos back
        the data, it has the timestamp from when the data was encoded, which
        is not equivalent to when we run encode_multipart_formdata on
        the data again.
        """
        for i, line in enumerate(body):
            if line.startswith('--'):
                continue

            self.assertEqual(body[i], expected_body[i])

    def test_check_gzip(self):
        r = self.pool.request('GET',
                              '/encodingrequest',
                              headers={'accept-encoding': 'gzip'})
        self.assertEqual(r.headers.get('content-encoding'), 'gzip')
        self.assertEqual(r.data, 'hello, world!')

    def test_check_deflate(self):
        r = self.pool.request('GET',
                              '/encodingrequest',
                              headers={'accept-encoding': 'deflate'})
        self.assertEqual(r.headers.get('content-encoding'), 'deflate')
        self.assertEqual(r.data, 'hello, world!')

    def test_connection_count(self):
        pool = HTTPConnectionPool(self.host, self.port, maxsize=1)

        pool.request('GET', '/')
        pool.request('GET', '/')
        pool.request('GET', '/')

        self.assertEqual(pool.num_connections, 1)
        self.assertEqual(pool.num_requests, 3)

    def test_partial_response(self):
        pool = HTTPConnectionPool(self.host, self.port, maxsize=1)

        req_data = {'lol': 'cat'}
        resp_data = urllib.urlencode(req_data)

        r = pool.request('GET',
                         '/echo',
                         fields=req_data,
                         preload_content=False)

        self.assertEqual(r.read(5), resp_data[:5])
        self.assertEqual(r.read(), resp_data[5:])

    def test_lazy_load_twice(self):
        # This test is sad and confusing. Need to figure out what's
        # going on with partial reads and socket reuse.

        pool = HTTPConnectionPool(self.host,
                                  self.port,
                                  block=True,
                                  maxsize=1,
                                  timeout=2)

        payload_size = 1024 * 2
        first_chunk = 512

        boundary = 'foo'

        req_data = {'count': 'a' * payload_size}
        resp_data = encode_multipart_formdata(req_data, boundary=boundary)[0]

        req2_data = {'count': 'b' * payload_size}
        resp2_data = encode_multipart_formdata(req2_data, boundary=boundary)[0]

        r1 = pool.request('POST',
                          '/echo',
                          fields=req_data,
                          multipart_boundary=boundary,
                          preload_content=False)

        self.assertEqual(r1.read(first_chunk), resp_data[:first_chunk])

        try:
            r2 = pool.request('POST',
                              '/echo',
                              fields=req2_data,
                              multipart_boundary=boundary,
                              preload_content=False,
                              pool_timeout=0.001)

            # This branch should generally bail here, but maybe someday it will
            # work? Perhaps by some sort of magic. Consider it a TODO.

            self.assertEqual(r2.read(first_chunk), resp2_data[:first_chunk])

            self.assertEqual(r1.read(), resp_data[first_chunk:])
            self.assertEqual(r2.read(), resp2_data[first_chunk:])
            self.assertEqual(pool.num_requests, 2)

        except EmptyPoolError:
            self.assertEqual(r1.read(), resp_data[first_chunk:])
            self.assertEqual(pool.num_requests, 1)

        self.assertEqual(pool.num_connections, 1)

    def test_for_double_release(self):
        MAXSIZE = 5

        # Check default state
        pool = HTTPConnectionPool(self.host, self.port, maxsize=MAXSIZE)
        self.assertEqual(pool.num_connections, 0)
        self.assertEqual(pool.pool.qsize(), MAXSIZE)

        # Make an empty slot for testing
        pool.pool.get()
        self.assertEqual(pool.pool.qsize(), MAXSIZE - 1)

        # Check state after simple request
        pool.urlopen('GET', '/')
        self.assertEqual(pool.pool.qsize(), MAXSIZE - 1)

        # Check state without release
        pool.urlopen('GET', '/', preload_content=False)
        self.assertEqual(pool.pool.qsize(), MAXSIZE - 2)

        pool.urlopen('GET', '/')
        self.assertEqual(pool.pool.qsize(), MAXSIZE - 2)

        # Check state after read
        pool.urlopen('GET', '/').data
        self.assertEqual(pool.pool.qsize(), MAXSIZE - 2)

        pool.urlopen('GET', '/')
        self.assertEqual(pool.pool.qsize(), MAXSIZE - 2)

    def test_release_conn_parameter(self):
        MAXSIZE = 5
        pool = HTTPConnectionPool(self.host, self.port, maxsize=MAXSIZE)
        self.assertEqual(pool.pool.qsize(), MAXSIZE)

        # Make request without releasing connection
        pool.request('GET', '/', release_conn=False, preload_content=False)
        self.assertEqual(pool.pool.qsize(), MAXSIZE - 1)
コード例 #25
0
class TestConnectionPool(HTTPDummyServerTestCase):
    def setUp(self):
        self.pool = HTTPConnectionPool(self.host, self.port)

    def test_get(self):
        r = self.pool.request('GET',
                              '/specific_method',
                              fields={'method': 'GET'})
        self.assertEqual(r.status, 200, r.data)

    def test_post_url(self):
        r = self.pool.request('POST',
                              '/specific_method',
                              fields={'method': 'POST'})
        self.assertEqual(r.status, 200, r.data)

    def test_urlopen_put(self):
        r = self.pool.urlopen('PUT', '/specific_method?method=PUT')
        self.assertEqual(r.status, 200, r.data)

    def test_wrong_specific_method(self):
        # To make sure the dummy server is actually returning failed responses
        r = self.pool.request('GET',
                              '/specific_method',
                              fields={'method': 'POST'})
        self.assertEqual(r.status, 400, r.data)

        r = self.pool.request('POST',
                              '/specific_method',
                              fields={'method': 'GET'})
        self.assertEqual(r.status, 400, r.data)

    def test_upload(self):
        data = "I'm in ur multipart form-data, hazing a cheezburgr"
        fields = {
            'upload_param': 'filefield',
            'upload_filename': 'lolcat.txt',
            'upload_size': len(data),
            'filefield': ('lolcat.txt', data),
        }

        r = self.pool.request('POST', '/upload', fields=fields)
        self.assertEqual(r.status, 200, r.data)

    def test_one_name_multiple_values(self):
        fields = [
            ('foo', 'a'),
            ('foo', 'b'),
        ]

        # urlencode
        r = self.pool.request('GET', '/echo', fields=fields)
        self.assertEqual(r.data, b'foo=a&foo=b')

        # multipart
        r = self.pool.request('POST', '/echo', fields=fields)
        self.assertEqual(r.data.count(b'name="foo"'), 2)

    def test_unicode_upload(self):
        fieldname = u('myfile')
        filename = u('\xe2\x99\xa5.txt')
        data = u('\xe2\x99\xa5').encode('utf8')
        size = len(data)

        fields = {
            u('upload_param'): fieldname,
            u('upload_filename'): filename,
            u('upload_size'): size,
            fieldname: (filename, data),
        }

        r = self.pool.request('POST', '/upload', fields=fields)
        self.assertEqual(r.status, 200, r.data)

    def test_timeout_float(self):
        url = '/sleep?seconds=0.005'
        # Pool-global timeout
        pool = HTTPConnectionPool(self.host, self.port, timeout=0.001)
        self.assertRaises(ReadTimeoutError, pool.request, 'GET', url)

    def test_timeout(self):
        url = '/sleep?seconds=0.005'
        timeout = util.Timeout(read=0.001)

        # Pool-global timeout
        pool = HTTPConnectionPool(self.host, self.port, timeout=timeout)

        conn = pool._get_conn()
        self.assertRaises(ReadTimeoutError, pool._make_request, conn, 'GET',
                          url)
        pool._put_conn(conn)

        self.assertRaises(ReadTimeoutError, pool.request, 'GET', url)

        # Request-specific timeouts should raise errors
        pool = HTTPConnectionPool(self.host, self.port, timeout=0.5)

        conn = pool._get_conn()
        self.assertRaises(ReadTimeoutError,
                          pool._make_request,
                          conn,
                          'GET',
                          url,
                          timeout=timeout)
        pool._put_conn(conn)

        self.assertRaises(ReadTimeoutError,
                          pool.request,
                          'GET',
                          url,
                          timeout=timeout)

        # Timeout int/float passed directly to request and _make_request should
        # raise a request timeout
        self.assertRaises(ReadTimeoutError,
                          pool.request,
                          'GET',
                          url,
                          timeout=0.001)
        conn = pool._new_conn()
        self.assertRaises(ReadTimeoutError,
                          pool._make_request,
                          conn,
                          'GET',
                          url,
                          timeout=0.001)
        pool._put_conn(conn)

        # Timeout int/float passed directly to _make_request should not raise a
        # request timeout if it's a high value
        pool.request('GET', url, timeout=5)

    @timed(0.1)
    def test_connect_timeout(self):
        url = '/sleep'
        timeout = util.Timeout(connect=0.001)

        # Pool-global timeout
        pool = HTTPConnectionPool(TARPIT_HOST, self.port, timeout=timeout)
        conn = pool._get_conn()
        self.assertRaises(ConnectTimeoutError, pool._make_request, conn, 'GET',
                          url)
        pool._put_conn(conn)
        self.assertRaises(ConnectTimeoutError, pool.request, 'GET', url)

        # Request-specific connection timeouts
        big_timeout = util.Timeout(read=0.5, connect=0.5)
        pool = HTTPConnectionPool(TARPIT_HOST, self.port, timeout=big_timeout)
        conn = pool._get_conn()
        self.assertRaises(ConnectTimeoutError,
                          pool._make_request,
                          conn,
                          'GET',
                          url,
                          timeout=timeout)

        pool._put_conn(conn)
        self.assertRaises(ConnectTimeoutError,
                          pool.request,
                          'GET',
                          url,
                          timeout=timeout)

    def test_timeout_reset(self):
        """ If the read timeout isn't set, socket timeout should reset """
        url = '/sleep?seconds=0.005'
        timeout = util.Timeout(connect=0.001)
        pool = HTTPConnectionPool(self.host, self.port, timeout=timeout)
        conn = pool._get_conn()
        try:
            pool._make_request(conn, 'GET', url)
        except ReadTimeoutError:
            self.fail("This request shouldn't trigger a read timeout.")

    @timed(0.1)
    def test_total_timeout(self):
        url = '/sleep?seconds=0.005'

        timeout = util.Timeout(connect=3, read=5, total=0.001)
        pool = HTTPConnectionPool(TARPIT_HOST, self.port, timeout=timeout)
        conn = pool._get_conn()
        self.assertRaises(ConnectTimeoutError, pool._make_request, conn, 'GET',
                          url)

        # This will get the socket to raise an EAGAIN on the read
        timeout = util.Timeout(connect=3, read=0)
        pool = HTTPConnectionPool(self.host, self.port, timeout=timeout)
        conn = pool._get_conn()
        self.assertRaises(ReadTimeoutError, pool._make_request, conn, 'GET',
                          url)

        # The connect should succeed and this should hit the read timeout
        timeout = util.Timeout(connect=3, read=5, total=0.002)
        pool = HTTPConnectionPool(self.host, self.port, timeout=timeout)
        conn = pool._get_conn()
        self.assertRaises(ReadTimeoutError, pool._make_request, conn, 'GET',
                          url)

        timeout = util.Timeout(total=None, connect=0.001)
        pool = HTTPConnectionPool(TARPIT_HOST, self.port, timeout=timeout)
        conn = pool._get_conn()
        self.assertRaises(ConnectTimeoutError, pool._make_request, conn, 'GET',
                          url)

    def test_timeout_success(self):
        timeout = util.Timeout(connect=3, read=5, total=None)
        pool = HTTPConnectionPool(self.host, self.port, timeout=timeout)
        pool.request('GET', '/')
        # This should not raise a "Timeout already started" error
        pool.request('GET', '/')

        pool = HTTPConnectionPool(self.host, self.port, timeout=timeout)
        # This should also not raise a "Timeout already started" error
        pool.request('GET', '/')

        timeout = util.Timeout(total=None)
        pool = HTTPConnectionPool(self.host, self.port, timeout=timeout)
        pool.request('GET', '/')

    def test_tunnel(self):
        # note the actual httplib.py has no tests for this functionality
        timeout = util.Timeout(total=None)
        pool = HTTPConnectionPool(self.host, self.port, timeout=timeout)
        conn = pool._get_conn()
        try:
            conn.set_tunnel(self.host, self.port)
        except AttributeError:  # python 2.6
            conn._set_tunnel(self.host, self.port)

        conn._tunnel = mock.Mock(return_value=None)
        pool._make_request(conn, 'GET', '/')
        conn._tunnel.assert_called_once_with()

        # test that it's not called when tunnel is not set
        timeout = util.Timeout(total=None)
        pool = HTTPConnectionPool(self.host, self.port, timeout=timeout)
        conn = pool._get_conn()

        conn._tunnel = mock.Mock(return_value=None)
        pool._make_request(conn, 'GET', '/')
        self.assertEqual(conn._tunnel.called, False)

    def test_redirect(self):
        r = self.pool.request('GET',
                              '/redirect',
                              fields={'target': '/'},
                              redirect=False)
        self.assertEqual(r.status, 303)

        r = self.pool.request('GET', '/redirect', fields={'target': '/'})
        self.assertEqual(r.status, 200)
        self.assertEqual(r.data, b'Dummy server!')

    def test_maxretry(self):
        try:
            self.pool.request('GET',
                              '/redirect',
                              fields={'target': '/'},
                              retries=0)
            self.fail("Failed to raise MaxRetryError exception")
        except MaxRetryError:
            pass

    def test_keepalive(self):
        pool = HTTPConnectionPool(self.host, self.port, block=True, maxsize=1)

        r = pool.request('GET', '/keepalive?close=0')
        r = pool.request('GET', '/keepalive?close=0')

        self.assertEqual(r.status, 200)
        self.assertEqual(pool.num_connections, 1)
        self.assertEqual(pool.num_requests, 2)

    def test_keepalive_close(self):
        # NOTE: This used to run against apache.org but it made the test suite
        # really slow and fail half the time. Setting it to skip until we can
        # make this run better locally.
        pool = HTTPConnectionPool(self.host,
                                  self.port,
                                  block=True,
                                  maxsize=1,
                                  timeout=2)

        r = pool.request('GET',
                         '/keepalive?close=1',
                         retries=0,
                         headers={
                             "Connection": "close",
                         })

        self.assertEqual(pool.num_connections, 1)

        # The dummyserver will have responded with Connection:close,
        # and httplib will properly cleanup the socket.

        # We grab the HTTPConnection object straight from the Queue,
        # because _get_conn() is where the check & reset occurs
        # pylint: disable-msg=W0212
        conn = pool.pool.get()
        self.assertEqual(conn.sock, None)
        pool._put_conn(conn)

        # Now with keep-alive
        r = pool.request('GET',
                         '/keepalive?close=0',
                         retries=0,
                         headers={
                             "Connection": "keep-alive",
                         })

        # The dummyserver responded with Connection:keep-alive, the connection
        # persists.
        conn = pool.pool.get()
        self.assertNotEqual(conn.sock, None)
        pool._put_conn(conn)

        # Another request asking the server to close the connection. This one
        # should get cleaned up for the next request.
        r = pool.request('GET',
                         '/keepalive?close=1',
                         retries=0,
                         headers={
                             "Connection": "close",
                         })

        self.assertEqual(r.status, 200)

        conn = pool.pool.get()
        self.assertEqual(conn.sock, None)
        pool._put_conn(conn)

        # Next request
        r = pool.request('GET', '/keepalive?close=0')

    def test_post_with_urlencode(self):
        data = {'banana': 'hammock', 'lol': 'cat'}
        r = self.pool.request('POST',
                              '/echo',
                              fields=data,
                              encode_multipart=False)
        self.assertEqual(r.data.decode('utf-8'), urlencode(data))

    def test_post_with_multipart(self):
        data = {'banana': 'hammock', 'lol': 'cat'}
        r = self.pool.request('POST',
                              '/echo',
                              fields=data,
                              encode_multipart=True)
        body = r.data.split(b'\r\n')

        encoded_data = encode_multipart_formdata(data)[0]
        expected_body = encoded_data.split(b'\r\n')

        # TODO: Get rid of extra parsing stuff when you can specify
        # a custom boundary to encode_multipart_formdata
        """
        We need to loop the return lines because a timestamp is attached
        from within encode_multipart_formdata. When the server echos back
        the data, it has the timestamp from when the data was encoded, which
        is not equivalent to when we run encode_multipart_formdata on
        the data again.
        """
        for i, line in enumerate(body):
            if line.startswith(b'--'):
                continue

            self.assertEqual(body[i], expected_body[i])

    def test_check_gzip(self):
        r = self.pool.request('GET',
                              '/encodingrequest',
                              headers={'accept-encoding': 'gzip'})
        self.assertEqual(r.headers.get('content-encoding'), 'gzip')
        self.assertEqual(r.data, b'hello, world!')

    def test_check_deflate(self):
        r = self.pool.request('GET',
                              '/encodingrequest',
                              headers={'accept-encoding': 'deflate'})
        self.assertEqual(r.headers.get('content-encoding'), 'deflate')
        self.assertEqual(r.data, b'hello, world!')

    def test_bad_decode(self):
        self.assertRaises(DecodeError,
                          self.pool.request,
                          'GET',
                          '/encodingrequest',
                          headers={'accept-encoding': 'garbage-deflate'})

        self.assertRaises(DecodeError,
                          self.pool.request,
                          'GET',
                          '/encodingrequest',
                          headers={'accept-encoding': 'garbage-gzip'})

    def test_connection_count(self):
        pool = HTTPConnectionPool(self.host, self.port, maxsize=1)

        pool.request('GET', '/')
        pool.request('GET', '/')
        pool.request('GET', '/')

        self.assertEqual(pool.num_connections, 1)
        self.assertEqual(pool.num_requests, 3)

    def test_connection_count_bigpool(self):
        http_pool = HTTPConnectionPool(self.host, self.port, maxsize=16)

        http_pool.request('GET', '/')
        http_pool.request('GET', '/')
        http_pool.request('GET', '/')

        self.assertEqual(http_pool.num_connections, 1)
        self.assertEqual(http_pool.num_requests, 3)

    def test_partial_response(self):
        pool = HTTPConnectionPool(self.host, self.port, maxsize=1)

        req_data = {'lol': 'cat'}
        resp_data = urlencode(req_data).encode('utf-8')

        r = pool.request('GET',
                         '/echo',
                         fields=req_data,
                         preload_content=False)

        self.assertEqual(r.read(5), resp_data[:5])
        self.assertEqual(r.read(), resp_data[5:])

    def test_lazy_load_twice(self):
        # This test is sad and confusing. Need to figure out what's
        # going on with partial reads and socket reuse.

        pool = HTTPConnectionPool(self.host,
                                  self.port,
                                  block=True,
                                  maxsize=1,
                                  timeout=2)

        payload_size = 1024 * 2
        first_chunk = 512

        boundary = 'foo'

        req_data = {'count': 'a' * payload_size}
        resp_data = encode_multipart_formdata(req_data, boundary=boundary)[0]

        req2_data = {'count': 'b' * payload_size}
        resp2_data = encode_multipart_formdata(req2_data, boundary=boundary)[0]

        r1 = pool.request('POST',
                          '/echo',
                          fields=req_data,
                          multipart_boundary=boundary,
                          preload_content=False)

        self.assertEqual(r1.read(first_chunk), resp_data[:first_chunk])

        try:
            r2 = pool.request('POST',
                              '/echo',
                              fields=req2_data,
                              multipart_boundary=boundary,
                              preload_content=False,
                              pool_timeout=0.001)

            # This branch should generally bail here, but maybe someday it will
            # work? Perhaps by some sort of magic. Consider it a TODO.

            self.assertEqual(r2.read(first_chunk), resp2_data[:first_chunk])

            self.assertEqual(r1.read(), resp_data[first_chunk:])
            self.assertEqual(r2.read(), resp2_data[first_chunk:])
            self.assertEqual(pool.num_requests, 2)

        except EmptyPoolError:
            self.assertEqual(r1.read(), resp_data[first_chunk:])
            self.assertEqual(pool.num_requests, 1)

        self.assertEqual(pool.num_connections, 1)

    def test_for_double_release(self):
        MAXSIZE = 5

        # Check default state
        pool = HTTPConnectionPool(self.host, self.port, maxsize=MAXSIZE)
        self.assertEqual(pool.num_connections, 0)
        self.assertEqual(pool.pool.qsize(), MAXSIZE)

        # Make an empty slot for testing
        pool.pool.get()
        self.assertEqual(pool.pool.qsize(), MAXSIZE - 1)

        # Check state after simple request
        pool.urlopen('GET', '/')
        self.assertEqual(pool.pool.qsize(), MAXSIZE - 1)

        # Check state without release
        pool.urlopen('GET', '/', preload_content=False)
        self.assertEqual(pool.pool.qsize(), MAXSIZE - 2)

        pool.urlopen('GET', '/')
        self.assertEqual(pool.pool.qsize(), MAXSIZE - 2)

        # Check state after read
        pool.urlopen('GET', '/').data
        self.assertEqual(pool.pool.qsize(), MAXSIZE - 2)

        pool.urlopen('GET', '/')
        self.assertEqual(pool.pool.qsize(), MAXSIZE - 2)

    def test_release_conn_parameter(self):
        MAXSIZE = 5
        pool = HTTPConnectionPool(self.host, self.port, maxsize=MAXSIZE)
        self.assertEqual(pool.pool.qsize(), MAXSIZE)

        # Make request without releasing connection
        pool.request('GET', '/', release_conn=False, preload_content=False)
        self.assertEqual(pool.pool.qsize(), MAXSIZE - 1)

    @SkipTest
    def test_dns_error(self):
        # This fails on everything except Py27. Not sure why...
        pool = HTTPConnectionPool('thishostdoesnotexist.invalid',
                                  self.port,
                                  timeout=0.001)
        self.assertRaises(MaxRetryError,
                          pool.request,
                          'GET',
                          '/test',
                          retries=2)
コード例 #26
0
class TestConnectionPool(HTTPDummyServerTestCase):

    def setUp(self):
        self.pool = HTTPConnectionPool(self.host, self.port)
        self.addCleanup(self.pool.close)

    def test_get(self):
        r = self.pool.request('GET', '/specific_method',
                              fields={'method': 'GET'})
        self.assertEqual(r.status, 200, r.data)

    @pytest.mark.skip
    def test_post_url(self):
        r = self.pool.request('POST', '/specific_method',
                              fields={'method': 'POST'})
        self.assertEqual(r.status, 200, r.data)

    def test_urlopen_put(self):
        r = self.pool.urlopen('PUT', '/specific_method?method=PUT')
        self.assertEqual(r.status, 200, r.data)

    @pytest.mark.skip
    def test_wrong_specific_method(self):
        # To make sure the dummy server is actually returning failed responses
        r = self.pool.request('GET', '/specific_method',
                              fields={'method': 'POST'})
        self.assertEqual(r.status, 400, r.data)

        r = self.pool.request('POST', '/specific_method',
                              fields={'method': 'GET'})
        self.assertEqual(r.status, 400, r.data)

    @pytest.mark.skip
    def test_upload(self):
        data = "I'm in ur multipart form-data, hazing a cheezburgr"
        fields = {
            'upload_param': 'filefield',
            'upload_filename': 'lolcat.txt',
            'upload_size': len(data),
            'filefield': ('lolcat.txt', data),
        }

        r = self.pool.request('POST', '/upload', fields=fields)
        self.assertEqual(r.status, 200, r.data)

    @pytest.mark.skip
    def test_one_name_multiple_values(self):
        fields = [
            ('foo', 'a'),
            ('foo', 'b'),
        ]

        # urlencode
        r = self.pool.request('GET', '/echo', fields=fields)
        self.assertEqual(r.data, b'foo=a&foo=b')

        # multipart
        r = self.pool.request('POST', '/echo', fields=fields)
        self.assertEqual(r.data.count(b'name="foo"'), 2)

    @pytest.mark.skip
    def test_request_method_body(self):
        body = b'hi'
        r = self.pool.request('POST', '/echo', body=body)
        self.assertEqual(r.data, body)

        fields = [('hi', 'hello')]
        self.assertRaises(TypeError, self.pool.request, 'POST', '/echo', body=body, fields=fields)

    @pytest.mark.skip
    def test_unicode_upload(self):
        fieldname = u('myfile')
        filename = u('\xe2\x99\xa5.txt')
        data = u('\xe2\x99\xa5').encode('utf8')
        size = len(data)

        fields = {
            u('upload_param'): fieldname,
            u('upload_filename'): filename,
            u('upload_size'): size,
            fieldname: (filename, data),
        }

        r = self.pool.request('POST', '/upload', fields=fields)
        self.assertEqual(r.status, 200, r.data)

    @pytest.mark.xfail
    def test_nagle(self):
        """ Test that connections have TCP_NODELAY turned on """
        # This test needs to be here in order to be run. socket.create_connection actually tries
        # to connect to the host provided so we need a dummyserver to be running.
        pool = HTTPConnectionPool(self.host, self.port)
        self.addCleanup(pool.close)
        conn = pool._get_conn()
        self.addCleanup(conn.close)
        pool._make_request(conn, 'GET', '/')
        tcp_nodelay_setting = conn._sock.getsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY)
        self.assertTrue(tcp_nodelay_setting)

    @pytest.mark.xfail
    def test_socket_options(self):
        """Test that connections accept socket options."""
        # This test needs to be here in order to be run. socket.create_connection actually tries to
        # connect to the host provided so we need a dummyserver to be running.
        pool = HTTPConnectionPool(self.host, self.port, socket_options=[
            (socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
        ])
        conn = pool._new_conn()
        conn.connect()
        s = conn._sock
        using_keepalive = s.getsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE) > 0
        self.assertTrue(using_keepalive)
        s.close()

    @pytest.mark.xfail
    def test_disable_default_socket_options(self):
        """Test that passing None disables all socket options."""
        # This test needs to be here in order to be run. socket.create_connection actually tries
        # to connect to the host provided so we need a dummyserver to be running.
        pool = HTTPConnectionPool(self.host, self.port, socket_options=None)
        conn = pool._new_conn()
        conn.connect()
        s = conn._sock
        using_nagle = s.getsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY) == 0
        self.assertTrue(using_nagle)
        s.close()

    @pytest.mark.xfail
    def test_defaults_are_applied(self):
        """Test that modifying the default socket options works."""
        # This test needs to be here in order to be run. socket.create_connection actually tries
        # to connect to the host provided so we need a dummyserver to be running.
        pool = HTTPConnectionPool(self.host, self.port)
        self.addCleanup(pool.close)
        # Get the HTTPConnection instance
        conn = pool._new_conn()
        self.addCleanup(conn.close)
        # Update the default socket options
        conn.default_socket_options += [(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)]
        conn.connect()
        s = conn._sock
        self.addCleanup(s.close)
        nagle_disabled = s.getsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY) > 0
        using_keepalive = s.getsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE) > 0
        self.assertTrue(nagle_disabled)
        self.assertTrue(using_keepalive)

    def test_connection_error_retries(self):
        """ ECONNREFUSED error should raise a connection error, with retries """
        port = find_unused_port()
        pool = HTTPConnectionPool(self.host, port)
        try:
            pool.request('GET', '/', retries=Retry(connect=3))
            self.fail("Should have failed with a connection error.")
        except MaxRetryError as e:
            self.assertEqual(type(e.reason), NewConnectionError)

    def test_timeout_success(self):
        timeout = Timeout(connect=3, read=5, total=None)
        pool = HTTPConnectionPool(self.host, self.port, timeout=timeout)
        self.addCleanup(pool.close)
        pool.request('GET', '/')
        # This should not raise a "Timeout already started" error
        pool.request('GET', '/')

        pool = HTTPConnectionPool(self.host, self.port, timeout=timeout)
        self.addCleanup(pool.close)
        # This should also not raise a "Timeout already started" error
        pool.request('GET', '/')

        timeout = Timeout(total=None)
        pool = HTTPConnectionPool(self.host, self.port, timeout=timeout)
        self.addCleanup(pool.close)
        pool.request('GET', '/')

    @pytest.mark.xfail
    def test_redirect(self):
        r = self.pool.request('GET', '/redirect', fields={'target': '/'}, redirect=False)
        self.assertEqual(r.status, 303)

        r = self.pool.request('GET', '/redirect', fields={'target': '/'})
        self.assertEqual(r.status, 200)
        self.assertEqual(r.data, b'Dummy server!')

    def test_bad_connect(self):
        pool = HTTPConnectionPool('badhost.invalid', self.port)
        try:
            pool.request('GET', '/', retries=5)
            self.fail("should raise timeout exception here")
        except MaxRetryError as e:
            self.assertEqual(type(e.reason), NewConnectionError)

    @pytest.mark.skip
    def test_keepalive(self):
        pool = HTTPConnectionPool(self.host, self.port, block=True, maxsize=1)
        self.addCleanup(pool.close)

        r = pool.request('GET', '/keepalive?close=0')
        r = pool.request('GET', '/keepalive?close=0')

        self.assertEqual(r.status, 200)
        self.assertEqual(pool.num_connections, 1)
        self.assertEqual(pool.num_requests, 2)

    @pytest.mark.skip
    def test_keepalive_close(self):
        pool = HTTPConnectionPool(self.host, self.port,
                                  block=True, maxsize=1, timeout=2)
        self.addCleanup(pool.close)

        r = pool.request('GET', '/keepalive?close=1', retries=0,
                         headers={
                             "Connection": "close",
                         })

        self.assertEqual(pool.num_connections, 1)

        # The dummyserver will have responded with Connection:close,
        # and httplib will properly cleanup the socket.

        # We grab the HTTPConnection object straight from the Queue,
        # because _get_conn() is where the check & reset occurs
        # pylint: disable-msg=W0212
        conn = pool.pool.get()
        self.assertEqual(conn._sock, None)
        pool._put_conn(conn)

        # Now with keep-alive
        r = pool.request('GET', '/keepalive?close=0', retries=0,
                         headers={
                             "Connection": "keep-alive",
                         })

        # The dummyserver responded with Connection:keep-alive, the connection
        # persists.
        conn = pool.pool.get()
        self.assertNotEqual(conn._sock, None)
        pool._put_conn(conn)

        # Another request asking the server to close the connection. This one
        # should get cleaned up for the next request.
        r = pool.request('GET', '/keepalive?close=1', retries=0,
                         headers={
                             "Connection": "close",
                         })

        self.assertEqual(r.status, 200)

        conn = pool.pool.get()
        self.assertEqual(conn._sock, None)
        pool._put_conn(conn)

        # Next request
        r = pool.request('GET', '/keepalive?close=0')

    @pytest.mark.skip
    def test_post_with_urlencode(self):
        data = {'banana': 'hammock', 'lol': 'cat'}
        r = self.pool.request('POST', '/echo', fields=data, encode_multipart=False)
        self.assertEqual(r.data.decode('utf-8'), urlencode(data))

    @pytest.mark.skip
    def test_post_with_multipart(self):
        data = {'banana': 'hammock', 'lol': 'cat'}
        r = self.pool.request('POST', '/echo',
                              fields=data,
                              encode_multipart=True)
        body = r.data.split(b'\r\n')

        encoded_data = encode_multipart_formdata(data)[0]
        expected_body = encoded_data.split(b'\r\n')

        # TODO: Get rid of extra parsing stuff when you can specify
        # a custom boundary to encode_multipart_formdata
        """
        We need to loop the return lines because a timestamp is attached
        from within encode_multipart_formdata. When the server echos back
        the data, it has the timestamp from when the data was encoded, which
        is not equivalent to when we run encode_multipart_formdata on
        the data again.
        """
        for i, line in enumerate(body):
            if line.startswith(b'--'):
                continue

            self.assertEqual(body[i], expected_body[i])

    def test_check_gzip(self):
        r = self.pool.request('GET', '/encodingrequest',
                              headers={'accept-encoding': 'gzip'})
        self.assertEqual(r.headers.get('content-encoding'), 'gzip')
        self.assertEqual(r.data, b'hello, world!')

    def test_check_deflate(self):
        r = self.pool.request('GET', '/encodingrequest',
                              headers={'accept-encoding': 'deflate'})
        self.assertEqual(r.headers.get('content-encoding'), 'deflate')
        self.assertEqual(r.data, b'hello, world!')

    def test_bad_decode(self):
        self.assertRaises(DecodeError, self.pool.request,
                          'GET', '/encodingrequest',
                          headers={'accept-encoding': 'garbage-deflate'})

        self.assertRaises(DecodeError, self.pool.request,
                          'GET', '/encodingrequest',
                          headers={'accept-encoding': 'garbage-gzip'})

    @pytest.mark.xfail
    def test_connection_count(self):
        pool = HTTPConnectionPool(self.host, self.port, maxsize=1)
        self.addCleanup(pool.close)

        pool.request('GET', '/')
        pool.request('GET', '/')
        pool.request('GET', '/')

        self.assertEqual(pool.num_connections, 1)
        self.assertEqual(pool.num_requests, 3)

    @pytest.mark.xfail
    def test_connection_count_bigpool(self):
        http_pool = HTTPConnectionPool(self.host, self.port, maxsize=16)
        self.addCleanup(http_pool.close)

        http_pool.request('GET', '/')
        http_pool.request('GET', '/')
        http_pool.request('GET', '/')

        self.assertEqual(http_pool.num_connections, 1)
        self.assertEqual(http_pool.num_requests, 3)

    def test_partial_response(self):
        pool = HTTPConnectionPool(self.host, self.port, maxsize=1)
        self.addCleanup(pool.close)

        req_data = {'lol': 'cat'}
        resp_data = urlencode(req_data).encode('utf-8')

        r = pool.request('GET', '/echo', fields=req_data, preload_content=False)

        self.assertEqual(r.read(5), resp_data[:5])
        self.assertEqual(r.read(), resp_data[5:])

    @pytest.mark.skip
    def test_lazy_load_twice(self):
        # This test is sad and confusing. Need to figure out what's
        # going on with partial reads and socket reuse.

        pool = HTTPConnectionPool(self.host, self.port, block=True, maxsize=1, timeout=2)

        payload_size = 1024 * 2
        first_chunk = 512

        boundary = 'foo'

        req_data = {'count': 'a' * payload_size}
        resp_data = encode_multipart_formdata(req_data, boundary=boundary)[0]

        req2_data = {'count': 'b' * payload_size}
        resp2_data = encode_multipart_formdata(req2_data, boundary=boundary)[0]

        r1 = pool.request('POST', '/echo',
                          fields=req_data,
                          multipart_boundary=boundary,
                          preload_content=False)

        first_data = r1.read(first_chunk)
        self.assertGreater(len(first_data), 0)
        self.assertEqual(first_data, resp_data[:len(first_data)])

        try:
            r2 = pool.request('POST', '/echo', fields=req2_data, multipart_boundary=boundary,
                              preload_content=False, pool_timeout=0.001)

            # This branch should generally bail here, but maybe someday it will
            # work? Perhaps by some sort of magic. Consider it a TODO.

            second_data = r2.read(first_chunk)
            self.assertGreater(len(second_data), 0)
            self.assertEqual(second_data, resp2_data[:len(second_data)])

            self.assertEqual(r1.read(), resp_data[len(first_data):])
            self.assertEqual(r2.read(), resp2_data[len(second_data):])
            self.assertEqual(pool.num_requests, 2)

        except EmptyPoolError:
            self.assertEqual(r1.read(), resp_data[len(first_data):])
            self.assertEqual(pool.num_requests, 1)

        self.assertEqual(pool.num_connections, 1)

    @pytest.mark.xfail
    def test_for_double_release(self):
        MAXSIZE = 5

        # Check default state
        pool = HTTPConnectionPool(self.host, self.port, maxsize=MAXSIZE)
        self.addCleanup(pool.close)
        self.assertEqual(pool.num_connections, 0)
        self.assertEqual(pool.pool.qsize(), MAXSIZE)

        # Make an empty slot for testing
        pool.pool.get()
        self.assertEqual(pool.pool.qsize(), MAXSIZE-1)

        # Check state after simple request
        pool.urlopen('GET', '/')
        self.assertEqual(pool.pool.qsize(), MAXSIZE-1)

        # Check state without release
        pool.urlopen('GET', '/', preload_content=False)
        self.assertEqual(pool.pool.qsize(), MAXSIZE-2)

        pool.urlopen('GET', '/')
        self.assertEqual(pool.pool.qsize(), MAXSIZE-2)

        # Check state after read
        pool.urlopen('GET', '/').data
        self.assertEqual(pool.pool.qsize(), MAXSIZE-2)

        pool.urlopen('GET', '/')
        self.assertEqual(pool.pool.qsize(), MAXSIZE-2)

    def test_connections_arent_released(self):
        MAXSIZE = 5
        pool = HTTPConnectionPool(self.host, self.port, maxsize=MAXSIZE)
        self.assertEqual(pool.pool.qsize(), MAXSIZE)

        pool.request('GET', '/', preload_content=False)
        self.assertEqual(pool.pool.qsize(), MAXSIZE-1)

    def test_dns_error(self):
        pool = HTTPConnectionPool('thishostdoesnotexist.invalid', self.port, timeout=0.001)
        self.assertRaises(MaxRetryError, pool.request, 'GET', '/test', retries=2)

    def test_source_address(self):
        for addr, is_ipv6 in VALID_SOURCE_ADDRESSES:
            if is_ipv6 and not HAS_IPV6_AND_DNS:
                warnings.warn("No IPv6 support: skipping.",
                              NoIPv6Warning)
                continue
            pool = HTTPConnectionPool(self.host, self.port,
                                      source_address=addr, retries=False)
            self.addCleanup(pool.close)
            r = pool.request('GET', '/source_address')
            self.assertEqual(r.data, b(addr[0]))

    def test_source_address_error(self):
        for addr in INVALID_SOURCE_ADDRESSES:
            pool = HTTPConnectionPool(self.host, self.port, source_address=addr, retries=False)
            # FIXME: This assert flakes sometimes. Not sure why.
            self.assertRaises(NewConnectionError,
                              pool.request,
                              'GET', '/source_address?{0}'.format(addr))

    @pytest.mark.xfail
    def test_stream_keepalive(self):
        x = 2

        for _ in range(x):
            response = self.pool.request(
                    'GET',
                    '/chunked',
                    headers={
                        'Connection': 'keep-alive',
                        },
                    preload_content=False,
                    retries=False,
                    )
            for chunk in response.stream(3):
                self.assertEqual(chunk, b'123')

        self.assertEqual(self.pool.num_connections, 1)
        self.assertEqual(self.pool.num_requests, x)

    def test_chunked_gzip(self):
        response = self.pool.request(
                'GET',
                '/chunked_gzip',
                preload_content=False,
                decode_content=True,
                )

        self.assertEqual(b'123' * 4, response.read())

    def test_mixed_case_hostname(self):
        pool = HTTPConnectionPool("LoCaLhOsT", self.port)
        self.addCleanup(pool.close)
        response = pool.request('GET', "http://LoCaLhOsT:%d/" % self.port)
        self.assertEqual(response.status, 200)
コード例 #27
0
ファイル: zmq2http.py プロジェクト: mhennipman/Koppelvlakken
sub.setsockopt(zmq.SUBSCRIBE, '')
sub.setsockopt(zmq.HWM, 255)

host, port = sys.argv[2].split(':')
pool = HTTPConnectionPool(host, port=int(port), maxsize=4)

while True:
    multipart = sub.recv_multipart()
    path = multipart[0]
    content = ''.join(multipart[1:])
    try:
        r = pool.urlopen('POST',
                         path,
                         headers={
                             'Content-Type': 'application/gzip',
                             'Connection': 'Keep-Alive',
                             'User-Agent': 'ZMQ2http 0.1'
                         },
                         body=content,
                         release_conn=True)
        print str(r.status) + ' ' + str(len(content))
        if r.status != 200:
            #Debug errors
            print r.data
            f = open("%.4f" % time() + '.gz', 'wb')
            f.write(''.join(multipart[1:]))
            f.close()
    except Exception as e:
        print e
        pass
コード例 #28
0
class TestConnectionPool(HTTPDummyServerTestCase):
    def setUp(self):
        self.pool = HTTPConnectionPool(self.host, self.port)

    def test_get(self):
        r = self.pool.request("GET", "/specific_method", fields={"method": "GET"})
        self.assertEqual(r.status, 200, r.data)

    def test_post_url(self):
        r = self.pool.request("POST", "/specific_method", fields={"method": "POST"})
        self.assertEqual(r.status, 200, r.data)

    def test_urlopen_put(self):
        r = self.pool.urlopen("PUT", "/specific_method?method=PUT")
        self.assertEqual(r.status, 200, r.data)

    def test_wrong_specific_method(self):
        # To make sure the dummy server is actually returning failed responses
        r = self.pool.request("GET", "/specific_method", fields={"method": "POST"})
        self.assertEqual(r.status, 400, r.data)

        r = self.pool.request("POST", "/specific_method", fields={"method": "GET"})
        self.assertEqual(r.status, 400, r.data)

    def test_upload(self):
        data = "I'm in ur multipart form-data, hazing a cheezburgr"
        fields = {
            "upload_param": "filefield",
            "upload_filename": "lolcat.txt",
            "upload_size": len(data),
            "filefield": ("lolcat.txt", data),
        }

        r = self.pool.request("POST", "/upload", fields=fields)
        self.assertEqual(r.status, 200, r.data)

    def test_one_name_multiple_values(self):
        fields = [("foo", "a"), ("foo", "b")]

        # urlencode
        r = self.pool.request("GET", "/echo", fields=fields)
        self.assertEqual(r.data, b"foo=a&foo=b")

        # multipart
        r = self.pool.request("POST", "/echo", fields=fields)
        self.assertEqual(r.data.count(b'name="foo"'), 2)

    def test_request_method_body(self):
        body = b"hi"
        r = self.pool.request("POST", "/echo", body=body)
        self.assertEqual(r.data, body)

        fields = [("hi", "hello")]
        self.assertRaises(TypeError, self.pool.request, "POST", "/echo", body=body, fields=fields)

    def test_unicode_upload(self):
        fieldname = u("myfile")
        filename = u("\xe2\x99\xa5.txt")
        data = u("\xe2\x99\xa5").encode("utf8")
        size = len(data)

        fields = {
            u("upload_param"): fieldname,
            u("upload_filename"): filename,
            u("upload_size"): size,
            fieldname: (filename, data),
        }

        r = self.pool.request("POST", "/upload", fields=fields)
        self.assertEqual(r.status, 200, r.data)

    def test_timeout_float(self):
        url = "/sleep?seconds=0.005"
        # Pool-global timeout
        pool = HTTPConnectionPool(self.host, self.port, timeout=0.001, retries=False)
        self.assertRaises(ReadTimeoutError, pool.request, "GET", url)

    def test_conn_closed(self):
        pool = HTTPConnectionPool(self.host, self.port, timeout=0.001, retries=False)
        conn = pool._get_conn()
        pool._put_conn(conn)
        try:
            url = "/sleep?seconds=0.005"
            pool.urlopen("GET", url)
            self.fail("The request should fail with a timeout error.")
        except ReadTimeoutError:
            if conn.sock:
                self.assertRaises(socket.error, conn.sock.recv, 1024)
        finally:
            pool._put_conn(conn)

    def test_nagle(self):
        """ Test that connections have TCP_NODELAY turned on """
        # This test needs to be here in order to be run. socket.create_connection actually tries to
        # connect to the host provided so we need a dummyserver to be running.
        pool = HTTPConnectionPool(self.host, self.port)
        conn = pool._get_conn()
        pool._make_request(conn, "GET", "/")
        tcp_nodelay_setting = conn.sock.getsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY)
        assert tcp_nodelay_setting > 0, (
            "Expected TCP_NODELAY to be set on the "
            "socket (with value greater than 0) "
            "but instead was %s" % tcp_nodelay_setting
        )

    def test_socket_options(self):
        """Test that connections accept socket options."""
        # This test needs to be here in order to be run. socket.create_connection actually tries to
        # connect to the host provided so we need a dummyserver to be running.
        pool = HTTPConnectionPool(self.host, self.port, socket_options=[(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)])
        s = pool._new_conn()._new_conn()  # Get the socket
        using_keepalive = s.getsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE) > 0
        self.assertTrue(using_keepalive)
        s.close()

    def test_disable_default_socket_options(self):
        """Test that passing None disables all socket options."""
        # This test needs to be here in order to be run. socket.create_connection actually tries to
        # connect to the host provided so we need a dummyserver to be running.
        pool = HTTPConnectionPool(self.host, self.port, socket_options=None)
        s = pool._new_conn()._new_conn()
        using_nagle = s.getsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY) == 0
        self.assertTrue(using_nagle)
        s.close()

    def test_defaults_are_applied(self):
        """Test that modifying the default socket options works."""
        # This test needs to be here in order to be run. socket.create_connection actually tries to
        # connect to the host provided so we need a dummyserver to be running.
        pool = HTTPConnectionPool(self.host, self.port)
        # Get the HTTPConnection instance
        conn = pool._new_conn()
        # Update the default socket options
        conn.default_socket_options += [(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)]
        s = conn._new_conn()
        nagle_disabled = s.getsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY) > 0
        using_keepalive = s.getsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE) > 0
        self.assertTrue(nagle_disabled)
        self.assertTrue(using_keepalive)

    @timed(0.5)
    def test_timeout(self):
        """ Requests should time out when expected """
        url = "/sleep?seconds=0.003"
        timeout = Timeout(read=0.001)

        # Pool-global timeout
        pool = HTTPConnectionPool(self.host, self.port, timeout=timeout, retries=False)

        conn = pool._get_conn()
        self.assertRaises(ReadTimeoutError, pool._make_request, conn, "GET", url)
        pool._put_conn(conn)

        time.sleep(0.02)  # Wait for server to start receiving again. :(

        self.assertRaises(ReadTimeoutError, pool.request, "GET", url)

        # Request-specific timeouts should raise errors
        pool = HTTPConnectionPool(self.host, self.port, timeout=0.1, retries=False)

        conn = pool._get_conn()
        self.assertRaises(ReadTimeoutError, pool._make_request, conn, "GET", url, timeout=timeout)
        pool._put_conn(conn)

        time.sleep(0.02)  # Wait for server to start receiving again. :(

        self.assertRaises(ReadTimeoutError, pool.request, "GET", url, timeout=timeout)

        # Timeout int/float passed directly to request and _make_request should
        # raise a request timeout
        self.assertRaises(ReadTimeoutError, pool.request, "GET", url, timeout=0.001)
        conn = pool._new_conn()
        self.assertRaises(ReadTimeoutError, pool._make_request, conn, "GET", url, timeout=0.001)
        pool._put_conn(conn)

        # Timeout int/float passed directly to _make_request should not raise a
        # request timeout if it's a high value
        pool.request("GET", url, timeout=1)

    @requires_network
    @timed(0.5)
    def test_connect_timeout(self):
        url = "/sleep?seconds=0.005"
        timeout = Timeout(connect=0.001)

        # Pool-global timeout
        pool = HTTPConnectionPool(TARPIT_HOST, self.port, timeout=timeout)
        conn = pool._get_conn()
        self.assertRaises(ConnectTimeoutError, pool._make_request, conn, "GET", url)

        # Retries
        retries = Retry(connect=0)
        self.assertRaises(MaxRetryError, pool.request, "GET", url, retries=retries)

        # Request-specific connection timeouts
        big_timeout = Timeout(read=0.2, connect=0.2)
        pool = HTTPConnectionPool(TARPIT_HOST, self.port, timeout=big_timeout, retries=False)
        conn = pool._get_conn()
        self.assertRaises(ConnectTimeoutError, pool._make_request, conn, "GET", url, timeout=timeout)

        pool._put_conn(conn)
        self.assertRaises(ConnectTimeoutError, pool.request, "GET", url, timeout=timeout)

    def test_connection_error_retries(self):
        """ ECONNREFUSED error should raise a connection error, with retries """
        port = find_unused_port()
        pool = HTTPConnectionPool(self.host, port)
        try:
            pool.request("GET", "/", retries=Retry(connect=3))
            self.fail("Should have failed with a connection error.")
        except MaxRetryError as e:
            self.assertTrue(isinstance(e.reason, ProtocolError))
            self.assertEqual(e.reason.args[1].errno, errno.ECONNREFUSED)

    def test_timeout_reset(self):
        """ If the read timeout isn't set, socket timeout should reset """
        url = "/sleep?seconds=0.005"
        timeout = Timeout(connect=0.001)
        pool = HTTPConnectionPool(self.host, self.port, timeout=timeout)
        conn = pool._get_conn()
        try:
            pool._make_request(conn, "GET", url)
        except ReadTimeoutError:
            self.fail("This request shouldn't trigger a read timeout.")

    @requires_network
    @timed(5.0)
    def test_total_timeout(self):
        url = "/sleep?seconds=0.005"

        timeout = Timeout(connect=3, read=5, total=0.001)
        pool = HTTPConnectionPool(TARPIT_HOST, self.port, timeout=timeout)
        conn = pool._get_conn()
        self.assertRaises(ConnectTimeoutError, pool._make_request, conn, "GET", url)

        # This will get the socket to raise an EAGAIN on the read
        timeout = Timeout(connect=3, read=0)
        pool = HTTPConnectionPool(self.host, self.port, timeout=timeout)
        conn = pool._get_conn()
        self.assertRaises(ReadTimeoutError, pool._make_request, conn, "GET", url)

        # The connect should succeed and this should hit the read timeout
        timeout = Timeout(connect=3, read=5, total=0.002)
        pool = HTTPConnectionPool(self.host, self.port, timeout=timeout)
        conn = pool._get_conn()
        self.assertRaises(ReadTimeoutError, pool._make_request, conn, "GET", url)

    @requires_network
    def test_none_total_applies_connect(self):
        url = "/sleep?seconds=0.005"
        timeout = Timeout(total=None, connect=0.001)
        pool = HTTPConnectionPool(TARPIT_HOST, self.port, timeout=timeout)
        conn = pool._get_conn()
        self.assertRaises(ConnectTimeoutError, pool._make_request, conn, "GET", url)

    def test_timeout_success(self):
        timeout = Timeout(connect=3, read=5, total=None)
        pool = HTTPConnectionPool(self.host, self.port, timeout=timeout)
        pool.request("GET", "/")
        # This should not raise a "Timeout already started" error
        pool.request("GET", "/")

        pool = HTTPConnectionPool(self.host, self.port, timeout=timeout)
        # This should also not raise a "Timeout already started" error
        pool.request("GET", "/")

        timeout = Timeout(total=None)
        pool = HTTPConnectionPool(self.host, self.port, timeout=timeout)
        pool.request("GET", "/")

    def test_tunnel(self):
        # note the actual httplib.py has no tests for this functionality
        timeout = Timeout(total=None)
        pool = HTTPConnectionPool(self.host, self.port, timeout=timeout)
        conn = pool._get_conn()
        try:
            conn.set_tunnel(self.host, self.port)
        except AttributeError:  # python 2.6
            conn._set_tunnel(self.host, self.port)

        conn._tunnel = mock.Mock(return_value=None)
        pool._make_request(conn, "GET", "/")
        conn._tunnel.assert_called_once_with()

        # test that it's not called when tunnel is not set
        timeout = Timeout(total=None)
        pool = HTTPConnectionPool(self.host, self.port, timeout=timeout)
        conn = pool._get_conn()

        conn._tunnel = mock.Mock(return_value=None)
        pool._make_request(conn, "GET", "/")
        self.assertEqual(conn._tunnel.called, False)

    def test_redirect(self):
        r = self.pool.request("GET", "/redirect", fields={"target": "/"}, redirect=False)
        self.assertEqual(r.status, 303)

        r = self.pool.request("GET", "/redirect", fields={"target": "/"})
        self.assertEqual(r.status, 200)
        self.assertEqual(r.data, b"Dummy server!")

    def test_bad_connect(self):
        pool = HTTPConnectionPool("badhost.invalid", self.port)
        try:
            pool.request("GET", "/", retries=5)
            self.fail("should raise timeout exception here")
        except MaxRetryError as e:
            self.assertTrue(isinstance(e.reason, ProtocolError), e.reason)

    def test_keepalive(self):
        pool = HTTPConnectionPool(self.host, self.port, block=True, maxsize=1)

        r = pool.request("GET", "/keepalive?close=0")
        r = pool.request("GET", "/keepalive?close=0")

        self.assertEqual(r.status, 200)
        self.assertEqual(pool.num_connections, 1)
        self.assertEqual(pool.num_requests, 2)

    def test_keepalive_close(self):
        pool = HTTPConnectionPool(self.host, self.port, block=True, maxsize=1, timeout=2)

        r = pool.request("GET", "/keepalive?close=1", retries=0, headers={"Connection": "close"})

        self.assertEqual(pool.num_connections, 1)

        # The dummyserver will have responded with Connection:close,
        # and httplib will properly cleanup the socket.

        # We grab the HTTPConnection object straight from the Queue,
        # because _get_conn() is where the check & reset occurs
        # pylint: disable-msg=W0212
        conn = pool.pool.get()
        self.assertEqual(conn.sock, None)
        pool._put_conn(conn)

        # Now with keep-alive
        r = pool.request("GET", "/keepalive?close=0", retries=0, headers={"Connection": "keep-alive"})

        # The dummyserver responded with Connection:keep-alive, the connection
        # persists.
        conn = pool.pool.get()
        self.assertNotEqual(conn.sock, None)
        pool._put_conn(conn)

        # Another request asking the server to close the connection. This one
        # should get cleaned up for the next request.
        r = pool.request("GET", "/keepalive?close=1", retries=0, headers={"Connection": "close"})

        self.assertEqual(r.status, 200)

        conn = pool.pool.get()
        self.assertEqual(conn.sock, None)
        pool._put_conn(conn)

        # Next request
        r = pool.request("GET", "/keepalive?close=0")

    def test_post_with_urlencode(self):
        data = {"banana": "hammock", "lol": "cat"}
        r = self.pool.request("POST", "/echo", fields=data, encode_multipart=False)
        self.assertEqual(r.data.decode("utf-8"), urlencode(data))

    def test_post_with_multipart(self):
        data = {"banana": "hammock", "lol": "cat"}
        r = self.pool.request("POST", "/echo", fields=data, encode_multipart=True)
        body = r.data.split(b"\r\n")

        encoded_data = encode_multipart_formdata(data)[0]
        expected_body = encoded_data.split(b"\r\n")

        # TODO: Get rid of extra parsing stuff when you can specify
        # a custom boundary to encode_multipart_formdata
        """
        We need to loop the return lines because a timestamp is attached
        from within encode_multipart_formdata. When the server echos back
        the data, it has the timestamp from when the data was encoded, which
        is not equivalent to when we run encode_multipart_formdata on
        the data again.
        """
        for i, line in enumerate(body):
            if line.startswith(b"--"):
                continue

            self.assertEqual(body[i], expected_body[i])

    def test_check_gzip(self):
        r = self.pool.request("GET", "/encodingrequest", headers={"accept-encoding": "gzip"})
        self.assertEqual(r.headers.get("content-encoding"), "gzip")
        self.assertEqual(r.data, b"hello, world!")

    def test_check_deflate(self):
        r = self.pool.request("GET", "/encodingrequest", headers={"accept-encoding": "deflate"})
        self.assertEqual(r.headers.get("content-encoding"), "deflate")
        self.assertEqual(r.data, b"hello, world!")

    def test_bad_decode(self):
        self.assertRaises(
            DecodeError, self.pool.request, "GET", "/encodingrequest", headers={"accept-encoding": "garbage-deflate"}
        )

        self.assertRaises(
            DecodeError, self.pool.request, "GET", "/encodingrequest", headers={"accept-encoding": "garbage-gzip"}
        )

    def test_connection_count(self):
        pool = HTTPConnectionPool(self.host, self.port, maxsize=1)

        pool.request("GET", "/")
        pool.request("GET", "/")
        pool.request("GET", "/")

        self.assertEqual(pool.num_connections, 1)
        self.assertEqual(pool.num_requests, 3)

    def test_connection_count_bigpool(self):
        http_pool = HTTPConnectionPool(self.host, self.port, maxsize=16)

        http_pool.request("GET", "/")
        http_pool.request("GET", "/")
        http_pool.request("GET", "/")

        self.assertEqual(http_pool.num_connections, 1)
        self.assertEqual(http_pool.num_requests, 3)

    def test_partial_response(self):
        pool = HTTPConnectionPool(self.host, self.port, maxsize=1)

        req_data = {"lol": "cat"}
        resp_data = urlencode(req_data).encode("utf-8")

        r = pool.request("GET", "/echo", fields=req_data, preload_content=False)

        self.assertEqual(r.read(5), resp_data[:5])
        self.assertEqual(r.read(), resp_data[5:])

    def test_lazy_load_twice(self):
        # This test is sad and confusing. Need to figure out what's
        # going on with partial reads and socket reuse.

        pool = HTTPConnectionPool(self.host, self.port, block=True, maxsize=1, timeout=2)

        payload_size = 1024 * 2
        first_chunk = 512

        boundary = "foo"

        req_data = {"count": "a" * payload_size}
        resp_data = encode_multipart_formdata(req_data, boundary=boundary)[0]

        req2_data = {"count": "b" * payload_size}
        resp2_data = encode_multipart_formdata(req2_data, boundary=boundary)[0]

        r1 = pool.request("POST", "/echo", fields=req_data, multipart_boundary=boundary, preload_content=False)

        self.assertEqual(r1.read(first_chunk), resp_data[:first_chunk])

        try:
            r2 = pool.request(
                "POST",
                "/echo",
                fields=req2_data,
                multipart_boundary=boundary,
                preload_content=False,
                pool_timeout=0.001,
            )

            # This branch should generally bail here, but maybe someday it will
            # work? Perhaps by some sort of magic. Consider it a TODO.

            self.assertEqual(r2.read(first_chunk), resp2_data[:first_chunk])

            self.assertEqual(r1.read(), resp_data[first_chunk:])
            self.assertEqual(r2.read(), resp2_data[first_chunk:])
            self.assertEqual(pool.num_requests, 2)

        except EmptyPoolError:
            self.assertEqual(r1.read(), resp_data[first_chunk:])
            self.assertEqual(pool.num_requests, 1)

        self.assertEqual(pool.num_connections, 1)

    def test_for_double_release(self):
        MAXSIZE = 5

        # Check default state
        pool = HTTPConnectionPool(self.host, self.port, maxsize=MAXSIZE)
        self.assertEqual(pool.num_connections, 0)
        self.assertEqual(pool.pool.qsize(), MAXSIZE)

        # Make an empty slot for testing
        pool.pool.get()
        self.assertEqual(pool.pool.qsize(), MAXSIZE - 1)

        # Check state after simple request
        pool.urlopen("GET", "/")
        self.assertEqual(pool.pool.qsize(), MAXSIZE - 1)

        # Check state without release
        pool.urlopen("GET", "/", preload_content=False)
        self.assertEqual(pool.pool.qsize(), MAXSIZE - 2)

        pool.urlopen("GET", "/")
        self.assertEqual(pool.pool.qsize(), MAXSIZE - 2)

        # Check state after read
        pool.urlopen("GET", "/").data
        self.assertEqual(pool.pool.qsize(), MAXSIZE - 2)

        pool.urlopen("GET", "/")
        self.assertEqual(pool.pool.qsize(), MAXSIZE - 2)

    def test_release_conn_parameter(self):
        MAXSIZE = 5
        pool = HTTPConnectionPool(self.host, self.port, maxsize=MAXSIZE)
        self.assertEqual(pool.pool.qsize(), MAXSIZE)

        # Make request without releasing connection
        pool.request("GET", "/", release_conn=False, preload_content=False)
        self.assertEqual(pool.pool.qsize(), MAXSIZE - 1)

    def test_dns_error(self):
        pool = HTTPConnectionPool("thishostdoesnotexist.invalid", self.port, timeout=0.001)
        self.assertRaises(MaxRetryError, pool.request, "GET", "/test", retries=2)

    def test_source_address(self):
        for addr, is_ipv6 in VALID_SOURCE_ADDRESSES:
            if is_ipv6 and not HAS_IPV6_AND_DNS:
                warnings.warn("No IPv6 support: skipping.", NoIPv6Warning)
                continue
            pool = HTTPConnectionPool(self.host, self.port, source_address=addr, retries=False)
            r = pool.request("GET", "/source_address")
            assert r.data == b(addr[0]), (
                "expected the response to contain the source address {addr}, "
                "but was {data}".format(data=r.data, addr=b(addr[0]))
            )

    def test_source_address_error(self):
        for addr in INVALID_SOURCE_ADDRESSES:
            pool = HTTPConnectionPool(self.host, self.port, source_address=addr, retries=False)
            self.assertRaises(ProtocolError, pool.request, "GET", "/source_address")

    def test_stream_keepalive(self):
        x = 2

        for _ in range(x):
            response = self.pool.request(
                "GET", "/chunked", headers={"Connection": "keep-alive"}, preload_content=False, retries=False
            )
            for chunk in response.stream():
                self.assertEqual(chunk, b"123")

        self.assertEqual(self.pool.num_connections, 1)
        self.assertEqual(self.pool.num_requests, x)

    def test_chunked_gzip(self):
        response = self.pool.request("GET", "/chunked_gzip", preload_content=False, decode_content=True)

        self.assertEqual(b"123" * 4, response.read())

    def test_cleanup_on_connection_error(self):
        """
        Test that connections are recycled to the pool on 
        connection errors where no http response is received.
        """
        poolsize = 3
        with HTTPConnectionPool(self.host, self.port, maxsize=poolsize, block=True) as http:
            self.assertEqual(http.pool.qsize(), poolsize)

            # force a connection error by supplying a non-existent
            # url. We won't get a response for this  and so the
            # conn won't be implicitly returned to the pool.
            self.assertRaises(
                MaxRetryError, http.request, "GET", "/redirect", fields={"target": "/"}, release_conn=False, retries=0
            )

            r = http.request("GET", "/redirect", fields={"target": "/"}, release_conn=False, retries=1)
            r.release_conn()

            # the pool should still contain poolsize elements
            self.assertEqual(http.pool.qsize(), http.pool.maxsize)
コード例 #29
0
ファイル: test_connectionpool.py プロジェクト: gruns/urllib3
class TestConnectionPool(HTTPDummyServerTestCase):

    def setUp(self):
        self.pool = HTTPConnectionPool(self.host, self.port)

    def test_get(self):
        r = self.pool.request('GET', '/specific_method',
                               fields={'method': 'GET'})
        self.assertEqual(r.status, 200, r.data)

    def test_post_url(self):
        r = self.pool.request('POST', '/specific_method',
                               fields={'method': 'POST'})
        self.assertEqual(r.status, 200, r.data)

    def test_urlopen_put(self):
        r = self.pool.urlopen('PUT', '/specific_method?method=PUT')
        self.assertEqual(r.status, 200, r.data)

    def test_wrong_specific_method(self):
        # To make sure the dummy server is actually returning failed responses
        r = self.pool.request('GET', '/specific_method',
                               fields={'method': 'POST'})
        self.assertEqual(r.status, 400, r.data)

        r = self.pool.request('POST', '/specific_method',
                               fields={'method': 'GET'})
        self.assertEqual(r.status, 400, r.data)

    def test_upload(self):
        data = "I'm in ur multipart form-data, hazing a cheezburgr"
        fields = {
            'upload_param': 'filefield',
            'upload_filename': 'lolcat.txt',
            'upload_size': len(data),
            'filefield': ('lolcat.txt', data),
        }

        r = self.pool.request('POST', '/upload', fields=fields)
        self.assertEqual(r.status, 200, r.data)

    def test_one_name_multiple_values(self):
        fields = [
            ('foo', 'a'),
            ('foo', 'b'),
        ]

        # urlencode
        r = self.pool.request('GET', '/echo', fields=fields)
        self.assertEqual(r.data, b'foo=a&foo=b')

        # multipart
        r = self.pool.request('POST', '/echo', fields=fields)
        self.assertEqual(r.data.count(b'name="foo"'), 2)


    def test_unicode_upload(self):
        fieldname = u('myfile')
        filename = u('\xe2\x99\xa5.txt')
        data = u('\xe2\x99\xa5').encode('utf8')
        size = len(data)

        fields = {
            u('upload_param'): fieldname,
            u('upload_filename'): filename,
            u('upload_size'): size,
            fieldname: (filename, data),
        }

        r = self.pool.request('POST', '/upload', fields=fields)
        self.assertEqual(r.status, 200, r.data)

    def test_timeout_float(self):
        url = '/sleep?seconds=0.005'
        # Pool-global timeout
        pool = HTTPConnectionPool(self.host, self.port, timeout=0.001)
        self.assertRaises(ReadTimeoutError, pool.request, 'GET', url)

    def test_conn_closed(self):
        pool = HTTPConnectionPool(self.host, self.port, timeout=0.001)
        conn = pool._get_conn()
        pool._put_conn(conn)
        try:
            url = '/sleep?seconds=0.005'
            pool.urlopen('GET', url)
            self.fail("The request should fail with a timeout error.")
        except ReadTimeoutError:
            if conn.sock:
                self.assertRaises(socket.error, conn.sock.recv, 1024)
        finally:
            pool._put_conn(conn)

    def test_nagle(self):
        """ Test that connections have TCP_NODELAY turned on """
        pool = HTTPConnectionPool(self.host, self.port)
        conn = pool._get_conn()
        pool._make_request(conn, 'GET', '/')
        tcp_nodelay_setting = conn.sock.getsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY)
        assert tcp_nodelay_setting > 0, ("Expected TCP_NODELAY to be set on the "
                                         "socket (with value greater than 0) "
                                         "but instead was %s" %
                                         tcp_nodelay_setting)

    def test_timeout(self):
        url = '/sleep?seconds=0.005'
        timeout = util.Timeout(read=0.001)

        # Pool-global timeout
        pool = HTTPConnectionPool(self.host, self.port, timeout=timeout)

        conn = pool._get_conn()
        self.assertRaises(ReadTimeoutError, pool._make_request,
                          conn, 'GET', url)
        pool._put_conn(conn)

        self.assertRaises(ReadTimeoutError, pool.request, 'GET', url)

        # Request-specific timeouts should raise errors
        pool = HTTPConnectionPool(self.host, self.port, timeout=0.5)

        conn = pool._get_conn()
        self.assertRaises(ReadTimeoutError, pool._make_request,
                          conn, 'GET', url, timeout=timeout)
        pool._put_conn(conn)

        self.assertRaises(ReadTimeoutError, pool.request,
                          'GET', url, timeout=timeout)

        # Timeout int/float passed directly to request and _make_request should
        # raise a request timeout
        self.assertRaises(ReadTimeoutError, pool.request,
                          'GET', url, timeout=0.001)
        conn = pool._new_conn()
        self.assertRaises(ReadTimeoutError, pool._make_request, conn,
                          'GET', url, timeout=0.001)
        pool._put_conn(conn)

        # Timeout int/float passed directly to _make_request should not raise a
        # request timeout if it's a high value
        pool.request('GET', url, timeout=5)

    @requires_network
    @timed(0.1)
    def test_connect_timeout(self):
        url = '/sleep'
        timeout = util.Timeout(connect=0.001)

        # Pool-global timeout
        pool = HTTPConnectionPool(TARPIT_HOST, self.port, timeout=timeout)
        conn = pool._get_conn()
        self.assertRaises(ConnectTimeoutError, pool._make_request, conn, 'GET', url)
        pool._put_conn(conn)
        self.assertRaises(ConnectTimeoutError, pool.request, 'GET', url)

        # Request-specific connection timeouts
        big_timeout = util.Timeout(read=0.5, connect=0.5)
        pool = HTTPConnectionPool(TARPIT_HOST, self.port, timeout=big_timeout)
        conn = pool._get_conn()
        self.assertRaises(ConnectTimeoutError, pool._make_request, conn, 'GET',
                          url, timeout=timeout)

        pool._put_conn(conn)
        self.assertRaises(ConnectTimeoutError, pool.request, 'GET', url,
                          timeout=timeout)


    def test_timeout_reset(self):
        """ If the read timeout isn't set, socket timeout should reset """
        url = '/sleep?seconds=0.005'
        timeout = util.Timeout(connect=0.001)
        pool = HTTPConnectionPool(self.host, self.port, timeout=timeout)
        conn = pool._get_conn()
        try:
            pool._make_request(conn, 'GET', url)
        except ReadTimeoutError:
            self.fail("This request shouldn't trigger a read timeout.")

    @requires_network
    @timed(2.0)
    def test_total_timeout(self):
        url = '/sleep?seconds=0.005'

        timeout = util.Timeout(connect=3, read=5, total=0.001)
        pool = HTTPConnectionPool(TARPIT_HOST, self.port, timeout=timeout)
        conn = pool._get_conn()
        self.assertRaises(ConnectTimeoutError, pool._make_request, conn, 'GET', url)

        # This will get the socket to raise an EAGAIN on the read
        timeout = util.Timeout(connect=3, read=0)
        pool = HTTPConnectionPool(self.host, self.port, timeout=timeout)
        conn = pool._get_conn()
        self.assertRaises(ReadTimeoutError, pool._make_request, conn, 'GET', url)

        # The connect should succeed and this should hit the read timeout
        timeout = util.Timeout(connect=3, read=5, total=0.002)
        pool = HTTPConnectionPool(self.host, self.port, timeout=timeout)
        conn = pool._get_conn()
        self.assertRaises(ReadTimeoutError, pool._make_request, conn, 'GET', url)

        timeout = util.Timeout(total=None, connect=0.001)
        pool = HTTPConnectionPool(TARPIT_HOST, self.port, timeout=timeout)
        conn = pool._get_conn()
        self.assertRaises(ConnectTimeoutError, pool._make_request, conn, 'GET',
                          url)

    def test_timeout_success(self):
        timeout = util.Timeout(connect=3, read=5, total=None)
        pool = HTTPConnectionPool(self.host, self.port, timeout=timeout)
        pool.request('GET', '/')
        # This should not raise a "Timeout already started" error
        pool.request('GET', '/')

        pool = HTTPConnectionPool(self.host, self.port, timeout=timeout)
        # This should also not raise a "Timeout already started" error
        pool.request('GET', '/')

        timeout = util.Timeout(total=None)
        pool = HTTPConnectionPool(self.host, self.port, timeout=timeout)
        pool.request('GET', '/')

    def test_tunnel(self):
        # note the actual httplib.py has no tests for this functionality
        timeout = util.Timeout(total=None)
        pool = HTTPConnectionPool(self.host, self.port, timeout=timeout)
        conn = pool._get_conn()
        try:
            conn.set_tunnel(self.host, self.port)
        except AttributeError: # python 2.6
            conn._set_tunnel(self.host, self.port)

        conn._tunnel = mock.Mock(return_value=None)
        pool._make_request(conn, 'GET', '/')
        conn._tunnel.assert_called_once_with()

        # test that it's not called when tunnel is not set
        timeout = util.Timeout(total=None)
        pool = HTTPConnectionPool(self.host, self.port, timeout=timeout)
        conn = pool._get_conn()

        conn._tunnel = mock.Mock(return_value=None)
        pool._make_request(conn, 'GET', '/')
        self.assertEqual(conn._tunnel.called, False)


    def test_redirect(self):
        r = self.pool.request('GET', '/redirect', fields={'target': '/'}, redirect=False)
        self.assertEqual(r.status, 303)

        r = self.pool.request('GET', '/redirect', fields={'target': '/'})
        self.assertEqual(r.status, 200)
        self.assertEqual(r.data, b'Dummy server!')

    def test_max_retry(self):
        try:
            self.pool.request('GET', '/redirect',
                              fields={'target': '/'},
                              retries=0)
            self.fail("Failed to raise MaxRetryError exception")
        except MaxRetryError:
            pass

    def test_disabled_retry(self):
        # Disabled retries will disable redirect handling:
        r = self.pool.request('GET', '/redirect',
                              fields={'target': '/'},
                              retries=False)
        self.assertEqual(r.status, 303)

        pool = HTTPConnectionPool('thishostdoesnotexist.invalid', self.port, timeout=0.001)
        self.assertRaises(ConnectionError, pool.request, 'GET', '/test', retries=False)


    def test_keepalive(self):
        pool = HTTPConnectionPool(self.host, self.port, block=True, maxsize=1)

        r = pool.request('GET', '/keepalive?close=0')
        r = pool.request('GET', '/keepalive?close=0')

        self.assertEqual(r.status, 200)
        self.assertEqual(pool.num_connections, 1)
        self.assertEqual(pool.num_requests, 2)

    def test_keepalive_close(self):
        pool = HTTPConnectionPool(self.host, self.port,
                                  block=True, maxsize=1, timeout=2)

        r = pool.request('GET', '/keepalive?close=1', retries=0,
                         headers={
                             "Connection": "close",
                         })

        self.assertEqual(pool.num_connections, 1)

        # The dummyserver will have responded with Connection:close,
        # and httplib will properly cleanup the socket.

        # We grab the HTTPConnection object straight from the Queue,
        # because _get_conn() is where the check & reset occurs
        # pylint: disable-msg=W0212
        conn = pool.pool.get()
        self.assertEqual(conn.sock, None)
        pool._put_conn(conn)

        # Now with keep-alive
        r = pool.request('GET', '/keepalive?close=0', retries=0,
                         headers={
                             "Connection": "keep-alive",
                         })

        # The dummyserver responded with Connection:keep-alive, the connection
        # persists.
        conn = pool.pool.get()
        self.assertNotEqual(conn.sock, None)
        pool._put_conn(conn)

        # Another request asking the server to close the connection. This one
        # should get cleaned up for the next request.
        r = pool.request('GET', '/keepalive?close=1', retries=0,
                         headers={
                             "Connection": "close",
                         })

        self.assertEqual(r.status, 200)

        conn = pool.pool.get()
        self.assertEqual(conn.sock, None)
        pool._put_conn(conn)

        # Next request
        r = pool.request('GET', '/keepalive?close=0')

    def test_post_with_urlencode(self):
        data = {'banana': 'hammock', 'lol': 'cat'}
        r = self.pool.request('POST', '/echo', fields=data, encode_multipart=False)
        self.assertEqual(r.data.decode('utf-8'), urlencode(data))

    def test_post_with_multipart(self):
        data = {'banana': 'hammock', 'lol': 'cat'}
        r = self.pool.request('POST', '/echo',
                                    fields=data,
                                    encode_multipart=True)
        body = r.data.split(b'\r\n')

        encoded_data = encode_multipart_formdata(data)[0]
        expected_body = encoded_data.split(b'\r\n')

        # TODO: Get rid of extra parsing stuff when you can specify
        # a custom boundary to encode_multipart_formdata
        """
        We need to loop the return lines because a timestamp is attached
        from within encode_multipart_formdata. When the server echos back
        the data, it has the timestamp from when the data was encoded, which
        is not equivalent to when we run encode_multipart_formdata on
        the data again.
        """
        for i, line in enumerate(body):
            if line.startswith(b'--'):
                continue

            self.assertEqual(body[i], expected_body[i])

    def test_check_gzip(self):
        r = self.pool.request('GET', '/encodingrequest',
                                   headers={'accept-encoding': 'gzip'})
        self.assertEqual(r.headers.get('content-encoding'), 'gzip')
        self.assertEqual(r.data, b'hello, world!')

    def test_check_deflate(self):
        r = self.pool.request('GET', '/encodingrequest',
                                   headers={'accept-encoding': 'deflate'})
        self.assertEqual(r.headers.get('content-encoding'), 'deflate')
        self.assertEqual(r.data, b'hello, world!')

    def test_bad_decode(self):
        self.assertRaises(DecodeError, self.pool.request,
                          'GET', '/encodingrequest',
                          headers={'accept-encoding': 'garbage-deflate'})

        self.assertRaises(DecodeError, self.pool.request,
                          'GET', '/encodingrequest',
                          headers={'accept-encoding': 'garbage-gzip'})

    def test_connection_count(self):
        pool = HTTPConnectionPool(self.host, self.port, maxsize=1)

        pool.request('GET', '/')
        pool.request('GET', '/')
        pool.request('GET', '/')

        self.assertEqual(pool.num_connections, 1)
        self.assertEqual(pool.num_requests, 3)

    def test_connection_count_bigpool(self):
        http_pool = HTTPConnectionPool(self.host, self.port, maxsize=16)

        http_pool.request('GET', '/')
        http_pool.request('GET', '/')
        http_pool.request('GET', '/')

        self.assertEqual(http_pool.num_connections, 1)
        self.assertEqual(http_pool.num_requests, 3)

    def test_partial_response(self):
        pool = HTTPConnectionPool(self.host, self.port, maxsize=1)

        req_data = {'lol': 'cat'}
        resp_data = urlencode(req_data).encode('utf-8')

        r = pool.request('GET', '/echo', fields=req_data, preload_content=False)

        self.assertEqual(r.read(5), resp_data[:5])
        self.assertEqual(r.read(), resp_data[5:])

    def test_lazy_load_twice(self):
        # This test is sad and confusing. Need to figure out what's
        # going on with partial reads and socket reuse.

        pool = HTTPConnectionPool(self.host, self.port, block=True, maxsize=1, timeout=2)

        payload_size = 1024 * 2
        first_chunk = 512

        boundary = 'foo'

        req_data = {'count': 'a' * payload_size}
        resp_data = encode_multipart_formdata(req_data, boundary=boundary)[0]

        req2_data = {'count': 'b' * payload_size}
        resp2_data = encode_multipart_formdata(req2_data, boundary=boundary)[0]

        r1 = pool.request('POST', '/echo', fields=req_data, multipart_boundary=boundary, preload_content=False)

        self.assertEqual(r1.read(first_chunk), resp_data[:first_chunk])

        try:
            r2 = pool.request('POST', '/echo', fields=req2_data, multipart_boundary=boundary,
                                    preload_content=False, pool_timeout=0.001)

            # This branch should generally bail here, but maybe someday it will
            # work? Perhaps by some sort of magic. Consider it a TODO.

            self.assertEqual(r2.read(first_chunk), resp2_data[:first_chunk])

            self.assertEqual(r1.read(), resp_data[first_chunk:])
            self.assertEqual(r2.read(), resp2_data[first_chunk:])
            self.assertEqual(pool.num_requests, 2)

        except EmptyPoolError:
            self.assertEqual(r1.read(), resp_data[first_chunk:])
            self.assertEqual(pool.num_requests, 1)

        self.assertEqual(pool.num_connections, 1)

    def test_for_double_release(self):
        MAXSIZE=5

        # Check default state
        pool = HTTPConnectionPool(self.host, self.port, maxsize=MAXSIZE)
        self.assertEqual(pool.num_connections, 0)
        self.assertEqual(pool.pool.qsize(), MAXSIZE)

        # Make an empty slot for testing
        pool.pool.get()
        self.assertEqual(pool.pool.qsize(), MAXSIZE-1)

        # Check state after simple request
        pool.urlopen('GET', '/')
        self.assertEqual(pool.pool.qsize(), MAXSIZE-1)

        # Check state without release
        pool.urlopen('GET', '/', preload_content=False)
        self.assertEqual(pool.pool.qsize(), MAXSIZE-2)

        pool.urlopen('GET', '/')
        self.assertEqual(pool.pool.qsize(), MAXSIZE-2)

        # Check state after read
        pool.urlopen('GET', '/').data
        self.assertEqual(pool.pool.qsize(), MAXSIZE-2)

        pool.urlopen('GET', '/')
        self.assertEqual(pool.pool.qsize(), MAXSIZE-2)

    def test_release_conn_parameter(self):
        MAXSIZE=5
        pool = HTTPConnectionPool(self.host, self.port, maxsize=MAXSIZE)
        self.assertEqual(pool.pool.qsize(), MAXSIZE)

        # Make request without releasing connection
        pool.request('GET', '/', release_conn=False, preload_content=False)
        self.assertEqual(pool.pool.qsize(), MAXSIZE-1)

    def test_dns_error(self):
        pool = HTTPConnectionPool('thishostdoesnotexist.invalid', self.port, timeout=0.001)
        self.assertRaises(MaxRetryError, pool.request, 'GET', '/test', retries=2)

    @onlyPy26OrOlder
    def test_source_address_ignored(self):
        # No warning is issued if source_address is omitted.
        with warnings.catch_warnings(record=True) as w:
            pool = HTTPConnectionPool(self.host, self.port)
            assert pool.request('GET', '/source_address').status == 200
            assert (
                not w or not issubclass(w[-1].category, PythonVersionWarning))

        # source_address is ignored in Python 2.6 and older. Warning issued.
        with warnings.catch_warnings(record=True) as w:
            for addr in INVALID_SOURCE_ADDRESSES:
                pool = HTTPConnectionPool(
                    self.host, self.port, source_address=addr)
                assert pool.request('GET', '/source_address').status == 200
            assert issubclass(w[-1].category, PythonVersionWarning)

    @onlyPy27OrNewer
    def test_source_address(self):
        for addr in VALID_SOURCE_ADDRESSES:
            pool = HTTPConnectionPool(
                self.host, self.port, source_address=addr)
            r = pool.request('GET', '/source_address')
            assert r.data == b(addr[0])

    @onlyPy27OrNewer
    def test_source_address_error(self):
        for addr in INVALID_SOURCE_ADDRESSES:
            pool = HTTPConnectionPool(
                self.host, self.port, source_address=addr)
            self.assertRaises(
                MaxRetryError, pool.request, 'GET', '/source_address')

    @onlyPy3
    def test_httplib_headers_case_insensitive(self):
        HEADERS = {'Content-Length': '0', 'Content-type': 'text/plain',
                    'Server': 'TornadoServer/%s' % tornado.version}
        r = self.pool.request('GET', '/specific_method',
                               fields={'method': 'GET'})
        self.assertEqual(HEADERS, dict(r.headers.items())) # to preserve case sensitivity
コード例 #30
0
if len(sys.argv) != 3 or len(sys.argv[2].split(':')) != 2:
    print 'usage: tcp://pubsub-server:port host:port'
    sys.exit(1)

context = zmq.Context()
sub = context.socket(zmq.SUB)
sub.connect(sys.argv[1])
sub.setsockopt(zmq.SUBSCRIBE, '')
sub.setsockopt(zmq.HWM, 255)

host,port = sys.argv[2].split(':')
pool = HTTPConnectionPool(host,port=int(port),maxsize=4)

while True:
    multipart = sub.recv_multipart()
    path = multipart[0]
    content = ''.join(multipart[1:])
    try:
        r = pool.urlopen('POST',path,headers={'Content-Type':'application/gzip','Connection':'Keep-Alive','User-Agent' : 'ZMQ2http 0.1'},body=content,release_conn=True)
        print str(r.status) + ' ' + str(len(content))
        if r.status != 200:
           #Debug errors
           print r.data
           f = open("%.4f" % time() + '.gz', 'wb') 
           f.write(''.join(multipart[1:]))
           f.close()
    except Exception as e:
        print e
        pass
コード例 #31
0
class TestConnectionPool(HTTPDummyServerTestCase):
    def setUp(self):
        self.pool = HTTPConnectionPool(self.host, self.port)

    def test_get(self):
        r = self.pool.request("GET", "/specific_method", fields={"method": "GET"})
        self.assertEqual(r.status, 200, r.data)

    def test_post_url(self):
        r = self.pool.request("POST", "/specific_method", fields={"method": "POST"})
        self.assertEqual(r.status, 200, r.data)

    def test_urlopen_put(self):
        r = self.pool.urlopen("PUT", "/specific_method?method=PUT")
        self.assertEqual(r.status, 200, r.data)

    def test_wrong_specific_method(self):
        # To make sure the dummy server is actually returning failed responses
        r = self.pool.request("GET", "/specific_method", fields={"method": "POST"})
        self.assertEqual(r.status, 400, r.data)

        r = self.pool.request("POST", "/specific_method", fields={"method": "GET"})
        self.assertEqual(r.status, 400, r.data)

    def test_upload(self):
        data = "I'm in ur multipart form-data, hazing a cheezburgr"
        fields = {
            "upload_param": "filefield",
            "upload_filename": "lolcat.txt",
            "upload_size": len(data),
            "filefield": ("lolcat.txt", data),
        }

        r = self.pool.request("POST", "/upload", fields=fields)
        self.assertEqual(r.status, 200, r.data)

    def test_one_name_multiple_values(self):
        fields = [("foo", "a"), ("foo", "b")]

        # urlencode
        r = self.pool.request("GET", "/echo", fields=fields)
        self.assertEqual(r.data, b"foo=a&foo=b")

        # multipart
        r = self.pool.request("POST", "/echo", fields=fields)
        self.assertEqual(r.data.count(b'name="foo"'), 2)

    def test_unicode_upload(self):
        fieldname = u("myfile")
        filename = u("\xe2\x99\xa5.txt")
        data = u("\xe2\x99\xa5").encode("utf8")
        size = len(data)

        fields = {
            u("upload_param"): fieldname,
            u("upload_filename"): filename,
            u("upload_size"): size,
            fieldname: (filename, data),
        }

        r = self.pool.request("POST", "/upload", fields=fields)
        self.assertEqual(r.status, 200, r.data)

    def test_timeout(self):
        url = "/sleep?seconds=0.005"
        timeout = 0.001

        # Pool-global timeout
        pool = HTTPConnectionPool(self.host, self.port, timeout=timeout)

        conn = pool._get_conn()
        self.assertRaises(SocketTimeout, pool._make_request, conn, "GET", url)
        pool._put_conn(conn)

        self.assertRaises(TimeoutError, pool.request, "GET", url)

        # Request-specific timeout
        pool = HTTPConnectionPool(self.host, self.port, timeout=0.5)

        conn = pool._get_conn()
        self.assertRaises(SocketTimeout, pool._make_request, conn, "GET", url, timeout=timeout)
        pool._put_conn(conn)

        self.assertRaises(TimeoutError, pool.request, "GET", url, timeout=timeout)

    def test_redirect(self):
        r = self.pool.request("GET", "/redirect", fields={"target": "/"}, redirect=False)
        self.assertEqual(r.status, 303)

        r = self.pool.request("GET", "/redirect", fields={"target": "/"})
        self.assertEqual(r.status, 200)
        self.assertEqual(r.data, b"Dummy server!")

    def test_maxretry(self):
        try:
            self.pool.request("GET", "/redirect", fields={"target": "/"}, retries=0)
            self.fail("Failed to raise MaxRetryError exception")
        except MaxRetryError:
            pass

    def test_keepalive(self):
        pool = HTTPConnectionPool(self.host, self.port, block=True, maxsize=1)

        r = pool.request("GET", "/keepalive?close=0")
        r = pool.request("GET", "/keepalive?close=0")

        self.assertEqual(r.status, 200)
        self.assertEqual(pool.num_connections, 1)
        self.assertEqual(pool.num_requests, 2)

    def test_keepalive_close(self):
        # NOTE: This used to run against apache.org but it made the test suite
        # really slow and fail half the time. Setting it to skip until we can
        # make this run better locally.
        pool = HTTPConnectionPool(self.host, self.port, block=True, maxsize=1, timeout=2)

        r = pool.request("GET", "/keepalive?close=1", retries=0, headers={"Connection": "close"})

        self.assertEqual(pool.num_connections, 1)

        # The dummyserver will have responded with Connection:close,
        # and httplib will properly cleanup the socket.

        # We grab the HTTPConnection object straight from the Queue,
        # because _get_conn() is where the check & reset occurs
        # pylint: disable-msg=W0212
        conn = pool.pool.get()
        self.assertEqual(conn.sock, None)
        pool._put_conn(conn)

        # Now with keep-alive
        r = pool.request("GET", "/keepalive?close=0", retries=0, headers={"Connection": "keep-alive"})

        # The dummyserver responded with Connection:keep-alive, the connection
        # persists.
        conn = pool.pool.get()
        self.assertNotEqual(conn.sock, None)
        pool._put_conn(conn)

        # Another request asking the server to close the connection. This one
        # should get cleaned up for the next request.
        r = pool.request("GET", "/keepalive?close=1", retries=0, headers={"Connection": "close"})

        self.assertEqual(r.status, 200)

        conn = pool.pool.get()
        self.assertEqual(conn.sock, None)
        pool._put_conn(conn)

        # Next request
        r = pool.request("GET", "/keepalive?close=0")

    def test_post_with_urlencode(self):
        data = {"banana": "hammock", "lol": "cat"}
        r = self.pool.request("POST", "/echo", fields=data, encode_multipart=False)
        self.assertEqual(r.data.decode("utf-8"), urlencode(data))

    def test_post_with_multipart(self):
        data = {"banana": "hammock", "lol": "cat"}
        r = self.pool.request("POST", "/echo", fields=data, encode_multipart=True)
        body = r.data.split(b"\r\n")

        encoded_data = encode_multipart_formdata(data)[0]
        expected_body = encoded_data.split(b"\r\n")

        # TODO: Get rid of extra parsing stuff when you can specify
        # a custom boundary to encode_multipart_formdata
        """
        We need to loop the return lines because a timestamp is attached
        from within encode_multipart_formdata. When the server echos back
        the data, it has the timestamp from when the data was encoded, which
        is not equivalent to when we run encode_multipart_formdata on
        the data again.
        """
        for i, line in enumerate(body):
            if line.startswith(b"--"):
                continue

            self.assertEqual(body[i], expected_body[i])

    def test_check_gzip(self):
        r = self.pool.request("GET", "/encodingrequest", headers={"accept-encoding": "gzip"})
        self.assertEqual(r.headers.get("content-encoding"), "gzip")
        self.assertEqual(r.data, b"hello, world!")

    def test_check_deflate(self):
        r = self.pool.request("GET", "/encodingrequest", headers={"accept-encoding": "deflate"})
        self.assertEqual(r.headers.get("content-encoding"), "deflate")
        self.assertEqual(r.data, b"hello, world!")

    def test_bad_decode(self):
        self.assertRaises(
            DecodeError, self.pool.request, "GET", "/encodingrequest", headers={"accept-encoding": "garbage-deflate"}
        )

        self.assertRaises(
            DecodeError, self.pool.request, "GET", "/encodingrequest", headers={"accept-encoding": "garbage-gzip"}
        )

    def test_connection_count(self):
        pool = HTTPConnectionPool(self.host, self.port, maxsize=1)

        pool.request("GET", "/")
        pool.request("GET", "/")
        pool.request("GET", "/")

        self.assertEqual(pool.num_connections, 1)
        self.assertEqual(pool.num_requests, 3)

    def test_connection_count_bigpool(self):
        http_pool = HTTPConnectionPool(self.host, self.port, maxsize=16)

        http_pool.request("GET", "/")
        http_pool.request("GET", "/")
        http_pool.request("GET", "/")

        self.assertEqual(http_pool.num_connections, 1)
        self.assertEqual(http_pool.num_requests, 3)

    def test_partial_response(self):
        pool = HTTPConnectionPool(self.host, self.port, maxsize=1)

        req_data = {"lol": "cat"}
        resp_data = urlencode(req_data).encode("utf-8")

        r = pool.request("GET", "/echo", fields=req_data, preload_content=False)

        self.assertEqual(r.read(5), resp_data[:5])
        self.assertEqual(r.read(), resp_data[5:])

    def test_lazy_load_twice(self):
        # This test is sad and confusing. Need to figure out what's
        # going on with partial reads and socket reuse.

        pool = HTTPConnectionPool(self.host, self.port, block=True, maxsize=1, timeout=2)

        payload_size = 1024 * 2
        first_chunk = 512

        boundary = "foo"

        req_data = {"count": "a" * payload_size}
        resp_data = encode_multipart_formdata(req_data, boundary=boundary)[0]

        req2_data = {"count": "b" * payload_size}
        resp2_data = encode_multipart_formdata(req2_data, boundary=boundary)[0]

        r1 = pool.request("POST", "/echo", fields=req_data, multipart_boundary=boundary, preload_content=False)

        self.assertEqual(r1.read(first_chunk), resp_data[:first_chunk])

        try:
            r2 = pool.request(
                "POST",
                "/echo",
                fields=req2_data,
                multipart_boundary=boundary,
                preload_content=False,
                pool_timeout=0.001,
            )

            # This branch should generally bail here, but maybe someday it will
            # work? Perhaps by some sort of magic. Consider it a TODO.

            self.assertEqual(r2.read(first_chunk), resp2_data[:first_chunk])

            self.assertEqual(r1.read(), resp_data[first_chunk:])
            self.assertEqual(r2.read(), resp2_data[first_chunk:])
            self.assertEqual(pool.num_requests, 2)

        except EmptyPoolError:
            self.assertEqual(r1.read(), resp_data[first_chunk:])
            self.assertEqual(pool.num_requests, 1)

        self.assertEqual(pool.num_connections, 1)

    def test_for_double_release(self):
        MAXSIZE = 5

        # Check default state
        pool = HTTPConnectionPool(self.host, self.port, maxsize=MAXSIZE)
        self.assertEqual(pool.num_connections, 0)
        self.assertEqual(pool.pool.qsize(), MAXSIZE)

        # Make an empty slot for testing
        pool.pool.get()
        self.assertEqual(pool.pool.qsize(), MAXSIZE - 1)

        # Check state after simple request
        pool.urlopen("GET", "/")
        self.assertEqual(pool.pool.qsize(), MAXSIZE - 1)

        # Check state without release
        pool.urlopen("GET", "/", preload_content=False)
        self.assertEqual(pool.pool.qsize(), MAXSIZE - 2)

        pool.urlopen("GET", "/")
        self.assertEqual(pool.pool.qsize(), MAXSIZE - 2)

        # Check state after read
        pool.urlopen("GET", "/").data
        self.assertEqual(pool.pool.qsize(), MAXSIZE - 2)

        pool.urlopen("GET", "/")
        self.assertEqual(pool.pool.qsize(), MAXSIZE - 2)

    def test_release_conn_parameter(self):
        MAXSIZE = 5
        pool = HTTPConnectionPool(self.host, self.port, maxsize=MAXSIZE)
        self.assertEqual(pool.pool.qsize(), MAXSIZE)

        # Make request without releasing connection
        pool.request("GET", "/", release_conn=False, preload_content=False)
        self.assertEqual(pool.pool.qsize(), MAXSIZE - 1)
コード例 #32
0
class TestFileBodiesOnRetryOrRedirect(HTTPDummyServerTestCase):
    def setUp(self):
        self.pool = HTTPConnectionPool(self.host, self.port, timeout=0.1)

    def test_retries_put_filehandle(self):
        """HTTP PUT retry with a file-like object should not timeout"""
        retry = Retry(total=3, status_forcelist=[418])
        # httplib reads in 8k chunks; use a larger content length
        content_length = 65535
        data = b'A' * content_length
        uploaded_file = io.BytesIO(data)
        headers = {
            'test-name': 'test_retries_put_filehandle',
            'Content-Length': str(content_length)
        }
        resp = self.pool.urlopen('PUT',
                                 '/successful_retry',
                                 headers=headers,
                                 retries=retry,
                                 body=uploaded_file,
                                 assert_same_host=False,
                                 redirect=False)
        self.assertEqual(resp.status, 200)

    def test_redirect_put_file(self):
        """PUT with file object should work with a redirection response"""
        retry = Retry(total=3, status_forcelist=[418])
        # httplib reads in 8k chunks; use a larger content length
        content_length = 65535
        data = b'A' * content_length
        uploaded_file = io.BytesIO(data)
        headers = {
            'test-name': 'test_redirect_put_file',
            'Content-Length': str(content_length)
        }
        url = '/redirect?target=/echo&status=307'
        resp = self.pool.urlopen('PUT',
                                 url,
                                 headers=headers,
                                 retries=retry,
                                 body=uploaded_file,
                                 assert_same_host=False,
                                 redirect=True)
        self.assertEqual(resp.status, 200)
        self.assertEqual(resp.data, data)

    def test_redirect_with_failed_tell(self):
        """Abort request if failed to get a position from tell()"""
        class BadTellObject(io.BytesIO):
            def tell(self):
                raise IOError

        body = BadTellObject(b'the data')
        url = '/redirect?target=/successful_retry'
        # httplib uses fileno if Content-Length isn't supplied,
        # which is unsupported by BytesIO.
        headers = {'Content-Length': '8'}
        try:
            self.pool.urlopen('PUT', url, headers=headers, body=body)
            self.fail('PUT successful despite failed rewind.')
        except UnrewindableBodyError as e:
            self.assertTrue('Unable to record file position for' in str(e))