def testInterleaved4(self): """tests that http server returns responses in correct order""" cnn = HTTPConnection() cnn.connect(('localhost', SERVER_PORT)) #we do 2 requests that should take 2 seconds to complete each. #if server/client pipelining was not working, fetching the 2 urls #would take 4 seconds on a single connection #if pipelining works, it should take just 2 seconds start = time.time() cnn.send(cnn.get('/sleep?3')) cnn.send(cnn.get('/sleep?1')) response1 = cnn.receive() response2 = cnn.receive() #we expect response1 to be returned first, because it was made first #eventhough it takes longer self.assertEquals('slept 3', response1.body) self.assertEquals('slept 1', response2.body) end = time.time() self.assertAlmostEqual(3, end - start, places = 1) cnn.close()
def testInterleaved4(self): """tests that http server returns responses in correct order""" cnn = HTTPConnection() cnn.connect(('localhost', SERVER_PORT)) #we do 2 requests that should take 2 seconds to complete each. #if server/client pipelining was not working, fetching the 2 urls #would take 4 seconds on a single connection #if pipelining works, it should take just 2 seconds start = time.time() cnn.send(cnn.get('/sleep?3')) cnn.send(cnn.get('/sleep?1')) response1 = cnn.receive() response2 = cnn.receive() #we expect response1 to be returned first, because it was made first #eventhough it takes longer self.assertEquals('slept 3', response1.body) self.assertEquals('slept 1', response2.body) end = time.time() self.assertAlmostEqual(3, end - start, places=1) cnn.close()
def thermometers(): cnn = HTTPConnection() addr = (hwserver, hwport) cnn.connect(addr) req = cnn.get("/temperature") req.add_header("Connection", "close") res = cnn.perform(req) if res.status_code != 200: raise RuntimeError("Could not query thermometers state: error %s" % res.status_code) return json.loads(res.body)
def relay_set(relay, val): # query hwserver cnn = HTTPConnection() addr = (hwserver, hwport) cnn.connect(addr) req = cnn.get(str("/relay/%s/%d" % ("on" if val else "off", relay))) req.add_header("Content-type", "application/x-www-form-urlencoded") req.add_header("Connection", "close") res = cnn.perform(req) if res.status_code != 200: print "Could not query relay: error %s" % res.status_code
def main(): cnn = HTTPConnection() cnn.connect(('www.google.com', 80)) request = cnn.get('/') response = cnn.perform(request) print response.status print response.headers print response.body cnn.close()
def testSimple(self): cnn = HTTPConnection() cnn.connect(('localhost', SERVER_PORT)) request = cnn.get('/hello/1') response = cnn.perform(request) self.assertEquals(200, response.status_code) chunks = list(response.iter) self.assertEquals('Hello World 1', chunks[0]) request = cnn.get('/hello/2') response = cnn.perform(request) self.assertEquals(200, response.status_code) body = response.body self.assertEquals('Hello World 2', body) request = cnn.get('/xxx') response = cnn.perform(request) self.assertEquals(404, response.status_code) chunks = list(response) self.assertEquals('Not Found', chunks[0]) cnn.close()
def testHTTPReadTimeout(self): self.server.read_timeout = 2 cnn = HTTPConnection() try: cnn.connect(('localhost', SERVER_PORT)) Tasklet.sleep(1) response = cnn.perform(cnn.get('/hello/1')) self.assertEquals('HTTP/1.1 200 OK', response.status) self.assertEquals('Hello World 1', response.body) Tasklet.sleep(3) try: list(cnn.perform(cnn.get('/hello/2'))) self.fail('expected eof') except HTTPError, e: pass except: self.fail('expected http errror')
def fetch_post(target, id): cnn = HTTPConnection() try: cnn.connect((target["host"], 80)) try: start = time.time() req = cnn.get("/posts/%s" % id) res = cnn.perform(req) if res.status_code != 200: store_error(target, "fetch_post: %d" % res.status_code) print "get time=%f" % (time.time() - start) finally: cnn.close() except IOError as e: store_error(target, e)
def testInterleaved1(self): cnn = HTTPConnection() cnn.connect(('localhost', SERVER_PORT)) cnn.send(cnn.get('/hello/1')) cnn.send(cnn.get('/hello/2')) response = cnn.receive() self.assertEquals(200, response.status_code) chunks = list(response) self.assertEquals('Hello World 1', chunks[0]) response = cnn.receive() self.assertEquals(200, response.status_code) chunks = list(response) self.assertEquals('Hello World 2', chunks[0]) cnn.close()
def monitor(state): # prepare url url = '/monitor' first = True for key, val in state.iteritems(): if first: url += '?' first = False else: url += '&' url += '%s=%s' % (key, val) # query hwserver cnn = HTTPConnection() addr = (hwserver, hwport) cnn.connect(addr) req = cnn.get(str(url)) req.add_header("Content-type", "application/x-www-form-urlencoded") req.add_header("Connection", "close") res = cnn.perform(req) if res.status_code != 200: raise RuntimeError("Could not query monitor: error %s" % res.status_code) return json.loads(res.body)
def download(self, url): "Downloads given URL and returns it" if url is None: raise DownloadError() if type(url) == unicode: url = url.encode("utf-8") url_obj = urlparse.urlparse(url, "http", False) if url_obj.scheme != "http": self.error("Scheme '%s' is not supported", url_obj.scheme) elif url_obj.hostname is None: self.error("Empty hostname: %s", url) else: cnn = HTTPConnection() try: with Timeout.push(50): cnn.set_limit(20000000) port = url_obj.port if port is None: port = 80 cnn.connect((url_obj.hostname, port)) request = cnn.get(url_obj.path + url_obj.query) request.add_header("Connection", "close") response = cnn.perform(request) if response.status_code != 200: self.error("Error downloading %s: %s %s", url, response.status_code, response.status) return "" return response.body except TimeoutError: self.error("Timeout downloading %s", url) except Exception as e: self.error("Error downloading %s: %s", url, str(e)) finally: try: cnn.close() except Exception: pass raise DownloadError()
def testInterleaved3(self): """tests that http client and server really support pipelining""" cnn = HTTPConnection() cnn.connect(('localhost', SERVER_PORT)) #we do 2 requests that should take 2 seconds to complete each. #if server/client pipelining was not working, fetching the 2 urls #would take 4 seconds on a single connection #if pipelining works, it should take just 2 seconds start = time.time() cnn.send(cnn.get('/sleep?2')) cnn.send(cnn.get('/sleep?2')) list(cnn.receive()) list(cnn.receive()) end = time.time() self.assertAlmostEqual(2, end - start, places = 1) cnn.close()
def testInterleaved3(self): """tests that http client and server really support pipelining""" cnn = HTTPConnection() cnn.connect(('localhost', SERVER_PORT)) #we do 2 requests that should take 2 seconds to complete each. #if server/client pipelining was not working, fetching the 2 urls #would take 4 seconds on a single connection #if pipelining works, it should take just 2 seconds start = time.time() cnn.send(cnn.get('/sleep?2')) cnn.send(cnn.get('/sleep?2')) list(cnn.receive()) list(cnn.receive()) end = time.time() self.assertAlmostEqual(2, end - start, places=1) cnn.close()
def main(): cnn = HTTPConnection() cnn.connect(('www.google.com', 80)) request = cnn.get('/') #you can send multiple http requests on the same connection: cnn.send(request) #request 1 cnn.send(request) #request 2 #and receive the corresponding responses response1 = cnn.receive() response2 = cnn.receive() print response1.status print response1.headers print response1.body print response2.status print response2.headers print response2.body cnn.close()