def readhttp(address): address = xutils.quote_unicode(address) req = urllib.request.Request( address, data=None, headers={ 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.1916.47 Safari/537.36' }) bytes = xutils.urlopen(req).read() return xutils.decode_bytes(bytes)
def GET(self): self.response_headers = [] # url = web.ctx.environ["REQUEST_URI"] url = xutils.get_argument("url") body = xutils.get_argument("body") method = xutils.get_argument("method") content_type = xutils.get_argument("content_type") cookie = xutils.get_argument("cookie") or "" if url is None: return xtemplate.render("tools/curl.html") if not url.startswith("http"): url = "http://" + url url = xutils.quote_unicode(url) host = get_host(url) # print(url, method, host) # print(web.ctx.environ["HTTP_USER_AGENT"]) headers = OrderedDict() headers["Connection"] = "Keep-Alive" headers["Cache-Control"] = "max-age=0" headers["Content-Type"] = content_type headers["Host"] = host headers["Cookie"] = cookie # print(cookie) putheader(headers, "User-Agent", "HTTP_USER_AGENT") putheader(headers, "Accept", "HTTP_ACCEPT") putheader(headers, "Accept-Encoding", "HTTP_ACCEPT_ENCODING") putheader(headers, "Accept-Language", "HTTP_ACCEPT_LANGUAGE") # putheader(headers, "Cookie", "HTTP_COOKIE") try: # response = b''.join(list(self.do_http(method, host, url, headers, data=body))) buf = self.do_http(method, host, url, headers, data=body) if isinstance(buf, bytes): response = xutils.decode_bytes(buf) else: response = buf # byte 0x8b in position 1 usually signals that the data stream is gzipped except Exception as e: xutils.print_exc() response = str(e) return xtemplate.render("tools/curl.html", url=url, status = self.status, method=method, body=body, response=response, cookie=cookie, response_headers = self.response_headers)