def do_open(self, http_class, request): """Return the response object for the given request. Overrides the HTTPHandler method of the same name to return a FakeResponse instead of creating any network connections. Args: http_class: The http protocol being used. request: A urllib2.Request object. Returns: A FakeResponse object. """ self.__class__.request = request # Store the most recent request object if self._response_map: key = create_response_key( request.get_method(), request.get_full_url(), request.headers) if str(key) in self._response_map: (code, body, response_headers) = self._response_map[str(key)] return FakeResponse(code, body, response_headers) else: raise Error('Unknown request %s %s' '\nrequest:%s\nresponse_map:%s' % ( request.get_method(), request.get_full_url(), str(key), pformat(iter(self._response_map.keys())))) elif isinstance(self._response, Exception): raise self else: return self._response
def post(self, filename, ccd_data, use_base64=False): """ filename : The name of the file attachment ccd_data : The xml to post (send) use_base64 : If true, encode the encrypted XML as base64. Maybe necessary for 3DES """ attachments = "" payload = """--%s Content-Disposition: attachment; name="%s"; filename="%s.xml" Content-Type: text/xml %s""" payload_uuid = str(uuid.uuid4()) try: for data in ccd_data: if len(attachments) > 0: attachments += "\r\n\r\n" if use_base64: attachments += payload % (payload_uuid, filename, filename, base64.b64encode(data)) else: attachments += payload % (payload_uuid, filename, filename, data) #import pdb; pdb.set_trace() if self._url.find("https") == 0: https = urllib.request.HTTPSHandler(debuglevel=1) opener = urllib.request.build_opener(https) urllib.request.install_opener(opener) request = urllib.request.Request(self._url) request.add_header("Content-Type", "multipart/form-data; boundary=%s" % payload_uuid) request.add_header("User-Agent", "synthesis") request.add_data(attachments) if PRINT_HTTP_POST: print(request.header_items()) print(request.get_data()) print(request.get_method()) print(request.get_host()) return (True, "True") response = urllib2.urlopen(request).read() else: print("**** POSTING TO LOCAL SERVER ****") request = urllib.request.Request(self._url) request.add_header("Content-Type", "multipart/form-data; boundary=%s" % payload_uuid) request.add_header("User-Agent", "synthesis") request.add_data(attachments) if PRINT_HTTP_POST: print(request.header_items()) print(request.get_data()) print(request.get_method()) print(request.get_host()) return (True, "True") response = urllib2.urlopen(request).read() # check for some sign of success within the response if response[0:4] == "202:": return (True, response) else: return (False, response) except Exception as err: return (False, "An error occurred while performing an HTTP-POST or receiving the response: (%s)" % str(err))
def testSetMethod(self): self.wrapper.setMethod(POST) request = self._get_request(self.wrapper) self.assertEqual("POST", request.get_method()) self.wrapper.setMethod(GET) request = self._get_request(self.wrapper) self.assertEqual("GET", request.get_method())
def __getRepositoryItems__(self, item, state='all', page='0', direction='desc'): if not self.repository: raise Exception("Github", "No repository set") request = self.__getRequest__("repos/" + self.repository + "/" + item + "?" + "page=" + page + ("&state=" + state if state else "") + ("&direction=" + direction if direction else "")) key = request.get_method() + ":" + request.get_full_url() if not key in self.__data_caching__: response = urllib.request.urlopen(request) self.__data_caching__[request.get_method() + ":" + request.get_full_url()] = { "response": response, "content": json.loads(response.read().decode("utf-8")) } return self.__data_caching__[key]["response"], self.__data_caching__[key]["content"]
def call( self, method_name: str, payload: Any, service_annotations: Mapping[str, Union[str, int, None]], method_annotations: Mapping[str, Union[str, int, None]], parameter_annotations: Mapping[str, Mapping[str, Union[str, int, None]]] ) -> Tuple[bool, Any]: logger = self.logger.getChild('call') signed_payload = sign({ '_method': method_name, **payload }, self.secret, algorithm=self.algorithm) request = urllib.request.Request( self.url, data=signed_payload, headers={ 'Accept': 'application/json', 'Content-Type': 'application/jose', }, ) logger.debug('An HTTP request for %s():\n%s %s\n%s\n\n%s', method_name, request.get_method(), request.full_url, request.headers, request.data) response = urllib.request.urlopen(request) try: content = json.load( io.TextIOWrapper(response, 'utf-8') # type: ignore ) except ValueError: response.seek(0) raise UnexpectedNirumResponseError(response.read().decode()) status = response.status # type: ignore return 200 <= status < 400, content
def _make_http_response(self, endpoint, request, specific_signer=None): body_params = request.get_body_params() if body_params: body = urllib.parse.urlencode(body_params) request.set_content(body) request.set_content_type(format_type.APPLICATION_FORM) elif request.get_content() and "Content-Type" not in request.get_headers(): request.set_content_type(format_type.APPLICATION_OCTET_STREAM) method = request.get_method() signer = self._signer if specific_signer is None else specific_signer header, url = signer.sign(self.__region_id, request) if self.get_user_agent() is not None: header['User-Agent'] = self.get_user_agent() if header is None: header = {} header['x-sdk-client'] = 'python/2.0.0' protocol = request.get_protocol_type() response = HttpResponse( endpoint, url, method, header, protocol, request.get_content(), self._port, timeout=self._timeout) if body_params: body = urllib.parse.urlencode(request.get_body_params()) response.set_content(body, "utf-8", format_type.APPLICATION_FORM) return response
def get_html(url): headers = { 'Accept': '*/*', 'Accept-Encoding': 'gzip, deflate, sdch', 'Accept-Language': 'zh-CN,zh;q=0.8', 'Host': 'www.360kan.com', 'Proxy-Connection': 'keep-alive', 'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/52.0.2743.116 Safari/537.36' } request = urllib.request.Request(url, headers=headers) response = urllib.request.urlopen(request) response.encoding = 'utf-8' print(request.get_method()) # response.read() if response.code == 200: return 'ok' else: pass
def default_open(self,request): if ((request.get_method() == "GET") and (CachedResponse.ExistsInCache(self.cacheLocation, request.get_full_url()))): # print "CacheHandler: Returning CACHED response for %s" % request.get_full_url() return CachedResponse(self.cacheLocation, request.get_full_url(), setCacheHeader=True) else: return urllib.request.urlopen(request.get_full_url())
def open(self, request): with urllib.request.urlopen(request) as r: print('请求方法: {},请求URL: {}'.format(request.get_method(), r.geturl())) print('响应状态码: {}, 消息: {}'.format(r.getcode(), r.msg)) print(r.info()) print(r.read().decode())
def _get_parameters_from_request(request): if request.get_method() == 'GET': pieces_str = urlparse(request.get_full_url()).query else: pieces_str = request.data.decode('ascii') return parse_qs(pieces_str)
def glsrequest(uri, method, data=None): ''' Returns xml node tree as Element instance. 'uri' may be absolute or relative to _BASEURI. 'method' in ('GET', 'POST', 'PUT') 'data' can be a string or Element instance ''' if method not in {'GET', 'POST', 'PUT'}: raise GlslibException(MSGUNSUPPORTEDMETHOD % method) if not uri.startswith(_BASEURI): uri = _BASEURI.rstrip('/') + '/' + uri.lstrip('/') request = urllib.request.Request(uri) request.add_header("Authorization", "Basic %s" % _AUTHSTR) if etree.iselement(data): # tostring generates bytestring (as required for data) data = etree.tostring(data) request.add_header('Content-Type', 'application/xml') request.add_data(data) request.get_method = lambda: method msg = '%s %s\n%s\n%s' % (request.get_method(), request.get_full_url(), request.headers, data.decode('utf-8') if data else '') logger.debug(msg) try: r = urllib.request.urlopen(request) return etree.XML(r.read()) except urllib.error.HTTPError as httperr: logger.error(httperr.read()) raise except urllib.error.URLError as urlerr: logger.error(request.get_full_url()) raise
def xmlrpc(request, func): """xmlrpc(request:Request, func:callable) : string Processes the body of an XML-RPC request, and calls 'func' with two arguments, a string containing the method name and a tuple of parameters. """ # Get contents of POST body if request.get_method() != 'POST': request.response.set_status(405, "Only the POST method is accepted") return "XML-RPC handlers only accept the POST method." length = int(request.environ['CONTENT_LENGTH']) data = request.stdin.read(length) # Parse arguments params, method = xmlrpc.client.loads(data) try: result = func(method, params) except xmlrpc.client.Fault as exc: result = exc except: # report exception back to client result = xmlrpc.client.dumps( xmlrpc.client.Fault( 1, "%s:%s" % (sys.exc_info()[0], sys.exc_info()[1]))) else: result = (result, ) result = xmlrpc.client.dumps(result, methodresponse=1) request.response.set_content_type('text/xml') return result
def start_request(self, request): with self._setup_request(request): self._check_reusable() self._connection.putrequest( request.get_method(), request.selector) for item in request.header_items(): self._connection.putheader(*item) self._connection.endheaders()
def _http_response(self, request, response): txn = { 'url': request.full_url, 'method': request.get_method(), 'status_code': response.code, 'response_headers': response.headers, } self.transactions.append(txn) return response
def _http_response(self, request, response): fetch = { 'url': request.full_url, 'method': request.get_method(), 'response_code': response.code, 'response_headers': response.headers, } self.fetches.append(fetch) return response
def fake_urlopen(request): if request_hook is not None: request_hook(request) if request.get_method() in empty_methods: yield io.StringIO("") return if http_exc is not None: raise urllib.request.HTTPError(*(list(http_exc) + [response])) yield response
def test_create_post_request(self): """Test of the 'create_post_request' method.""" request = self._http_core_foctory.create_post_request( "http://url", "data") self._assert_request(request) self.assertEqual("data", request.get_data()) self.assertEqual("POST", request.get_method())
def http_response(self, request, response): if request.get_method() == "GET": if 'd-cache' not in response.info(): CachedResponse.StoreInCache(self.cacheLocation, request.get_full_url(), response) return CachedResponse(self.cacheLocation, request.get_full_url(), setCacheHeader=False) else: return CachedResponse(self.cacheLocation, request.get_full_url(), setCacheHeader=True) else: return response
def statuses_mentions(query=None): base_url = "http://api.twitter.com/1/statuses/mentions.json" parameters = generate_base_data() query_string = "" if not query == None: query_string = add_params(query) parameters.extend(query) request = urllib.request.Request(base_url + query_string) signature = myoauth.oauth_sign( request.get_method(), base_url, parameters, OAUTH_CONSUMER_SECRET, OAUTH_TOKEN_SECRET ) header_string = generate_header_string(parameters, [["oauth_signature", signature]]) request.add_header("Authorization", header_string) print(request.get_header("Authorization")) print(request.get_method()) print(request.get_full_url()) return urllib.request.urlopen(request)
def default_open(self, request): '''Respond to the request by first checking if there is a cached response otherwise defer to http handler''' if ((request.get_method() == "GET") and (CachedResponse.ExistsInCache( self.cacheLocation, request.get_full_url()))): # print "CacheHandler: Returning CACHED response for %s" % request.get_full_url() return CachedResponse(self.cacheLocation, request.get_full_url(), setCacheHeader=True) else: return None # let the next handler try to handle the request
def test_make_request(self): """v3 requests have correct URL and Auth header.""" getter = GetMazaDataAPI3('foo', 'bar') request = getter.make_request() credentials = base64.encodebytes(b'foo:bar').decode('ascii') expected = 'Basic %s' % credentials self.assertEqual('GET', request.get_method()) self.assertEqual('https://uccs.landscape.canonical.com/api/3/foo', request.get_full_url()) self.assertEqual(expected, request.headers['Authorization'])
def __getRequest__(self, endpoint, full_url=False): if full_url: request = urllib.request.Request(endpoint) else: url = urllib.parse.urljoin(self.API_URL, endpoint) request = urllib.request.Request(url) if self.verbose: print("[Github] " + request.get_method() + " " + request.get_full_url()) if self.loginBase64: request.add_header('Authorization', "Basic " + self.loginBase64) return request
def http_request(self, request): host, full_url = request.host, request.get_full_url() url_path = full_url[full_url.find(host) + len(host):] log_url(self.log, "Requesting: ", request.get_full_url(), TRACE_LEVEL) self.log.log(self.log_level, "%s %s" % (request.get_method(), url_path)) for header in request.header_items(): self.log.log(self.log_level, " . %s: %s" % header[:]) return request
def _createRequest(self): request = super()._createRequest() uri = request.get_full_url() method = request.get_method() body = request.data headers = request.headers new_uri, new_headers, new_body = self.client.sign( uri, method, body, headers) request.full_url = new_uri request.headers = new_headers request.data = new_body return request
def requestB(opener, url, headers, data, method='POST'): answer = '' retcode = None additionalInfo = '[None]' contentLenght = None if ImportCookie: headers = processingCookies(headers) data = urllib.parse.urlencode(data) if method == 'GET': if data: url = url + '?' + data data = None elif method == 'POST': headers['Content-Length'] = len(data) data = data.encode() request = urllib.request.Request(url, data, headers) try: printRequest(request.get_method(), request.get_full_url()) if data: printData(data) f = opener.open(request) headers = f.getheaders() code = f.code retcode = code answer = f.read() m = hashlib.md5() m.update(answer) for h in headers: if h[0].lower() == 'content-length': contentLenght = h[1] additionalInfo = '[' + str(h[1]) + ']' printAnswer(code, additionalInfo) printHeaders(headers, 'Set-Cookie') except urllib.error.HTTPError as error: for h in error.headers: if h.lower() == 'content-length': printAnswer(str(error.code), ' [' + str(error.headers[h]) + ']') else: printAnswer(str(error.code), ' [-1]') retcode = error.code answer = error.read() except urllib.error.URLError as error: printAnswer(str(error)) return answer, retcode
def requestB(opener,url, headers, data, method = 'POST'): answer = '' retcode = None additionalInfo = '[None]' contentLenght = None if ImportCookie: headers = processingCookies(headers) data = urllib.parse.urlencode(data) if method == 'GET': if data: url = url + '?' + data data = None elif method == 'POST': headers['Content-Length'] = len(data) data = data.encode() request = urllib.request.Request(url,data,headers) try: printRequest(request.get_method(), request.get_full_url()) if data: printData(data) f = opener.open(request) headers = f.getheaders() code = f.code retcode = code answer = f.read() m = hashlib.md5() m.update(answer) for h in headers: if h[0].lower() == 'content-length': contentLenght = h[1] additionalInfo = '[' + str(h[1]) + ']' printAnswer(code,additionalInfo) printHeaders(headers,'Set-Cookie') except urllib.error.HTTPError as error: for h in error.headers: if h.lower() == 'content-length': printAnswer(str(error.code) , ' [' + str(error.headers[h]) + ']') else: printAnswer(str(error.code) , ' [-1]') retcode = error.code answer = error.read() except urllib.error.URLError as error: printAnswer(str(error)) return answer, retcode
def generic_request(base_url, query=None): parameters = generate_base_data() query_string = "" if not query == None: query_string = add_params(query) parameters.extend(query) request = urllib.request.Request(base_url + query_string) signature = myoauth.oauth_sign( request.get_method(), base_url, parameters, OAUTH_CONSUMER_SECRET, OAUTH_TOKEN_SECRET ) header_string = generate_header_string(parameters, [["oauth_signature", signature]]) request.add_header("Authorization", header_string) return urllib.request.urlopen(request)
def downloadasAndroid(url): #header={"Android QQ":"User-Agent: MQQBrowser/26 Mozilla/5.0 (Linux; U; Android 2.3.7; zh-cn; MB200 Build/GRJ22; CyanogenMod-7) AppleWebKit/533.1 (KHTML, like Gecko) Version/4.0 Mobile Safari/533.1"} header = {"UC": "User-Agent: UCWEB7.0.2.37/28/999"} request = urllib.request.Request(url, headers=header) #request.add_header("Connection","keep-live") print(request.get_full_url()) #整个网页链接 print(request.host) #服务器域名 print(request.get_method()) #get 或者post print(request.type) #http或者ftp,https response = urllib.request.urlopen(request) print(response.code) #200,404,403,编号 print(response.info()) #网页详细信息 content = response.read() print(content.decode('utf-8')) #网页源代码
def http_request(self, request): if __debug__: host, full_url = request.get_host(), request.get_full_url() url_path = full_url[full_url.find(host) + len(host):] self.httpout.write("%s\n" % request.get_full_url()) self.httpout.write("\n") self.httpout.write("%s %s\n" % (request.get_method(), url_path)) for header in request.header_items(): self.httpout.write("%s: %s\n" % header[:]) self.httpout.write("\n") return request
def DeleteProjectTW(project_id): company = "project36" key = base64.b64encode(b'twp_TRKx81UCnv4deufBFU2b85350cXo:xxx') project_id = project_id request = urllib.request.Request( "https://{0}.teamwork.com/projects/{1}.json".format(company, project_id)) print(request) request.add_header("Authorization", "BASIC " + key.decode()) request.add_header("Content-type", "application/json") request.get_method = lambda: "DELETE" print(request.header_items()) response = urllib.request.urlopen(request) data = response.read() print(request.get_method()) print(data)
def default_open(self, request): """Handles GET requests, if the response is cached it returns it """ if request.get_method() is not "GET": return None # let the next handler try to handle the request if exists_in_cache( self.cache_location, request.get_full_url(), self.max_age ): return CachedResponse( self.cache_location, request.get_full_url(), set_cache_header = True ) else: return None
def http_response(self, request, response): '''Post process the response object by seeing if its from the cache or live if live, store a copy then pull that same copy (without the cache-header) to return, if from cache, pull cached copy again (add a cache-header) and return ''' if self._checkResponseCode(response) and request.get_method() == "GET": if CACHE_HEADER not in response.info(): CachedResponse.StoreInCache(self.cacheLocation, request.get_full_url(), response) return CachedResponse(self.cacheLocation, request.get_full_url(), setCacheHeader=False) else: return CachedResponse(self.cacheLocation, request.get_full_url(), setCacheHeader=True) else: return response
def get_token(self): login = '******' passf = {"name": "doni", "password": "******"} passd = json.dumps(passf).encode('utf8') request = urllib.request.Request(url=login, method='POST', headers=self.headers, data=passd) print(request.full_url) print(request.data, request.get_method()) response = urllib.request.urlopen(request) # print(data) contentb = str(response.read(), encoding='utf-8') content = json.loads(contentb) tokens = content['data']['accessToken'] response.close() print(tokens) return tokens
def retweeted_by_me(query=None): base_url = "http://api.twitter.com/1/statuses/retweeted_by_me.json" parameters = generate_base_data() query_string = "" if not query == None: query_string = add_params(query) parameters.extend(query) # print('extended parameters: '+parameters) # querystring = urllib.parse.quote('?include_entities=true', safe='?') request = urllib.request.Request(base_url + query_string) signature = myoauth.oauth_sign( request.get_method(), base_url, parameters, OAUTH_CONSUMER_SECRET, OAUTH_TOKEN_SECRET ) header_string = generate_header_string(parameters, [["oauth_signature", signature]]) request.add_header("Authorization", header_string) return urllib.request.urlopen(request)
def log_curl_request(request, cookie_jar): if not os.getenv("CURL_CMD"): return cmd = [] cmd += "curl -v" if request.get_method() == "GET": # No verb to add here pass headers = request.header_items() for header_name, header_val in headers: cmd += ' -H "%s: %s"' % (header_name, header_val) url = " " + request.full_url escaped_url = "" for char in url: if char == "&": escaped_url += "\\" escaped_url += char cmd += escaped_url print("".join(cmd))
def get_response(app: str, api_key: str, endpoint: str, payload: Optional[dict] = None): headers = { "Accept": "application/vnd.heroku+json; version=3", "Authorization": f"Bearer {api_key}", "Content-Type": "application/json", "Range": "version ..; order=desc,max=10;", } request = urllib.request.Request( f"https://api.heroku.com/apps/{app}/{endpoint}", headers=headers, data=json.dumps(payload).encode() if payload is not None else None, ) with urllib.request.urlopen(request) as response: assert response.getcode() in get_status_codes( request.get_method()), response.getcode() return json.loads(response.read())
def totale(self, token): dataformat = {"number": 0, "size": 10} self.headers['token'] = token data1 = json.dumps(dataformat) data3 = data1.encode('utf8') print(data1) request = urllib.request.Request(url=self.baseurl, method='POST', headers=self.headers, data=data3) print(request.full_url) print(request.data, request.get_method()) response = urllib.request.urlopen(request) # print(data) contentb = str(response.read(), encoding='utf-8') content = json.loads(contentb) print(content) response.close() totalcount = content['data']['totalElements'] print(totalcount) return totalcount
def http_response(self, request, response): """Gets a HTTP response, if it was a GET request and the status code starts with 2 (200 OK etc) it caches it and returns a CachedResponse """ if (request.get_method() == "GET" and str(response.code).startswith("2") ): if 'x-local-cache' not in response.info(): # Response is not cached set_cache_header = store_in_cache( self.cache_location, request.get_full_url(), response ) else: set_cache_header = True return CachedResponse( self.cache_location, request.get_full_url(), set_cache_header = set_cache_header ) else: return response
def encodeASP(text): """ base64 encode function for (ASP).NET """ isbytes = True if not isinstance(text, bytes): text = base64.urlsafe_b64encode(text.encode()) isbytes = False else: text = base64.urlsafe_b64encode(text) count = len(re.findall(b'=',text)) for i in range(count): text = text[:-1] text = text + str(count).encode() if isbytes: return text else: return text.decode() class SmartRedirectHandler(urllib.request.HTTPRedirectHandler): def http_error_301(self, req, fp, code, msg, headers): self.preProcessingRedirection(req, fp, code, msg, headers) result = super(SmartRedirectHandler, self).http_error_301(req, fp, code, msg, headers) self.postProcessingRedirection(result) return result def http_error_302(self, req, fp, code, msg, headers): self.preProcessingRedirection(req, fp, code, msg, headers) result = super(SmartRedirectHandler, self).http_error_302(req, fp, code, msg, headers) self.postProcessingRedirection(result) return result def preProcessingRedirection(self, req, fp, code, msg, headers): location = '' for i in headers._headers: if i[0] == 'Location': location = i[1].strip() req.add_header('Host',urlparse(location).netloc) printAnswer(code, str(msg) + " " + location) printHeaders(headers._headers,'Set-Cookie') def postProcessingRedirection(self, result): printRequest("GET", result.geturl()) def stringToHexCSV(s): hexs = s.encode('hex') ret = ' '.join(hexs[i:i+2] for i in range(0, len(hexs), 2)) return ret def defaultCreateOpener(withCookieJar = True, withBurpProxy = True): global cookieJar if withCookieJar: cookieJar = urllib.request.HTTPCookieProcessor(http.cookiejar.CookieJar()) proxy_handler = None if withBurpProxy: proxy_handler = urllib.request.ProxyHandler({'https': 'https://127.0.0.1:8080/', 'http': 'http://127.0.0.1:8080/'}) ret = None if withCookieJar and withBurpProxy: ret = urllib.request.build_opener(proxy_handler, SmartRedirectHandler(), cookieJar) elif withCookieJar: ret = urllib.request.build_opener(SmartRedirectHandler(), cookieJar) elif withBurpProxy: ret = urllib.request.build_opener(proxy_handler, SmartRedirectHandler()) return ret def processingCookies(headers): cookies = headers['Cookie'] final = '' if type(cookies) == type(""): return for c in cookies.keys(): final += " " + c + "=" + cookies[c] + ";" headers['Cookie'] = final return headers createOpener = defaultCreateOpener def requestC(opener,url, headers, data, method = 'POST'): [answer, code] = requestB(opener,url, headers, data, method) return answer def requestB(opener,url, headers, data, method = 'POST'): answer = '' retcode = None additionalInfo = '[None]' contentLenght = None if ImportCookie: headers = processingCookies(headers) data = urllib.parse.urlencode(data) if method == 'GET': if data: url = url + '?' + data data = None elif method == 'POST': headers['Content-Length'] = len(data) data = data.encode() request = urllib.request.Request(url,data,headers) try: printRequest(request.get_method(), request.get_full_url()) if data: printData(data) f = opener.open(request) headers = f.getheaders() code = f.code retcode = code answer = f.read() m = hashlib.md5() m.update(answer) for h in headers: if h[0].lower() == 'content-length': contentLenght = h[1] additionalInfo = '[' + str(h[1]) + ']' printAnswer(code,additionalInfo) printHeaders(headers,'Set-Cookie') except urllib.error.HTTPError as error: for h in error.headers: if h.lower() == 'content-length': printAnswer(str(error.code) , ' [' + str(error.headers[h]) + ']') else: printAnswer(str(error.code) , ' [-1]') retcode = error.code answer = error.read() except urllib.error.URLError as error: printAnswer(str(error)) return answer, retcode def parseBurpData(fileName): global Protocol url = '' host = '' data = None contentType = 'None' headers = {} indata = None try: indata = open(fileName,"r") except IOError as error: print(str(error)) sys.exit(1) line = indata.readline() res = line.partition(' ') method = res[0] printDebug('method ' + method) res = res[2].rpartition(' ') uri = res[0] printDebug('URI: ' + str(uri)) if Protocol == None: rulo = urlparse(uri) printOut('Scheme not given, trying to guess it from burp request.') if rulo.scheme != 'http' or rulo.scheme != 'https': printOut('** Could not determine the scheme from the HTTP request, please configure one **') sys.exit(1) else: Protocol = rulo.scheme line = indata.readline() while line.strip(): res = line.partition(':') if res[0] == 'Host': host = res[2].strip() if res[0] == 'Content-Type': contentType = res[2].strip() if res[0] == 'Cookie': if ImportCookie: cookies = res[2].split(';') for c in cookies: tm = c.strip().partition('=') if res[0] not in headers: headers[res[0]] = {tm[0]:tm[2]} else: headers[res[0]][tm[0]] = tm[2] line = indata.readline() continue headers[res[0]] = res[2].strip() line = indata.readline() if method == 'POST': url = Protocol + '://' + host + uri data = indata.read().strip() if len(data) == 0: data = None else: urlencodedcontenttype = re.compile('application\/x-www-form-urlencoded') if urlencodedcontenttype.match(contentType): data = urllib.parse.parse_qs(data) for d in list(data.keys()): if len(data[d]) > 1: printOut("Multiple value for the same field. Odd... taking the first one") data[d] = data[d][0] elif contentType == 'text/xml; charset=UTF-8': data = parseString(data) else: printOut('Unknown Content type: ' + str(contentType)) elif method == 'GET': res = uri.rpartition('?') uri = res[0] if len(res) == 3: if uri == '': uri = res[2] data = urllib.parse.parse_qs(res[2]) for d in list(data.keys()): if len(data[d]) > 1: printOut("Multiple value for the same field. Odd... taking the first one") data[d] = data[d][0] url = Protocol + '://' + host + uri indata.close() return url, headers,data, method
def request(self, path, parameters=None, data=None, method=None, auto_login=True, json_answer=True, filename=None): """ Send a request to the Nuclos server. :param path: The path to open. :param parameters: A dictionary of parameters to add to the request URL. :param data: The data to add. If this is given the request will automatically be a POST request. :param method: The HTTP method to use. If not set this will be GET or POST, depending on the data. :param auto_login: Try to log in automatically in case of a 401 error. :param json_answer: Parse the servers answer as JSON. :param filename: A file to save the downloaded data in. :return: The answer of the server. None in case of an error. :raise: URLError in case of an HTTP error. Returns None instead if the 'handle_http_errors' option is set. """ if not self.session_id and auto_login: self.login() url = path if not url.startswith("http"): url = self._build_url(path, parameters) request = urllib.request.Request(url) if json_answer: request.add_header("Accept", "application/json") if data: request.data = json.dumps(data).encode("utf-8") request.add_header("Content-Type", "application/json") if method: request.method = method if method and request.data and method not in ["POST", "PUT"]: logging.warning("Overriding the POST method while sending data!") if self.session_id: request.add_header("Cookie", "JSESSIONID=" + str(self.session_id)) logging.debug("Sending {} request to {}.".format(request.get_method(), request.get_full_url())) if request.data: logging.debug("Sending data {}.".format(request.data)) try: result = urllib.request.urlopen(request) if filename is not None: with open(filename, "wb") as f: shutil.copyfileobj(result, f) return None answer = result.read().decode() if answer: logging.debug("Received answer {}".format(answer)) if not json_answer: return answer try: return json.loads(answer) except ValueError: logging.error("Invalid JSON in '{}'.".format(answer)) return None except urllib.request.HTTPError as e: if e.code == 401 and auto_login: logging.info("Unauthorized. Trying to log in again.") self.session_id = None self.login() return self.request(path, data=data, method=method, auto_login=False, json_answer=json_answer) elif e.code == 403: raise NuclosAuthenticationException() else: logging.error("HTTP Error {}: {}".format(e.code, e.reason)) raise NuclosHTTPException(e)
def retrieveScore(targetFinderData): """ Given property characteristics in XML format access Portfolio Manager targetFinder webservices and retrieve the PM designScore. **Args:** - *targetFinderData*, XML-format string of the characteristics of a property or building. **Returns:** - *PMMetrics*, dictionary of PM metrics retrieved from targetFinder web services. The values returned is a list with the calculated value and the unit of measurement. **Notes:** - The purpose of targetFinder is having to do a single PUSH call instead of multiple calls with different XML files. This web service does not save property data. - targetFinder does not require login information. - Missing values in `propertyUses` tag are filled with ENERGY STAR default values. The only propertyUse information in this code is the `totalGrossFloorArea`. """ # assert type(targetFinderData) == str # NOTE: When testing new features in OpenEIS or in Energy Star use the # URL for the test environment. Production versions of the code # should use live environment. url = "https://portfoliomanager.energystar.gov/wstest/targetFinder" # Test Environment # url = 'https://portfoliomanager.energystar.gov/ws/targetFinder' # Live Environment # --- Assemble opener. # debugHandler = urllib.request.HTTPSHandler(debuglevel=0) # --- Assemble request. # opener = urllib.request.build_opener(debugHandler) targetFinderData_bin = targetFinderData.encode("utf-8") request = urllib.request.Request(url, targetFinderData_bin, headers={"Content-Type": "application/xml"}) assert request.get_method() == "POST" metrics = dict() try: response = opener.open(request) xmlRoot = ET.fromstring(response.read()) metrics["status"] = "success" for val in xmlRoot.iter("metric"): metrics[val.get("name")] = (val.findtext("value"), val.get("uom")) except urllib.request.HTTPError as err: print("http error") print("code is ", err.code) print("reason is", err.reason) print(str(err)) print(err.read()) metrics["status"] = "HTTP Error" metrics["reason"] = err.reason except urllib.request.URLError as err: print("url error") print(err.args) print(err.reason) metrics["status"] = "URL Error" metrics["reason"] = err.reason return metrics
def __write_capture(self, request, response): ohandle = io.StringIO() response_body = b'' saved_exception = None try: ohandle.write('<capture>\n') ohandle.write('<request>\n') method = request.get_method() url = request.get_full_url() parsed = urlparse.urlsplit(url) relative_url = parsed.path if parsed.query: relative_url += '?' + parsed.query if parsed.fragment: # TODO: will this ever happen? relative_url += '#' + parsed.fragment host = None request_body = None if hasattr(request, 'get_host'): host = request.get_host() # support 3.3 if request.has_data(): request_body = request.get_data() else: host = request.host request_body = request.data ohandle.write('<method>%s</method>\n' % escape(method)) ohandle.write('<url>%s</url>\n' % escape(url)) ohandle.write('<host>%s</host>\n' % escape(host)) try: # ghetto addr = response.fp.raw._sock.getpeername() if addr: ohandle.write('<hostip>%s</hostip>\n' % escape(addr[0])) except Exception as error: pass ohandle.write('<datetime>%s</datetime>\n' % escape(time.asctime(time.gmtime())+' GMT')) # TODO: can we calculate request time and elapsed? request_headers = '%s %s HTTP/1.1\r\n' % (method, relative_url) # TODO: is there access to the HTTP version? for item in request.header_items(): request_headers += item[0] + ': ' + '\r\n\t'.join(item[1:]) + '\r\n' if self.re_nonprintable_str.search(request_headers): ohandle.write('<headers encoding="base64">%s</headers>\n' % base64.b64encode(request_headers.encode('utf-8')).decode('ascii')) else: ohandle.write('<headers>%s</headers>\n' % escape(request_headers)) if request_body is not None: if self.re_nonprintable.search(request_body): ohandle.write('<body encoding="base64">%s</body>\n' % base64.b64encode(request_body).decode('ascii')) else: ohandle.write('<body>%s</body>\n' % escape(request_body.decode('ascii'))) ohandle.write('</request>\n') ohandle.write('<response>\n') status = int(response.getcode()) ohandle.write('<status>%d</status>\n' % status) headers = response.info() if 'HEAD' == method or status < 200 or status in (204, 304,): response_body = b'' else: try: response_body = response.read() except urllib2.IncompleteRead as e: saved_exception = e response_headers = 'HTTP/1.1 %d %s\r\n' % (status, response.msg) # TODO: is there access to the HTTP version? response_headers += headers.as_string() content_type = headers.get('Content-Type') content_length = headers.get('Content-Length') if content_type: ohandle.write('<content_type>%s</content_type>\n' % escape(content_type)) if content_length: ohandle.write('<content_length>%d</content_length>\n' % int(content_length)) if self.re_nonprintable_str.search(response_headers): ohandle.write('<headers encoding="base64">%s</headers>\n' % base64.b64encode(response_headers.encode('utf-8')).decode('ascii')) else: ohandle.write('<headers>%s</headers>\n' % escape(response_headers)) if response_body: if self.re_nonprintable.search(response_body): ohandle.write('<body encoding="base64">%s</body>\n' % base64.b64encode(response_body).decode('ascii')) else: ohandle.write('<body>%s</body>\n' % escape(response_body.decode('ascii'))) ohandle.write('</response>\n') ohandle.write('</capture>\n') self.ofhandle.write(ohandle.getvalue().encode('utf-8')) ohandle.close() self.write_count += 1 if 0 == (self.write_count % self.cut_count): self.close() self.open_file() except Exception as e: sys.stderr.write('*** unhandled error in RaftCaptureProcessor: %s\n' % (e)) if saved_exception: raise(saved_exception) return response_body