def request(self, host, handler, data, verbose=False): # set the headers, including the user-agent headers = { "User-Agent": "my-user-agent", "Content-Type": "text/xml", "Accept-Encoding": "gzip" } url = "http://%s%s" % (host, handler) response = None try: response = requests.post(url, data=data, headers=headers, timeout=SOCKET_TIMEOUT) response.raise_for_status() except requests.RequestException as e: if response is None: exception(ProtocolError(url, 500, str(e), "")) else: exception( ProtocolError(url, response.status_code, str(e), response.headers)) if response is not None: return self.parse_response(response) return response
def single_request(self, host, handler, request_body, verbose=0): # issue XML-RPC request h = self.make_connection(host) if verbose: h.set_debuglevel(1) try: self.send_request(h, handler, request_body) self.send_host(h, host) self.send_user_agent(h) self.send_auth(h) self.send_content(h, request_body) response = h.getresponse(buffering=True) if response.status == 200: self.verbose = verbose return self.parse_response(response) except Fault: raise except Exception: self.close() raise #discard any response data and raise exception if response.getheader("content-length", 0): response.read() raise ProtocolError( host + handler, response.status, response.reason, response.msg, )
def __request_helper(self, url, request_body): """ A helper method to assist in making a request and parsing the response. """ response = None # pylint: disable=try-except-raise # pylint: disable=raise-missing-from try: response = self.__bugzillasession.request("POST", url, data=request_body) return self.parse_response(response) except RequestException as e: if not response: raise raise ProtocolError( # pragma: no cover url, response.status_code, str(e), response.headers) except Fault: raise except Exception: msg = str(sys.exc_info()[1]) if not self.__seen_valid_xml: msg += "\nThe URL may not be an XMLRPC URL: %s" % url e = BugzillaError(msg) # pylint: disable=attribute-defined-outside-init e.__traceback__ = sys.exc_info()[2] # pylint: enable=attribute-defined-outside-init raise e
def request(self, host, handler, request_body, verbose=0): self.verbose = verbose url = self._scheme + "://" + host + handler transport = get_transport( url, possible_transports=self._possible_bzr_transports) response = transport.request("POST", url, body=request_body, headers={ "Content-Type": "text/xml"}) if response.status != 200: raise ProtocolError(url, response.status, response.text, response.headers) return self.parse_response(BytesIO(response.data))
def request(self, host, handler, request_body, verbose=0): self.verbose = verbose url = self._scheme + "://" + host + handler request = _urllib2_wrappers.Request("POST", url, request_body) request.add_header("User-Agent", self.user_agent) request.add_header("Content-Type", "text/xml") response = self._opener.open(request) if response.code != 200: raise ProtocolError(host + handler, response.code, response.msg, response.info()) return self.parse_response(response)
def raise_error(self, request): import socket try: from xmlrpc.client import ProtocolError except ImportError: # PY2 from xmlrpclib import ProtocolError if request.param == "protocol": exc = ProtocolError("http://pypi.python.org/pypi", 503, "", {}) else: exc = socket.error(111) def raise_error(): raise exc return raise_error
def _request_helper(self, url, request_body): """ A helper method to assist in making a request and provide a parsed response. """ response = None try: response = self.session.post(url, data=request_body, **self.request_defaults) # We expect utf-8 from the server response.encoding = 'UTF-8' # escape illegal utf-8 characters import re response._content = re.sub(b'[\x01-\x1f]+', b'', response.content) response._content = re.sub(b'&', b'&', response.content) # update/set any cookies if self._cookiejar is not None: for cookie in response.cookies: self._cookiejar.set_cookie(cookie) if self._cookiejar.filename is not None: # Save is required only if we have a filename self._cookiejar.save() response.raise_for_status() return self.parse_response(response) except requests.RequestException: e = sys.exc_info()[1] if not response: raise raise ProtocolError(url, response.status_code, str(e), response.headers) except Fault: raise sys.exc_info()[1] except Exception: # pylint: disable=W0201 e = BugzillaError(str(sys.exc_info()[1])) e.__traceback__ = sys.exc_info()[2] raise e
def request(self, host, handler, request_body, verbose): """ Make an xmlrpc request. """ headers = { 'User-Agent': self.user_agent, 'Content-Type': 'text/xml', } url = self._build_url(host, handler) try: resp = self.session.post(url, data=request_body, headers=headers, proxies=self.session.proxies) resp.raise_for_status() except requests.exceptions.HTTPError as e: if e.response.status_code == 407: # Proxy Authentication Required handle_proxy_407(url, self.session) # Try again return self.request(host, handler, request_body, verbose) else: raise except requests.exceptions.ConnectionError as e: # requests isn't so nice here. For whatever reason, https gives this # error and http gives the above error. Also, there is no status_code # attribute here. We have to just check if it looks like 407. See # https://github.com/kennethreitz/requests/issues/2061. if "407" in str(e): # Proxy Authentication Required handle_proxy_407(url, self.session) # Try again return self.request(host, handler, request_body, verbose) else: raise except requests.RequestException as e: raise ProtocolError(url, resp.status_code, str(e), resp.headers) else: return self.parse_response(resp)
def single_request(self, host, handler, request_body, verbose=False): try: proxy = urllib.request.ProxyHandler( {"http": "http://{}:{}@{}:{}".format(*self.proxy)}) opener = urllib.request.build_opener(proxy) URL = self.host + "/RPC2" resp = opener.open(URL, request_body) if resp.status == 200: self.verbose = verbose return self.parse_response(resp) except Fault as f: print(f) raise except Exception: self.close() raise if resp.getheader("content-length", ""): resp.read() raise ProtocolError(host + handler, resp.status, resp.reason, dict(resp.getheaders()))
def _request_helper(self, url, request_body): """ A helper method to assist in making a request and provide a parsed response. """ response = None # pylint: disable=try-except-raise try: response = self.session.post(url, data=request_body, **self.request_defaults) # We expect utf-8 from the server response.encoding = 'UTF-8' # update/set any cookies if self._cookiejar is not None: for cookie in response.cookies: self._cookiejar.set_cookie(cookie) if self._cookiejar.filename is not None: # Save is required only if we have a filename self._cookiejar.save() response.raise_for_status() return self.parse_response(response) except requests.RequestException as e: if not response: raise raise ProtocolError(url, response.status_code, str(e), response.headers) except Fault: raise except Exception: e = BugzillaError(str(sys.exc_info()[1])) # pylint: disable=attribute-defined-outside-init e.__traceback__ = sys.exc_info()[2] # pylint: enable=attribute-defined-outside-init raise e
def request(self, host, handler, request_body, verbose=0): # issue XML-RPC request h = self.make_connection(host) if verbose: h.set_debuglevel(1) self.send_request(h, handler, request_body) self.send_host(h, host) self.send_user_agent(h) self.send_content(h, request_body) response = h.getresponse() if response.status != 200: raise ProtocolError(host + handler, response.status, response.reason, response.msg.headers) payload = response.read() parser, unmarshaller = self.getparser() parser.feed(payload) parser.close() return unmarshaller.close()