def perform_request(self, method, url, params=None, body=None, timeout=None, ignore=()): start = time.time() url = self.url_prefix + url if params: url = '%s?%s' % (url, urlencode(params)) full_url = self.host + url try: response = urlfetch.fetch( url=full_url, payload=body, method=method, headers=self.headers, deadline=timeout, validate_certificate=self.validate_certificate) duration = time.time() - start raw_data = response.content except Exception as e: self.log_request_fail(method, full_url, url, json.dumps(body), time.time() - start, exception=e) raise ConnectionError('N/A', str(e), e) # raise errors based on http status codes, let the client handle those if needed if not (200 <= response.status_code < 300) and response.status_code not in ignore: self.log_request_fail(method, full_url, url, json.dumps(body), duration, response.status_code) self._raise_error(response.status_code, raw_data) self.log_request_success(method, full_url, url, json.dumps(body), response.status_code, raw_data, duration) return response.status_code, response.headers, raw_data
def perform_request(self, method, url, params=None, body=None, timeout=None, ignore=()): url_path = url if params: url_path = '%s?%s' % (url, urlencode(params or {})) url = self.base_url + url_path start = self.loop.time() response = None try: with aiohttp.Timeout(timeout or self.timeout): response = yield from self.session.request(method, url, data=body) raw_data = yield from response.text() duration = self.loop.time() - start except Exception as e: self.log_request_fail(method, url, url_path, body, self.loop.time() - start, exception=e) if isinstance(e, asyncio.TimeoutError): raise ConnectionTimeout('TIMEOUT', str(e), e) raise ConnectionError('N/A', str(e), e) finally: if response is not None: yield from response.release() # raise errors based on http status codes, let the client handle those if needed if not (200 <= response.status < 300) and response.status not in ignore: self.log_request_fail(method, url, url_path, body, duration, status_code=response.status, response=raw_data) self._raise_error(response.status, raw_data) self.log_request_success(method, url, url_path, body, response.status, raw_data, duration) return response.status, response.headers, raw_data
def perform_request(self, method, url, params=None, body=None, headers={}, ignore=(), future=None): url = self.url_prefix + url if params is not None: url = '%s?%s' % (url, urlencode(params)) full_url = self.host + url curl_handle = self._get_curl_handler(headers) curl_handle.setopt(pycurl.URL, full_url) if method == 'HEAD': curl_handle.setopt(pycurl.NOBODY, True) # Prepare the headers callback curl_handle.headers = {} curl_handle.setopt(pycurl.HEADERFUNCTION, get_header_function(curl_handle.headers)) # Prepare the body buffer curl_handle.buffer = BytesIO() curl_handle.setopt(pycurl.WRITEDATA, curl_handle.buffer) # The possible body of a request if body is not None: curl_handle.setopt(pycurl.POSTFIELDS, body) # Set after pycurl.POSTFIELDS to ensure that the request is the wanted one curl_handle.setopt(pycurl.CUSTOMREQUEST, method) if future is not None: curl_handle.connection = self return self.multi_handle.query(curl_handle, future) else: start = time.time() curl_handle.perform() duration = time.time() - start status = curl_handle.getinfo(pycurl.RESPONSE_CODE) curl_handle.close() (content_type, body) = decode_body(curl_handle) if not (200 <= status < 300) and status not in ignore: self.log_request_fail(method, full_url, url, body, duration, status) http_message = curl_handle.headers.pop('__STATUS__') raise return_error(status, body, content_type, http_message) self.log_request_success(method, full_url, url, curl_handle.buffer.getvalue(), status, body, duration) return status, curl_handle.headers, body
def perform_request(self, method, url, params=None, body=None, timeout=None, ignore=(), headers=None): url = self.base_url + url if params: url = '%s?%s' % (url, urlencode(params or {})) start = time.time() request = requests.Request(method=method, headers=headers, url=url, data=body) prepared_request = self.session.prepare_request(request) settings = self.session.merge_environment_settings( prepared_request.url, {}, None, None, None) send_kwargs = {'timeout': timeout or self.timeout} send_kwargs.update(settings) try: response = self.session.request(prepared_request.method, prepared_request.url, data=prepared_request.body, headers=prepared_request.headers, **send_kwargs) duration = time.time() - start raw_data = response.text except Exception as e: self.log_request_fail(method, url, prepared_request.path_url, body, time.time() - start, exception=e) if isinstance(e, requests.exceptions.SSLError): raise SSLError('N/A', str(e), e) if isinstance(e, requests.Timeout): raise ConnectionTimeout('TIMEOUT', str(e), e) raise ConnectionError('N/A', str(e), e) # raise errors based on http status codes, let the client handle those if needed if not (200 <= response.status_code < 300) and response.status_code not in ignore: self.log_request_fail(method, url, response.request.path_url, body, duration, response.status_code, raw_data) self._raise_error(response.status_code, raw_data) self.log_request_success(method, url, response.request.path_url, body, response.status_code, raw_data, duration) return response.status_code, response.headers, raw_data
def perform_request(self, method, url, params=None, body=None, timeout=None, ignore=(), headers=None): url_path = url if params: query_string = urlencode(params) else: query_string = "" # Provide correct URL object to avoid string parsing in low-level code url = yarl.URL.build(scheme=self.scheme, host=self.hostname, port=self.port, path=url, query_string=query_string, encoded=True) start = self.loop.time() response = None try: request_timeout = timeout or self.timeout.total with async_timeout.timeout(request_timeout, loop=self.loop): # override the default session timeout explicitly response = yield from self.session.request(method, url, data=body, headers=headers, timeout=request_timeout) raw_data = yield from response.text() duration = self.loop.time() - start except asyncio.CancelledError: raise except Exception as e: self.log_request_fail(method, url, url_path, body, self.loop.time() - start, exception=e) if isinstance(e, ServerFingerprintMismatch): raise SSLError('N/A', str(e), e) if isinstance(e, asyncio.TimeoutError): raise ConnectionTimeout('TIMEOUT', str(e), e) raise ConnectionError('N/A', str(e), e) finally: if response is not None: yield from response.release() # raise errors based on http status codes, let the client handle those if needed if not (200 <= response.status < 300) and response.status not in ignore: self.log_request_fail(method, url, url_path, body, duration, status_code=response.status, response=raw_data) self._raise_error(response.status, raw_data) self.log_request_success(method, url, url_path, body, response.status, raw_data, duration) return response.status, response.headers, raw_data
def perform_request(self, method, url, params=None, body=None, timeout=None, ignore=()): url_path = url if params: url_path = '%s?%s' % (url, urlencode(params or {})) url = self.base_url + url_path start = self.loop.time() response = None try: with aiohttp.Timeout(timeout or self.timeout): response = yield from self.session.request(method, url, data=body) raw_data = yield from response.text() duration = self.loop.time() - start except asyncio.TimeoutError as e: self.log_request_fail(method, url, body, self.loop.time() - start, exception=e) raise ConnectionTimeout('TIMEOUT', str(e), e) except FingerprintMismatch as e: self.log_request_fail(method, url, body, self.loop.time() - start, exception=e) raise SSLError('N/A', str(e), e) except ClientError as e: self.log_request_fail(method, url, body, self.loop.time() - start, exception=e) raise ConnectionError('N/A', str(e), e) finally: if response is not None: yield from response.release() # raise errors based on http status codes, let the client handle those if needed if not (200 <= response.status < 300) and response.status not in ignore: self.log_request_fail(method, url, body, duration, response.status, raw_data) self._raise_error(response.status, raw_data) self.log_request_success(method, url, url_path, body, response.status, raw_data, duration) return response.status, response.headers, raw_data
def perform_request(self, method, url, params=None, body=None, timeout=None, ignore=()): url = self.base_url + url if params: url = '%s?%s' % (url, urlencode(params or {})) start = time.time() headers = self.headers.copy() try: response = urlfetch.Fetch(url, payload=body, method=method, headers=headers, allow_truncated=False, follow_redirects=True, deadline=timeout, validate_certificate=self.verify_certs) duration = time.time() - start raw_data = response.content except Exception as e: self.log_request_fail(method, url, url, body, time.time() - start, exception=e) if isinstance(e, urlfetch_errors.SSLCertificateError): raise SSLError('N/A', str(e), e) if isinstance(e, urlfetch_errors.DeadlineExceededError): raise ConnectionTimeout('TIMEOUT', str(e), e) raise ConnectionError('N/A', str(e), e) # raise errors based on http status codes, let the client handle those if needed if not (200 <= response.status_code < 300) and response.status_code not in ignore: self.log_request_fail(method, url, url, body, duration) self._raise_error(response.status_code, raw_data) self.log_request_success(method, url, url, body, response.status_code, raw_data, duration) return response.status_code, response.headers, raw_data
def perform_request( # noqa: C901 self, method, url, params=None, body=None, timeout=None, ignore=(), headers=None ): url = self.url_prefix + url if params: url = "%s?%s" % (url, urlencode(params)) full_url = self.host + url start = time.time() try: kw = {} if timeout: kw["timeout"] = timeout # in python2 we need to make sure the url and method are not # unicode. Otherwise the body will be decoded into unicode too and # that will fail (#133, #201). if not isinstance(url, str): url = url.encode("utf-8") if not isinstance(method, str): method = method.encode("utf-8") request_headers = self.headers if headers: request_headers = request_headers.copy() request_headers.update(headers) if self.http_compress and body: try: body = gzip.compress(body) except AttributeError: # oops, Python2.7 doesn't have `gzip.compress` let's try # again body = gzip.zlib.compress(body) try: response = self.pool.urlopen( method, url, body, retries=Retry(False), headers=request_headers, **kw ) except NewConnectionError as error: self.log.error(error.args[0]) sys.exit(-1) duration = time.time() - start raw_data = response.data.decode("utf-8") except Exception as e: self.log_request_fail( method, full_url, url, body, time.time() - start, exception=e ) if isinstance(e, UrllibSSLError): raise SSLError("N/A", str(e), e) if isinstance(e, ReadTimeoutError): raise ConnectionTimeout("TIMEOUT", str(e), e) raise ConnectionError("N/A", str(e), e) # raise errors based on http status codes, # let the client handle those if needed if not (200 <= response.status < 300) and response.status not in ignore: self.log_request_fail( method, full_url, url, body, duration, response.status, raw_data ) self._raise_error(response.status, raw_data) self.log_request_success( method, full_url, url, body, response.status, raw_data, duration ) return response.status, response.getheaders(), raw_data