def do_open(self, req): """ Called by handler's url_open method. """ host = req.get_host() if not host: raise urllib2.URLError("no host given") try: resp_statuses = self._hostresp.setdefault(host, self._get_tail_filter()) # Check if all our last 'resp_statuses' were timeouts and raise # a w3afMustStopException if this is the case. if len(resp_statuses) == self._curr_check_failures and all(st == RESP_TIMEOUT for st in resp_statuses): msg = ( "w3af found too much consecutive timeouts. The remote " "webserver seems to be unresponsive; please verify manually." ) raise w3afMustStopException(msg) conn_factory = self._get_connection conn = self._cm.get_available_connection(host, conn_factory) if conn.is_fresh: # First of all, call the request method. This is needed for # HTTPS Proxy if isinstance(conn, ProxyHTTPConnection): conn.proxy_setup(req.get_full_url()) conn.is_fresh = False self._start_transaction(conn, req) resp = conn.getresponse() else: # We'll try to use a previously created connection resp = self._reuse_connection(conn, req, host) # If the resp is None it means that connection is bad. It was # possibly closed by the server. Replace it with a new one. if resp is None: conn.close() conn = self._cm.replace_connection(conn, host, conn_factory) # First of all, call the request method. This is needed for # HTTPS Proxy if isinstance(conn, ProxyHTTPConnection): conn.proxy_setup(req.get_full_url()) # Try again with the fresh one conn.is_fresh = False self._start_transaction(conn, req) resp = conn.getresponse() except (socket.error, httplib.HTTPException), err: # We better discard this connection self._cm.remove_connection(conn, host) if isinstance(err, socket.timeout): resp_statuses.append(RESP_TIMEOUT) _err = URLTimeoutError() else: resp_statuses.append(RESP_BAD) _err = urllib2.URLError(err) raise _err
def _incrementGlobalErrorCount(self, error): ''' Increment the error count, and if we got a lot of failures raise a "w3afMustStopException" ''' if self._ignore_errors_conf: return # All the logic follows: if self._lastRequestFailed: self._last_errors.append(str(error)) else: self._lastRequestFailed = True errtotal = len(self._last_errors) om.out.debug('Incrementing global error count. GEC: %s' % errtotal) if errtotal >= 10 and not self._mustStop: msg = 'The xUrllib found too much consecutive errors. The remote' \ ' webserver doesn\'t seem to be reachable anymore; please verify' \ ' manually.' self.stop() raise w3afMustStopException(msg, self._last_errors)
# Log the errors om.out.debug(msg) om.out.debug('Traceback for this error: %s' % traceback.format_exc()) req._Request__original = original_url # Then retry! return self._retry(req, e, useCache) except KeyboardInterrupt: # Correct control+c handling... raise except sqlite3.Error, e: msg = 'A sqlite3 error was raised: "%s".' % e if 'disk' in str(e).lower(): msg += ' Please check if your disk is full.' raise w3afMustStopException( msg ) except w3afMustStopException: raise except Exception, e: # This except clause will catch unexpected errors # For the first N errors, return an empty response... # Then a w3afMustStopException will be raised msg = ('%s %s returned HTTP code "%s"' % (req.get_method(), original_url, NO_CONTENT)) om.out.debug(msg) om.out.debug('Unhandled exception in xUrllib._send(): %s' % e) om.out.debug(traceback.format_exc()) # Clear the log of failed requests; this request is done! req_id = id(req) if req_id in self._errorCount: