def execute_plugin(register_plugins, m_resource): pluginPool = MyGeventPool(30) for key, plugin in register_plugins.iteritems(): ##proPool.apply_async(plugin_run_thread, (key, plugin, m_resource)) if isinstance(m_resource, Resource): if m_resource.has_url_params: param_dict = m_resource.url_params method = 'GET' if m_resource.has_post_params: param_dict = m_resource.post_params method = 'POST' for k, v in param_dict.iteritems(): param_key = to_utf8(k) param_value = to_utf8(v) pluginPool.spawn(plugin_run_thread, key, plugin, m_resource, method=method, param_key=param_key, param_value=param_value) pluginPool.join()
def run(self, info): #if not info.has_url_params and not info.has_post_params: # return m_return = [] if info.has_url_params: ''' cookie_dict = Config.audit_config.cookie print cookie_dict if hasattr(cookie_dict, "iteritems"): cookie_params = { to_utf8(k): to_utf8(v) for k, v in cookie_dict.iteritems() } cookie_param = ';'.join( '%s=%s' % (k ,v) for (k, v) in sorted(cookie_params.iteritems()) ) print cookie_param print "GET" ''' param_dict = info.url_params for k, v in param_dict.iteritems(): key = to_utf8(k) value = to_utf8(v) return m_return if info.has_post_params: print 'POST' # Send the results return m_return
def run(self, info): m_return = [] if info.has_url_params: #param_dict = info.url_params for k, v in info.url_params.iteritems(): key = to_utf8(k) value = to_utf8(v) for cmd_inject_case in cmd_inject_detect_test_cases: p = payload_muntants(info, payload={ 'k': k, 'pos': 1, 'payload': cmd_inject_case['input'], 'type': 0 }, bmethod=info.method, timeout=30.0) if cmd_inject_case['target'] is not None: if p is not None: __ = re.search(cmd_inject_case['target'], p.data) if __ is not None: logger.log_verbose('[+] found cmd inject!') return m_return if info.has_post_params: #param_dict = info.post_params for k, v in info.post_params.iteritems(): key = to_utf8(k) value = to_utf8(v) for cmd_inject_case in cmd_inject_detect_test_cases: p = payload_muntants(info, payload={ 'k': k, 'pos': 1, 'payload': cmd_inject_case['input'], 'type': 0 }, bmethod=info.method, timeout=30.0) if cmd_inject_case['target'] is not None: if p is not None: __ = re.search(cmd_inject_case['target'], p.data) if __ is not None: logger.log_verbose('[+] found cmd inject!') return m_return # Send the results return m_return
def __init__(self, raw_request): """ :param raw_request: Raw HTTP request. :type raw_request: str """ self.__raw_request = to_utf8(raw_request) super(HTTP_Raw_Request, self).__init__()
def __init__(self, raw_headers): """ :param raw_headers: Raw headers to parse. :type raw_headers: str """ self.__raw_headers = to_utf8(raw_headers) self.__headers, self.__cache = self.parse_headers(raw_headers)
def spider_task(): cookie_param = None cookie_dict = conf.cookie if conf is not None and conf.has_key( 'cookie') else None m_url = conf.url if cookie_dict != None: if hasattr(cookie_dict, "iteritems"): cookie_params = { to_utf8(k): to_utf8(v) for k, v in cookie_dict.iteritems() } cookie_param = ';'.join( '%s=%s' % (k, v) for (k, v) in sorted(cookie_params.iteritems())) __ = start_wvs_spider_dispatch(m_url, cookie_param, logger) #__ = test_start_wvs_spider_dispatch('www.bbktel.com.cn_d2cc49d948a8589628d260faa6ba41a4') json_content = json.loads(__) for urls in json_content['info']: #print item logger.log_verbose("Web Spider:found url %s" % urls['fullurl']) m_resource = URL(url=urls['fullurl']) conf.targets.append(m_resource) for item_url in urls['content']: post_param = item_url['param_data'] if "AcunetixBoundary_" in post_param: #multipart/form-data method = 'FILE_UPLOAD' else: method = item_url['method'] if method == "POST": post_param_dict = argument_query(item_url['param_data']) m_resource = URL(url=item_url['url'], method="POST", post_params=post_param_dict, referer=urls['fullurl']) else: m_resource = URL(url=item_url['url'], method=method, referer=urls['fullurl']) logger.log_verbose("Web Spider:found url %s" % item_url['url']) conf.targets.append(m_resource)
def parse_headers(raw_headers): """ Parse HTTP headers. :param raw_headers: Raw headers to parse. :type raw_headers: str :returns: Parsed headers in original and simplified forms. :rtype: tuple( tuple(tuple(str, str)), dict(str -> str) ) """ # Split the headers into lines and parse each line. original = [] parsed = {} last_name = None for line in to_utf8(raw_headers).split("\r\n"): # If we find an empty line, stop processing. if not line: break # If the line begins with whitespace, it's a continuation. if line[0] in " \t": if last_name is None: break # broken headers line = line.strip() parsed[last_name] += " " + line item = original[-1] item = (item[0], item[1] + " " + line) original[-1] = item continue # next line # Split the name and value pairs. name, value = line.split(":", 1) # Strip the leading and trailing whitespace. name = name.strip() value = value.strip() # Convert the name to lowercase. name_lower = name.lower() # Remember the last name we've seen. last_name = name_lower # Add the headers to the parsed form. # If the name already exists, merge the headers. # If not, add a new one. if name_lower in parsed: parsed[name_lower] += ", " + value else: parsed[name_lower] = value # Add the headers to the original form. original.append( (name, value) ) # Convert the original headers list into a tuple to make it # read-only, then return the tuple and the dictionary. return tuple(original), parsed
def __init__(self, data): """ :param data: Raw HTML content. :type data: str """ # Raw HTML content self.__raw_data = to_utf8(data) # Parent constructor super(HTML, self).__init__()
def from_items(items): """ Get HTTP headers in pre-parsed form. This is useful for integrating with other libraries that have already parsed the HTTP headers in their own way. :param items: Iterable of key/value pairs. :type items: iterable( tuple(str, str) ) """ # Reconstruct the raw headers the best we can. # reconstructed = [ # "%s: %s" % (to_utf8(name), # (to_utf8(value) # if value.endswith("\r\n") # else value + "\r\n") # ) # for name, value in items # ] reconstructed = [] for name,value in items: # because the cookie type is a dict if name == 'Cookie': value=";".join(["%s=%s" % (k,v) for k,v in value.items() ] ) if not value.endswith("\r\n"): value = value + "\r\n" reconstructed.append("%s: %s" % (to_utf8(name),to_utf8(value))) reconstructed.append("\r\n") raw_headers = "".join(reconstructed) # Return an HTTP_Headers object using the reconstructed raw headers. return HTTP_Headers(raw_headers)
def cookie_query(cookie_query): try: # much faster than parse_qsl() cookie_params = dict((map(unquote_plus, (to_utf8(token) + '=').split('=', 2)[:2]) for token in cookie_query.split(';'))) if len(cookie_params) == 1 and not cookie_params.values()[0]: cookie_params = {} else: query = None except Exception: ##raise # XXX DEBUG cookie_params = {} return cookie_params
def has_key(self, name): """ Test the presence of a header. Comparisons are case-insensitive. :param name: Header name. :type name: str :returns: True if present, False otherwise. :rtype: bool """ try: name = to_utf8(name) if ":" in name: name = name.split(":", 1)[0] name = name.strip().lower() except AttributeError: raise TypeError("Expected str, got %s" % type(name)) return name in self.__cache
def get(self, name, default = None): """ Get a header by name. Comparisons are case-insensitive. When more than one header has the requested name, only the first one is returned. :param name: Header name. :type name: str :returns: Header value. :rtype: str """ try: name = to_utf8(name) if ":" in name: name = name.split(":", 1)[0] name = name.strip().lower() except AttributeError: raise TypeError("Expected str, got %s" % type(name)) try: return self.__cache[name] except KeyError: return default
def __init__(self, request, **kwargs): """ All optional arguments must be passed as keywords. :param request: HTTP request that originated this response. :type request: HTTP_Request | HTTP_Raw_Request :param raw_response: (Optional) Raw bytes received from the server. :type raw_response: str :param status: (Optional) HTTP status code. Defaults to "200". :type status: str :param reason: (Optional) HTTP reason message. :type reason: str :param protocol: (Optional) Protocol name. Defaults to "HTTP". :type protocol: str :param version: (Optional) Protocol version. Defaults to "1.1". :type version: str :param raw_headers: (Optional) Raw HTTP headers. :type raw_headers: str :param headers: (Optional) Parsed HTTP headers. :type headers: HTTP_Headers | dict(str -> str) | tuple( tuple(str, str) ) :param data: (Optional) Raw data that followed the response headers. :type data: str :param elapsed: (Optional) Time elapsed in milliseconds since the request was sent until the response was received. :type elapsed: int """ # Initialize everything. self.__request = request self.__raw_response = None self.__raw_headers = None self.__status = None self.__reason = None self.__protocol = getattr(request, "protocol", "HTTP") self.__version = getattr(request, "version", "1.1") self.__headers = None self.__data = None self.__elapsed = None # Raw response bytes. self.__raw_response = kwargs.get("raw_response", None) if self.__raw_response: self.__parse_raw_response(request) # Status line. self.__status = to_utf8( kwargs.get("status", self.__status) ) self.__reason = to_utf8( kwargs.get("reason", self.__reason) ) self.__protocol = to_utf8( kwargs.get("protocol", self.__protocol) ) self.__version = to_utf8( kwargs.get("version", self.__version) ) if self.__status and not self.__reason: try: self.__reason = httplib.responses[self.__status] except Exception: pass elif not self.__status and self.__reason: lower_reason = self.__reason.strip().lower() for code, text in httplib.responses.iteritems(): if text.lower() == lower_reason: self.__status = str(code) break elif not self.__status: self.__status = "200" self.__reason = "OK" # HTTP headers. self.__raw_headers = to_utf8( kwargs.get("raw_headers", self.__raw_headers) ) self.__headers = kwargs.get("headers", self.__headers) if self.__headers: if not isinstance(self.__headers, HTTP_Headers): if hasattr(self.__headers, "items"): self.__headers = HTTP_Headers.from_items(sorted(self.__headers.items())) else: self.__headers = HTTP_Headers.from_items(sorted(self.__headers)) if not self.__raw_headers: self.__reconstruct_raw_headers() elif self.__raw_headers and not self.__headers: self.__parse_raw_headers() # Data. self.__data = to_utf8( kwargs.get("data", self.__data) ) # Reconstruct the raw response if needed. if not self.__raw_response: self.__reconstruct_raw_response() # Response time. self.elapsed = kwargs.get("elapsed", None) # Call the parent constructor. super(HTTP_Response, self).__init__()
def __init__(self, url, headers = None, post_data = None, method = None, protocol = "HTTP", version = "1.1", referer = None, user_agent = None): """ :param url: Absolute URL to connect to. :type url: str :param headers: HTTP headers, in raw or parsed form. Defaults to DEFAULT_HEADERS. :type headers: HTTP_Headers | dict(str -> str) | tuple( tuple(str, str) ) | str | None :param post_data: Optional POST data. If used, the Content-Type and Content-Length headers are populated automatically, unless already present in "headers". :type post_data: str | None :param method: HTTP method. Defaults to POST if post_data is used, or to GET if no post_data is used. :type method: str :param protocol: Protocol name. :type protocol: str :param version: Protocol version. :type version: str :param referer: Optional referer. Ignored if already present in "headers". :type referer: str :param user_agent: Optional user-agent string. Ignored if already present in "headers". Defaults to DEFAULT_USER_AGENT. :type user_agent: str | None """ # Default method. if not method: method = "POST" if post_data else "GET" # HTTP method, protocol and version. self.__method = to_utf8(method.upper()) # Not sure about upper() here... self.__protocol = to_utf8(protocol.upper()) # Not sure about upper() here... self.__version = to_utf8(version) # POST data. self.__post_data = post_data # URL. self.__parsed_url = ParsedURL(url) self.__url = self.__parsed_url.url # Cookie header value. try: if conf is not None and conf.has_key('cookie'): cookie = conf.cookie else: cookie = None except Exception: cookie = None # User-Agent header value. if user_agent: if user_agent.lower() == "random": user_agent = generate_user_agent() else: user_agent = to_utf8(user_agent) else: user_agent = self.DEFAULT_USER_AGENT # Referer header value. if referer: referer = to_utf8(referer) else: referer = None # HTTP headers. if headers is None: headers = self.DEFAULT_HEADERS if version == "1.1": headers = (("Host", self.__parsed_url.host),) + headers if post_data: headers = headers + (("Content-Type", "application/x-www-form-urlencoded"), ("Content-Length", str(len(post_data)))) if cookie: headers = headers + (("Cookie", cookie),) if referer: headers = headers + (("Referer", referer),) if user_agent: headers = headers + (("User-Agent", user_agent),) headers = headers + (("Connection" , "close"),) #ADD By BlackYe. Deal Caused by NewConnectionError headers = HTTP_Headers.from_items(headers) elif not isinstance(headers, HTTP_Headers): headers = to_utf8(headers) if type(headers) == str: # raw headers headers = HTTP_Headers(headers) elif hasattr(headers, "items"): # dictionary headers = HTTP_Headers.from_items(sorted(headers.items())) else: # dictionary items headers = HTTP_Headers.from_items(sorted(headers)) if cookie or referer or user_agent: headers = headers.to_tuple() if cookie and not any(x[0].lower() == "cookie" for x in headers): headers = headers + (("Cookie", cookie),) if referer and not any(x[0].lower() == "referer" for x in headers): headers = headers + (("Referer", referer),) if user_agent and not any(x[0].lower() == "user-agent" for x in headers): headers = headers + (("User-Agent", user_agent),) headers = HTTP_Headers.from_items(headers) self.__headers = headers # Call the parent constructor. super(HTTP_Request, self).__init__()