def _inject_to(r, value, payloads, pre_func=None): pds = _get_payload(payloads) if not pre_func: pre_func = lambda x: encode(x) rqs = RequestSet(_inject_query(r, value, pds, pre_func)) if r.method in ("POST", "PUT"): rqs += RequestSet(_inject_post(r, value, pds, pre_func)) if r.has_header("Cookie"): rqs += RequestSet(_inject_cookie(r, value, pds, pre_func)) rqs += RequestSet(_inject_json(r, value, pds, pre_func)) if not rqs: raise NoInjectionPointFound() return rqs
def _inject_multi(r, method, target, payloads, **kwds): if isinstance(r, Request): return method(r, target, payloads, **kwds) elif isinstance(r, RequestSet): return RequestSet( reduce(lambda x, y: x + y, [method(ro, target, payloads, **kwds) for ro in r]))
def inject(r, to=None, at=None, payloads="default", **kwds): """ Inject a request. This function will create a RequestSet from a Request where a part of the request is replaced with some payload. There is two ways to use this function, either to inject the value of a parameter or to inject at a specific location. When used with the 'to' parameter, Abrupt will lookup the value in the query string, the request content and the cookies. It will then replace the value of the parameter with the payloads. If no valid injection point is found, an error is raised. When used with the 'at' parameter, Abrupt will lookup the string in the whole request text and replace it with the payloads. If no valid injection point is found, an error is raised. If the string is found more than once, the function will suggest to provide the 'choice' integer keyword. payloads could either be a list of the payloads to inject or a key of the global dictionnary 'payloads'. Before being injected, each payload pass through the pre_func function which is by default 'encode'. See also: payloads, inject_all, find_injection_points """ rqs = RequestSet() if not to and not at: print error("I need some help here. Where should I inject? " + \ "Try 'help(inject)'") elif to and at: print error("Wow, too many parameters. It is either 'to' or 'at'.") elif to: if isinstance(to, (list, tuple)): for t in to: rqs.extend(_inject_multi(r, _inject_to, t, payloads, **kwds)) else: rqs.extend(_inject_multi(r, _inject_to, to, payloads, **kwds)) elif at: if isinstance(at, (list, tuple)): for a in at: rqs.extend(_inject_multi(r, _inject_at, a, payloads, **kwds)) else: rqs.extend(_inject_multi(r, _inject_at, at, payloads, **kwds)) return rqs
def spider(init, max=-1, ignore_qs=False, post_func=None, hosts=None): """ Spider a request by following some links. init - The initial request(s) max - The maximum of request to execute post_func - A hook to be executed after each new page fetched hosts - A lists of authorised hosts to spider on. By default only the hostname of r_init is allowed. """ nb = 0 checked = [] if isinstance(init, Request): q = deque([ init, ]) hs = [ init.hostname, ] elif isinstance(init, RequestSet): q = deque(init) hs = list(set(init.extract("hostname"))) else: raise TypeError("init must be a Request or a RequestSet") if hosts: hs += hosts try: while nb != max and q: to_add = [] r = q.popleft() print str(len(checked)) + "/" + str(len(q)), clear_line() if not r.response: r() if r.response.content_type: if re.match(r'text/html', r.response.content_type): to_add += _follow_redirect(r) to_add += _get_links(r) else: print "\nIgnoring", r.response.content_type checked.append(r) if post_func: post_func(r) for nr in to_add: if nr.hostname not in hs: continue if not ignore_qs and any(nr == rc for rc in checked + list(q)): continue if ignore_qs and any( nr.similar(rc) for rc in checked + list(q)): continue q.append(nr) nb += 1 except KeyboardInterrupt: print str(len(checked)) + "/" + str(len(q)) return RequestSet(checked)
def proxy(ip=None, port=None, rules=(ru_bypass_ssl, ru_forward_images,), alerter=None, persistent=True, pre_func=None, decode_func=None, forward_chunked=False, verbose=False): """Intercept all HTTP(S) requests on port. Return a RequestSet of all the answered requests. ip -- ip to listen to, by default conf.ip port -- port to listen to, by default conf.port alerter -- alerter triggered on each response, by default GenericAlerter rules -- set of rules for automated actions over requests pre_func -- callback used before processing a request decode_func -- callback used when (de)coding a request/response content, by default, decode(). forward_chunked -- forward chunked response without waiting for the end of it persistent -- keep the connection persistent with your client verbose -- degree of verbosity: False -- Only display requests undergoing default_action 1/True -- Display all requests, including automated ones 2 -- Display all requests with their full content 3 -- Display all requests and responses with their full content See also: conf """ if not ip: ip = conf.ip if not port: port = conf.port if not alerter: alerter = alert.GenericAlerter() if not rules: rules = [] if not decode_func: decode_func = decode if not pre_func: pre_func = lambda x:x print "Running on", ip + ":" + str(port) print "Ctrl-C to interrupt the proxy..." httpd = ProxyHTTPServer((ip, port), ProxyHTTPRequestHandler) httpd.rules = rules httpd.auto = False httpd.pre_func = pre_func httpd.decode_func = decode_func httpd.alerter = alerter httpd.reqs = [] httpd.forward_chunked = forward_chunked httpd.verbose = verbose httpd.persistent = persistent while True: try: httpd.serve_forever() except select.error: # select syscall got interrupted by window resizing pass except KeyboardInterrupt: print "Waiting for the threads to stop" httpd.shutdown() for t in threading.enumerate(): if t.name.startswith("proxy"): t.join() break return RequestSet(httpd.reqs)
def fuzz_headers(r, payloads="default"): print "TODO: adapt payloads for each header tested" rs = [] for i, e in enumerate(r.headers): k, v = e pds = _get_payload(payloads) for p in pds: r_new = r.copy() h_new = (k, p) r_new.headers[i] = h_new r_new.injection_point = k r_new.payload = p rs.append(r_new) return RequestSet(rs)
def _get_links(r): new_reqs = [] if not has_lxml: raise Exception("To use the spider, you need lxml") try: root = lxml.html.fromstring(r.response.content) base_tag = root.xpath('//base') if base_tag and base_tag[0].get('href'): base = base_tag[0].get(["href"]) else: base = r.url links = [ x.get("href") for x in root.xpath("//a|//area") if x.get('href') ] for l in links: try: l.encode('ascii') except UnicodeEncodeError: l = e(l.encode('utf-8'), safe='/') url_p = urlparse.urlparse(l) if url_p.scheme in ('http', 'https'): try: new_reqs.append(create(l)) except: print "Invalid link:", l continue elif url_p.scheme in ('javascript', 'mailto') or l.startswith("#"): continue elif url_p.scheme == '' and url_p.path: nr = r.copy() n_path = urlparse.urljoin(base, l) nr.url = urlparse.urlunparse( urlparse.urlparse(r.url)[:2] + urlparse.urlparse(n_path)[2:]) new_reqs.append(nr) else: if url_p.scheme not in ("ftp", "irc", "xmpp", "mms"): print "UNKNOWN PROTOCOL Miam!?:" + l, url_p.scheme except lxml.etree.XMLSyntaxError: pass return RequestSet(new_reqs)
def inject_all(r, payloads="default"): ips = find_injection_points(r) if ips: return reduce(lambda x, y: x + y, [i(r, to=ip, payloads=payloads) for ip in ips]) return RequestSet()