def analyzePost(siteparams, victim2, verbose, depth, file, authcookie, gui=None): result = {} subdir = parseUrl(viclist[0]) with Pool(processes=processes) as pool: for victim, paramlist in siteparams.items(): sub = {} print("\n{0}[INFO]{1} post{4}|{2} Attacking {3}".format(color.RD, color.END + color.O, color.END, victim, color.END+color.RD)) time.sleep(0.5) for param in paramlist: payloads = [] nullbytes = [] print("\n{0}[INFO]{1} post{4}|{2} Using {3}\n".format(color.RD, color.END + color.O, color.END, param, color.END+color.RD)) time.sleep(1.0) paysplit = listsplit(payloadlist, round(len(payloadlist)/processes)) resetCounter() res = [pool.apply_async(phase1, args=(4,victim,victim2,"",None,"",verbose,depth,l,file,authcookie,param+"=INJECT",gui,)) for l in paysplit] for i in res: #fetch results tuples = i.get() payloads += tuples[0] nullbytes += tuples[1] payloads = list(set(payloads)) nullbytes = list(set(nullbytes)) sub[param] = (payloads, nullbytes) result[victim] = sub if not os.path.exists(cachedir+subdir): os.makedirs(cachedir+subdir) with open(cachedir+subdir+"spider-phase2.json", "w+") as f: json.dump(result, f, sort_keys=True, indent=4) return result
def analyzeCookie(victim2, verbose, depth, file, authcookie, gui=None): result = {} subdir = parseUrl(viclist[0]) with Pool(processes=processes) as pool: for victim in viclist: sub = {} cookie = getCookie(victim) if len(cookie.keys()) < 1: print("\n{0}[INFO]{1} cookie{4}|{2} No cookies available for {3}.\n".format(color.RD, color.END + color.O, color.END, victim, color.END+color.RD)) continue print("\n{0}[INFO]{1} cookie{4}|{2} Attacking {3}\n".format(color.RD, color.END + color.O, color.END, victim, color.END+color.RD)) time.sleep(0.5) for key in cookie.keys(): payloads = [] nullbytes = [] print("\n{0}[INFO]{1} cookie{4}|{2} Using {3}\n".format(color.RD, color.END + color.O, color.END, key, color.END+color.RD)) time.sleep(1.0) paysplit = listsplit(payloadlist, round(len(payloadlist)/processes)) resetCounter() res = [pool.apply_async(phase1, args=(3,victim,victim2,"",cookie,key,verbose,depth,l,file,authcookie,"",gui,)) for l in paysplit] for i in res: #fetch results tuples = i.get() payloads += tuples[0] nullbytes += tuples[1] payloads = list(set(payloads)) nullbytes = list(set(nullbytes)) sub[key] = (payloads, nullbytes) result[victim] = sub if not os.path.exists(cachedir+subdir): os.makedirs(cachedir+subdir) with open(cachedir+subdir+"spider-phase4.json", "w+") as f: json.dump(result, f, sort_keys=True, indent=4) return result
def analyzePath(victim2, verbose, depth, file, authcookie, gui=None): """ attack each URL using the path vector """ result = {} subdir = parseUrl(viclist[0]) with Pool(processes=processes) as pool: pathviclist = [] for victim in viclist: # only root directory, else false positives splitted = victim.split("://") ulist = splitted[1].split("/") last = ulist[-1] # delete file, but not hidden directory if "." in last and not last.startswith(".") and last != ulist[0]: del ulist[-1] url = splitted[0] + "://" + "/".join(ulist) if url not in pathviclist: pathviclist.append(url) for victim in pathviclist: payloads = [] nullbytes = [] print("\n{0}[INFO]{1} path{4}|{2} Attacking {3}\n".format(color.RD, color.END + color.O, color.END, victim, color.END+color.RD)) if gui: gui.crawlerResultDisplay.append("\n[Info] path| Attacking {}".format(victim)) gui.show() time.sleep(1.0) paysplit = listsplit(payloadlist, round(len(payloadlist)/processes)) resetCounter() res = [pool.apply_async(phase1, args=(2, victim, victim2, "", None, "", verbose, depth, l, file, authcookie, "", gui,)) for l in paysplit] for i in res: # fetch results tuples = i.get() payloads += tuples[0] nullbytes += tuples[1] payloads = list(set(payloads)) nullbytes = list(set(nullbytes)) result[victim] = (payloads, nullbytes) if payloads and gui: gui.crawlerResultDisplay.append("[+] Vulnerable!") gui.crawlerResultDisplay.append("Payloads: {}\nNullbytes: {}".format(payloads, nullbytes)) gui.show() if not os.path.exists(cachedir + subdir): os.makedirs(cachedir + subdir) with open(cachedir + subdir + "spider-phase3.json", "w+") as f: json.dump(result, f, sort_keys=True, indent=4) return result
def analyzeParam(siteparams, victim2, verbose, depth, file, authcookie, gui=None): """ attack each GET parameter found for each target URL """ result = {} subdir = parseUrl(viclist[0]) with Pool(processes=processes) as pool: for victim, paramlist in siteparams.items(): sub = {} print("\n{0}[INFO]{1} param{4}|{2} Attacking {3}".format(color.RD, color.END + color.O, color.END, victim, color.END+color.RD)) if gui: gui.crawlerResultDisplay.append("\n[Info] param| Attacking {}".format(victim)) gui.show() time.sleep(0.5) for param in paramlist: payloads = [] nullbytes = [] paysplit = listsplit(payloadlist, round(len(payloadlist)/processes)) print("\n{0}[INFO]{1} param{4}|{2} Using {3}\n".format(color.RD, color.END + color.O, color.END, param, color.END+color.RD)) if gui: gui.crawlerResultDisplay.append("[Info] param| Using {}".format(param)) gui.show() time.sleep(1.0) resetCounter() res = [pool.apply_async(phase1, args=(1, victim, victim2, param, None, "", verbose, depth, l, file, authcookie, "", gui,)) for l in paysplit] for i in res: # fetch results tuples = i.get() payloads += tuples[0] nullbytes += tuples[1] payloads = list(set(payloads)) nullbytes = list(set(nullbytes)) sub[param] = (payloads, nullbytes) if payloads and gui: gui.crawlerResultDisplay.append("[+] Vulnerable!") gui.crawlerResultDisplay.append("Payloads: {}\nNullbytes: {}".format(payloads, nullbytes)) gui.show() result[victim] = sub if not os.path.exists(cachedir+subdir): os.makedirs(cachedir+subdir) with open(cachedir+subdir+"spider-phase2.json", "w+") as f: json.dump(result, f, sort_keys=True, indent=4) return result