def gen(self, cwd, urls, proxy, headers, timeout, cookies, postdata, module, datalist): requestList = [] domain = '' domains = [] common = FileOp(cwd + '/lists/vhost-list.txt').reader() try: if len(datalist) > 1: domain = datalist[0] domains = FileOp(datalist[1]).reader() elif len(datalist) == 1: domain = datalist[0] except: print('vhost module: -dl [domain] [list of subdomains] ') sys.exit(0) for url in urls: for i in common: new_h = headers.copy() new_h['Host'] = i req_get = RequestObject('reqID', "GET", proxy, new_h, timeout, cookies, url, postdata, module) requestList.append(req_get) if i != '': subd = i + '.' + domain new_h = headers.copy() new_h['Host'] = subd req_get = RequestObject('reqID', "GET", proxy, new_h, timeout, cookies, url, postdata, module) requestList.append(req_get) if len(datalist) > 1: for i in domains: new_h = headers.copy() new_h['Host'] = i req_get = RequestObject('reqID', "GET", proxy, new_h, timeout, cookies, url, postdata, module) requestList.append(req_get) for j in common: new_h = headers.copy() new_h['Host'] = j + '.' + i req_get = RequestObject('reqID', "GET", proxy, new_h, timeout, cookies, url, postdata, module) requestList.append(req_get) new_h = headers.copy() new_h['Host'] = j + '-' + i req_get = RequestObject('reqID', "GET", proxy, new_h, timeout, cookies, url, postdata, module) requestList.append(req_get) return requestList
def gen(self, reqs, module, rules): requestList = [] #Store generated request objects common = [] try: common = FileOp(rules['cwd']+'/lists/vhost-list.txt').reader() if len(rules['datalist']) > 1: domain = rules['datalist'][0] domains = FileOp(rules['datalist'][1]).reader() elif len(rules['datalist']) == 1: domains = FileOp(rules['datalist'][0]).reader() except: print('vhost module: -dl [domain] [subs.txt] or -dl [subs.txt]') if len(rules['datalist']) > 1: for req in reqs: for dom in common: req_get = copy.deepcopy(req) req_get.update_reqID('reqID') req_get.update_module(module) head_get = req.headers.copy() head_get['Host']=dom req_get.update_headers(head_get) requestList.append(req_get) req_get = copy.deepcopy(req) req_get.update_reqID('reqID') req_get.update_module(module) head_get = req.headers.copy() head_get['Host']=dom+'.'+domain req_get.update_headers(head_get) requestList.append(req_get) req_get = copy.deepcopy(req) req_get.update_reqID('reqID') req_get.update_module(module) head_get = req.headers.copy() head_get['Host']=dom+'-'+domain req_get.update_headers(head_get) requestList.append(req_get) if len(domains) > 0: for req in reqs: for dom in domains: req_get = copy.deepcopy(req) req_get.update_reqID('reqID') req_get.update_module(module) head_get = req.headers.copy() head_get['Host']=dom req_get.update_headers(head_get) requestList.append(req_get) return requestList
def gen(self, reqs, module, rules): requestList = [] #Store generated request objects params = FileOp(rules['cwd'] + '/lists/parameters.txt').reader() headers = FileOp(rules['cwd'] + '/lists/all-headers.txt').reader() paramValue = 'discobiscuits' headerValue = '127.0.0.1' db = '' for req in reqs: u = UrlObject(req.url) #Brute GET and POST Parameters for param in params: req_get = copy.deepcopy(req) ndata = req.data.copy() ndata[param] = paramValue haxgod = u.u_q + '?' + self.getParamStr(ndata) req_get.update_url(haxgod) req_get.update_reqID('reqID') req_get.update_module(module) requestList.append(req_get) req_post = copy.deepcopy(req_get) req_post.update_url(req.url) req_post.update_data(ndata) req_post.update_method('POST') requestList.append(req_post) del ndata[param] #Brute Request Headers for head in headers: req_head = copy.deepcopy(req) heads = req.headers.copy() heads[head] = headerValue req_head.update_headers(heads) req_head.update_reqID('reqID') req_head.update_module(module) requestList.append(req_head) del heads[head] #Brute Cookies for cookie in params: req_c = copy.deepcopy(req) monster = req.cookies.copy() monster[cookie] = paramValue req_c.update_cookies(monster) req_c.update_reqID('reqID') req_c.update_module(module) requestList.append(req_c) del monster[cookie] return requestList
def gen(self, reqs, module, rules): requestList = [] #Store generated request objects data2 = FileOp(rules['cwd'] + '/lists/kiterunner-routes-small.txt').reader() #Use some different contexts api_context = [ '1', '-1', '0', '33e2c9de-991a-11eb-a8b3-0242ac130003', '064ad4e2-31a1-4559-8d96-83d17feca4e4', '00000000-0000-0000-0000-000000000000', '1.0', '%00', 'NULL' ] #Build URL lists data = [] for c in api_context: for route in data2: if route.startswith('/'): route = route[1:] newpath = route.replace('{kr}', c) data.append(newpath) for req in reqs: u = UrlObject(req.url) for directory in data: newurl = u.u_d + directory req_get = copy.deepcopy(req) req_get.update_url(newurl) req_get.update_reqID('reqID') req_get.update_module(module) requestList.append(req_get) return requestList
def gen(self, reqs, module, rules): requestList = [] #Store generated request objects usernames = rules['datalist'] if len(usernames) == 0: print('basic: Use -dl [usernames]') passwords = FileOp(cwd+'/lists/password.txt').reader() common = ['root', 'admin', 'test', 'demo'] users = set(common + usernames) for req in reqs: for user in users: for passwd in passwords: data = user+':'+passwd auth = base64.b64encode(data.encode()) data = 'Basic '+auth.decode() req_head = copy.deepcopy(req) heads = req.headers.copy() heads['Authorization'] = data req_head.update_headers(heads) req_head.update_reqID('reqID') req_head.update_module(module) requestList.append(req_head) del heads[head] return requestList
def gen(self, reqs, module, rules): requestList = [] #Store generated request objects url = '.s3.amazonaws.com/' separators = ['.', '-', '_'] if rules['datalist'] == None: print('s3bucket module: -dl [companyname] [related words]...') sys.exit(0) else: data = FileOp(rules['cwd'] + '/lists/buckets.txt').reader() data = data + rules['datalist'] tempdata = [] for a in rules['datalist']: for b in data: tempdata.append(a + b) tempdata.append(b + a) for c in separators: tempdata.append(a + c + b) tempdata.append(b + c + a) tempdata = set(tempdata) for req in reqs: for permutation in tempdata: req_get = copy.deepcopy(req) req_get.update_url('https://' + permutation + url) req_get.update_reqID('reqID') req_get.update_module(module) requestList.append(req_get) return requestList
def gen(self, reqs, module, rules): requestList = [] #Store generated request objects if rules['datalist'] == None: print('dirb-files: Provide a list of file extensions -dl html php') sys.exit(0) else: filetypes = rules['datalist'] try: files = FileOp(rules['cwd'] + '/lists/files.xtcz').reader() except: files = ['fail.xtcz'] print('File Extensions: ' + str(filetypes)) for req in reqs: u = UrlObject(req.url) for ftype in filetypes: for directory in files: newurl = u.u_d + directory.replace('xtcz', ftype) req_get = copy.deepcopy(req) req_get.update_url(newurl) req_get.update_reqID('reqID') req_get.update_module(module) requestList.append(req_get) return requestList
def parse(self, filename): data = FileOp(filename).reader_s() databytes = bytearray() databytes.extend(data.encode()) parsed_req = HTTPRequest(databytes) #If no error parsing then populate the object with data from file if (parsed_req.error_code == None): #======================== #Parse headers and method #======================== self.headers = dict(parsed_req.headers) self.method = parsed_req.command #================== #Parse request body #================== try: content_len = int(self.headers['Content-Length']) except: content_len = 0 pass if content_len > 0: self.data = str(parsed_req.rfile.read(content_len).decode()) self.data = ParseArguments().parseData(self.data) else: self.data = {} #=========================== #Parse full url from request #=========================== try: host = self.headers['Host'] except: host = '' pass self.url = self.url + host if self.url.endswith('/') == False: self.url = self.url + parsed_req.path #=================================== #Parse Cookies from provided headers #=================================== try: cookiestr = self.headers['Cookie'] del self.headers['Cookie'] self.cookies = ParseArguments.parseCookies(cookiestr) except: self.cookies = {} pass
def __init__(self): self.requestList = [] #Store generated request objects self.data = FileOp('/root/scanomalie/lists/archive-file.txt').reader() self.prepend = [ '_','.','~','%20','-','.~','Copy%20of%20', 'copy%20of%20','Copy_','Copy%20','Copy_of_', 'Copy_(1)_of_','%24', ] self.common = [ 'www', 'html', 'public', 'public_html', 'web', 'wwwroot', 'website', 'root', 'src', 'source', 'data', 'dir', 'site', 'index', 'htdoc', 'htdocs' ]
def __init__(self, cwd): self.rs = attr('reset') self.logo = FileOp(cwd + '/img/scanomaly.ico').reader() self.author = fg(8) + 'Author: ' + self.rs self.author += "© " + fg(2) + "Ciar" + fg(15) + "án McN" + fg(3) self.author += 'ally' + self.rs + " ~ " + fg( 12) + 'www.securit.ie' + self.rs self.software = fg(8) + 'Software: ' + self.rs self.software += 'https://github.com/mak-/scanomaly.git' self.software += self.rs self.breaker = fg( 8) + '================================================' self.breaker += self.rs
def gen(self, cwd, urls, proxy, headers, timeout, cookies, postdata, module): requestList = [] #Store generated request objects data = FileOp(cwd + '/lists/files.txt').reader() shuffled = random.shuffle(data) #Randomize our list newurl = '' for url in urls: u = UrlObject(url) for directory in data: newurl = u.u_d + directory req_get = RequestObject('reqID', 'GET', proxy, headers, timeout, cookies, newurl, postdata, module) requestList.append(req_get) return requestList
def gen(self, reqs, module, rules): requestList = [] #Store generated request objects data = FileOp(rules['cwd'] + '/lists/dirs.txt').reader() shuffled = random.shuffle(data) #Randomize our list newurl = '' for req in reqs: u = UrlObject(req.url) for directory in data: newurl = u.u_d + directory req_get = copy.deepcopy(req) req_get.update_url(newurl) req_get.update_reqID('reqID') req_get.update_module(module) requestList.append(req_get) return requestList
def gen(self, cwd, urls, proxy, headers, timeout, cookies, postdata, module, datalist): requestList = [] #Store generated request objects data = FileOp(cwd+'/lists/files.xtcz').reader() db = '' print('file extension: '+str(datalist)) for url in urls: u = UrlObject(url) for ftype in datalist: for directory in data: newurl = u.u_d + directory.replace('xtcz', ftype) req_get = RequestObject('reqID',"GET", proxy, headers, timeout, cookies, newurl, postdata, module) requestList.append(req_get) return requestList
def gen(self, cwd, urls, proxy, headers, timeout, cookies, postdata, module, datalist): requestList = [] #Store generated request objects try: data = FileOp(datalist[0]).reader() except: print('dirb-custom: Provide a wordlist using -dl [wordlist]') sys.exit(0) for url in urls: u = UrlObject(url) for directory in data: newurl = u.u_d + directory req_get = RequestObject('reqID', "GET", proxy, headers, timeout, cookies, newurl, postdata, module) requestList.append(req_get) return requestList
def gen(self, reqs, module, rules): requestList = [] #Store generated request objects try: custom_list = FileOp(rules['datalist'][0]).reader() except: print('dirb-custom: Provide a wordlist using -dl [wordlist]') sys.exit(0) for req in reqs: u = UrlObject(req.url) #For each item in custom list append to current url for directory in custom_list: newurl = u.u_d + directory req_get = copy.deepcopy(req) req_get.update_url(newurl) req_get.update_reqID('reqID') req_get.update_module(module) requestList.append(req_get) return requestList
def gen(self, cwd, urls, proxy, headers, timeout, cookies, postdata, module): requestList = [] #Store generated request objects data = FileOp(cwd + '/lists/parameters.txt').reader() paramValue = 'discobiscuits' db = '' for url in urls: u = UrlObject(url) for param in data: ndata = postdata.copy() ndata[param] = paramValue db = u.u_q + '?' + self.getParamStr(ndata) req_get = RequestObject('reqID', "GET", proxy, headers, timeout, cookies, db, ndata, module) requestList.append(req_get) req_post = RequestObject('reqID', "POST", proxy, headers, timeout, cookies, u.u_q, ndata, module) requestList.append(req_post) del ndata[param] return requestList
iprange = ipaddr.pop() ipaddr = '.'.join(ipaddr) first = int(iprange.split('-').pop(0)) second = int(iprange.split('-').pop())+1 for x in range(first, second): iplist.append(ipaddr+'.'+str(x)) except Exception as e: print(e) print('Error: Bad IP range provided (127.0.0.20-30)') else: try: ip = str(ipaddress.IPv4Address(i)) iplist.append(ip) except Exception as e: print(e) print('Error: Bad IP address provided (127.0.0.1)') for i in iplist: print('http://'+i+'/') print('https://'+i+'/') elif args.IPList != None: iplist = FileOp(args.IPList).reader() for i in iplist: print('http://'+i+'/') print('https://'+i+'/') else: print('Specify -a or -p')
#Get URL from CLI if args.url != None: urldata = ParseArguments() newdata = urldata.parseUrlData(args.url) if len(newdata) > 0: for i in newdata: data.update({i: newdata[i]}) if '?' in args.url: args.url = args.url.split('?')[0] args.url += urldata.parseUrlfromData(data) urls = [args.url] #GET URLS from file if args.urlist != None: urlist = FileOp(args.urlist).reader() urls = urls + urlist print('Urls: ' + fg(1) + str(len(urls)) + rs) print(fg(8) + '---' + rs + 'Loading Modules' + fg(8) + '---' + rs) #Add RequestObjects to list #New modules need to be added to this section with their arguments #To add user CLI input use args.datalist (allows a list of arguments) #Using -dl data1 data2 (specify strings or files etc) #====================================================================== results = [] for z in run_mods: if z != None: #If the module name matches - pass the right arguments to gen if z.name == 'baseline':
def gen(self, reqs, module, rules): requestList = [] #Store generated request objects params = FileOp(rules['cwd'] + '/lists/parameters.txt').reader() headers = FileOp(rules['cwd'] + '/lists/all-headers.txt').reader() if len(rules['datalist']) == 0: print('fuzz: -dl [fuzz payload] [fuzz payload] ...') sys.exit(0) for fuzz in rules['datalist']: paramValue = fuzz headerValue = fuzz db = '' for req in reqs: u = UrlObject(req.url) #Brute all headers with fuzzstrings for head in headers: req_head = copy.deepcopy(req) heads = req.headers.copy() heads[head] = headerValue req_head.update_headers(heads) req_head.update_reqID('reqID') req_head.update_module(module) requestList.append(req_head) del heads[head] #Add custom fuzz to path for req in reqs: u = UrlObject(req.url) newurl = u.u_d + paramValue req_get = copy.deepcopy(req) req_get.update_url(newurl) req_get.update_reqID('reqID') req_get.update_module(module) requestList.append(req_get) #Add custom fuzz to path+? for req in reqs: u = UrlObject(req.url) newurl = u.u_d + '?' + paramValue req_get = copy.deepcopy(req) req_get.update_url(newurl) req_get.update_reqID('reqID') req_get.update_module(module) requestList.append(req_get) #Included headers coverage (like user agent etc) for req in reqs: req_head = copy.deepcopy(req) heads = req.headers.copy() newhead = copy.deepcopy(heads) for h, value in heads.items(): req_head2 = copy.deepcopy(req) newhead[h] = headerValue req_head2.update_headers(heads) req_head2.update_reqID('reqID') req_head2.update_module(module) requestList.append(req_head2) del newhead[h] return requestList
def gen(self, reqs, module, rules): requestList = [] #Store generated request objects params = FileOp(rules['cwd'] + '/lists/parameters.txt').reader() headers = FileOp(rules['cwd'] + '/lists/all-headers.txt').reader() mini = FileOp(rules['cwd'] + '/lists/mini-fuzz.txt').reader() for fuzz in mini: paramValue = fuzz headerValue = fuzz db = '' for req in reqs: u = UrlObject(req.url) #Brute GET and POST Parameters for param in params: req_get = copy.deepcopy(req) ndata = req.data.copy() ndata[param] = paramValue haxgod = u.u_q + '?' + self.getParamStr(ndata) req_get.update_url(haxgod) req_get.update_reqID('reqID') req_get.update_module(module) requestList.append(req_get) req_post = copy.deepcopy(req_get) req_post.update_url(req.url) req_post.update_data(ndata) req_post.update_method('POST') requestList.append(req_post) del ndata[param] #Fuzz with param[]=value if PHP if u.last_ext == 'php': for param in params: req_get = copy.deepcopy(req) ndata = req.data.copy() weird = param + '[]' ndata[weird] = paramValue haxgod = u.u_q + '?' + self.getParamStr(ndata) req_get.update_url(haxgod) req_get.update_reqID('reqID') req_get.update_module(module) requestList.append(req_get) req_post = copy.deepcopy(req_get) req_post.update_url(req.url) req_post.update_data(ndata) req_post.update_method('POST') requestList.append(req_post) del ndata[weird] #Brute Request Headers for head in headers: req_head = copy.deepcopy(req) heads = req.headers.copy() heads[head] = headerValue req_head.update_headers(heads) req_head.update_reqID('reqID') req_head.update_module(module) requestList.append(req_head) del heads[head] #Brute Cookies for cookie in params: req_c = copy.deepcopy(req) monster = req.cookies.copy() monster[cookie] = paramValue req_c.update_cookies(monster) req_c.update_reqID('reqID') req_c.update_module(module) requestList.append(req_c) del monster[cookie] return requestList
def __init__(self, reqList, cwd): self.agents = FileOp(cwd+'/lists/user-agents.txt').reader() self.reqList = reqList.copy()
def gen(self, reqs, module, rules): requestList = [] #Store generated request objects params = FileOp(rules['cwd'] + '/lists/parameters.txt').reader() headers = FileOp(rules['cwd'] + '/lists/all-headers.txt').reader() try: custom_list = FileOp(rules['datalist'][0]).reader() except: print('dirb-custom: Provide a wordlist using -dl [wordlist]') sys.exit(0) for fuzz in custom_list: paramValue = fuzz headerValue = fuzz db = '' for req in reqs: u = UrlObject(req.url) #Brute all headers with fuzzstrings for head in headers: req_head = copy.deepcopy(req) heads = req.headers.copy() heads[head] = headerValue req_head.update_headers(heads) req_head.update_reqID('reqID') req_head.update_module(module) requestList.append(req_head) del heads[head] #Brute GET and POST Parameters for param in params: req_get = copy.deepcopy(req) ndata = req.data.copy() ndata[param] = paramValue haxgod = u.u_q + '?' + self.getParamStr(ndata) req_get.update_url(haxgod) req_get.update_reqID('reqID') req_get.update_module(module) requestList.append(req_get) req_post = copy.deepcopy(req_get) req_post.update_url(req.url) req_post.update_data(ndata) req_post.update_method('POST') requestList.append(req_post) del ndata[param] #Add custom fuzz to path for req in reqs: u = UrlObject(req.url) newurl = u.u_d + paramValue req_get = copy.deepcopy(req) req_get.update_url(newurl) req_get.update_reqID('reqID') req_get.update_module(module) requestList.append(req_get) #Add custom fuzz to path+? for req in reqs: u = UrlObject(req.url) newurl = u.u_d + '?' + paramValue req_get = copy.deepcopy(req) req_get.update_url(newurl) req_get.update_reqID('reqID') req_get.update_module(module) requestList.append(req_get) #Included headers coverage (like user agent etc) for req in reqs: req_head = copy.deepcopy(req) heads = req.headers.copy() newhead = copy.deepcopy(heads) for h, value in heads.items(): req_head2 = copy.deepcopy(req) newhead[h] = headerValue req_head2.update_headers(heads) req_head2.update_reqID('reqID') req_head2.update_module(module) requestList.append(req_head2) del newhead[h] return requestList
def gen(self, reqs, module, rules): requestList = [] #Store generated request objects params = FileOp(rules['cwd']+'/lists/parameters.txt').reader() headers = FileOp(rules['cwd']+'/lists/all-headers.txt').reader() if len(rules['datalist']) == 0: print('fuzz: -dl [fuzz payload] [fuzz payload] ...') sys.exit(0) for fuzz in rules['datalist']: paramValue = fuzz headerValue = fuzz db = '' #Add custom fuzz to path for req in reqs: u = UrlObject(req.url) newurl = u.u_d+paramValue req_get = copy.deepcopy(req) req_get.update_url(newurl) req_get.update_reqID('reqID') req_get.update_module(module) requestList.append(req_get) #Add custom fuzz to path+? for req in reqs: u = UrlObject(req.url) newurl = u.u_d+'?'+paramValue req_get = copy.deepcopy(req) req_get.update_url(newurl) req_get.update_reqID('reqID') req_get.update_module(module) requestList.append(req_get) #Included headers coverage (like user agent etc) for req in reqs: req_head = copy.deepcopy(req) heads = req.headers.copy() newhead = copy.deepcopy(heads) for h,value in heads.items(): req_head2 = copy.deepcopy(req) newhead[h] = headerValue req_head2.update_headers(heads) req_head2.update_reqID('reqID') req_head2.update_module(module) requestList.append(req_head2) del newhead[h] mini_headers = [ 'X-Api-Version','User-Agent','Cookie','Referer', 'Accept-Language','Accept-Encoding', 'Upgrade-Insecure-Requests','Accept','Pragma', 'X-Requested-With','X-CSRF-Token','Dnt', 'Content-Length','Access-Control-Request-Method', 'Access-Control-Request-Headers','Warning', 'Authorization','TE','Accept-Charset','Accept-Datetime', 'Expect','Forwarded','From','Max-Forwards', 'Proxy-Authorization', 'X-Forwarded-For', 'Range', 'Content-Deposition', 'X-Amz-Target','Content-Type', 'Username','IP', 'IPaddress','Hostname'] #Generate requests using mini_headers and value provided for req in reqs: req_head = copy.deepcopy(req) heads = req.headers.copy() for h in mini_headers: newheaders = copy.deepcopy(heads) newheaders[h] = headerValue req_new = copy.deepcopy(req_head) req_new.update_headers(newheaders) req_new.update_reqID('reqID') req_new.update_module(module) requestList.append(req_new) return requestList
req.update_data(data) if args.allmethod: for method in all_methods: newreq = copy.deepcopy(req) newreq.update_method(method) urlList.append(newreq) else: for method in args.methods: newreq = copy.deepcopy(req) newreq.update_method(method) urlList.append(newreq) multi_url[urlList[0].url] = urlList #Generate a list of urls if args.urlist != None: urlist = FileOp(args.urlist).reader() if len(multi_url) == 0: for url in urlist: urlMulti = [] req = RequestObject(url) req.update_headers(total_headers) req.update_proxy(proxies) req.update_cookies(cookies) req.update_data(data) if args.allmethod: for method in all_methods: newreq = copy.deepcopy(req) newreq.update_method(method) urlMulti.append(newreq) else: for method in args.methods:
def gen(self, reqs, module, rules): d1 = '' d2 = '' self.data = FileOp(rules['cwd'] + '/lists/archive-file.txt').reader() for req in reqs: u = UrlObject(req.url) #If no lastfile if u.lastfile != '': for i in self.data: d1 = u.u_q + i req_get = copy.deepcopy(req) req_get.update_url(d1) req_get.update_reqID('reqID') req_get.update_module(module) self.requestList.append(req_get) d2 = u.u_d + u.lastfile_ext + i req_get = copy.deepcopy(req) req_get.update_url(d2) req_get.update_reqID('reqID') req_get.update_module(module) self.requestList.append(req_get) for i in self.prepend: d3 = u.u_d + i + u.lastfile req_get = copy.deepcopy(req) req_get.update_url(d3) req_get.update_reqID('reqID') req_get.update_module(module) self.requestList.append(req_get) for j in self.data: d4 = u.u_d + i + u.lastfile + j req_get = copy.deepcopy(req) req_get.update_url(d4) req_get.update_reqID('reqID') req_get.update_module(module) self.requestList.append(req_get) #If no lastpath if u.lastpath != '': for i in self.data: d5 = u.u_d + u.lastpath + i req_get = copy.deepcopy(req) req_get.update_url(d5) req_get.update_reqID('reqID') req_get.update_module(module) self.requestList.append(req_get) d6 = u.u_dd + u.lastpath + i req_get = copy.deepcopy(req) req_get.update_url(d6) req_get.update_reqID('reqID') req_get.update_module(module) self.requestList.append(req_get) for i in self.common: for j in self.data: d7 = u.u_d + i + j req_get = copy.deepcopy(req) req_get.update_url(d7) req_get.update_reqID('reqID') req_get.update_module(module) self.requestList.append(req_get) #Else else: for i in self.common: for j in self.data: d8 = u.u_d + i + j req_get = copy.deepcopy(req) req_get.update_url(d8) req_get.update_reqID('reqID') req_get.update_module(module) self.requestList.append(req_get) return self.requestList
def gen(self, reqs, module, rules): requestList = [] #Store generated request objects bypass = FileOp(rules['cwd'] + '/lists/bypass.txt').reader() all_methods = [ "GET", "POST", "OPTIONS", "PUT", "PATCH", "HEAD", "DELETE", "TRACE", "DEBUG", "AAA" ] newurl = '' headers = FileOp(rules['cwd'] + '/lists/all-headers.txt').reader() for req in reqs: u = UrlObject(req.url) #Try url/*fuzz* for fuzz in bypass: newurl = u.u_d + fuzz req_get = copy.deepcopy(req) req_get.update_url(newurl) req_get.update_reqID('reqID') req_get.update_module(module) requestList.append(req_get) #Try url(-1 dir)/*fuzz* for fuzz in bypass: newurl = u.u_dd + fuzz req_get = copy.deepcopy(req) req_get.update_url(newurl) req_get.update_reqID('reqID') req_get.update_module(module) requestList.append(req_get) #Try url/*fuzz*/PATH for fuzz in bypass: newurl = u.u_d + fuzz + u.lastpath req_get = copy.deepcopy(req) req_get.update_url(newurl) req_get.update_reqID('reqID') req_get.update_module(module) requestList.append(req_get) #Try url(-1 dir)/*fuzz*/PATH for fuzz in bypass: newurl = u.u_dd + fuzz + u.lastpath req_get = copy.deepcopy(req) req_get.update_url(newurl) req_get.update_reqID('reqID') req_get.update_module(module) requestList.append(req_get) #Try url/*fuzz**fuzz2*/PATH for fuzz in bypass: for fuzz2 in bypass: newurl = u.u_d + fuzz + fuzz2 + u.lastpath req_get = copy.deepcopy(req) req_get.update_url(newurl) req_get.update_reqID('reqID') req_get.update_module(module) requestList.append(req_get) #Try url/*fuzz*/PATH/*fuzz2*/ for fuzz in bypass: for fuzz2 in bypass: newurl = u.u_d + fuzz + u.lastpath + fuzz2 req_get = copy.deepcopy(req) req_get.update_url(newurl) req_get.update_reqID('reqID') req_get.update_module(module) requestList.append(req_get) #Try all headers for local ranges local_range = [ '127.0.0.1', '10.0.0.1', '172.16.0.1', '192.168.0.1' ] for ip in local_range: for head in headers: req_head = copy.deepcopy(req) heads = req.headers.copy() heads[head] = ip req_head.update_headers(heads) req_head.update_reqID('reqID') req_head.update_module(module) requestList.append(req_head) del heads[head] #Try all methods for method in all_methods: req_m = copy.deepcopy(req) req_m.update_method(method) req_m.update_reqID('reqID') req_m.update_module(module) requestList.append(req_m) #Try known bypasses known = ['X-Original-URL', 'X-Rewrite-URL', 'X-Override-URL'] for head in known: newurl = u.u_d req_head = copy.deepcopy(req) heads = req.headers.copy() heads[head] = '/' + u.lastpath req_head.update_headers(heads) req_head.update_reqID('reqID') req_head.update_module(module) requestList.append(req_head) del heads[head] req_head = copy.deepcopy(req) heads = req.headers.copy() heads[head] = '/' + u.lastpath + '/' req_head.update_headers(heads) req_head.update_reqID('reqID') req_head.update_module(module) requestList.append(req_head) del heads[head] return requestList
def gen(self, cwd, urls, proxy, headers, timeout, cookies, postdata, module): d1 = '' d2 = '' data = FileOp(cwd+'/lists/archive-file.txt').reader() shuffled = random.shuffle(data) for url in urls: u = UrlObject(url) #If no lastfile if u.lastfile != '': for i in self.data: d1 = u.u_q + i req_get = RequestObject('reqID',"GET", proxy, headers, timeout, cookies, d1, postdata, module) self.requestList.append(req_get) d2 = u.u_d + u.lastfile_ext+i req_get = RequestObject('reqID',"GET", proxy, headers, timeout, cookies, d2, postdata, module) self.requestList.append(req_get) for i in self.prepend: d1 = u.u_d + i + u.lastfile req_get = RequestObject('reqID',"GET", proxy, headers, timeout, cookies, d1, postdata, module) self.requestList.append(req_get) for j in self.data: d2 = u.u_d + i + u.lastfile + j req_get = RequestObject('reqID',"GET", proxy, headers, timeout, cookies, d2, postdata, module) self.requestList.append(req_get) #If no lastpath if u.lastpath != '': for i in self.data: d1 = u.u_d + u.lastpath + i req_get = RequestObject('reqID',"GET", proxy, headers, timeout, cookies, d1, postdata, module) self.requestList.append(req_get) d2 = u.u_dd + u.lastpath + i req_get = RequestObject('reqID',"GET", proxy, headers, timeout, cookies, d2, postdata, module) self.requestList.append(req_get) for i in self.common: for j in self.data: d1 = u.u_d + i + j req_get = RequestObject('reqID',"GET", proxy, headers, timeout, cookies, d1, postdata, module) self.requestList.append(req_get) #Else else: for i in self.common: for j in self.data: d1 = u.u_d + i + j req_get = RequestObject('reqID',"GET", proxy, headers, timeout, cookies, d1, postdata, module) self.requestList.append(req_get) return self.requestList