def save_links(self, dork, dname, pnum, links_list): if not self.get_outdir(): for l in links_list: pp.p_log(l) return dname append = True if pnum == 1: append = False dname = dutil.create_random_dir(self.get_outdir(), dname) dname = re.sub('^{}[/]?'.format(self.get_outdir()), '', dname) futil.dump_list( futil.join_names(dutil.get_dir(self.get_outdir(), dname), "{}.info".format(dname)), ["dork: {}".format(dork)], append) futil.dump_list( futil.join_names(dutil.get_dir(self.get_outdir(), dname), "{}.txt".format(dname)), links_list, append) if not self.is_output_silent(): for l in links_list: pp.p_log(l) return dname
def get_dork_path(self): dp = '' flag = False try: dp = self.get_conf().get('dork_path') except: pp.p_error("Dorks path not exists.") flag = True else: if dp == '': pp.p_error("Dorks path not defined.") flag = True if flag: raise Exception("Error in Config file.") if dp.startswith('~'): dp = os.path.expanduser(dp) elif dp.startswith('/'): pass elif dp.startswith('./'): cwd = os.path.realpath('.') dp = dutil.join_names(cwd, dp[2:]) else: cwd = os.path.realpath('.') dp = dutil.join_names(cwd, dp) return dp
def update_dorks_repo(self): pp.p_log("Building Dork Repo.") repo_url = self.get_conf().get('repo_url') pp.p_log("Fetching from '{}'".format(repo_url)) tmpdir = dutil.create_temp_dir('f0x', 'repo_') Repo.clone_from(repo_url, tmpdir) pp.p_log("Done Fetching.") rmdirs = ['.git'] rmfiles = ['README.md', 'LICENSE'] for i in rmdirs: try: g = dutil.get_dir(tmpdir, i) except: pass else: dutil.rmdir(g) for i in rmfiles: try: f = futil.get_file(tmpdir, i) except: pass else: os.remove(f) try: dutil.merge_dirs(tmpdir, self.get_dork_path()) except Exception as e: pp.p_error(e) quit() pp.p_log("Dork Repo updated.")
async def _record_proxy(self, proxies): while True: proxy = await proxies.get() if proxy is None: break proto = 'https' if 'HTTPS' in proxy.types else 'http' proxy_url = '%s://%s:%d' % (proto, proxy.host, proxy.port) self.add_proxy(proxy_url) if fox.is_verbose(): pp.p_log("Found proxy: {}".format(pp.light_green(proxy_url)))
def list_dorks(self): cat = None sev = None if self.get_categories(): cat = self.get_categories() else: cat = [""] if self.get_severities(): sev = self.get_severities() else: sev = range(1, 11) for c in cat: for d in self.get_dorks(c, sev): pp.p_log(d)
def execute(self): dorks = [] if self.get_query(): dorks += [self.get_query()] if self.get_process_dorksdb_flag(): cat = [] if self.get_categories(): cat = self.get_categories() for c in cat: dorks += self.get_dorks(c, self.get_severities()) if self.is_verbose(): pp.p_debug("{} dorks to fetch.".format(len(dorks))) with ThreadPoolExecutor(max_workers=self.get_threads()) as exec: exec.map(self.process_dork, dorks) self.make_report()
def get_dorks(self, category, sev_list): dorks = [] dpath = None chome = '' if not sev_list or len(sev_list) == 0: return dorks try: dpath = self.get_dork_path() except Exception as e: pp.p_error(e) return [] if category: chome = re.sub('\.', '/', category) dpath = dutil.get_dir(dpath, chome) for i in dutil.get_files_list(dpath, True): with open(i, 'r') as dfile: d = None j = None for l in dfile: if l.lstrip().lower().startswith('googledork:'): d = re.sub('^googledork:', '', l.lstrip().lower()) d = d.strip() elif l.lstrip().lower().startswith('severity:'): j = re.sub('^severity:', '', l.lstrip().lower()) j = j.strip() elif (not d) and l.lstrip().lower().startswith('dork:'): d = re.sub('^dork:', '', l.lstrip().lower()) d = d.strip() if d and j: break if j and int(j) in sev_list and d: dorks.append(d) return dorks
def list_repo_categories(self): dp = None try: dp = self.get_dork_path() except Exception as e: pp.p_error(e) quit() dl = dutil.get_dir_list(dp, True) if len(dl) == 0: pp.p_log("No Dorks available, Update dork repo.") return for i in dl: dc = re.sub('^{}[/]?'.format(dp), '', i) dc = re.sub('/', '.', dc) td = len(dutil.get_files_list(i, True)) pp.p_log("Category: {}".format(dc)) pp.p_log("Total Dorks: {}\n".format(td), '**')
def process_dork(self, dork): if self.is_verbose(): pp.p_debug("Processing dork: {}".format(dork)) dname = "dork{}".format(int(random.random() * 1000)) proxy = self.get_proxy_object() for p in range(1, self.get_no_of_pages() + 1): if not self.can_fetch_more(): break time.sleep( rand.rand_between(self.get_delay_min(), self.get_delay_max())) response = None try: response = self.fetch_page_response(dork, p, proxy['proxy']) except Exception as e: self.release_proxy(proxy) gsrch.session_cleanup() pp.p_error(e) return if self.is_verbose(): pp.p_debug("Fetched page : {}".format(p)) links = gsrch.extract_urls(response) if self.is_verbose(): pp.p_debug("Found {} url(s).".format(len(links))) dname = self.save_links(dork, dname, p, links) self.update_results_stats(len(links)) if not gsrch.has_next(response): break self.release_proxy(proxy) gsrch.session_cleanup()
def make_report(self): if not self.get_outdir(): return if not (self.get_outmode_json() or self.get_outmode_report() or self.get_outmode_xml()): return fdr = None if self.get_outmode_report(): fdr = open(futil.join_names(self.get_outdir(), 'index.html'), 'w') fdr.write('<!DOCTYPE html> <html><head><title>f0x Report: links' + '</title></head><body>') for ddir in dutil.get_dir_list(self.get_outdir()): dname = re.sub('^{}[/]?'.format(self.get_outdir()), '', ddir) links = futil.get_file_aslist( futil.get_file(ddir, "{}.txt".format(dname))) if self.get_outmode_json(): with open(futil.join_names(ddir, '{}.json'.format(dname)), 'w') as fd: fd.write(json.dumps({'urls': links})) try: fdh = None fdx = None if self.get_outmode_report(): fdh = open(futil.join_names(ddir, '{}.html'.format(dname)), 'w') if self.get_outmode_xml(): fdx = open(futil.join_names(ddir, '{}.xml'.format(dname)), 'w') except Exception as e: pp.p_error(e) else: if fdh: fdh.write('<!DOCTYPE html> <html><head><title>dork urls' + '</title></head><body>') if fdx: fdx.write('<?xml version="1.0" ?><urls>') for link in links: if fdh: fdh.write('<a href="{}">{}</a><br/>'.format( link, link)) if fdx: fdx.write('<url>{}</url>'.format(link)) if fdx: fdx.write('</urls>') if fdh: fdh.write('</body></html>') if fdr: fdr.write( ('dork: <a href="{}/{}.html">{}</a> urls ' + 'fetched: {}<br/>').format(dname, dname, dname, len(links))) finally: if fdh: fdh.close() if fdx: fdx.close() if fdr: fdr.write('</body></html>') fdr.close()
def _load_conf(self): conf.load('./f0x.config') if self.is_verbose(): pp.p_debug("Loaded Config file, keys: {}".format( self.get_conf().getKeys()))
def banner(): print(pp.green(' .o88o. .o o.')) print(pp.green(' 888 `" .8\' `8.')) print(pp.green(' o888oo .8\' ' + pp.yellow('oooo ooo') + ' `8.')) print(pp.green(' 888 88 ' + pp.yellow('`88b..8P') + ' 88')) print(pp.green(' 888 88 ' + pp.yellow('Y888') + ' 88')) print(pp.green(' 888 `8. ' + pp.yellow('.o8"\'88b') + ' .8\'')) print(pp.green(' o888o `8. ' + pp.yellow('o88\' 888o') + ' .8\'')) print()
parser.add_argument('-oR', '--out-report', help='Create html report with ' + 'JSON format results.', dest='out_report', action="store_true") parser.add_argument('--silent', help='Do not print fetched links to stdout, ' + 'just save them in file.', dest='out_silent', action='store_true') args = parser.parse_args() if args.version: print("{} v{}".format(pp.as_bold(pp.red(__NAME__)), pp.blue(__VERSION__))) quit() banner() class F0x: def __init__(self, verbose=False): self._conn_per_proxy_count = None self._proxy_ptr = -1 self._out_fmt_json = 1 << 0 self._out_fmt_xml = 1 << 1 self._out_report = 1 << 2 self._outmode = 0 self._proxy_lock = threading.Lock() self._count_lock = threading.Lock()