def get_targets(self, args): timestamp = str(int(time())) targets = [] if args.import_file: targets += [t for t in open(args.file).read().split('\n') if t] if args.import_database: if args.rescan: targets += get_urls.run(self.db, scope_type="active") else: targets += get_urls.run(self.db, scope_type="active", tool=self.name) if args.scan_folder: files = os.listdir(args.scan_folder) counter_max = str(args.counter_max) for f in files: if f.count('_') == 4: counter = 0 http, _, _, domain, port = f.split('-dir.txt')[0].split( '_') for data in open(os.path.join(args.scan_folder, f)).read().split('\n'): if '(Status: 200)' in data: targets.append("{}://{}:{}{}".format( http, domain, port, data.split(' ')[0])) counter += 1 if counter >= counter_max: break if args.output_path[0] == "/": self.path = os.path.join(self.base_config['PROJECT']['base_path'], args.output_path[1:], timestamp, args.output_path[1:] + "_{}") else: self.path = os.path.join(self.base_config['PROJECT']['base_path'], args.output_path, timestamp, args.output_path + "_{}") res = [] i = 0 for url_chunk in self.chunks(targets, args.group_size): i += 1 _, file_name = tempfile.mkstemp() open(file_name, 'w').write('\n'.join(url_chunk)) if not os.path.exists(self.path.format(i)): os.makedirs(self.path.format(i)) res.append({'target': file_name, 'output': self.path.format(i)}) return res
def get_targets(self, args): targets = [] if args.url: targets.append(args.url) if args.file: urls = open(args.file).read().split("\n") for u in urls: if u: targets.append(u) if args.import_database: if args.rescan: targets += get_urls.run(self.db, scope_type="active") else: targets += get_urls.run(self.db, tool=self.name, scope_type="active") if args.output_path[0] == "/": output_path = os.path.join( self.base_config["PROJECT"]["base_path"], args.output_path[1:], str(int(time.time())), ) else: output_path = os.path.join( self.base_config["PROJECT"]["base_path"], args.output_path, str(int(time.time())), ) if not os.path.exists(output_path): os.makedirs(output_path) res = [] for t in targets: res.append( { "target": t, "output": os.path.join( output_path, t.replace(":", "_") .replace("/", "_") .replace("?", "_") .replace("&", "_") + "-dir.txt", ), } ) return res
def run(self, args): if not args.wordlist: print("Wordlist is required!") return if not args.binary: self.binary = which.run('gobuster') else: self.binary = which.run(args.binary) if not self.binary: print( "Gobuster binary not found. Please explicitly provide path with --binary" ) # pdb.set_trace() if args.url: self.brute_force_domain(args.url, args) self.Domain.commit() elif args.file: urls = open(args.file).read().split('\n') for u in urls: if u: self.brute_force_domain(u, args) self.Domain.commit() elif args.import_database: urls = get_urls.run(self.db) self.brute_force_domains(urls, args)
def get_targets(self, args): targets = [] if args.url: targets.append(args.url) if args.file: urls = open(args.file).read().split('\n') for u in urls: if u: targets.append(u) if args.import_database: if args.rescan: targets += get_urls.run(self.db, scope_type="active") else: targets += get_urls.run(self.db, tool=self.name, scope_type="active") if args.output_path[0] == "/": output_path = os.path.join( self.base_config['PROJECT']['base_path'], args.output_path[1:], str(int(time.time()))) else: output_path = os.path.join( self.base_config['PROJECT']['base_path'], args.output_path, str(int(time.time()))) if not os.path.exists(output_path): os.makedirs(output_path) res = [] for t in targets: res.append({ 'target': t, 'output': os.path.join( output_path, t.replace(':', '_').replace('/', '_').replace( '?', '_').replace('&', '_') + "-dir.txt") }) return res
def get_targets(self, args): targets = [] if args.import_file: targets += [t for t in open(args.file).read().split('\n') if t] if args.import_database: if args.rescan: targets += get_urls.run(self.db, scope_type="active") else: targets += get_urls.run(self.db, scope_type="active", tool=self.name) if args.output_path[0] == "/": self.path = os.path.join(self.base_config['PROJECT']['base_path'], args.output_path[1:], args.output_path[1:] + "_{}") else: self.path = os.path.join(self.base_config['PROJECT']['base_path'], args.output_path, args.output_path + "_{}") res = [] i = 0 for url_chunk in self.chunks(targets, args.group_size): i += 1 _, file_name = tempfile.mkstemp() open(file_name, 'w').write('\n'.join(url_chunk)) if not os.path.exists(self.path.format(i)): os.makedirs(self.path.format(i)) res.append({'target': file_name, 'output': self.path.format(i)}) return res
def run(self, args): if not args.binary: self.binary = which.run('gowitness') else: self.binary = which.run(args.binary) if not self.binary: print( "Gowitness binary not found. Please explicitly provide path with --binary" ) # pdb.set_trace() elif args.import_file: urls = open(args.file).read().split('\n') self.process_urls(urls, args) elif args.import_database: urls = get_urls.run(self.db) self.process_urls(urls, args)
def get_targets(self, args): timestamp = str(int(time())) targets = [] if args.import_file: targets += [t for t in open(args.file).read().split("\n") if t] if args.import_database: if args.rescan: targets += get_urls.run(self.db, scope_type="active") else: targets += get_urls.run(self.db, scope_type="active", tool=self.name) if args.scan_folder: files = os.listdir(args.scan_folder) counter_max = str(args.counter_max) for f in files: if f.count("_") == 4: counter = 0 http, _, _, domain, port = f.split("-dir.txt")[0].split( "_") for data in (open(os.path.join(args.scan_folder, f)).read().split("\n")): if "(Status: 200)" in data: targets.append("{}://{}:{}{}".format( http, domain, port, data.split(" ")[0])) counter += 1 if counter >= counter_max: break if args.output_path[0] == "/": self.path = os.path.join( self.base_config["PROJECT"]["base_path"], args.output_path[1:], timestamp, ) else: self.path = os.path.join(self.base_config["PROJECT"]["base_path"], args.output_path, timestamp) if not os.path.exists(self.path): os.makedirs(self.path) res = [] i = 0 if args.group_size == 0: args.group_size = len(targets) for url_chunk in self.chunks(targets, args.group_size): i += 1 _, file_name = tempfile.mkstemp() open(file_name, "w").write("\n".join(url_chunk)) res.append({ "target": file_name, "output": self.path + "-results-{}.txt".format(i) }) return res