def main(): if argv.server: p = Payloader.generate_payloads(input_wordlist, urler(argv.server)) elif argv.auto: if ',' in argv.auto: server_path, public_path = argv.auto.split(',') public_url = unender(ender(ngrok.connect(port = port), '/') + unstarter(public_path, '/'), '/') else: server_path = argv.auto public_url = unender(ngrok.connect(port = port), '/') system(f"(cd {server_path}; fuser -k {port}/tcp 1>/dev/null 2>/dev/null; php -S 0.0.0.0:{port} 1>/dev/null 2>/dev/null &)") p = Payloader.generate_payloads(input_wordlist, urler(public_url)) with ThreadPoolExecutor(max_workers=argv.threads) as mapper: mapper.map(try_payload, p) print(f"{Color.good} Success. Check your server logs for bounty!")
def netloc_generator(self, parsed_url, payloads: list) -> list: error_print = lambda e: print( f"{Color.bad} Skipping payload generation due to: {e}") skip_print = f"{Color.bad} Skipping URL {colored(parsed_url.netloc, color='cyan')}!" if parsed_url.netloc.count('.') >= 5 or len(parsed_url.netloc) > 40: print(skip_print) return [] if self.Skipper.check_netloc(parsed_url.netloc): print(skip_print) return [] else: self.Skipper.add_netloc(parsed_url.netloc) try: error = False get(urler(parsed_url.netloc)) except ConnectionError: error_print("Connection Error") error = True except Timeout: error_print("Request Timeout") error = True except Exception as E: error_print(str(E.__class__.__name__)) error = True if error: return [] return [ merge(parsed_url.netloc, payload) for payload in payloads if payload ]
def main(): sql_dict['HOST'] = ender(urler(sql_dict['HOST']), "/") const = input(f"{ColorObj.information} Fetch data or edit (F/E)? ") if const == 'F': print(f"{ColorObj.good} Fetching data ..") query_res = fetch_mysql() for i in query_res: print(i) print(f"{ColorObj.good} Fetched!") main() exit() for keys, values in sql_dict.items(): print("{} ::: {}".format(keys, colored(values, color='green'))) print("") cont = input( f"{ColorObj.information} Continue with above value (Y/N) or update value (U): " ) if cont.upper() == 'Y': try: insert_mysql() except Exception as E: print(f"Error {E,E.__class__} occured while inserting data") exit(0) elif cont.upper() == 'U': up = input(f"{ColorObj.information} Enter hostname to update: ") update_mysql(up) else: main()
def async_generator(url: str): global to_try if not url: return [] parsed_url = urlparse(urler(url)) print_asyncgen = lambda data: print( f"{Color.information} Generating {data} for: {colored(url, color='cyan')}" ) if parsed_url.query: print_asyncgen('query payload') try: for payloaded_url in Payloader.query_generator( parsed_url, payloads): to_try.append(payloaded_url) except Exception as E: print(E) print_asyncgen('path payload') try: for payloaded_url in Payloader.path_generator( parsed_url, payloads): to_try.append(payloaded_url) except Exception as E: print(E) elif parsed_url.path: print_asyncgen('path payload') try: for payloaded_url in Payloader.path_generator( parsed_url, payloads): to_try.append(payloaded_url) except Exception as E: print(E) elif parsed_url.netloc: print_asyncgen('domain payload') for payloaded_url in Payloader.netloc_generator(parsed_url, payloads): to_try.append(payloaded_url)
def insert_mysql(): statement = "INSERT INTO password (HOST, USERNAME, EMAIL, PASSWORD, 2FA, BACKUP, OTHER) VALUES (%s,%s,%s,%s,%s,%s,%s)" values = (ender( urler(sql_dict['HOST']).replace('http://', 'https://'), "/"), sql_dict['USERNAME'], sql_dict['EMAIL'], sql_dict['PASSWORD'], sql_dict['2FA'], sql_dict['BACKUP'], sql_dict['OTHER']) cursor.execute(statement, values) conn.commit()
def update_mysql(host): data = fetch_mysql() where = "" for i in data: if i[1] == host: where = i[0] data = i[1:] break statement = "UPDATE password SET HOST = %s, USERNAME = %s, EMAIL = %s, PASSWORD = %s, 2FA = %s, BACKUP = %s, OTHER = %s WHERE ID = %s" values = (ender( urler(sql_dict['HOST']).replace('http://', 'https://'), "/"), sql_dict['USERNAME'], sql_dict['EMAIL'], sql_dict['PASSWORD'], sql_dict['2FA'], sql_dict['BACKUP'], sql_dict['OTHER'], int(where)) cursor.execute(statement, values) conn.commit()
def extract_from_url(self, url: str) -> bool: try: output_list = [] jsurl = urler(url) parsed_url = urlparse(jsurl) print( f"{Color.information} Getting data from {colored(jsurl, color='yellow', attrs=['bold'])}" ) output_list.append( (f"URL: {colored(jsurl, color='yellow', attrs=['bold'])}\n\n")) (lambda __after: [__after() for self.argv.domain in [(parsed_url.netloc)]][0] if parsed_url.netloc and not self.argv.domain else __after() )(lambda: None) if parsed_url.path.endswith('.js'): jstext = JSE.returnjs_fromjs(jsurl) jscomments, js_exlines, js_hidden, js_links, js_imgsrc = (None, None, None, None, None) elif not parsed_url.path.endswith('.js'): jstext, js_other = JSE.returnjs_fromhtml(jsurl) jscomments, js_exlines, js_hidden, js_links, js_imgsrc = js_other if js_links or js_imgsrc: for js_link in js_links: output_list.append(self.link_extract(js_link)) for js_src in js_imgsrc: output_list.append(self.link_extract(js_src, is_src=True)) if jscomments: for jscomment in jscomments: jscomment = '"{}"'.format(jscomment.strip(' ')) print( f"{Color.good} Comments: {colored(jscomment, color='red', attrs=['bold'])}" ) output_list.append([ manage_output(f"{jscomment} <--- Comments\n"), 'Comments' ]) if js_exlines: for exline in js_exlines: output_list.append(self.exline_extract(exline['src'])) if js_hidden: print( f"{Color.good} Hidden input parameters: {colored(js_hidden, color='red', attrs=['bold'])}" ) output_list.append([ manage_output(f"{js_hidden} <--- Hidden parameters\n"), 'Hidden' ]) for line in jstext: line = line.strip(' ').rstrip('{').rstrip(' ').lstrip( '}').lstrip(' ') output_list.append(self.domsource_extract(line)) if self.jstext_continuer: self.jstext_continuer = 0 continue output_list.append(self.domsink_extract(line)) if self.jstext_continuer: self.jstext_continuer = 0 continue output_list.append(self.url_extract(line)) if self.jstext_continuer: self.jstext_continuer = 0 continue output_list.append(self.path_extract(line)) if self.jstext_continuer: self.jstext_continuer = 0 continue output_list.append(self.subdomain_extract(line)) if self.jstext_continuer: self.jstext_continuer = 0 continue output_list.append(self.custom_extract(line)) if self.jstext_continuer: self.jstext_continuer = 0 continue output_list.append(self.shannon_extract(line)) if self.jstext_continuer: self.jstext_continuer = 0 continue return tuple(filter(None, output_list)) except Exception: print_exc()
help="Fetch from config file (optional, not implemented)") group.add_argument('-a', '--argv', action="store_true", help="Fetch from command line (optional, not implemented)") group.add_argument('-i', '--input', action="store_true", help="Fetch from input (optional, not implemented)") argv = parser.parse_args() mode = starter(argv) if mode == 'argv': sql_dict['HOST'] = ender( urler(argv.host).replace('http://', 'https://'), "/") sql_dict['USERNAME'] = argv.username sql_dict['EMAIL'] = argv.email sql_dict['PASSWORD'] = argv.password sql_dict['2FA'] = argv.two_factor sql_dict['BACKUP'] = argv.backup sql_dict['OTHER'] = argv.other elif mode == 'input': sql_dict['HOST'] = ender( (urler(input(f"{ColorObj.information} Enter Host: "))).replace( 'http://', 'https://'), "/") sql_dict['USERNAME'] = input(f"{ColorObj.information} Enter Username: "******"{ColorObj.information} Enter Email: ") sql_dict['PASSWORD'] = input(f"{ColorObj.information} Enter Password: "******"{ColorObj.information} Enter 2FA: ") sql_dict['BACKUP'] = input(f"{ColorObj.information} Enter Backup: ")