def dig_info(self): utils.print_good('Starting basic Dig') utils.make_directory(self.options['WORKSPACE'] + '/screenshot/digs') final_subdomains = utils.replace_argument(self.options, '$WORKSPACE/subdomain/final-$OUTPUT.txt') #run command directly instead of run it via module cause there're a lot of command to run all_domains = utils.just_read(final_subdomains).splitlines() if self.options['DEBUG'] == 'True': all_domains = all_domains[:10] custom_logs = {"module": self.module_name, "content": []} for part in list(utils.chunks(all_domains, 5)): for domain in part: cmd = utils.replace_argument( self.options, 'dig all {0} | tee $WORKSPACE/screenshot/digs/{0}.txt'.format(domain)) output_path = utils.replace_argument(self.options, 'tee $WORKSPACE/screenshot/digs/{0}.txt'.format(domain)) execute.send_cmd(self.options, cmd, '', '', self.module_name, True) # time.sleep(0.5) custom_logs['content'].append( {"cmd": cmd, "std_path": '', "output_path": output_path, "status": "Done"}) #just wait couple seconds and continue but not completely stop the routine time.sleep(5) print(custom_logs) #submit a log utils.print_info('Update activities log') utils.update_activities(self.options, str(custom_logs))
def technology_detection(self): all_subdomain_path = utils.replace_argument( self.options, '$WORKSPACE/subdomain/final-$OUTPUT.txt') if not utils.not_empty_file(all_subdomain_path): return #add https:// prefix for all domain domains = utils.just_read(all_subdomain_path).splitlines() scheme_path = utils.replace_argument( self.options, '$WORKSPACE/recon/all-scheme-$OUTPUT.txt') utils.just_write(scheme_path, "\n".join( domains + [("https://" + x.strip()) for x in domains])) #really execute command cmd = '$GO_PATH/webanalyze -apps $PLUGINS_PATH/apps.json -hosts $WORKSPACE/recon/all-scheme-$OUTPUT.txt -output json -worker 20 | tee $WORKSPACE/recon/$OUTPUT-technology.json' cmd = utils.replace_argument(self.options, cmd) output_path = utils.replace_argument( self.options, '$WORKSPACE/recon/$OUTPUT-technology.json') execute.send_cmd(self.options, cmd, output_path, '', self.module_name) utils.just_waiting(self.options, self.module_name, seconds=10, times=20) with open(output_path, encoding='utf-8') as o: data = o.read().splitlines() #parsing output to get technology techs = {} for line in data: try: jsonl = json.loads(line) if jsonl.get('matches'): subdomain = jsonl.get('hostname').replace('https://', '') if techs.get(subdomain): techs[subdomain] += [x.get('app_name') for x in jsonl.get('matches')] else: techs[subdomain] = [x.get('app_name') for x in jsonl.get('matches')] except: pass # print(techs) #update the main json and rewrite that main_json = utils.reading_json(utils.replace_argument( self.options, '$WORKSPACE/$COMPANY.json')) for i in range(len(main_json['Subdomains'])): sub = main_json['Subdomains'][i].get('Domain') if techs.get(sub): main_json['Subdomains'][i]["Technology"] = techs.get(sub) utils.just_write(utils.replace_argument( self.options, '$WORKSPACE/$COMPANY.json'), main_json, is_json=True)
def dirsearch(self): utils.print_good('Starting dirsearch') if self.is_direct: domains = utils.just_read(self.is_direct).splitlines() else: #matching IP with subdomain main_json = utils.reading_json(utils.replace_argument( self.options, '$WORKSPACE/$COMPANY.json')) domains = [x.get('Domain') for x in main_json['Subdomains']] if self.options['DEBUG'] == 'True': domains = domains[:5] custom_logs = {"module": self.module_name, "content": []} for part in utils.chunks(domains, 3): for domain in part: #just strip everything to save local, it won't affect the result strip_domain = domain.replace( 'http://', '').replace('https://', '').replace('/', '-') cmd = "python3 $PLUGINS_PATH/dirsearch/dirsearch.py -b -e php,zip,aspx,js --wordlist=$PLUGINS_PATH/wordlists/really-quick.txt --simple-report=$WORKSPACE/directory/quick/{1}-dirsearch.txt -t 50 -u {0}".format( domain, strip_domain) cmd = utils.replace_argument(self.options, cmd) output_path = utils.replace_argument( self.options, '$WORKSPACE/directory/quick/{0}-dirsearch.txt'.format(strip_domain)) std_path = utils.replace_argument( self.options, '$WORKSPACE/directory/quick/std-{0}-dirsearch.std'.format(strip_domain)) execute.send_cmd(self.options, cmd, output_path, std_path, self.module_name) # time.sleep(0.5) #set status to done because this gonna will be submit when all command was done custom_logs['content'].append( {"cmd": cmd, "std_path": std_path, "output_path": output_path, "status": "Done"}) #just wait couple seconds and continue but not completely stop the routine time.sleep(20) #submit a log utils.print_info('Update activities log') # utils.update_activities(self.options, str(custom_logs)) utils.force_done(self.options, self.module_name) #just save commands logfile = utils.replace_argument(self.options, '$WORKSPACE/log.json') utils.save_all_cmd(self.options, logfile)
def quick_gobuster(self): utils.print_good('Starting gobuster for short wordlist') if self.is_direct: domains = utils.just_read(self.is_direct).splitlines() else: #matching IP with subdomain main_json = utils.reading_json(utils.replace_argument(self.options, '$WORKSPACE/$COMPANY.json')) domains = [x.get('Domain') for x in main_json['Subdomains']] if self.options['DEBUG'] == 'True': domains = domains[:5] custom_logs = {"module": self.module_name, "content": []} for part in utils.chunks(domains, 3): for domain in part: #just strip everything to save local, it won't affect the result strip_domain = domain.replace( 'http://', '').replace('https://', '').replace('/', '-') cmd = '$GO_PATH/gobuster -k -q -e -x php,jsp,aspx,html,json -w $PLUGINS_PATH/wordlists/quick-content-discovery.txt -t 100 -o $WORKSPACE/directory/{1}-gobuster.txt -s 200,301,307 -u "{0}" '.format( domain.strip(), strip_domain) cmd = utils.replace_argument(self.options, cmd) output_path = utils.replace_argument( self.options, '$WORKSPACE/directory/quick/{0}-gobuster.txt'.format(strip_domain)) std_path = utils.replace_argument( self.options, '$WORKSPACE/directory/quick/std-{0}-gobuster.std'.format(strip_domain)) execute.send_cmd(self.options, cmd, output_path, std_path, self.module_name) # time.sleep(0.5) #set status to done because this gonna will be submit when all command was done custom_logs['content'].append({"cmd": cmd, "std_path": std_path, "output_path": output_path, "status": "Done"}) #just wait couple seconds and continue but not completely stop the routine time.sleep(20) #submit a log utils.print_info('Update activities log') utils.force_done(self.options, self.module_name) # utils.update_activities(self.options, str(custom_logs)) #just save commands logfile = utils.replace_argument(self.options, '$WORKSPACE/log.json') utils.save_all_cmd(self.options, logfile)
def proxy_parsing(options): # return if proxy config file found if options['PROXY_FILE'] != "None": proxy_file = options['PROXY_FILE'] utils.print_info("Detected proxychains file: {0}".format(proxy_file)) return elif options['PROXY'] != "None": proxy_file = options['CWD'] + '/core/proxychains.conf' utils.print_info("Detected proxychains file: {0}".format(proxy_file)) if options['PROXY'] != "None": proxy_parsed = urllib.parse.urlsplit(options['PROXY']) scheme = proxy_parsed.scheme host = proxy_parsed.netloc.split(':')[0] port = proxy_parsed.netloc.split(':')[1] proxy_element = "\n" + scheme + " " + host + " " + port raw_data = utils.just_read(proxy_file).splitlines() for i in range(len(raw_data)): if '[ProxyList]' in raw_data[i]: init_part = raw_data[:i] proxy_part = raw_data[i:] # check if this proxy is exist or not check_duplicate = False for item in proxy_part: if proxy_element.strip() in item.strip(): check_duplicate = True if not check_duplicate: proxy_part.append(proxy_element) real_proxy_data = "\n".join(init_part + proxy_part) utils.just_write(proxy_file, real_proxy_data) if options['PROXY'] != "None" or options['PROXY_FILE'] != "None": if not shutil.which(options['PROXY_CMD'].split(' ')[0]): utils.print_bad("Look like proxy mode doesn't support your OS") sys.exit(0) else: #simple check for proxy is good utils.print_info("Testing proxy with simple curl command") if execute.run(options['PROXY_CMD'] + " curl -s ipinfo.io/ip") == execute.run("curl -s ipinfo.io/ip"): utils.print_bad("Look like your proxy not work properly") sys.exit(0)
def masscan(self): utils.print_good('Starting masscan') time.sleep(1) if self.is_direct: ip_file = utils.replace_argument( self.options, '$WORKSPACE/subdomain/final-IP-$OUTPUT.txt') # print(ip_file) # print(utils.just_read(ip_file)) ip_list = utils.just_read(ip_file).splitlines() ip_list = list(set([ip for ip in ip_list if ip != 'N/A'])) else: main_json = utils.reading_json(utils.replace_argument( self.options, '$WORKSPACE/$COMPANY.json')) main_json['Modules'][self.module_name] = [] if self.options['SPEED'] == 'slow': ip_list = [x.get("IP") for x in main_json['Subdomains'] if x.get("IP") is not None] + main_json['IP Space'] elif self.options['SPEED'] == 'quick': ip_list = [x.get("IP") for x in main_json['Subdomains'] if x.get("IP") is not None] ip_list = set([ip for ip in ip_list if ip != 'N/A']) if self.options['DEBUG'] == "True": utils.print_info("just testing 5 first host") ip_list = list(ip_list)[:5] utils.just_write(utils.replace_argument( self.options, '$WORKSPACE/subdomain/IP-$TARGET.txt'), "\n".join(ip_list)) # print(ip_list) time.sleep(1) cmd = "sudo masscan --rate 1000 -p0-65535 -iL $WORKSPACE/subdomain/IP-$TARGET.txt -oX $WORKSPACE/portscan/$OUTPUT-masscan.xml --wait 0" cmd = utils.replace_argument(self.options, cmd) output_path = utils.replace_argument( self.options, '$WORKSPACE/portscan/$OUTPUT-masscan.xml') std_path = utils.replace_argument( self.options, '$WORKSPACE/portscan/std-$OUTPUT-masscan.std') execute.send_cmd(self.options, cmd, output_path, std_path, self.module_name)
def nmap_vuln(self): utils.print_good('Starting Nmap VulnScan') if self.is_direct: ip_list = utils.just_read(self.is_direct).splitlines() ip_list = list(set([ip for ip in ip_list if ip != 'N/A'])) else: main_json = utils.reading_json(utils.replace_argument( self.options, '$WORKSPACE/$COMPANY.json')) main_json['Modules'][self.module_name] = [] if self.options['SPEED'] == 'slow': ip_list = [x.get("IP") for x in main_json['Subdomains'] if x.get("IP") is not None] + main_json['IP Space'] elif self.options['SPEED'] == 'quick': ip_list = [x.get("IP") for x in main_json['Subdomains'] if x.get("IP") is not None] ip_list = list(set([ip for ip in ip_list if ip != 'N/A'])) if self.options['DEBUG'] == 'True': ip_list = list(ip_list)[:5] # Scan every 5 IP at time Increse if you want for part in utils.chunks(ip_list, 2): for ip in part: cmd = 'sudo nmap --open -T4 -Pn -n -sSV -p- {0} --script $PLUGINS_PATH/vulners.nse --oA $WORKSPACE/vulnscan/{0}-nmap'.format( ip.strip()) cmd = utils.replace_argument(self.options, cmd) output_path = utils.replace_argument( self.options, '$WORKSPACE/vulnscan/{0}-nmap.nmap'.format(ip.strip())) std_path = utils.replace_argument( self.options, '$WORKSPACE/vulnscan/std-{0}-nmap.std'.format(ip.strip())) execute.send_cmd(self.options, cmd, output_path, std_path, self.module_name) # check if previous task done or not every 30 second while not utils.checking_done(self.options, module=self.module_name): time.sleep(60) #just save commands logfile = utils.replace_argument(self.options, '$WORKSPACE/log.json') utils.save_all_cmd(self.options, logfile)
def dig_info(self): utils.print_good('Starting basic Dig') utils.make_directory(self.options['WORKSPACE'] + '/screenshot/digs') final_subdomains = utils.replace_argument( self.options, '$WORKSPACE/subdomain/final-$OUTPUT.txt') #run command directly instead of run it via module cause there're a lot of command to run all_domains = utils.just_read(final_subdomains).splitlines() if self.options['DEBUG'] == 'True': all_domains = all_domains[:10] custom_logs = {"module": self.module_name, "content": []} for part in list(utils.chunks(all_domains, 5)): for domain in part: cmd = utils.replace_argument( self.options, 'dig all {0} | tee $WORKSPACE/screenshot/digs/{0}.txt'. format(domain)) output_path = utils.replace_argument( self.options, 'tee $WORKSPACE/screenshot/digs/{0}.txt'.format(domain)) execute.send_cmd(self.options, cmd, '', '', self.module_name, True) # time.sleep(0.5) custom_logs['content'].append({ "cmd": cmd, "std_path": '', "output_path": output_path, "status": "Done" }) #just wait couple seconds and continue but not completely stop the routine time.sleep(5) print(custom_logs) #submit a log utils.print_info('Update activities log') utils.update_activities(self.options, str(custom_logs))
def parsing_argument(args): global options if args.config: options['config'] = args.config if args.query: options['query'] = args.query if args.query_list: options['query_list'] = args.query_list options = config.config(options, args) if options.get('query_list'): queris = utils.just_read(options.get('query_list')).splitlines() for query in queris: options['query'] = query single_query(options) else: single_query(options)
def get_ipsace(self): amass_output = utils.replace_argument( self.options, '$WORKSPACE/subdomain/amass-$OUTPUT/amass.json') if not utils.not_empty_file(amass_output): return ips_file = utils.replace_argument( self.options, '$WORKSPACE/ipspace/$OUTPUT-ipspace.txt') data = [] jsonl = utils.just_read(amass_output).splitlines() for line in jsonl: json_data = json.loads(line) for item in json_data.get('addresses'): ip = item.get('ip') cidr = item.get('cidr') asn = item.get('asn') utils.print_info("Found ASN for {0} on CIDR {1}".format( asn, cidr)) data.extend([ip, cidr]) utils.just_append(ips_file, data) utils.clean_up(ips_file)
def parsing_to_csv(self): nmap_detail_path = utils.replace_argument( self.options, '$WORKSPACE/vulnscan/details') # create all csv based on xml file for file in utils.list_files(nmap_detail_path, ext='xml'): # print(file) cmd = "python3 $PLUGINS_PATH/nmap-stuff/nmap_xml_parser.py -f {0} -csv $WORKSPACE/vulnscan/details/$OUTPUT-nmap.csv".format( file) cmd = utils.replace_argument(self.options, cmd) csv_output = utils.replace_argument( self.options, '$WORKSPACE/vulnscan/details/$OUTPUT-nmap.csv') execute.send_cmd(self.options, cmd, csv_output, '', self.module_name) time.sleep(5) # looping through all csv file all_csv = "IP,Host,OS,Proto,Port,Service,Product,Service FP,NSE Script ID,NSE Script Output,Notes\n" for file in utils.list_files(nmap_detail_path, ext='csv'): all_csv += "\n".join(utils.just_read(file).splitlines()[1:]) csv_summary_path = utils.replace_argument( self.options, '$WORKSPACE/vulnscan/summary-$OUTPUT.csv') utils.just_write(csv_summary_path, all_csv) # beautiful csv look cmd = "csvcut -c 1-7 $WORKSPACE/vulnscan/summary-$OUTPUT.csv | csvlook --no-inference | tee $WORKSPACE/vulnscan/std-$OUTPUT-summary.std" cmd = utils.replace_argument(self.options, cmd) output_path = utils.replace_argument( self.options, '$WORKSPACE/portscan/std-$OUTPUT-summary.std') execute.send_cmd(self.options, cmd, output_path, '', self.module_name) self.screenshots(all_csv)
def technology_detection(self): all_subdomain_path = utils.replace_argument( self.options, '$WORKSPACE/subdomain/final-$OUTPUT.txt') if not utils.not_empty_file(all_subdomain_path): return #add https:// prefix for all domain domains = utils.just_read(all_subdomain_path).splitlines() scheme_path = utils.replace_argument( self.options, '$WORKSPACE/recon/all-scheme-$OUTPUT.txt') utils.just_write( scheme_path, "\n".join(domains + [("https://" + x.strip()) for x in domains])) # really execute command cmd = '$GO_PATH/webanalyze -apps $PLUGINS_PATH/apps.json -hosts $WORKSPACE/recon/all-scheme-$OUTPUT.txt -output json -worker 20 | tee $WORKSPACE/recon/$OUTPUT-technology.json' cmd = utils.replace_argument(self.options, cmd) output_path = utils.replace_argument( self.options, '$WORKSPACE/recon/$OUTPUT-technology.json') execute.send_cmd(self.options, cmd, output_path, '', self.module_name) utils.just_waiting(self.options, self.module_name, seconds=10, times=20) with open(output_path, encoding='utf-8') as o: data = o.read().splitlines() # parsing output to get technology techs = {} for line in data: try: jsonl = json.loads(line) if jsonl.get('matches'): subdomain = jsonl.get('hostname').replace('https://', '') if techs.get(subdomain): techs[subdomain] += [ x.get('app_name') for x in jsonl.get('matches') ] else: techs[subdomain] = [ x.get('app_name') for x in jsonl.get('matches') ] except: pass # update the main json and rewrite that main_json = utils.reading_json( utils.replace_argument(self.options, '$WORKSPACE/$COMPANY.json')) for i in range(len(main_json['Subdomains'])): sub = main_json['Subdomains'][i].get('Domain') if techs.get(sub): main_json['Subdomains'][i]["Technology"] = techs.get(sub) utils.just_write(utils.replace_argument(self.options, '$WORKSPACE/$COMPANY.json'), main_json, is_json=True)
def linkfinder(self): utils.print_good('Starting linkfinder') utils.make_directory(self.options['WORKSPACE'] + '/assets/linkfinder') if self.is_direct: if utils.not_empty_file(self.is_direct): http_domains = utils.just_read(self.is_direct) # just return if direct input is just a string else: domain = self.is_direct strip_domain = utils.get_domain(domain) if strip_domain == domain: domain = 'http://' + domain cmd = 'python3 $PLUGINS_PATH/LinkFinder/linkfinder.py -i {0} -d -o cli | tee $WORKSPACE/assets/linkfinder/{1}-linkfinder.txt'.format( domain, strip_domain) cmd = utils.replace_argument(self.options, cmd) output_path = utils.replace_argument( self.options, '$WORKSPACE/assets/linkfinder/{0}-linkfinder.txt'.format( strip_domain)) std_path = utils.replace_argument( self.options, '$WORKSPACE/assets/linkfinder/{0}-linkfinder.std'.format( strip_domain)) execute.send_cmd(self.options, cmd, output_path, std_path, self.module_name) # if http not accept just try https if not utils.not_empty_file(output_path): strip_domain = utils.get_domain(domain) domain = 'https://' + domain cmd = 'python3 $PLUGINS_PATH/LinkFinder/linkfinder.py -i {0} -d -o cli | tee $WORKSPACE/assets/linkfinder/{1}-linkfinder.txt'.format( domain, strip_domain) cmd = utils.replace_argument(self.options, cmd) output_path = utils.replace_argument( self.options, '$WORKSPACE/assets/linkfinder/{0}-linkfinder.txt'. format(strip_domain)) std_path = utils.replace_argument( self.options, '$WORKSPACE/assets/linkfinder/{0}-linkfinder.std'. format(strip_domain)) execute.send_cmd(self.options, cmd, output_path, std_path, self.module_name) return None else: if self.options['SPEED'] != 'slow': utils.print_good("Skipping linkfinder in quick mode") return None http_domains = utils.replace_argument( self.options, '$WORKSPACE/assets/http-$OUTPUT.txt') if utils.not_empty_file(http_domains): domains = utils.just_read(http_domains) for domain in domains.splitlines(): strip_domain = utils.get_domain(domain) cmd = 'python3 $PLUGINS_PATH/LinkFinder/linkfinder.py -i {0} -d -o cli | tee $WORKSPACE/assets/linkfinder/{1}-linkfinder.txt'.format( domain, strip_domain) cmd = utils.replace_argument(self.options, cmd) output_path = utils.replace_argument( self.options, '$WORKSPACE/assets/linkfinder/{0}-linkfinder.txt'.format( strip_domain)) std_path = utils.replace_argument( self.options, '$WORKSPACE/assets/linkfinder/{0}-linkfinder.std'.format( strip_domain)) execute.send_cmd(self.options, cmd, output_path, std_path, self.module_name) time.sleep(5)