def conclude(self): output_path = utils.replace_argument( self.options, '$WORKSPACE/portscan/$OUTPUT-masscan.xml') if not utils.not_empty_file(output_path): return self.create_html_report() #parsing masscan xml tree = ET.parse(output_path) root = tree.getroot() masscan_json = {} for host in root.iter('host'): ip = host[0].get('addr') ports = [(str(x.get('portid')) + "/" + str(x.get('protocol'))) for x in host[1]] masscan_json[ip] = ports main_json = utils.reading_json(utils.replace_argument( self.options, '$WORKSPACE/$COMPANY.json')) #update the main json for i in range(len(main_json['Subdomains'])): ip = main_json['Subdomains'][i].get('IP') if ip != "N/A" and ip in masscan_json.keys(): main_json['Subdomains'][i]['Ports'] = masscan_json.get(ip) utils.just_write(utils.replace_argument( self.options, '$WORKSPACE/$COMPANY.json'), main_json, is_json=True)
def result_parsing(self): utils.print_good('Parsing XML for masscan report') utils.make_directory( self.options['WORKSPACE'] + '/portscan/parsed') result_path = utils.replace_argument( self.options, '$WORKSPACE/portscan') main_json = utils.reading_json(utils.replace_argument( self.options, '$WORKSPACE/$COMPANY.json')) for filename in glob.iglob(result_path + '/**/*.xml'): ip = filename.split('/')[-1].split('-masscan.xml')[0] masscan_report = NmapParser.parse_fromfile(filename) masscan_report_json = json.dumps(masscan_report) # store the raw json utils.just_write(utils.replace_argument( self.options, '$WORKSPACE/portscan/parsed/{0}.json'.format(ip)), masscan_report_json, is_json=True) services = [x['__NmapHost__']['_services'] for x in masscan_report_json['_host']] # ports = [y.get('_portid') for y in services] ports = [] for service in services: for element in service: ports.append( {"port": str(element['_portid']), "service": str(element['_protocol'])}) for i in range(len(main_json['Subdomains'])): if main_json['Subdomains'][i]['IP'] == ip: main_json['Subdomains'][i]['Network']['Ports'] = ports utils.just_write(utils.replace_argument( self.options, '$WORKSPACE/$COMPANY.json'), main_json, is_json=True)
def analyze(self, soup): result = [] # custom here divs = soup.findAll(True, {'class': ['SearchResult', 'result']}) for div in divs: element = { 'raw_ip': 'N/A', 'result_title': 'N/A', 'external_url': 'N/A' } # getting sumary div link_sum = div.find_all("a", "SearchResult__title-text")[0] element['raw_ip'] = link_sum.get('href').replace('/ipv4/', '') # ip element['external_url'] = link_sum.get( 'href').replace('/ipv4/', '') element['result_title'] = link_sum.span.text.replace( '(', '').replace(')', '') utils.print_debug(self.options, element) result.append(element) output = [] for item in result: if item.get('raw_ip'): output.append(item.get('external_url')) elif item.get('external_url'): output.append(item.get('raw_ip')) elif item.get('result_title'): output.append(item.get('result_title')) really_data = "\n".join(output) print(really_data) utils.just_write(self.output, really_data + "\n")
def create_skeleton_json(self): main_json = { "Company": utils.replace_argument(self.options, '$COMPANY'), "Main_domain": utils.replace_argument(self.options, '$TARGET'), "Whois": { "path": utils.replace_argument(self.options, '$WORKSPACE/info/$OUTPUT-whois.txt') }, "Subdomains": [], "Modules": {}, "IP Space": [] } outout = utils.replace_argument(self.options, '$WORKSPACE/$COMPANY.json') utils.just_write(outout, main_json, is_json=True) utils.check_output(outout) slack.slack_noti('status', self.options, mess={ 'title': "{0} | {1}".format(self.options['TARGET'], self.module_name), 'content': 'Create skeleton json' })
def sending(self, url): # sending request and return the response utils.print_debug(self.options, url) # catch error when session timeout try: r = sender.send_get(self.options, url, self.cookies) except: r = False if r: response = r.text if self.options['store_content']: ts = str(int(time.time())) raw_file = self.options['raw'] + \ "/fofa/{0}_{1}".format(utils.url_encode( url.replace(self.base_url, '')).replace('/', '_'), ts) utils.just_write(raw_file, response) soup = utils.soup(response) self.analyze(soup) # checking if there is many pages or not page_num = self.check_pages(soup) # if you're log in and have many results if page_num and self.logged_in and not self.options[ 'disable_pages']: utils.print_info("Continue grab more pages") self.pages(page_num)
def analyze(self, response): exploits = response.get('exploits') utils.print_debug(self.options, len(exploits)) if len(exploits) == 0: utils.print_info( "No exploit found for {0}".format(self.query)) return False # store raw json raw_file_path = self.options['raw'] + '/sploitus_{0}.json'.format( self.query.replace(' ', '_')) if self.options.get('store_content'): utils.just_write(raw_file_path, response, is_json=True) utils.print_debug(self.options, "Writing raw response to: {0}".format(raw_file_path)) results = [] for exploit in exploits: item = { 'Query': self.query, 'Title': exploit.get('title'), 'Score': str(exploit.get('score')), 'External_url': exploit.get('href'), 'CVE': str(utils.get_cve(exploit.get('source'))), 'ID': exploit.get('id'), 'Published': exploit.get('published'), 'Source': self.base_url + 'exploit?id=' + exploit.get('id'), 'Warning': 'High', 'Raw': raw_file_path, } utils.print_debug(self.options, item) results.append(item) return results
def prepare_input(self): if self.is_direct: # if direct input was file just read it if utils.not_empty_file(self.is_direct): domains = utils.just_read(self.is_direct).splitlines() http_domains_path = self.is_direct # get input string else: domains = [self.is_direct.strip()] http_domains_path = utils.reading_json( utils.replace_argument( self.options, '$WORKSPACE/directory/domain-lists.txt')) utils.just_write(http_domains_path, "\n".join(domains)) else: http_domains_path = utils.replace_argument( self.options, '$WORKSPACE/assets/http-$OUTPUT.txt') # if assets module done return it if utils.not_empty_file(http_domains_path): domains = utils.just_read(http_domains_path).splitlines() return domains, http_domains_path # matching IP with subdomain main_json = utils.reading_json( utils.replace_argument(self.options, '$WORKSPACE/$COMPANY.json')) domains = [x.get('Domain') for x in main_json['Subdomains']] http_domains_path = utils.reading_json( utils.replace_argument( self.options, '$WORKSPACE/directory/domain-lists.txt')) utils.just_write(http_domains_path, "\n".join(domains)) return domains, http_domains_path
def conclude(self): utils.print_banner("Conclusion for {0}".format(self.module_name)) main_json = utils.reading_json( utils.replace_argument(self.options, '$WORKSPACE/$COMPANY.json')) main_json['Modules'][self.module_name] = utils.checking_done( module=self.module_name, get_json=True) ips_file = utils.replace_argument( self.options, '$WORKSPACE/ipspace/$OUTPUT-ipspace.txt') with open(ips_file, 'r') as s: ips = s.read().splitlines() main_json['IP Space'] = ips # print(main_json['IP Space']) #write that json again utils.just_write(utils.replace_argument(self.options, '$WORKSPACE/$COMPANY.json'), main_json, is_json=True) #logging logfile = utils.replace_argument(self.options, '$WORKSPACE/log.json') utils.save_all_cmd(logfile) utils.print_banner("{0} Done".format(self.module_name))
def conclude(self): main_json = utils.reading_json(utils.replace_argument(self.options, '$WORKSPACE/$COMPANY.json')) main_json['Modules'][self.module_name] = utils.checking_done(module=self.module_name, get_json=True) #write that json again utils.just_write(utils.replace_argument(self.options, '$WORKSPACE/$COMPANY.json'), main_json, is_json=True)
def create_skeleton_json(self): outout = utils.replace_argument(self.options, '$WORKSPACE/$COMPANY.json') if utils.not_empty_file(outout): utils.print_info("Modules is already done") return main_json = { "Company": utils.replace_argument(self.options, '$COMPANY'), "Main_domain": utils.replace_argument(self.options, '$TARGET'), "Info": {}, "Subdomains": [], "Modules": {}, "IP Space": [] } utils.just_write(outout, main_json, is_json=True) utils.check_output(outout) slack.slack_noti('status', self.options, mess={ 'title': "{0} | {1}".format(self.options['TARGET'], self.module_name), 'content': 'Create skeleton json' })
def run(self): commands = execute.get_commands( self.options, self.module_name).get('routines') for item in commands: utils.print_good('Starting {0}'.format(item.get('banner'))) #really execute it execute.send_cmd(self.options, item.get('cmd'), item.get( 'output_path'), item.get('std_path'), self.module_name) time.sleep(1) utils.just_waiting(self.options, self.module_name, seconds=10, times=5) main_json = utils.reading_json(utils.replace_argument( self.options, '$WORKSPACE/$COMPANY.json')) for item in commands: if "Whois" in item.get('cmd'): main_json["Info"]["Whois"] = {"path": item.get('output_path')} if "Dig" in item.get('cmd'): main_json["Info"]["Dig"] = {"path": item.get('output_path')} utils.just_write(utils.replace_argument( self.options, '$WORKSPACE/$COMPANY.json'), main_json, is_json=True)
def github(self, url): result = [] r = sender.send_get(self.options, url, cookies=None) if r.status_code == 200: response = r.text # store raw json raw_file_path = self.options[ 'raw'] + '/write_up_github_{0}.html'.format( self.query.replace(' ', '_')) if self.options.get('store_content'): utils.just_write(raw_file_path, response) utils.print_debug( self.options, "Writing raw response to: {0}".format(raw_file_path)) soup = utils.soup(response) # Custom here body = soup.find_all('article', 'markdown-body')[0] links = body.findChildren('a') for link in links: if self.query.lower() in link.text.lower(): item = { 'Query': self.query, 'Title': link.text, 'Content': link.text, 'External_url': link.get('href'), 'Source': url, 'Warning': 'Write-Up', 'Raw': raw_file_path } utils.print_debug(self.options, item) result.append(item) return result
def conclude(self): output_path = utils.replace_argument( self.options, '$WORKSPACE/portscan/$OUTPUT-masscan.xml') if not utils.not_empty_file(output_path): return self.create_html_report() #parsing masscan xml tree = ET.parse(output_path) root = tree.getroot() masscan_json = {} for host in root.iter('host'): ip = host[0].get('addr') ports = [(str(x.get('portid')) + "/" + str(x.get('protocol'))) for x in host[1]] masscan_json[ip] = ports main_json = utils.reading_json(utils.replace_argument( self.options, '$WORKSPACE/$COMPANY.json')) #update the main json for i in range(len(main_json['Subdomains'])): ip = main_json['Subdomains'][i].get('IP') if ip != "N/A" and ip in masscan_json.keys(): main_json['Subdomains'][i]['Ports'] = masscan_json.get(ip) #just save commands logfile = utils.replace_argument(self.options, '$WORKSPACE/log.json') utils.save_all_cmd(self.options, logfile) utils.just_write(utils.replace_argument( self.options, '$WORKSPACE/$COMPANY.json'), main_json, is_json=True)
def conclude(self): output_path = utils.replace_argument( self.options, '$WORKSPACE/subdomain/massdns-IP-$OUTPUT.txt') # matching IP with subdomain main_json = utils.reading_json( utils.replace_argument(self.options, '$WORKSPACE/$COMPANY.json')) with open(output_path, 'r') as i: data = i.read().splitlines() ips = [] for line in data: if " A " in line: subdomain = line.split('. A ')[0] ip = line.split('. A ')[1] ips.append(ip) for i in range(len(main_json['Subdomains'])): if subdomain == main_json['Subdomains'][i]['Domain']: main_json['Subdomains'][i]['IP'] = ip final_ip = utils.replace_argument( self.options, '$WORKSPACE/subdomain/final-IP-$OUTPUT.txt') with open(final_ip, 'w+') as fip: fip.write("\n".join(str(ip) for ip in ips)) utils.just_write(utils.replace_argument(self.options, '$WORKSPACE/$COMPANY.json'), main_json, is_json=True)
def analyze(self, response): warns = response.get('data').get('search') total = response.get('total') if total == 0: utils.print_info("No exploit found for {0}".format(self.query)) return False # store raw json raw_file_path = self.options['raw'] + '/vulners_{0}.json'.format( self.query.replace(' ', '_')) if self.options.get('store_content'): utils.just_write(raw_file_path, response, is_json=True) utils.print_debug( self.options, "Writing raw response to: {0}".format(raw_file_path)) results = [] for warn in warns: item = { 'Query': self.query, 'Title': warn.get('_source').get('title'), 'Score': warn.get('_source').get('cvss').get('score'), 'External_url': warn.get('_source').get('href'), 'CVE': warn.get('_source').get('id'), 'ID': warn.get('_id'), 'Published': warn.get('_source').get('published'), 'Source': "https://vulners.com/cve/" + warn.get('_id'), 'Warning': 'Info', 'Raw': raw_file_path, } utils.print_debug(self.options, item) results.append(item) return results
def conclude(self): self.unique_result() utils.print_banner("Conclusion for {0}".format(self.module_name)) main_json = utils.reading_json( utils.replace_argument(self.options, '$WORKSPACE/$COMPANY.json')) all_subdomain = utils.replace_argument( self.options, '$WORKSPACE/subdomain/final-$OUTPUT.txt') with open(all_subdomain, 'r') as s: subdomains = s.read().splitlines() for subdomain in subdomains: main_json['Subdomains'].append({"Domain": subdomain}) main_json['Modules'][self.module_name] = utils.checking_done( module=self.module_name, get_json=True) utils.just_write(utils.replace_argument(self.options, '$WORKSPACE/$COMPANY.json'), main_json, is_json=True) #write that json again # utils.just_write(utils.reading_json(), main_json, is_json=True) #logging logfile = utils.replace_argument(self.options, '$WORKSPACE/log.json') utils.save_all_cmd(logfile) utils.print_banner("{0}".format(self.module_name))
def gowithness(self, data): # add http:// and https:// prefix to domain domains = [] utils.make_directory(self.options['WORKSPACE'] + '/screenshot/screenshoots-gowitness') for item in data: host = utils.get_domain(item) domains.append("http://" + host) domains.append("https://" + host) http_file = utils.replace_argument( self.options, '$WORKSPACE/screenshot/$OUTPUT-hosts.txt') utils.just_write(http_file, "\n".join(domains)) utils.clean_up(http_file) time.sleep(2) # screenshots with gowitness cmd = "$GO_PATH/gowitness file -s $WORKSPACE/screenshot/$OUTPUT-hosts.txt -t 30 --log-level fatal --destination $WORKSPACE/screenshot/screenshoots-gowitness/ --db $WORKSPACE/screenshot/screenshoots-gowitness/gowitness.db" execute.send_cmd(self.options, utils.replace_argument(self.options, cmd), '', '', self.module_name) utils.just_waiting(self.options, self.module_name, seconds=10) cmd = "$GO_PATH/gowitness generate -n $WORKSPACE/screenshot/$OUTPUT-gowitness-screenshots.html --destination $WORKSPACE/screenshot/screenshoots-gowitness/ --db $WORKSPACE/screenshot/screenshoots-gowitness/gowitness.db" html_path = utils.replace_argument( self.options, "$WORKSPACE/portscan/$OUTPUT-gowitness-screenshots.html") execute.send_cmd(self.options, utils.replace_argument(self.options, cmd), html_path, '', self.module_name)
def run(self): commands = execute.get_commands(self.options, self.module_name).get('routines') for item in commands: utils.print_good('Starting {0}'.format(item.get('banner'))) #really execute it execute.send_cmd(self.options, item.get('cmd'), item.get('output_path'), item.get('std_path'), self.module_name) time.sleep(1) utils.just_waiting(self.options, self.module_name, seconds=10, times=5) main_json = utils.reading_json( utils.replace_argument(self.options, '$WORKSPACE/$COMPANY.json')) for item in commands: if "Whois" in item.get('cmd'): main_json["Info"]["Whois"] = {"path": item.get('output_path')} if "Dig" in item.get('cmd'): main_json["Info"]["Dig"] = {"path": item.get('output_path')} utils.just_write(utils.replace_argument(self.options, '$WORKSPACE/$COMPANY.json'), main_json, is_json=True)
def masscan(self): utils.print_good('Starting masscan') time.sleep(1) ip_list = self.prepare_input() if self.is_direct: if type(ip_list) == list: if self.options['DEBUG'] == "True": utils.print_info("just testing 5 first host") ip_list = list(ip_list)[:5] utils.just_write( utils.replace_argument( self.options, '$WORKSPACE/subdomain/IP-$TARGET.txt'), "\n".join(ip_list)) # print(ip_list) time.sleep(1) cmd = "sudo masscan --rate 10000 -p0-65535 -iL $WORKSPACE/subdomain/IP-$TARGET.txt -oX $WORKSPACE/portscan/$OUTPUT-masscan.xml --wait 0" else: cmd = "sudo masscan --rate 10000 -p0-65535 {0} -oX $WORKSPACE/portscan/$OUTPUT-masscan.xml --wait 0".format( ip_list) else: cmd = "sudo masscan --rate 10000 -p0-65535 -iL $WORKSPACE/subdomain/final-IP-$TARGET.txt -oX $WORKSPACE/portscan/$OUTPUT-masscan.xml --wait 0" cmd = utils.replace_argument(self.options, cmd) output_path = utils.replace_argument( self.options, '$WORKSPACE/portscan/$OUTPUT-masscan.xml') std_path = utils.replace_argument( self.options, '$WORKSPACE/portscan/std-$OUTPUT-masscan.std') execute.send_cmd(self.options, cmd, output_path, std_path, self.module_name)
def screenshots(self, csv_data): # add http:// and https:// prefix to domain if csv_data: result = [] for line in csv_data.splitlines()[1:]: # print(line) host = line.split(',')[0] port = line.split(',')[3] result.append("http://" + host + ":" + port) result.append("https://" + host + ":" + port) utils.just_write( utils.replace_argument( self.options, '$WORKSPACE/portscan/$OUTPUT-hosts.txt'), "\n".join(result)) # screenshots with gowitness utils.make_directory(self.options['WORKSPACE'] + '/portscan/screenshoots-massscan/') cmd = "$GO_PATH/gowitness file -s $WORKSPACE/portscan/$OUTPUT-hosts.txt -t 30 --log-level fatal --destination $WORKSPACE/portscan/screenshoots-massscan/ --db $WORKSPACE/portscan/screenshoots-massscan/gowitness.db" execute.send_cmd(self.options, utils.replace_argument(self.options, cmd), '', '', self.module_name) utils.just_waiting(self.options, self.module_name, seconds=10) cmd = "$GO_PATH/gowitness generate -n $WORKSPACE/portscan/$OUTPUT-masscan-screenshots.html --destination $WORKSPACE/portscan/screenshoots-massscan/ --db $WORKSPACE/portscan/screenshoots-massscan/gowitness.db" html_path = utils.replace_argument( self.options, "$WORKSPACE/portscan/$OUTPUT-masscan-screenshots.html") execute.send_cmd(self.options, utils.replace_argument(self.options, cmd), html_path, '', self.module_name)
def conclude(self): self.unique_result() utils.print_banner("Conclusion for {0}".format(self.module_name)) main_json = utils.reading_json( utils.replace_argument(self.options, '$WORKSPACE/$COMPANY.json')) all_subdomain = utils.replace_argument( self.options, '$WORKSPACE/subdomain/final-$OUTPUT.txt') with open(all_subdomain, 'r+') as s: subdomains = s.read().splitlines() for subdomain in subdomains: main_json['Subdomains'].append({ "Domain": subdomain, "IP": "N/A", "Technology": [], "Ports": [], }) utils.just_write(utils.replace_argument(self.options, '$WORKSPACE/$COMPANY.json'), main_json, is_json=True) utils.print_banner("Done for {0}".format(self.module_name))
def analyze(self, soup): result = [] # custom here divs = soup.find_all("div", "list_mod_t") for div in divs: print(div.a.get('href')) result.append(div.a.get('href')) utils.just_write(self.output, "\n".join(result) + "\n")
def technology_detection(self): all_subdomain_path = utils.replace_argument( self.options, '$WORKSPACE/subdomain/final-$OUTPUT.txt') if not utils.not_empty_file(all_subdomain_path): return #add https:// prefix for all domain domains = utils.just_read(all_subdomain_path).splitlines() scheme_path = utils.replace_argument( self.options, '$WORKSPACE/recon/all-scheme-$OUTPUT.txt') utils.just_write( scheme_path, "\n".join(domains + [("https://" + x.strip()) for x in domains])) #really execute command cmd = '$GO_PATH/webanalyze -apps $PLUGINS_PATH/apps.json -hosts $WORKSPACE/recon/all-scheme-$OUTPUT.txt -output json -worker 20 | tee $WORKSPACE/recon/$OUTPUT-technology.json' cmd = utils.replace_argument(self.options, cmd) output_path = utils.replace_argument( self.options, '$WORKSPACE/recon/$OUTPUT-technology.json') execute.send_cmd(self.options, cmd, '', '', self.module_name) utils.just_waiting(self.options, self.module_name, seconds=10) with open(output_path, encoding='utf-8') as o: data = o.read().splitlines() #parsing output to get technology techs = {} for line in data: jsonl = json.loads(line) if jsonl.get('matches'): subdomain = jsonl.get('hostname').replace('https://', '') if techs.get(subdomain): techs[subdomain] += [ x.get('app_name') for x in jsonl.get('matches') ] else: techs[subdomain] = [ x.get('app_name') for x in jsonl.get('matches') ] # print(techs) #update the main json and rewrite that main_json = utils.reading_json( utils.replace_argument(self.options, '$WORKSPACE/$COMPANY.json')) for i in range(len(main_json['Subdomains'])): sub = main_json['Subdomains'][i].get('Domain') if techs.get(sub): main_json['Subdomains'][i]["Technology"] = techs.get(sub) utils.just_write(utils.replace_argument(self.options, '$WORKSPACE/$COMPANY.json'), main_json, is_json=True)
def conclude(self, output): head = ','.join([str(x) for x in output[0].keys()]) + "\n" body = '' for item in output: clean_body = [str(x).replace(',', '%2C') for x in item.values()] body += ','.join(clean_body) + "\n" utils.check_output(self.output) utils.just_write(self.output, head + body)
def unique_result(self): # gobuster clean up utils.print_good('Unique result') go_raw = utils.replace_argument( self.options, '$WORKSPACE/subdomain/raw-$OUTPUT-gobuster.txt') if utils.not_empty_file(go_raw): go_clean = [ x.split(' ')[1] for x in utils.just_read(go_raw).splitlines() ] go_output = utils.replace_argument( self.options, '$WORKSPACE/subdomain/$OUTPUT-gobuster.txt') utils.just_write(go_output, "\n".join(go_clean)) # massdns clean up massdns_raw = utils.replace_argument( self.options, '$WORKSPACE/subdomain/raw-massdns.txt') if utils.not_empty_file(massdns_raw): massdns_output = utils.replace_argument( self.options, '$WORKSPACE/subdomain/$OUTPUT-massdns.txt') if not os.path.exists(massdns_raw): with open(massdns_raw, 'r+') as d: ds = d.read().splitlines() for line in ds: newline = line.split(' ')[0][:-1] with open(massdns_output, 'a+') as m: m.write(newline + "\n") utils.check_output( utils.replace_argument( self.options, '$WORKSPACE/subdomain/$OUTPUT-massdns.txt')) # joining the output all_output = glob.glob( utils.replace_argument(self.options, '$WORKSPACE/subdomain/$OUTPUT-*.txt')) domains = [] for file in all_output: domains += utils.just_read(file).splitlines() output_path = utils.replace_argument( self.options, '$WORKSPACE/subdomain/final-$OUTPUT.txt') utils.just_write(output_path, "\n".join(set([x.strip() for x in domains]))) time.sleep(1) slack.slack_file('report', self.options, mess={ 'title': "{0} | {1} | Output".format( self.options['TARGET'], self.module_name), 'filename': '{0}'.format(output_path), })
def technology_detection(self): all_subdomain_path = utils.replace_argument( self.options, '$WORKSPACE/subdomain/final-$OUTPUT.txt') if not utils.not_empty_file(all_subdomain_path): return #add https:// prefix for all domain domains = utils.just_read(all_subdomain_path).splitlines() scheme_path = utils.replace_argument( self.options, '$WORKSPACE/recon/all-scheme-$OUTPUT.txt') utils.just_write(scheme_path, "\n".join( domains + [("https://" + x.strip()) for x in domains])) #really execute command cmd = '$GO_PATH/webanalyze -apps $PLUGINS_PATH/apps.json -hosts $WORKSPACE/recon/all-scheme-$OUTPUT.txt -output json -worker 20 | tee $WORKSPACE/recon/$OUTPUT-technology.json' cmd = utils.replace_argument(self.options, cmd) output_path = utils.replace_argument( self.options, '$WORKSPACE/recon/$OUTPUT-technology.json') execute.send_cmd(self.options, cmd, output_path, '', self.module_name) utils.just_waiting(self.options, self.module_name, seconds=10, times=20) with open(output_path, encoding='utf-8') as o: data = o.read().splitlines() #parsing output to get technology techs = {} for line in data: try: jsonl = json.loads(line) if jsonl.get('matches'): subdomain = jsonl.get('hostname').replace('https://', '') if techs.get(subdomain): techs[subdomain] += [x.get('app_name') for x in jsonl.get('matches')] else: techs[subdomain] = [x.get('app_name') for x in jsonl.get('matches')] except: pass # print(techs) #update the main json and rewrite that main_json = utils.reading_json(utils.replace_argument( self.options, '$WORKSPACE/$COMPANY.json')) for i in range(len(main_json['Subdomains'])): sub = main_json['Subdomains'][i].get('Domain') if techs.get(sub): main_json['Subdomains'][i]["Technology"] = techs.get(sub) utils.just_write(utils.replace_argument( self.options, '$WORKSPACE/$COMPANY.json'), main_json, is_json=True)
def parse_query(self): if not self.query: return None if '|' in self.query: name = self.query.split("|")[0] value = self.query.split("|")[1] if 'asn' in name: ips = self.get_asn_ip(value) utils.just_write(self.options['output'], "\n".join(ips)) utils.just_cleanup(self.options['output'])
def create_ip_result(self): utils.print_good('Create IP for list of domain result') # check if direct input is file or just single string if self.is_direct: if utils.not_empty_file(self.is_direct): cmd = '$PLUGINS_PATH/massdns/bin/massdns -r $PLUGINS_PATH/massdns/lists/resolvers.txt -t A -o S -w $WORKSPACE/subdomain/massdns-IP-$OUTPUT.txt $INPUT_LIST' # just return if direct input is just a string else: return else: final_ip = utils.replace_argument( self.options, '$WORKSPACE/subdomain/final-IP-$OUTPUT.txt') if utils.not_empty_file(final_ip): return cmd = '$PLUGINS_PATH/massdns/bin/massdns -r $PLUGINS_PATH/massdns/lists/resolvers.txt -t A -o S -w $WORKSPACE/subdomain/massdns-IP-$OUTPUT.txt $WORKSPACE/subdomain/final-$OUTPUT.txt' cmd = utils.replace_argument(self.options, cmd) output_path = utils.replace_argument( self.options, '$WORKSPACE/subdomain/massdns-IP-$OUTPUT.txt') execute.send_cmd(self.options, cmd, '', '', self.module_name) utils.just_waiting(self.options, self.module_name, seconds=5) # matching IP with subdomain main_json = utils.reading_json( utils.replace_argument(self.options, '$WORKSPACE/$COMPANY.json')) # get ips from amass stuff ips = [] if self.is_direct: if self.options.get("INPUT_LIST"): ips.extend(utils.extract_ip(self.options.get('INPUT_LIST'))) if utils.not_empty_file(output_path): data = utils.just_read(output_path).splitlines() for line in data: if " A " in line: subdomain = line.split('. A ')[0] ip = line.split('. A ')[1] ips.append(str(ip)) for i in range(len(main_json['Subdomains'])): if subdomain == main_json['Subdomains'][i]['Domain']: main_json['Subdomains'][i]['IP'] = ip final_ip = utils.replace_argument( self.options, '$WORKSPACE/subdomain/final-IP-$OUTPUT.txt') utils.just_write(final_ip, "\n".join(ips)) utils.just_write(utils.replace_argument(self.options, '$WORKSPACE/$COMPANY.json'), main_json, is_json=True)
def conclude(self): main_json = utils.reading_json(utils.replace_argument(self.options, '$WORKSPACE/$COMPANY.json')) main_json['Modules'][self.module_name] = utils.checking_done(module=self.module_name, get_json=True) #write that json again utils.just_write(utils.replace_argument(self.options, '$WORKSPACE/$COMPANY.json'), main_json, is_json=True) #logging logfile = utils.replace_argument(self.options, '$WORKSPACE/log.json') utils.save_all_cmd(logfile) utils.print_banner("{0} Done".format(self.module_name))
def run_single(command): forced = command.get('forced', False) if not forced: if utils.not_empty_file(command.get('output_path')): return True std_out = run(command.get('cmd')) # store std and output if command.get('std_path') != '': utils.just_write(command.get('std_path'), std_out) if command.get('output_path') != '': utils.check_output(command.get('output_path')) return True
def asnlookup(self, company): utils.print_banner(f"Starting scraping {company} from asnlookup.com") url = f'http://asnlookup.com/api/lookup?org={company}' r = sender.send_get(self.options, url, None) data = r.json() if not data: utils.print_bad('No IP found') else: content = "\n".join(data) print(content) utils.just_write(self.options['output'], content) utils.just_cleanup(self.options['output'])
def masscan(self): utils.print_good('Starting masscan') time.sleep(1) if self.is_direct: ip_file = utils.replace_argument( self.options, '$WORKSPACE/subdomain/final-IP-$OUTPUT.txt') # print(ip_file) # print(utils.just_read(ip_file)) ip_list = utils.just_read(ip_file).splitlines() ip_list = list(set([ip for ip in ip_list if ip != 'N/A'])) else: main_json = utils.reading_json( utils.replace_argument(self.options, '$WORKSPACE/$COMPANY.json')) main_json['Modules'][self.module_name] = [] if self.options['SPEED'] == 'slow': ip_list = [ x.get("IP") for x in main_json['Subdomains'] if x.get("IP") is not None ] + main_json['IP Space'] elif self.options['SPEED'] == 'quick': ip_list = [ x.get("IP") for x in main_json['Subdomains'] if x.get("IP") is not None ] ip_list = set([ip for ip in ip_list if ip != 'N/A']) if self.options['DEBUG'] == "True": utils.print_info("just testing 5 first host") ip_list = list(ip_list)[:5] utils.just_write( utils.replace_argument(self.options, '$WORKSPACE/subdomain/IP-$TARGET.txt'), "\n".join(ip_list)) # print(ip_list) time.sleep(1) cmd = "sudo masscan --rate 1000 -p0-65535 -iL $WORKSPACE/subdomain/IP-$TARGET.txt -oX $WORKSPACE/portscan/$OUTPUT-masscan.xml --wait 0" cmd = utils.replace_argument(self.options, cmd) output_path = utils.replace_argument( self.options, '$WORKSPACE/portscan/$OUTPUT-masscan.xml') std_path = utils.replace_argument( self.options, '$WORKSPACE/portscan/std-$OUTPUT-masscan.std') execute.send_cmd(self.options, cmd, output_path, std_path, self.module_name)
def conclude(self): utils.print_banner("Conclusion for {0}".format(self.module_name)) main_json = utils.reading_json(utils.replace_argument(self.options, '$WORKSPACE/$COMPANY.json')) ips_file = utils.replace_argument(self.options, '$WORKSPACE/ipspace/$OUTPUT-ipspace.txt') with open(ips_file, 'r') as s: ips = s.read().splitlines() main_json['IP Space'] = ips #write that json again utils.just_write(utils.replace_argument(self.options, '$WORKSPACE/$COMPANY.json'), main_json, is_json=True) utils.print_banner("{0} Done".format(self.module_name))
def conclude(self): utils.print_banner("Conclusion for {0}".format(self.module_name)) main_json = utils.reading_json(utils.replace_argument(self.options, '$WORKSPACE/$COMPANY.json')) ips_file = utils.replace_argument(self.options, '$WORKSPACE/ipspace/$OUTPUT-ipspace.txt') with open(ips_file, 'r') as s: ips = s.read().splitlines() main_json['IP Space'] = ips #write that json again utils.just_write(utils.replace_argument(self.options, '$WORKSPACE/$COMPANY.json'), main_json, is_json=True) utils.print_banner("{0} Done".format(self.module_name))
def proxy_parsing(options): # return if proxy config file found if options['PROXY_FILE'] != "None": proxy_file = options['PROXY_FILE'] utils.print_info("Detected proxychains file: {0}".format(proxy_file)) return elif options['PROXY'] != "None": proxy_file = options['CWD'] + '/core/proxychains.conf' utils.print_info("Detected proxychains file: {0}".format(proxy_file)) if options['PROXY'] != "None": proxy_parsed = urllib.parse.urlsplit(options['PROXY']) scheme = proxy_parsed.scheme host = proxy_parsed.netloc.split(':')[0] port = proxy_parsed.netloc.split(':')[1] proxy_element = "\n" + scheme + " " + host + " " + port raw_data = utils.just_read(proxy_file).splitlines() for i in range(len(raw_data)): if '[ProxyList]' in raw_data[i]: init_part = raw_data[:i] proxy_part = raw_data[i:] # check if this proxy is exist or not check_duplicate = False for item in proxy_part: if proxy_element.strip() in item.strip(): check_duplicate = True if not check_duplicate: proxy_part.append(proxy_element) real_proxy_data = "\n".join(init_part + proxy_part) utils.just_write(proxy_file, real_proxy_data) if options['PROXY'] != "None" or options['PROXY_FILE'] != "None": if not shutil.which(options['PROXY_CMD'].split(' ')[0]): utils.print_bad("Look like proxy mode doesn't support your OS") sys.exit(0) else: #simple check for proxy is good utils.print_info("Testing proxy with simple curl command") if execute.run(options['PROXY_CMD'] + " curl -s ipinfo.io/ip") == execute.run("curl -s ipinfo.io/ip"): utils.print_bad("Look like your proxy not work properly") sys.exit(0)
def create_ip_result(self): utils.print_good('Create IP for list of domain result') if self.is_direct: cmd = '$PLUGINS_PATH/massdns/bin/massdns -r $PLUGINS_PATH/massdns/lists/resolvers.txt -t A -o S -w $WORKSPACE/subdomain/massdns-IP-$OUTPUT.txt $INPUT' else: final_ip = utils.replace_argument( self.options, '$WORKSPACE/subdomain/final-IP-$OUTPUT.txt') if utils.not_empty_file(final_ip): return cmd = '$PLUGINS_PATH/massdns/bin/massdns -r $PLUGINS_PATH/massdns/lists/resolvers.txt -t A -o S -w $WORKSPACE/subdomain/massdns-IP-$OUTPUT.txt $WORKSPACE/subdomain/final-$OUTPUT.txt' cmd = utils.replace_argument(self.options, cmd) output_path = utils.replace_argument( self.options, '$WORKSPACE/subdomain/massdns-IP-$OUTPUT.txt') execute.send_cmd(self.options, cmd, '', '', self.module_name) utils.just_waiting(self.options, self.module_name, seconds=5) # matching IP with subdomain main_json = utils.reading_json(utils.replace_argument( self.options, '$WORKSPACE/$COMPANY.json')) with open(output_path, 'r') as i: data = i.read().splitlines() ips = [] for line in data: if " A " in line: subdomain = line.split('. A ')[0] ip = line.split('. A ')[1] ips.append(ip) for i in range(len(main_json['Subdomains'])): if subdomain == main_json['Subdomains'][i]['Domain']: main_json['Subdomains'][i]['IP'] = ip final_ip = utils.replace_argument( self.options, '$WORKSPACE/subdomain/final-IP-$OUTPUT.txt') with open(final_ip, 'w+') as fip: fip.write("\n".join(str(ip) for ip in ips)) utils.just_write(utils.replace_argument( self.options, '$WORKSPACE/$COMPANY.json'), main_json, is_json=True)
def masscan(self): utils.print_good('Starting masscan') time.sleep(1) if self.is_direct: ip_file = utils.replace_argument( self.options, '$WORKSPACE/subdomain/final-IP-$OUTPUT.txt') # print(ip_file) # print(utils.just_read(ip_file)) ip_list = utils.just_read(ip_file).splitlines() ip_list = list(set([ip for ip in ip_list if ip != 'N/A'])) else: main_json = utils.reading_json(utils.replace_argument( self.options, '$WORKSPACE/$COMPANY.json')) main_json['Modules'][self.module_name] = [] if self.options['SPEED'] == 'slow': ip_list = [x.get("IP") for x in main_json['Subdomains'] if x.get("IP") is not None] + main_json['IP Space'] elif self.options['SPEED'] == 'quick': ip_list = [x.get("IP") for x in main_json['Subdomains'] if x.get("IP") is not None] ip_list = set([ip for ip in ip_list if ip != 'N/A']) if self.options['DEBUG'] == "True": utils.print_info("just testing 5 first host") ip_list = list(ip_list)[:5] utils.just_write(utils.replace_argument( self.options, '$WORKSPACE/subdomain/IP-$TARGET.txt'), "\n".join(ip_list)) # print(ip_list) time.sleep(1) cmd = "sudo masscan --rate 1000 -p0-65535 -iL $WORKSPACE/subdomain/IP-$TARGET.txt -oX $WORKSPACE/portscan/$OUTPUT-masscan.xml --wait 0" cmd = utils.replace_argument(self.options, cmd) output_path = utils.replace_argument( self.options, '$WORKSPACE/portscan/$OUTPUT-masscan.xml') std_path = utils.replace_argument( self.options, '$WORKSPACE/portscan/std-$OUTPUT-masscan.std') execute.send_cmd(self.options, cmd, output_path, std_path, self.module_name)
def create_skeleton_json(self): outout = utils.replace_argument( self.options, '$WORKSPACE/$COMPANY.json') if utils.not_empty_file(outout): utils.print_info("Modules is already done") return main_json = { "Company": utils.replace_argument(self.options, '$COMPANY'), "Main_domain": utils.replace_argument(self.options, '$TARGET'), "Info": {}, "Subdomains": [], "Modules": {}, "IP Space": [] } utils.just_write(outout, main_json, is_json=True) utils.check_output(outout) slack.slack_noti('status', self.options, mess={ 'title': "{0} | {1}".format(self.options['TARGET'], self.module_name), 'content': 'Create skeleton json' })
def conclude(self): main_json = utils.reading_json(utils.replace_argument(self.options, '$WORKSPACE/$COMPANY.json')) main_json['Modules'][self.module_name] = utils.checking_done(module=self.module_name, get_json=True) # write that json again utils.just_write(utils.replace_argument(self.options, '$WORKSPACE/$COMPANY.json'), main_json, is_json=True)