def parsing_to_csv(self): masscan_xml = utils.replace_argument( self.options, '$WORKSPACE/portscan/$OUTPUT-masscan.xml') if not utils.not_empty_file(masscan_xml): return cmd = "python3 $PLUGINS_PATH/nmap-stuff/masscan_xml_parser.py -f $WORKSPACE/portscan/$OUTPUT-masscan.xml -csv $WORKSPACE/portscan/$OUTPUT-masscan.csv" cmd = utils.replace_argument(self.options, cmd) csv_output = utils.replace_argument( self.options, '$WORKSPACE/portscan/$OUTPUT-masscan.csv') execute.send_cmd(self.options, cmd, csv_output, '', self.module_name) time.sleep(2) # csv beatiful if not utils.not_empty_file(csv_output): return utils.print_line() cmd = "cat $WORKSPACE/portscan/$OUTPUT-masscan.csv | csvlook --no-inference | tee $WORKSPACE/portscan/$OUTPUT-masscan-summary.txt" output_path = utils.replace_argument( self.options, '$WORKSPACE/portscan/$OUTPUT-masscan-summary.txt') execute.send_cmd(self.options, utils.replace_argument(self.options, cmd), output_path, '', self.module_name) time.sleep(2) # re-screeenshot the result with open port csv_data = utils.just_read(csv_output) self.screenshots(csv_data)
def wayback_parsing(self): utils.print_good('Starting waybackurl') final_domains = utils.replace_argument( self.options, '$WORKSPACE/subdomain/final-$OUTPUT.txt') if self.is_direct: if utils.not_empty_file(self.is_direct): cmd = 'cat {0} | $GO_PATH/waybackurls | tee $WORKSPACE/assets/wayback-$OUTPUT.txt'.format( self.is_direct) # just return if direct input is just a string else: cmd = 'echo {0} | $GO_PATH/waybackurls | tee $WORKSPACE/assets/wayback-$OUTPUT.txt'.format( self.is_direct) else: if not utils.not_empty_file(final_domains): return None else: cmd = 'cat $WORKSPACE/subdomain/final-$OUTPUT.txt | $GO_PATH/waybackurls | tee $WORKSPACE/assets/wayback-$OUTPUT.txt' cmd = utils.replace_argument(self.options, cmd) output_path = utils.replace_argument( self.options, '$WORKSPACE/assets/wayback-$OUTPUT.txt') std_path = utils.replace_argument( self.options, '$WORKSPACE/assets/std-wayback-$OUTPUT.std') execute.send_cmd(self.options, cmd, output_path, std_path, self.module_name) utils.print_line()
def prepare_input(self): if self.is_direct: # if direct input was file just read it if utils.not_empty_file(self.is_direct): domains = utils.just_read(self.is_direct).splitlines() http_domains_path = self.is_direct # get input string else: domains = [self.is_direct.strip()] http_domains_path = utils.reading_json( utils.replace_argument( self.options, '$WORKSPACE/directory/domain-lists.txt')) utils.just_write(http_domains_path, "\n".join(domains)) else: http_domains_path = utils.replace_argument( self.options, '$WORKSPACE/assets/http-$OUTPUT.txt') # if assets module done return it if utils.not_empty_file(http_domains_path): domains = utils.just_read(http_domains_path).splitlines() return domains, http_domains_path # matching IP with subdomain main_json = utils.reading_json( utils.replace_argument(self.options, '$WORKSPACE/$COMPANY.json')) domains = [x.get('Domain') for x in main_json['Subdomains']] http_domains_path = utils.reading_json( utils.replace_argument( self.options, '$WORKSPACE/directory/domain-lists.txt')) utils.just_write(http_domains_path, "\n".join(domains)) return domains, http_domains_path
def screenshots(self): if self.options['SPEED'] == 'quick': utils.print_info('Skip screenshot on Dirbrute in quick speed') return None utils.print_good('Starting Screenshot from found result') final_result = utils.replace_argument( self.options, '$WORKSPACE/directory/$OUTPUT-summary.txt') if utils.not_empty_file(final_result): # screenshot found path at the end cmd = "cat {0} | $GO_PATH/aquatone -threads 20 -out $WORKSPACE/directory/$OUTPUT-screenshots".format( final_result) cmd = utils.replace_argument(self.options, cmd) std_path = utils.replace_argument( self.options, '$WORKSPACE/directory/$OUTPUT-screenshots/std-aquatone_report.std' ) output_path = utils.replace_argument( self.options, '$WORKSPACE/directory/$OUTPUT-screenshots/aquatone_report.html' ) execute.send_cmd(self.options, cmd, std_path, output_path, self.module_name) if utils.not_empty_file(output_path): utils.check_output(output_path)
def unique_result(self): utils.print_good('Unique result') # gobuster clean up go_raw = utils.replace_argument( self.options, '$WORKSPACE/subdomain/raw-$OUTPUT-gobuster.txt') if utils.not_empty_file(go_raw): go_clean = [x.split(' ')[1] for x in utils.just_read(go_raw).splitlines()] go_output = utils.replace_argument( self.options, '$WORKSPACE/subdomain/$OUTPUT-gobuster.txt') utils.just_write(go_output, "\n".join(go_clean)) # massdns clean up massdns_raw = utils.replace_argument( self.options, '$WORKSPACE/subdomain/raw-massdns.txt') if utils.not_empty_file(massdns_raw): massdns_output = utils.replace_argument( self.options, '$WORKSPACE/subdomain/$OUTPUT-massdns.txt') if not os.path.exists(massdns_raw): with open(massdns_raw, 'r+') as d: ds = d.read().splitlines() for line in ds: newline = line.split(' ')[0][:-1] with open(massdns_output, 'a+') as m: m.write(newline + "\n") utils.check_output(utils.replace_argument( self.options, '$WORKSPACE/subdomain/$OUTPUT-massdns.txt')) # joining the output all_output = glob.glob(utils.replace_argument(self.options, '$WORKSPACE/subdomain/$OUTPUT-*.txt')) domains = [] for file in all_output: domains += utils.just_read(file).splitlines() output_path = utils.replace_argument(self.options, '$WORKSPACE/subdomain/full-$OUTPUT.txt') utils.just_write(output_path, "\n".join(set([x.strip() for x in domains]))) # finding more subdomain permutation_domains = self.permutation() if permutation_domains: domains.extend(permutation_domains) output_path = utils.replace_argument( self.options, '$WORKSPACE/subdomain/full-$OUTPUT.txt') utils.just_write(output_path, "\n".join( set([x.strip() for x in domains]))) else: output_path = utils.replace_argument( self.options, '$WORKSPACE/subdomain/final-$OUTPUT.txt') utils.just_write(output_path, "\n".join( set([x.strip() for x in domains]))) time.sleep(1) slack.slack_file('report', self.options, mess={ 'title': "{0} | {1} | Output".format(self.options['TARGET'], self.module_name), 'filename': '{0}'.format(output_path), })
def create_ip_result(self): utils.print_good('Create IP for list of domain result') # check if direct input is file or just single string if self.is_direct: if utils.not_empty_file(self.is_direct): cmd = '$PLUGINS_PATH/massdns/bin/massdns -r $PLUGINS_PATH/massdns/lists/resolvers.txt -t A -o S -w $WORKSPACE/subdomain/massdns-IP-$OUTPUT.txt $INPUT_LIST' # just return if direct input is just a string else: return else: final_ip = utils.replace_argument( self.options, '$WORKSPACE/subdomain/final-IP-$OUTPUT.txt') if utils.not_empty_file(final_ip): return cmd = '$PLUGINS_PATH/massdns/bin/massdns -r $PLUGINS_PATH/massdns/lists/resolvers.txt -t A -o S -w $WORKSPACE/subdomain/massdns-IP-$OUTPUT.txt $WORKSPACE/subdomain/final-$OUTPUT.txt' cmd = utils.replace_argument(self.options, cmd) output_path = utils.replace_argument( self.options, '$WORKSPACE/subdomain/massdns-IP-$OUTPUT.txt') execute.send_cmd(self.options, cmd, '', '', self.module_name) utils.just_waiting(self.options, self.module_name, seconds=5) # matching IP with subdomain main_json = utils.reading_json( utils.replace_argument(self.options, '$WORKSPACE/$COMPANY.json')) # get ips from amass stuff ips = [] if self.is_direct: if self.options.get("INPUT_LIST"): ips.extend(utils.extract_ip(self.options.get('INPUT_LIST'))) if utils.not_empty_file(output_path): data = utils.just_read(output_path).splitlines() for line in data: if " A " in line: subdomain = line.split('. A ')[0] ip = line.split('. A ')[1] ips.append(str(ip)) for i in range(len(main_json['Subdomains'])): if subdomain == main_json['Subdomains'][i]['Domain']: main_json['Subdomains'][i]['IP'] = ip final_ip = utils.replace_argument( self.options, '$WORKSPACE/subdomain/final-IP-$OUTPUT.txt') utils.just_write(final_ip, "\n".join(ips)) utils.just_write(utils.replace_argument(self.options, '$WORKSPACE/$COMPANY.json'), main_json, is_json=True)
def local_get_report(options): command_path = str(BASE_DIR.joinpath('rest/commands.json')) commands = utils.reading_json(command_path) # create skeleton dict final_reports = [] for key in commands.keys(): final_reports.append({ "module": key, "reports": [] }) # get workspace name ws_name = options.get('TARGET') for k in commands.keys(): if "report" in commands[k].keys(): report = utils.replace_argument( options, commands[k].get("report")) # @TODO refactor this later if type(report) == str: if utils.not_empty_file(report): report_path = report.replace( options.get('WORKSPACE'), ws_name) report_item = { "path": report_path, "type": "html", } for i in range(len(final_reports)): if final_reports[i].get('module') == k: final_reports[i]["reports"].append( report_item) elif type(report) == list: for item in report: report_path = utils.replace_argument( options, item.get("path")) if utils.not_empty_file(report_path): report_path = report_path.replace( options.get('WORKSPACE'), ws_name) report_item = { "path": report_path, "type": item.get("type"), } for i in range(len(final_reports)): if final_reports[i].get('module') == k: final_reports[i]["reports"].append( report_item) # just clean up clean_reports = [] for i in range(len(final_reports)): if final_reports[i].get('reports'): clean_reports.append(final_reports[i]) return {'reports': clean_reports}
def linkfinder(self): utils.print_good('Starting linkfinder') if self.is_direct: if utils.not_empty_file(self.is_direct): http_domains = utils.just_read(self.is_direct) # just return if direct input is just a string else: domain = self.is_direct strip_domain = utils.get_domain(domain) if strip_domain == domain: domain = 'http://' + domain cmd = 'python3 $PLUGINS_PATH/LinkFinder/linkfinder.py -i {0} -d -o cli | tee $WORKSPACE/assets/linkfinder/{1}-linkfinder.txt'.format( domain, strip_domain) cmd = utils.replace_argument(self.options, cmd) output_path = utils.replace_argument( self.options, '$WORKSPACE/assets/linkfinder/{0}-linkfinder.txt'.format( strip_domain)) std_path = utils.replace_argument( self.options, '$WORKSPACE/assets/linkfinder/{0}-linkfinder.std'.format( strip_domain)) execute.send_cmd(self.options, cmd, output_path, std_path, self.module_name) return None else: if self.options['SPEED'] != 'slow': utils.print_good("Skipping linkfinder in quick mode") return None http_domains = utils.replace_argument( self.options, '$WORKSPACE/assets/http-$OUTPUT.txt') utils.make_directory(self.options['WORKSPACE'] + '/assets/linkfinder') if utils.not_empty_file(http_domains): domains = utils.just_read(http_domains) for domain in domains.splitlines(): strip_domain = utils.get_domain(domain) cmd = 'python3 $PLUGINS_PATH/LinkFinder/linkfinder.py -i {0} -d -o cli | tee $WORKSPACE/assets/linkfinder/{1}-linkfinder.txt'.format( domain, strip_domain) cmd = utils.replace_argument(self.options, cmd) output_path = utils.replace_argument( self.options, '$WORKSPACE/assets/linkfinder/{0}-linkfinder.txt'.format( strip_domain)) std_path = utils.replace_argument( self.options, '$WORKSPACE/assets/linkfinder/{0}-linkfinder.std'.format( strip_domain)) execute.send_cmd(self.options, cmd, output_path, std_path, self.module_name)
def run_single(command): forced = command.get('forced', False) if not forced: if utils.not_empty_file(command.get('output_path')): return True if utils.not_empty_file(command.get('cleaned_output')): return True std_out = run(command.get('cmd')) # store std and output if command.get('std_path') != '': utils.just_write(command.get('std_path'), std_out) if command.get('output_path') != '': utils.check_output(command.get('output_path')) return True
def resolve_ip(self): utils.print_good('Create IP for list of domain result') final_ip = utils.replace_argument( self.options, '$WORKSPACE/subdomain/final-IP-$OUTPUT.txt') all_subdomain_path = utils.replace_argument( self.options, '$WORKSPACE/subdomain/final-$OUTPUT.txt') # we this have done or not found anything just return if utils.not_empty_file( final_ip) or not utils.not_empty_file(all_subdomain_path): return cmd = '$PLUGINS_PATH/massdns/bin/massdns -r $PLUGINS_PATH/massdns/lists/resolvers.txt -t A -o S -w $WORKSPACE/subdomain/massdns-IP-$OUTPUT.txt $WORKSPACE/subdomain/final-$OUTPUT.txt' cmd = utils.replace_argument(self.options, cmd) output_path = utils.replace_argument( self.options, '$WORKSPACE/subdomain/massdns-IP-$OUTPUT.txt') execute.send_cmd(self.options, cmd, '', '', self.module_name) utils.just_waiting(self.options, self.module_name, seconds=5, times=5) # load main json main_json = utils.reading_json( utils.replace_argument(self.options, '$WORKSPACE/$COMPANY.json')) data = utils.just_read(output_path) if data: ips = [] for line in data.splitlines(): if " A " in line: subdomain = line.split('. A ')[0] ip = line.split('. A ')[1] ips.append(ip) for i in range(len(main_json['Subdomains'])): if subdomain == main_json['Subdomains'][i]['Domain']: main_json['Subdomains'][i]['IP'] = ip final_ip = utils.replace_argument( self.options, '$WORKSPACE/subdomain/final-IP-$OUTPUT.txt') with open(final_ip, 'w+') as fip: fip.write("\n".join(str(ip) for ip in ips)) # update the main json file utils.just_write(utils.replace_argument( self.options, '$WORKSPACE/$COMPANY.json'), main_json, is_json=True)
def conclude(self): output_path = utils.replace_argument( self.options, '$WORKSPACE/portscan/$OUTPUT-masscan.xml') if not utils.not_empty_file(output_path): return self.create_html_report() #parsing masscan xml tree = ET.parse(output_path) root = tree.getroot() masscan_json = {} for host in root.iter('host'): ip = host[0].get('addr') ports = [(str(x.get('portid')) + "/" + str(x.get('protocol'))) for x in host[1]] masscan_json[ip] = ports main_json = utils.reading_json(utils.replace_argument( self.options, '$WORKSPACE/$COMPANY.json')) #update the main json for i in range(len(main_json['Subdomains'])): ip = main_json['Subdomains'][i].get('IP') if ip != "N/A" and ip in masscan_json.keys(): main_json['Subdomains'][i]['Ports'] = masscan_json.get(ip) #just save commands logfile = utils.replace_argument(self.options, '$WORKSPACE/log.json') utils.save_all_cmd(self.options, logfile) utils.just_write(utils.replace_argument( self.options, '$WORKSPACE/$COMPANY.json'), main_json, is_json=True)
def screenshots(self, input_file): if not utils.not_empty_file(input_file): return False data = utils.just_read(input_file).splitlines() self.aquatone(input_file) self.gowithness(data)
def parse_commands(command_path): if not utils.not_empty_file(command_path): return False content = utils.just_read(command_path, get_json=True) if not content: return False modules = content.keys() for module in modules: for speed, values in content.get(module).items(): if speed.lower() == 'report': parse_report(values, module) else: for value in values: if not value.get('cmd'): continue item = { 'cmd': value.get('cmd'), 'output_path': value.get('output_path'), 'std_path': value.get('std_path'), 'banner': str(value.get('banner')), 'module': module, 'cmd_type': value.get('cmd_type') if value.get('cmd_type') else 'single', 'speed': speed.lower(), 'alias': module + "__" + speed.lower() + "__" + str(value.get('banner')).lower(), 'chunk': value.get('chunk') if value.get('chunk') else 0, } Commands.objects.create(**item) # print(modules) return True
def prepare_input(self): if self.is_direct: # if direct input was file just read it if utils.not_empty_file(self.is_direct): ip_list = utils.just_read(self.is_direct).splitlines() # get input string else: ip_list = utils.get_domain(self.is_direct).strip() else: main_json = utils.reading_json( utils.replace_argument(self.options, '$WORKSPACE/$COMPANY.json')) main_json['Modules'][self.module_name] = [] if self.options['SPEED'] == 'slow': ip_list = [ x.get("IP") for x in main_json['Subdomains'] if x.get("IP") is not None ] + main_json['IP Space'] elif self.options['SPEED'] == 'quick': ip_list = [ x.get("IP") for x in main_json['Subdomains'] if x.get("IP") is not None ] ip_list = list(set([ip for ip in ip_list if ip != 'N/A'])) if self.options['DEBUG'] == 'True': ip_list = list(ip_list)[:5] # utils.print_debug(ip_list) return ip_list
def get_reports(self, options, module=None, full=False, grouped=True): queryset = ReportsSkeleton.objects.all() if module is not None: queryset = queryset.filter(module=module) modules = [module] else: modules = list(ReportsSkeleton.objects.values_list( 'module', flat=True).distinct()) group_report = [{'module': m, 'reports': []} for m in modules] reports = [] for record in queryset: report = record.as_json() report_path = utils.replace_argument(options, report.get('report_path')) # print(report_path) if utils.not_empty_file(report_path): if full: report['report_path'] = report_path.replace(options.get('WORKSPACES'), '') else: report['report_path'] = report_path.replace(options.get('WORKSPACES'), '').strip('/') reports.append(report) if not grouped: return reports for i in range(len(group_report)): for report in reports: if report.get('module') == group_report[i]['module']: group_report[i]['reports'].append(report) return group_report
def create_skeleton_json(self): outout = utils.replace_argument(self.options, '$WORKSPACE/$COMPANY.json') if utils.not_empty_file(outout): utils.print_info("Modules is already done") return main_json = { "Company": utils.replace_argument(self.options, '$COMPANY'), "Main_domain": utils.replace_argument(self.options, '$TARGET'), "Info": {}, "Subdomains": [], "Modules": {}, "IP Space": [] } utils.just_write(outout, main_json, is_json=True) utils.check_output(outout) slack.slack_noti('status', self.options, mess={ 'title': "{0} | {1}".format(self.options['TARGET'], self.module_name), 'content': 'Create skeleton json' })
def prepare_input(self): if self.is_direct: if utils.not_empty_file(self.is_direct): ip_file = utils.replace_argument( self.options, '$WORKSPACE/subdomain/final-IP-$OUTPUT.txt') # print(ip_file) ip_list = utils.just_read(ip_file).splitlines() ip_list = list(set([ip for ip in ip_list if ip != 'N/A'])) else: ip_list = utils.resolve_input(self.is_direct) else: main_json = utils.reading_json( utils.replace_argument(self.options, '$WORKSPACE/$COMPANY.json')) main_json['Modules'][self.module_name] = [] if self.options['SPEED'] == 'slow': ip_list = [ x.get("IP") for x in main_json['Subdomains'] if x.get("IP") is not None ] + main_json['IP Space'] elif self.options['SPEED'] == 'quick': ip_list = [ x.get("IP") for x in main_json['Subdomains'] if x.get("IP") is not None ] ip_list = set([ip for ip in ip_list if ip != 'N/A']) return ip_list
def conclude(self): output_path = utils.replace_argument( self.options, '$WORKSPACE/portscan/$OUTPUT-masscan.xml') if not utils.not_empty_file(output_path): return self.create_html_report() #parsing masscan xml tree = ET.parse(output_path) root = tree.getroot() masscan_json = {} for host in root.iter('host'): ip = host[0].get('addr') ports = [(str(x.get('portid')) + "/" + str(x.get('protocol'))) for x in host[1]] masscan_json[ip] = ports main_json = utils.reading_json(utils.replace_argument( self.options, '$WORKSPACE/$COMPANY.json')) #update the main json for i in range(len(main_json['Subdomains'])): ip = main_json['Subdomains'][i].get('IP') if ip != "N/A" and ip in masscan_json.keys(): main_json['Subdomains'][i]['Ports'] = masscan_json.get(ip) utils.just_write(utils.replace_argument( self.options, '$WORKSPACE/$COMPANY.json'), main_json, is_json=True)
def technology_detection(self): all_subdomain_path = utils.replace_argument( self.options, '$WORKSPACE/subdomain/final-$OUTPUT.txt') if not utils.not_empty_file(all_subdomain_path): return #add https:// prefix for all domain domains = utils.just_read(all_subdomain_path).splitlines() scheme_path = utils.replace_argument( self.options, '$WORKSPACE/recon/all-scheme-$OUTPUT.txt') utils.just_write( scheme_path, "\n".join(domains + [("https://" + x.strip()) for x in domains])) #really execute command cmd = '$GO_PATH/webanalyze -apps $PLUGINS_PATH/apps.json -hosts $WORKSPACE/recon/all-scheme-$OUTPUT.txt -output json -worker 20 | tee $WORKSPACE/recon/$OUTPUT-technology.json' cmd = utils.replace_argument(self.options, cmd) output_path = utils.replace_argument( self.options, '$WORKSPACE/recon/$OUTPUT-technology.json') execute.send_cmd(self.options, cmd, '', '', self.module_name) utils.just_waiting(self.options, self.module_name, seconds=10) with open(output_path, encoding='utf-8') as o: data = o.read().splitlines() #parsing output to get technology techs = {} for line in data: jsonl = json.loads(line) if jsonl.get('matches'): subdomain = jsonl.get('hostname').replace('https://', '') if techs.get(subdomain): techs[subdomain] += [ x.get('app_name') for x in jsonl.get('matches') ] else: techs[subdomain] = [ x.get('app_name') for x in jsonl.get('matches') ] # print(techs) #update the main json and rewrite that main_json = utils.reading_json( utils.replace_argument(self.options, '$WORKSPACE/$COMPANY.json')) for i in range(len(main_json['Subdomains'])): sub = main_json['Subdomains'][i].get('Domain') if techs.get(sub): main_json['Subdomains'][i]["Technology"] = techs.get(sub) utils.just_write(utils.replace_argument(self.options, '$WORKSPACE/$COMPANY.json'), main_json, is_json=True)
def check_direct(self): all_subdomain = utils.replace_argument( self.options, '$WORKSPACE/subdomain/final-$OUTPUT.txt') if utils.not_empty_file(all_subdomain): return False self.is_direct = utils.is_direct_mode(self.options, require_input=True) return True
def technology_detection(self): all_subdomain_path = utils.replace_argument( self.options, '$WORKSPACE/subdomain/final-$OUTPUT.txt') if not utils.not_empty_file(all_subdomain_path): return #add https:// prefix for all domain domains = utils.just_read(all_subdomain_path).splitlines() scheme_path = utils.replace_argument( self.options, '$WORKSPACE/recon/all-scheme-$OUTPUT.txt') utils.just_write(scheme_path, "\n".join( domains + [("https://" + x.strip()) for x in domains])) #really execute command cmd = '$GO_PATH/webanalyze -apps $PLUGINS_PATH/apps.json -hosts $WORKSPACE/recon/all-scheme-$OUTPUT.txt -output json -worker 20 | tee $WORKSPACE/recon/$OUTPUT-technology.json' cmd = utils.replace_argument(self.options, cmd) output_path = utils.replace_argument( self.options, '$WORKSPACE/recon/$OUTPUT-technology.json') execute.send_cmd(self.options, cmd, output_path, '', self.module_name) utils.just_waiting(self.options, self.module_name, seconds=10, times=20) with open(output_path, encoding='utf-8') as o: data = o.read().splitlines() #parsing output to get technology techs = {} for line in data: try: jsonl = json.loads(line) if jsonl.get('matches'): subdomain = jsonl.get('hostname').replace('https://', '') if techs.get(subdomain): techs[subdomain] += [x.get('app_name') for x in jsonl.get('matches')] else: techs[subdomain] = [x.get('app_name') for x in jsonl.get('matches')] except: pass # print(techs) #update the main json and rewrite that main_json = utils.reading_json(utils.replace_argument( self.options, '$WORKSPACE/$COMPANY.json')) for i in range(len(main_json['Subdomains'])): sub = main_json['Subdomains'][i].get('Domain') if techs.get(sub): main_json['Subdomains'][i]["Technology"] = techs.get(sub) utils.just_write(utils.replace_argument( self.options, '$WORKSPACE/$COMPANY.json'), main_json, is_json=True)
def initial(self): self.create_ip_result() self.masscan() masscan_xml = utils.replace_argument( self.options, '$WORKSPACE/portscan/$OUTPUT-masscan.xml') # checking output of masscan is empty or not because usually your bandwidth will not enough to scan large input if utils.not_empty_file(masscan_xml): self.create_html() else: utils.print_bad('Masscan output empty') vulnscan.VulnScan(self.options)
def prepare_input(self): if self.is_direct: # if direct input was file just read it if utils.not_empty_file(self.is_direct): domains = utils.just_read(self.is_direct).splitlines() # get input string else: domains = [self.is_direct.strip()] else: # matching IP with subdomain main_json = utils.reading_json( utils.replace_argument(self.options, '$WORKSPACE/$COMPANY.json')) domains = [x.get('Domain') for x in main_json['Subdomains']] return domains
def create_ip_result(self): utils.print_good('Create IP for list of domain result') if self.is_direct: cmd = '$PLUGINS_PATH/massdns/bin/massdns -r $PLUGINS_PATH/massdns/lists/resolvers.txt -t A -o S -w $WORKSPACE/subdomain/massdns-IP-$OUTPUT.txt $INPUT' else: final_ip = utils.replace_argument( self.options, '$WORKSPACE/subdomain/final-IP-$OUTPUT.txt') if utils.not_empty_file(final_ip): return cmd = '$PLUGINS_PATH/massdns/bin/massdns -r $PLUGINS_PATH/massdns/lists/resolvers.txt -t A -o S -w $WORKSPACE/subdomain/massdns-IP-$OUTPUT.txt $WORKSPACE/subdomain/final-$OUTPUT.txt' cmd = utils.replace_argument(self.options, cmd) output_path = utils.replace_argument( self.options, '$WORKSPACE/subdomain/massdns-IP-$OUTPUT.txt') execute.send_cmd(self.options, cmd, '', '', self.module_name) utils.just_waiting(self.options, self.module_name, seconds=5) # matching IP with subdomain main_json = utils.reading_json( utils.replace_argument(self.options, '$WORKSPACE/$COMPANY.json')) with open(output_path, 'r') as i: data = i.read().splitlines() ips = [] for line in data: if " A " in line: subdomain = line.split('. A ')[0] ip = line.split('. A ')[1] ips.append(ip) for i in range(len(main_json['Subdomains'])): if subdomain == main_json['Subdomains'][i]['Domain']: main_json['Subdomains'][i]['IP'] = ip final_ip = utils.replace_argument( self.options, '$WORKSPACE/subdomain/final-IP-$OUTPUT.txt') with open(final_ip, 'w+') as fip: fip.write("\n".join(str(ip) for ip in ips)) utils.just_write(utils.replace_argument(self.options, '$WORKSPACE/$COMPANY.json'), main_json, is_json=True)
def create_ip_result(self): utils.print_good('Create IP for list of domain result') if self.is_direct: cmd = '$PLUGINS_PATH/massdns/bin/massdns -r $PLUGINS_PATH/massdns/lists/resolvers.txt -t A -o S -w $WORKSPACE/subdomain/massdns-IP-$OUTPUT.txt $INPUT' else: final_ip = utils.replace_argument( self.options, '$WORKSPACE/subdomain/final-IP-$OUTPUT.txt') if utils.not_empty_file(final_ip): return cmd = '$PLUGINS_PATH/massdns/bin/massdns -r $PLUGINS_PATH/massdns/lists/resolvers.txt -t A -o S -w $WORKSPACE/subdomain/massdns-IP-$OUTPUT.txt $WORKSPACE/subdomain/final-$OUTPUT.txt' cmd = utils.replace_argument(self.options, cmd) output_path = utils.replace_argument( self.options, '$WORKSPACE/subdomain/massdns-IP-$OUTPUT.txt') execute.send_cmd(self.options, cmd, '', '', self.module_name) utils.just_waiting(self.options, self.module_name, seconds=5) # matching IP with subdomain main_json = utils.reading_json(utils.replace_argument( self.options, '$WORKSPACE/$COMPANY.json')) with open(output_path, 'r') as i: data = i.read().splitlines() ips = [] for line in data: if " A " in line: subdomain = line.split('. A ')[0] ip = line.split('. A ')[1] ips.append(ip) for i in range(len(main_json['Subdomains'])): if subdomain == main_json['Subdomains'][i]['Domain']: main_json['Subdomains'][i]['IP'] = ip final_ip = utils.replace_argument( self.options, '$WORKSPACE/subdomain/final-IP-$OUTPUT.txt') with open(final_ip, 'w+') as fip: fip.write("\n".join(str(ip) for ip in ips)) utils.just_write(utils.replace_argument( self.options, '$WORKSPACE/$COMPANY.json'), main_json, is_json=True)
def post(self, request, *args, **kwargs): serializer = ExecuteSerializer(data=request.data) serializer.is_valid(raise_exception=True) data = serializer.data forced = data.get('forced') cmd = data.get('cmd') output_path = data.get('output_path') cmd_type = data.get('cmd_type') # don't care about the status nolog = data.get('nolog') # forced check if output path exist if not forced: if utils.not_empty_file(output_path): return common.message(500, "Commands is already done") # set idle status if nolog: data['status'] = 'Done' else: data['status'] = 'Running' item = parse_data(data) instance = Activities.objects.create(**item) Logs.objects.create(**item) command_record = instance.as_json() # really run the command if cmd_type == 'single': utils.print_info("Execute: {0} ".format(cmd)) execute.run_single(command_record) elif cmd_type == 'list': utils.print_info("Execute chunk: {0} ".format(cmd)) commands = execute.get_chunk_commands(command_record) execute.run_chunk(commands, command_record.get( 'chunk'), command_record.get('delay')) # update status after done if instance.status != 'Done': instance.status = 'Done' instance.save() return common.message(200, "Commands is done")
def file_check(self): files = [ "$PLUGINS_PATH/wordlists/all.txt", "$PLUGINS_PATH/wordlists/shorts.txt", "$PLUGINS_PATH/wordlists/raft-large-directories.txt", "$PLUGINS_PATH/apps.json", "$PLUGINS_PATH/nmap-stuff/vulners.nse", "$PLUGINS_PATH/nmap-stuff/nmap_xml_parser.py", "$PLUGINS_PATH/nmap-stuff/masscan_xml_parser.py", "$PLUGINS_PATH/providers-data.csv", ] for f in files: filepath = utils.replace_argument(self.options, f) if not utils.not_empty_file(filepath): utils.print_bad("Bad -- " + filepath) self.healthcheck = False else: utils.print_good("Good -- " + filepath)
def get_response(self): utils.print_good('Starting meg') if self.is_direct: if utils.not_empty_file(self.is_direct): cmd = '$GO_PATH/meg / {0} $WORKSPACE/assets/responses/ -v -c 100'.format( self.is_direct) # just return if direct input is just a string else: utils.print_bad("meg required input as a file.") return None else: cmd = '$GO_PATH/meg / $WORKSPACE/assets/http-$OUTPUT.txt $WORKSPACE/assets/responses/ -v -c 100' utils.make_directory(self.options['WORKSPACE'] + '/assets/responses') cmd = utils.replace_argument(self.options, cmd) output_path = utils.replace_argument( self.options, '$WORKSPACE/assets/responses/index') std_path = utils.replace_argument(self.options, '$WORKSPACE/assets/responses/index') execute.send_cmd(self.options, cmd, output_path, std_path, self.module_name)
def parsing_to_csv(self): nmap_detail_path = utils.replace_argument( self.options, '$WORKSPACE/vulnscan/details') # create all csv based on xml file for file in utils.list_files(nmap_detail_path, ext='xml'): # print(file) cmd = "python3 $PLUGINS_PATH/nmap-stuff/nmap_xml_parser.py -f {0} -csv $WORKSPACE/vulnscan/details/$OUTPUT-nmap.csv".format( file) cmd = utils.replace_argument(self.options, cmd) csv_output = utils.replace_argument( self.options, '$WORKSPACE/vulnscan/details/$OUTPUT-nmap.csv') # just return if this was done without checking anything if utils.not_empty_file(csv_output): return execute.send_cmd(self.options, cmd, csv_output, '', self.module_name) time.sleep(5) # looping through all csv file all_csv = "IP,Host,OS,Proto,Port,Service,Product,Service FP,NSE Script ID,NSE Script Output,Notes\n" for file in utils.list_files(nmap_detail_path, ext='csv'): all_csv += "\n".join(utils.just_read(file).splitlines()[1:]) csv_summary_path = utils.replace_argument( self.options, '$WORKSPACE/vulnscan/summary-$OUTPUT.csv') utils.just_write(csv_summary_path, all_csv) utils.print_line() # beautiful csv look cmd = "csvcut -c 1-7 $WORKSPACE/vulnscan/summary-$OUTPUT.csv | csvlook --no-inference | tee $WORKSPACE/vulnscan/std-$OUTPUT-summary.std" cmd = utils.replace_argument(self.options, cmd) output_path = utils.replace_argument( self.options, '$WORKSPACE/vulnscan/std-$OUTPUT-summary.std') execute.send_cmd(self.options, cmd, output_path, '', self.module_name) self.screenshots(all_csv)
def get_http(self): utils.print_good('Starting httprobe') if self.is_direct: if utils.not_empty_file(self.is_direct): cmd = 'cat {0} | $GO_PATH/httprobe -c 100 -t 20000 -v | tee $WORKSPACE/assets/http-$OUTPUT.txt'.format( self.is_direct) # just return if direct input is just a string else: utils.print_bad("httprobe required input as a file.") return None else: cmd = 'cat $WORKSPACE/subdomain/final-$OUTPUT.txt | $GO_PATH/httprobe -c 100 -t 20000 -v | tee $WORKSPACE/assets/http-$OUTPUT.txt' cmd = utils.replace_argument(self.options, cmd) output_path = utils.replace_argument( self.options, '$WORKSPACE/assets/http-$OUTPUT.txt') std_path = utils.replace_argument( self.options, '$WORKSPACE/assets/http-$OUTPUT.txt') execute.send_cmd(self.options, cmd, output_path, std_path, self.module_name) utils.print_line()
def get_queryset(self): queryset = Logs.objects.all() module = self.request.query_params.get('module', None) workspace = self.request.query_params.get('workspace', None) cmd = self.request.query_params.get('cmd', None) raw = self.request.query_params.get('raw', None) if workspace is not None: queryset = queryset.filter(workspace=workspace) if module is not None: queryset = queryset.filter(module=module) if cmd is not None: queryset = queryset.filter(cmd__contains=cmd) if raw: return queryset real_queryset = [] for item in queryset: if utils.not_empty_file(item.output_path): real_queryset.append(item) return real_queryset
def create_skeleton_json(self): outout = utils.replace_argument( self.options, '$WORKSPACE/$COMPANY.json') if utils.not_empty_file(outout): utils.print_info("Modules is already done") return main_json = { "Company": utils.replace_argument(self.options, '$COMPANY'), "Main_domain": utils.replace_argument(self.options, '$TARGET'), "Info": {}, "Subdomains": [], "Modules": {}, "IP Space": [] } utils.just_write(outout, main_json, is_json=True) utils.check_output(outout) slack.slack_noti('status', self.options, mess={ 'title': "{0} | {1}".format(self.options['TARGET'], self.module_name), 'content': 'Create skeleton json' })
def get_ipsace(self): amass_output = utils.replace_argument( self.options, '$WORKSPACE/subdomain/amass-$OUTPUT/amass.json') if not utils.not_empty_file(amass_output): return ips_file = utils.replace_argument( self.options, '$WORKSPACE/ipspace/$OUTPUT-ipspace.txt') data = [] jsonl = utils.just_read(amass_output).splitlines() for line in jsonl: json_data = json.loads(line) for item in json_data.get('addresses'): ip = item.get('ip') cidr = item.get('cidr') asn = item.get('asn') utils.print_info("Found ASN for {0} on CIDR {1}".format( asn, cidr)) data.extend([ip, cidr]) utils.just_append(ips_file, data) utils.clean_up(ips_file)