def run_cmd_noout(cmd_data): cmd = cmd_data[0] output = cmd_data[1] c = cmd[:-1] timeout = cmd[-1] display("Executing command: %s" % " ".join(c)) current_time = time.time() f = open(output, 'w') if timeout: process = Popen(c, stdout=f, stderr=STDOUT) while time.time() < current_time + timeout and process.poll() is None: time.sleep(5) if process.poll() is None: display_error( "Timeout of %s reached. Aborting thread for command: %s" % (timeout, " ".join(c)) ) process.terminate() else: Popen(c, stdout=f, stderr=STDOUT).wait() f.close() return cmd_data
def process_output(self, targets): for target in targets: try: with io.open(target["output"], encoding="utf-8") as fd: for line in fd: domain = line.strip() if domain[0] == '.': domain = domain[1:] ips = get_ips(domain) ip_obj = None dom, created = Domain.objects.get_or_create( name=domain) if ips: for ip in ips: ip_obj, created = IPAddress.objects.get_or_create( ip_address=ip) if ip_obj: dom.ip_addresses.add(ip_obj) dom.save() except FileNotFoundError: display_error("File doesn't exist for {}".format( target["output"])) bd, created = BaseDomain.objects.get_or_create( name=target["target"]) bd.add_tool_run(tool=self.name, args=self.args.tool_args)
def run_cmd(cmd): # c = [] # for cm in cmd[:-1]: # if ' ' in cm: # c.append('"' + cm + '"') # else: # c.append(cm) c = cmd[:-1] timeout = cmd[-1] display("Executing command: %s" % " ".join(c)) current_time = time.time() if timeout: process = Popen(c) while time.time() < current_time + timeout and process.poll() is None: time.sleep(5) if process.poll() is None: display_error( "Timeout of %s reached. Aborting thread for command: %s" % (timeout, " ".join(c)) ) process.terminate() else: Popen(c).wait() return cmd
def get_targets(self, args): timestamp = str(int(time())) targets = [] if args.import_file: targets += [ t for t in open(args.import_file).read().split("\n") if t ] if args.import_database: if args.rescan: targets += get_urls(scope_type="active") else: targets += get_urls(scope_type="active", tool=self.name, args=self.args.tool_args) if targets: if args.output_path[0] == "/": self.path = os.path.join( self.base_config["ARMORY_BASE_PATH"], args.output_path[1:], timestamp, ) else: self.path = os.path.join( self.base_config["ARMORY_BASE_PATH"], args.output_path, timestamp, ) if not os.path.exists(self.path): os.makedirs(self.path) res = [] i = 0 if args.group_size == 0: args.group_size = len(targets) for url_chunk in self.chunks(targets, args.group_size): i += 1 _, file_name = tempfile.mkstemp() open(file_name, "w").write("\n".join(url_chunk)) res.append({ "target": file_name, "output": self.path + "-results-{}.txt".format(i), }) return res else: display_error("No hosts provided to scan.") sys.exit(1)
def process_output(self, cmds): for c in cmds: output_path = c["output"] if os.path.isfile(output_path): data = open(output_path).read().split("\n") for d in data: if "Found: " in d: new_domain = d.split(" ")[1].lower() subdomain, created = Domain.objects.get_or_create( name=new_domain) else: display_error("{} not found.".format(output_path)) bd, created = BaseDomain.objects.get_or_create(name=c['target']) bd.add_tool_run(self.name, self.args.tool_args)
def process_output(self, cmds): for cmd in cmds: output_path = cmd["output"] if os.path.isfile(output_path): data = open(output_path).read().split("\n") for d in data: for ds in d.split( '<BR>'): # Sublist3r is spitting out <BR>s now... if ds: new_domain = ds.split(":")[0].lower() if new_domain: # print("Checking {}".format(new_domain)) subdomain, created = Domain.objects.get_or_create( name=new_domain) else: display_error("{} not found.".format(output_path)) next
def process_output(self, cmds): """ Process the output generated by the earlier commands. """ for cmd in cmds: d, created = BaseDomain.objects.get_or_create(name=cmd['target']) d.add_tool_run(self.name, self.args.tool_args) try: data2 = json.loads(open(cmd["output"]).read()) for sub, ip in data2.items(): created = False new_domain = sub.lower() if new_domain: subdomain, created = Domain.objects.get_or_create( name=new_domain) except Exception as e: display_error("Couldn't find file: {}".format(cmd["output"]))
def process_output(self, cmds): for c in cmds: target = c["target"] output_path = c["output"] try: res = json.loads(open(output_path).read()) except IOError: display_error("DnsRecon failed for {}".format(target)) continue if " -d " in res[0]["arguments"]: dbrec, created = Domain.objects.get_or_create(name=target) dbrec.dns = res dbrec.save() for record in res: domain = None ip = None if record.get("type") == "A" or record.get("type") == "PTR": domain = record.get("name").lower().replace("www.", "") ip = record.get("address") elif record.get("type") == "MX": domain = record.get("exchange").lower().replace("www.", "") elif record.get("type") == "SRV" or record.get("type" == "NS"): domain = record.get("target").lower().replace("www.", "") elif record.get("type") == "SOA": domain = record.get("mname").lower().replace("www.", "") if domain: domain_obj, created = Domain.objects.get_or_create( name=domain) if '/' in target: bd, created = CIDR.objects.get_or_create( name=target, defaults={'active_scope': True}) else: bd, created = BaseDomain.objects.get_or_create(name=target) bd.add_tool_run(tool=self.name, args=self.args.tool_args)
def get_cidr_info(ip_address): for p in private_subnets: if ip_address in p: return str(p), 'Non-Public Subnet' try: res = IPWhois(ip_address).lookup_whois(get_referral=True) except Exception: try: res = IPWhois(ip_address).lookup_whois() except Exception as e: display_error("Error trying to resolve whois: {}".format(e)) res = {} if not res.get('nets', []): display_warning( "The networks didn't populate from whois. Defaulting to a /24.") # again = raw_input("Would you like to try again? [Y/n]").lower() # if again == 'y': # time.sleep(5) # else: return '{}.0/24'.format('.'.join( ip_address.split('.')[:3])), "Whois failed to resolve." cidr_data = [] for net in res['nets']: for cd in net['cidr'].split(', '): cidr_data.append([ len(IPNetwork(cd)), cd, net['description'] if net['description'] else "" ]) try: cidr_data.sort() except Exception as e: display_error("Error occured: {}".format(e)) pdb.set_trace() return cidr_data[0][1], cidr_data[0][2]
def reclassify_domain(self, bd): if bd.meta.get("whois", False): display_new("Whois data found for {}".format(bd.name)) print(bd.meta["whois"]) res = input( "Should this domain be scoped (A)ctive, (P)assive, or (N)ot? [a/p/N] " ) if res.lower() == "a": bd.active_scope = True bd.passive_scope = True elif res.lower() == "p": bd.active_scope = False bd.passive_scope = True else: bd.active_scope = False bd.passive_scope = False bd.save() else: display_error( "Unfortunately, there is no whois information for {}. Please populate it using the Whois module" .format(bd.name))
def process_urls(data): # silence insecure url warnings requests.packages.urllib3.disable_warnings(InsecureRequestWarning) u = data[0] timeout = data[1] blacklist = [ "Date", "Connection", "Content-Type", "Content-Length", "Keep-Alive", "Content-Encoding", "Vary", ] new_headers = {} new_cookies = {} display("Processing %s" % u) try: res = requests.get( u, timeout=int(timeout), verify=False, headers={ "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 14.5; rv:10.0) Gecko/20100101 Firefox/89.0" }) res.raise_for_status() for k in res.headers.keys(): if k not in blacklist: if not new_headers.get(u, False): new_headers[u] = [] new_headers[u].append("{}: {}".format(k, res.headers[k])) new_cookies[u] = dict(res.cookies) except requests.exceptions.HTTPError as http_error: display_error("Http Error: {}".format(http_error)) except requests.exceptions.ConnectionError as connect_error: display_error("Error Connecting: {}".format(connect_error)) except KeyboardInterrupt: display_warning("Got Ctrl+C, exiting") sys.exit(1) except Exception as e: display_error("{} no good, skipping: {}".format(u, e)) return (new_headers, new_cookies)
def run(self, args): # pdb.set_trace() if not args.binary: self.binary = which.run("LinkedInt.py") else: self.binary = args.binary if not self.binary: display_error( "LinkedInt binary not found. Please explicitly provide path with --binary" ) if args.domain: domain, created = BaseDomain.objects.get_or_create(name=args.domain) if args.top: titles = [ user.job_title.split(" at ")[0] for user in domain.user_set.all() if user.job_title ] words = [] for t in titles: words += [w.lower() for w in get_words(t)] word_count = Counter(words).most_common() display("Using the top %s words:" % args.top) res = [] for w in word_count[: int(args.top)]: display("\t{}\t{}".format(w[0], w[1])) res.append(w[0]) # pdb.set_trace() args.smart_shuffle = ",".join(res) if args.auto_keyword: if not args.top: display_error("You must specify the top number of keywords using --top") else: if os.path.isfile('/tmp/armory_linkedinsearchqueries'): blacklist = open('/tmp/armory_linkedinsearchqueries').read().split('\n') else: blacklist = [] bfile = open('/tmp/armory_linkedinsearchqueries', 'a') for w in args.smart_shuffle.split(','): if w not in blacklist: args.keywords = w self.process_domain(domain, args) bfile.write('{}\n'.format(w)) else: display("Skipped {} due to it already being searched.".format(w)) bfile.close() elif args.smart_shuffle: args.keywords = " OR ".join( ['"{}"'.format(i) for i in args.smart_shuffle.split(",")] ) self.process_domain(domain, args) args.keywords = " AND ".join( ['-"{}"'.format(i) for i in args.smart_shuffle.split(",")] ) self.process_domain(domain, args) else: self.process_domain(domain, args)
def run(self, args): if not args.domain: display_error("You need to supply a domain to search for.") return if not args.binary: self.binary = which.run(self.binary_name) else: self.binary = args.binary if not self.binary: display_error( "{} binary not found. Please explicitly provide path with --binary".format(self.binary_name) ) if args.output_path[0] == "/": output_path = os.path.join( self.base_config["ARMORY_BASE_PATH"], 'output', args.output_path[1:] ) else: output_path = os.path.join( self.base_config["ARMORY_BASE_PATH"], 'output', args.output_path ) if not os.path.exists(output_path): os.makedirs(output_path) output_path = os.path.join(output_path, "{}.txt".format(args.domain)) command_args = " {} -o {} -A {} ".format(args.domain, output_path, args.api) if args.tool_args: command_args += ' '.join(args.tool_args) if not args.no_binary: current_dir = os.getcwd() new_dir = "/".join(self.binary.split("/")[:-1]) os.chdir(new_dir) cmd = shlex.split("python3 " + self.binary + command_args) print("Executing: %s" % " ".join(cmd)) subprocess.Popen(cmd).wait() os.chdir(current_dir) results = open(output_path).read().split('\n') cur_type = None for r in results: if r: if '### Company Names' in r: cur_type = "company" elif '### Domain Names' in r: cur_type = "domain" elif '### Email Addresses' in r: cur_type = "email" else: if cur_type == "domain": if args.scope == "active": d, created = Domain.objects.get_or_create(name=r, defaults={"active_scope":True, "passive_scope":True}) elif args.scope == "passive": d, created = Domain.objects.get_or_create(name=r, defaults={"active_scope":False, "passive_scope":True}) else: d, created = Domain.objects.get_or_create(name=r, defaults={"active_scope":False, "passive_scope":False})
def get_targets(self, args): """ This module is used to build out a target list and output file list, depending on the arguments. Should return a list in the format [(target, output), (target, output), etc, etc] """ targets = [] if args.import_database: if args.rescan: all_domains = BaseDomain.get_set(scope_type="passive") else: all_domains = BaseDomain.get_set(tool=self.name, args=args.tool_args, scope_type="passive") for d in all_domains: # We need to find all of the http/https ports and create the json file. output_path = os.path.join( self.base_config["ARMORY_BASE_PATH"], "output", "aquatone", d.name, ) if not os.path.exists(output_path): os.makedirs(output_path) hosts_j = {} hosts = [] open_ports = [] urls = [] targets.append(d.name) for s in d.domain_set.all(): name = s.name for ip in s.ip_addresses.all(): hosts_j[name] = ip.ip_address port_list = [] for p in ip.port_set.all(): if "http" in p.service_name: hosts.append("{}.{}".format( name, ip.ip_address)) port_list.append(p.port_number) urls.append("{}://{}:{}/".format( p.service_name, name, p.port_number)) urls.append("{}://{}:{}/".format( p.service_name, ip.ip_address, p.port_number)) if port_list: open_ports.append("{},{}".format( ip.ip_address, ",".join([str(o) for o in port_list]))) open(os.path.join(output_path, "hosts.txt"), "w").write("\n".join(list(set(hosts)))) open(os.path.join(output_path, "urls.txt"), "w").write("\n".join(list(set(urls)))) open(os.path.join(output_path, "open_ports.txt"), "w").write("\n".join(list(set(open_ports)))) open(os.path.join(output_path, "hosts.json"), "w").write(json.dumps(hosts_j)) else: display_error("You need to supply domain(s).") res = [] for t in targets: res.append({"target": t}) return res
def run(self, args): ranges = [] cidrs = [] ips = [] search = [] if not args.api_key: display_error("You must supply an API key to use shodan!") return if args.search: search = [args.search] if args.import_db: if args.rescan: if args.fast: search += [ "net:{}".format(c.name) for c in CIDR.get_set(scope_type="active") ] else: cidrs += [ c.name for c in CIDR.get_set(scope_type="active") ] if not args.cidr_only: ips += [ "{}".format(i.ip_address) for i in IPAddress.get_set(scope_type="active") ] else: if args.fast: search += [ "net:{}".format(c.name) for c in CIDR.get_set(scope_type="active") ] else: cidrs += [ c.name for c in CIDR.get_set(scope_type="active") ] if not args.cidr_only: ips += [ "{}".format(i.ip_address) for i in IPAddress.get_set(scope_type="active", tool=self.name) ] if args.target: if '/' not in args.target: ips += [args.target] elif args.fast: cidrs += ["net:{}".format(args.target)] else: cidrs += [args.target] for c in cidrs: ranges += [str(i) for i in IPNetwork(c)] ranges += ips ranges += search display( "Doing a total of {} queries. Estimated time: {} days, {} hours, {} minutes and {} seconds." .format(len(ranges), int(len(ranges) / 24.0 / 60.0 / 60.0), int(len(ranges) / 60.0 / 60.0) % 60, int(len(ranges) / 60.0) % 60, len(ranges) % 60)) for c in cidrs: ranges = [str(i) for i in IPNetwork(c)] display( "Processing {} IPs. Estimated time: {} days, {} hours, {} minutes and {} seconds." .format(c, int(len(ranges) / 24.0 / 60.0 / 60.0), int(len(ranges) / 60.0 / 60.0) % 60, int(len(ranges) / 60.0) % 60, len(ranges) % 60)) for r in ranges: self.get_shodan(r, args) cd = CIDR.objects.filter(name=c) if cd: cd[0].add_tool_run(tool=self.name) display( "Processing {} IPs. Estimated time: {} days, {} hours, {} minutes and {} seconds." .format(len(ips), int(len(ranges) / 24.0 / 60.0 / 60.0), int(len(ranges) / 60.0 / 60.0) % 60, int(len(ranges) / 60.0) % 60, len(ranges) % 60)) for i in ips: self.get_shodan(i, args) ip = IPAddress.objects.all().filter(ip_address=i) if ip: ip[0].add_tool_run(tool=self.name) for s in search: self.get_shodan(s, args) if s[:4] == "net:": cd = CIDR.objects.filter(name=s[4:]) if cd: cd[0].add_tool_run(tool=self.name)
def get_shodan(self, r, args): api_host_url = "https://api.shodan.io/shodan/host/{}?key={}" api_search_url = ( "https://api.shodan.io/shodan/host/search?key={}&query={}&page={}") time.sleep(1) if ":" in r: display("Doing Shodan search: {}".format(r)) try: results = json.loads( requests.get(api_search_url.format(args.api_key, r, 1)).text) if results.get( "error") and "request timed out" in results["error"]: display_warning( "Timeout occurred on Shodan's side.. trying again in 5 seconds." ) results = json.loads( requests.get(api_search_url.format(args.api_key, r, 1)).text) except Exception as e: display_error("Something went wrong: {}".format(e)) next total = len(results["matches"]) matches = [] i = 1 # pdb.set_trace() while total > 0: display("Adding {} results from page {}".format(total, i)) matches += results["matches"] i += 1 try: time.sleep(1) results = json.loads( requests.get(api_search_url.format(args.api_key, r, i)).text) if (results.get("error") and "request timed out" in results["error"] # noqa: W503 ): display_warning( "Timeout occurred on Shodan's side.. trying again in 5 seconds." ) results = json.loads( requests.get( api_search_url.format(args.api_key, r, 1)).text) total = len(results["matches"]) except Exception as e: display_error("Something went wrong: {}".format(e)) total = 0 pdb.set_trace() domains = [] for res in matches: ip_str = res["ip_str"] port_str = res["port"] transport = res["transport"] display("Processing IP: {} Port: {}/{}".format( ip_str, port_str, transport)) IP, created = IPAddress.objects.get_or_create( ip_address=ip_str) IP.meta["shodan_data"] = results port, created = Port.objects.get_or_create( ip_address=IP, port_number=port_str, proto=transport) if created: svc = "" if res.get("ssl", False): svc = "https" elif res.get("http", False): svc = "http" else: svc = "" port.service_name = svc port.status = "open" port.meta["shodan_data"] = res port.save() if res.get("ssl", {}).get('cert', {}).get('extensions'): for d in res['ssl']['cert']['extensions']: if d['name'] == 'subjectAltName': domains += get_domains_from_data(d['name']) if res.get("ssl", {}).get('cert', {}).get( 'subject', {} ).get('CN') and '*' not in res['ssl']['cert']['subject']['CN']: domains.append(res['ssl']['cert']['subject']['CN']) if res.get('hostnames'): domains += res['hostnames'] for d in list(set(domains)): if d.count('.') > 0: display("Adding discovered domain {}".format( only_valid(d))) domain, created = Domain.objects.get_or_create( name=only_valid(d)) else: display_error("Ignoring bad domain {}".format(d)) else: display("Searching for {}".format(r)) try: results = json.loads( requests.get(api_host_url.format(r, args.api_key)).text) except Exception as e: display_error("Something went wrong: {}".format(e)) next # pdb.set_trace() if results.get("data", False): display("{} results found for: {}".format( len(results["data"]), r)) domains = [] for res in results["data"]: ip_str = res["ip_str"] port_str = res["port"] transport = res["transport"] display("Processing IP: {} Port: {}/{}".format( ip_str, port_str, transport)) IP, created = IPAddress.objects.get_or_create( ip_address=ip_str) IP.meta["shodan_data"] = results port, created = Port.objects.get_or_create( ip_address=IP, port_number=port_str, proto=transport) if created: svc = "" if res.get("ssl", False): svc = "https" elif res.get("http", False): svc = "http" else: svc = "" port.service_name = svc port.status = "open" port.meta["shodan_data"] = res port.save() if res.get("ssl", {}).get('cert', {}).get('extensions'): for d in res['ssl']['cert']['extensions']: if d['name'] == 'subjectAltName': domains += get_domains_from_data(d['data']) display( "Domains discovered in subjectAltName: {}". format(", ".join( get_domains_from_data(d['data'])))) if res.get("ssl", {}).get('cert', {}).get( 'subject', {}).get('CN') and '*' not in res['ssl'][ 'cert']['subject']['CN']: domains.append(res['ssl']['cert']['subject']['CN']) if res.get('hostnames'): domains += res['hostnames'] for d in list(set(domains)): if d.count('.') > 0 and not is_ip(d): display("Adding discovered domain {}".format(d)) domain, created = Domain.objects.get_or_create(name=d) else: display_error("Ignoring bad domain {}".format(d))
def run(self, args): if not args.keyword: display_error("You need to supply a keyword to search for.") return if not args.binary: self.binary = which.run(self.binary_name) else: self.binary = args.binary if not self.binary: display_error( "Asnlookup binary not found. Please explicitly provide path with --binary" ) if args.output_path[0] == "/": output_path = os.path.join(self.base_config["ARMORY_BASE_PATH"], 'output', args.output_path[1:]) else: output_path = os.path.join(self.base_config["ARMORY_BASE_PATH"], 'output', args.output_path) if not os.path.exists(output_path): os.makedirs(output_path) command_args = " -o {} --output {} ".format(args.keyword, output_path) current_dir = os.getcwd() new_dir = "/".join(self.binary.split("/")[:-1]) os.chdir(new_dir) cmd = shlex.split("python3 " + self.binary + command_args) print("Executing: %s" % " ".join(cmd)) subprocess.Popen(cmd).wait() os.chdir(current_dir) ip_ranges = open( os.path.join(output_path, "{}_ipv4.txt".format( args.keyword))).read().split('\n') for r in ip_ranges: if r: display("Processing {}".format(r)) current_cidrs = [c.name for c in CIDR.objects.all()] new_cidr = True for nc in current_cidrs: if IPNetwork(r) in IPNetwork(nc): new_cidr = False if new_cidr: SC, created = CIDR.objects.get_or_create( name=r, defaults={ 'active_scope': True, 'passive_scope': True }) if created: display_new( "New CIDR added to ScopeCIDRS: {}".format(r))
def process_output(self, cmds): for cmd in cmds: output_path = cmd["output"] domain_obj, created = BaseDomain.objects.get_or_create( name=cmd["target"]) try: csvreader = csv.reader( open( os.path.join(cmd["output"], "pymeta_{}.csv".format(cmd["target"])))) if sys.version[0] == '2': headers = csvreader.next() else: headers = csvreader.__next__() searchable_headers = ["Author", "Creator", "Producer"] indexes = [ headers.index(s) for s in searchable_headers if s in headers ] data = [] for row in csvreader: for i in indexes: data.append(row[i]) data = list(set(data)) # Dedup for d in data: # pdb.set_trace() if d.strip() and len(d.split(' ')) == 2: res = raw_input("Is %s a valid name? [y/N] " % d) if res and res[0].lower() == "y": if " " in d: if ", " in d: first_name = d.split(", ")[1] last_name = d.split(", ")[0] else: first_name = d.split(" ")[0] last_name = " ".join(d.split(" ")[1:]) user, created = User.objects.get_or_create( first_name=first_name, last_name=last_name, defaults={'email', ''}) if created: print("New user created") user.domain = domain_obj elif '@' in d: res = raw_input("Is %s a valid email address? [y/N] " % d) if res and res[0].lower() == 'y': user, created = User.objects.get_or_create( email=d.strip()) if created: print("New user created") user.domain = domain_obj except IOError: pass except Exception as e: display_error("Error processing pymeta_{}.csv: {}".format( cmd["target"], e)) domain_obj.add_tool_run(tool=self.name, args=self.args.tool_args)
def run(self, args): if args.import_file: for nFile in args.import_file: self.process_data(nFile, args) elif args.launch: if (not args.username # noqa: W503 and not args.password # noqa: W503 and not args.host # noqa: W503 and not args.uuid # noqa: W503 and not args.policy_id # noqa: W503 and not args.folder_id # noqa: W503 ): display_error( "You must supply a username, password, and host to launch a Nessus job" ) else: n = NessusRequest( args.username, args.password, args.host, uuid=args.uuid, policy_id=args.policy_id, folder_id=args.folder_id, ) ips = [ ip.ip_address for ip in IPAddress.get_set(scope_type="active", tool=self.name) ] cidrs = [ cidr.name for cidr in CIDR.get_set(tool=self.name, scope_type="active") ] domains = [ domain.name for domain in Domain.get_set(scope_type="active", tool=self.name) ] if args.max_hosts > 0: all_ips = [] for c in cidrs: all_ips += [str(i) for i in IPNetwork(c)] all_ips += ips targets = list(set(all_ips)) + domains chunks = [ targets[i:i + args.max_hosts] for i in range(0, len(targets), args.max_hosts) ] display(f"Creating {len(chunks)}") i = 0 for c in chunks: i += 1 if i == 1: res = n.launch_job(", ".join(c), args.job_name + f" ({i})") display( "New Nessus job launched with ID {}".format( res)) else: res = n.launch_job(", ".join(c), args.job_name + f" ({i})", autostart=False) display( f"New Nessus job created with ID {res}. You'll need to launch it manually" ) display( "Remember this number! You'll need it to download the job once it is done." ) else: targets = ", ".join(merge_ranges(ips + cidrs) + domains) res = n.launch_job(targets, args.job_name) display("New Nessus job launched with ID {}".format(res)) display( "Remember this number! You'll need it to download the job once it is done." ) elif args.download: if (not args.username # noqa: W503 and not args.password # noqa: W503 and not args.host # noqa: W503 and not args.job_id # noqa: W503 ): display_error( "You must supply host, username, password and job_id to download a report to import" ) else: n = NessusRequest( args.username, args.password, args.host, ) if args.output_path[0] == "/": output_path = os.path.join( self.base_config["ARMORY_BASE_PATH"], args.output_path[1:]) else: output_path = os.path.join( self.base_config["ARMORY_BASE_PATH"], args.output_path) if not os.path.exists(output_path): os.makedirs(output_path) output_path = os.path.join( output_path, "Nessus-export-{}.nessus".format(int(time.time()))) n.export_file(args.job_id, output_path) self.process_data(output_path, args)