def create_report(name, full_url, domain_name, nmap, robots_txt, whois): project_dir = ROOT_DIRS + '/' + name create_directory(project_dir) write_file(project_dir + '/full-url.txt', full_url) write_file(project_dir + '/domain-name.txt', domain_name) write_file(project_dir + '/nmap.txt', nmap) write_file(project_dir + '/robots-txt.txt', robots_txt) write_file(project_dir + '/whois.txt', whois)
def save_results(nmap_scan_results, robots_txt_file, whois_info, url): website_dir = ROOT_DIR + '/' + domainName.get_domain(url) + '/' # Directory for the website info general.create_directory(website_dir) # Generate files from the website data. general.write_file(website_dir + "nMap_scan.txt", nmap_scan_results) general.write_file(website_dir + "robots_txt_file.txt", robots_txt_file) general.write_file(website_dir + "whois_info.txt", whois_info) print("\n" + "Scan complete!!") print("\n" + "Results in: " + ROOT_DIR + "/" + domainName.get_domain(url))
from domain import get_domain_name from general import create_directory, write_file from ip_address import get_ip_address from nmap import get_nmap from robots_txt import get_robots_txt from whois import get_whois from website import websites # Create a Root Directory to store all the results # Not to mess up with the source code ROOT_DIRS = 'websites' create_directory(ROOT_DIRS) def gather_info(name, url): print("Scanning " + url + '\n') print("Getting Domain Name...") domain_name = get_domain_name(url) print("Done\n") print("Getting the IP Address...") ip_address = get_ip_address(domain_name) print("Done\n") print("Running Nmap Scan...") nmap = get_nmap('-F', ip_address) print("Done\n") print("Fetching robots.txt...") robots_txt = get_robots_txt(url)
def create_report(name, nmap, robots_txt, whois): project_dir = ROOT_DIRS + '/' + name create_directory(project_dir) write_file(project_dir + '/nmap.txt', nmap) write_file(project_dir + '/robots-txt.txt', robots_txt) write_file(project_dir + '/whois.txt', whois)
from domain import get_domain_name from general import create_directory, write_file from ip_address import get_ip_address from nmap import get_nmap from robots_txt import get_robots_txt from whois import get_whois from website import websites # Create a Root Directory to store all the results # Not to mess up with the source code ROOT_DIRS = 'websites' create_directory(ROOT_DIRS) def gather_info(name, url): print("Scanning " + url + '\n') print("Getting Domain Name...") domain_name = get_domain_name(url) print("Done\n") print("Getting the IP Address...") ip_address = get_ip_address(domain_name) print("Done\n") print("Running Nmap Scan...") nmap = get_nmap('-F', ip_address) print("Done\n") print("Fetching robots.txt...")
import general import domainName import ipAddress import nMap import robots_txt import whois import sys ROOT_DIR = 'Websites' general.create_directory(ROOT_DIR) args = sys.argv def run_site_scan(url, port_range): # Get an ip-address of a given domain. ip = ipAddress.get_ip_address(url) nmap_scan_results = nMap.run_nmap(ip, port_range) robots_txt_file = robots_txt.get_robots_txt(url) whois_info = whois.get_whois(url) save_results(nmap_scan_results, robots_txt_file, whois_info, url) def save_results(nmap_scan_results, robots_txt_file, whois_info, url): website_dir = ROOT_DIR + '/' + domainName.get_domain(url) + '/' # Directory for the website info general.create_directory(website_dir)