Esempio n. 1
0
class Belati(object):
    def __init__(self):
        self.about = AboutProject()
        self.url_req = URLRequest()

        # Passing arguments
        parser = argparse.ArgumentParser(
            description='=[ {} {} by {}] ({})'.format(
                self.about.__name__, self.about.__version__,
                self.about.__author__, self.about.__giturl__))
        parser.add_argument(
            '-d',
            action='store',
            dest='domain',
            help=
            'Perform OSINT from Domain e.g petruknisme.com(without protocol http/https)'
        )
        parser.add_argument('-u',
                            action='store',
                            dest='username',
                            help='Perform OSINT from username e.g petruknisme')
        parser.add_argument('-e',
                            action='store',
                            dest='email',
                            help='Perform OSINT from email address')
        parser.add_argument(
            '-c',
            action='store',
            dest='orgcomp',
            help=
            'Perform OSINT from Organization or Company Name, use double quote'
        )
        parser.add_argument('-o',
                            action='store',
                            dest='output_files',
                            help='Save log for output files')
        parser.add_argument('--db-file',
                            action='store',
                            dest='db_file_location',
                            help='Specify Database File Location(SQLite3)')
        parser.add_argument(
            '--single-proxy',
            action='store',
            dest='single_proxy',
            help='Proxy support with single IP (ex: http://127.0.0.1:8080)')
        parser.add_argument('--proxy-file',
                            action='store',
                            dest='proxy_file_location',
                            help='Proxy support from Proxy List File')
        parser.add_argument('--auto-proxy',
                            action='store_true',
                            dest='auto_proxy',
                            default=True,
                            help='Auto Proxy Support( Coming soon )')
        parser.add_argument('--version',
                            action='version',
                            version='=[ {} {} by {}] ({})'.format(
                                self.about.__name__, self.about.__version__,
                                self.about.__author__, self.about.__giturl__))
        results = parser.parse_args()

        domain = results.domain
        username = results.username
        email = results.email
        orgcomp = results.orgcomp
        single_proxy = results.single_proxy
        proxy_file_location = results.proxy_file_location
        proxy = ""
        self.multiple_proxy_list = []

        self.show_banner()

        conf = Config()
        self.db = Database()

        # Setup project
        self.project_id = self.db.create_new_project(domain, orgcomp,
                                                     datetime.datetime.now())
        log.console_log("{}[+] Creating New Belati Project... {}".format(G, W))
        log.console_log(
            "---------------------------------------------------------")
        log.console_log("Project ID: {}".format(str(self.project_id)))
        log.console_log("Project Domain: {}".format(domain))
        log.console_log("Project Organization/Company: {}".format(orgcomp))
        log.console_log(
            "---------------------------------------------------------")

        if domain is not None:
            if single_proxy is not None:
                log.console_log("{}[*] Checking Proxy Status... {}".format(
                    G, W))
                if self.check_single_proxy_status(single_proxy, "http://" +
                                                  str(domain)) == 'ok':
                    proxy = single_proxy
                else:
                    log.console_log(
                        '{}[-] Please use another proxy or disable proxy! {}'.
                        format(R, W))
                    sys.exit()

            if proxy_file_location is not None:
                log.console_log(
                    "{}[*] Checking Proxy Status from file {}{}".format(
                        G, proxy_file_location, W))
                self.check_multiple_proxy_status(proxy_file_location,
                                                 "http://" + str(domain))
                proxy = self.multiple_proxy_list

            extract_domain = tldextract.extract(domain)

            self.check_domain(self.url_req.ssl_checker(domain), proxy)
            self.banner_grab(self.url_req.ssl_checker(domain), proxy)

            if extract_domain.subdomain == "":
                self.robots_scraper(self.url_req.ssl_checker(domain), proxy)
                self.enumerate_subdomains(domain, proxy)
                self.scan_DNS_zone(domain)
                self.harvest_email_search(domain, proxy)
                self.harvest_email_pgp(domain, proxy)
            else:
                domain = extract_domain.domain + '.' + extract_domain.suffix
                self.robots_scraper(self.url_req.ssl_checker(domain), proxy)
                self.enumerate_subdomains(domain, proxy)
                self.scan_DNS_zone(domain)
                self.harvest_email_search(domain, proxy)
                self.harvest_email_pgp(domain, proxy)

            self.harvest_document(domain, proxy)

        if username is not None:
            self.username_checker(username)

        if orgcomp is not None:
            self.gather_company(orgcomp, proxy)

        if email is not None:
            log.console_log("This feature will be coming soon. Be patient :)")

        log.console_log(
            "{}All done sir! All log saved in log directory and dowloaded file saved in belatiFiles {}"
            .format(Y, W))

    def show_banner(self):
        banner = """
        {}
         /$$$$$$$  /$$$$$$$$ /$$        /$$$$$$  /$$$$$$$$ /$$$$$$
        | $$__  $$| $$_____/| $$       /$$__  $$|__  $$__/|_  $$_/
        | $$  \ $$| $$      | $$      | $$  \ $$   | $$     | $$
        | $$$$$$$ | $$$$$   | $$      | $$$$$$$$   | $$     | $$
        | $$__  $$| $$__/   | $$      | $$__  $$   | $$     | $$
        | $$  \ $$| $$      | $$      | $$  | $$   | $$     | $$
        | $$$$$$$/| $$$$$$$$| $$$$$$$$| $$  | $$   | $$    /$$$$$$
        |_______/ |________/|________/|__/  |__/   |__/   |______/

                                The Traditional Swiss Army Knife for OSINT

        =[ {} {} by {}]=

        + -- --=[ {} ]=-- -- +
        + -- --=[ {} ]=-- -- +
        {}
        """

        warning_message = """
        {}
        This tool is for educational purposes only.
        Any damage you make will not affect the author.
        Do It With Your Own Risk!

        For Better Privacy, Please Use proxychains or other proxy service!
        {}
        """

        log.console_log(
            banner.format(G, self.about.__name__, self.about.__version__,
                          self.about.__author__, self.about.__info__,
                          self.about.__authorurl__, W))
        log.console_log(warning_message.format(R, W))

    def check_domain(self, domain_name, proxy_address):
        check = CheckDomain()

        log.console_log(
            G + "{}[*] Checking Domain Availability... {}".format(G, W), 0)
        check.domain_checker(domain_name, proxy_address)
        log.console_log("{}[*] Checking URL Alive... {}".format(G, W), 0)
        check.alive_check(domain_name, proxy_address)
        log.console_log("{}[*] Perfoming Whois... {}".format(G, W))
        whois_result = check.whois_domain(domain_name)
        email = re.findall(r'[a-zA-Z0-9._+-]+@[a-zA-Z0-9._+-]+\s*',
                           str(whois_result))
        self.db.insert_domain_result(self.project_id,
                                     self.strip_scheme(domain_name),
                                     str(whois_result), str(email))

    def banner_grab(self, domain_name, proxy_address):
        banner = BannerGrab()
        log.console_log("{}[*] Perfoming HTTP Banner Grabbing... {}".format(
            G, W))
        banner_info = banner.show_banner(domain_name, proxy_address)
        self.db.insert_banner(domain_name, self.project_id, str(banner_info))

    def enumerate_subdomains(self, domain_name, proxy):
        log.console_log("{}[*] Perfoming Subdomains Enumeration... {}".format(
            G, W))
        subdomain_list = sublist3r.main(domain_name,
                                        100,
                                        "",
                                        ports=None,
                                        silent=False,
                                        verbose=False,
                                        enable_bruteforce=False,
                                        engines=None)
        subdomain_ip_list = []

        for subdomain in subdomain_list:
            self.banner_grab(self.url_req.ssl_checker(subdomain), proxy)
            self.robots_scraper(self.url_req.ssl_checker(subdomain), proxy)
            self.wappalyzing_webpage(subdomain)
            self.public_git_finder(subdomain, proxy)
            self.public_svn_finder(subdomain, proxy)
            try:
                subdomain_ip_list.append(socket.gethostbyname(subdomain))
                self.db.update_subdomain_ip(
                    self.project_id, subdomain,
                    str(socket.gethostbyname(subdomain)))
            except socket.gaierror:
                pass

        subdomain_ip_listFix = list(set(subdomain_ip_list))

        # check common service port TODO
        #for ipaddress in subdomain_ip_listFix:
        #self.common_service_check(ipaddress)

        for ipaddress in subdomain_ip_listFix:
            self.service_scanning(ipaddress)

    def wappalyzing_webpage(self, domain):
        log.console_log("{}[*] Wapplyzing on domain {}{}".format(G, domain, W))
        wappalyzing = Wappalyzer()
        targeturl = self.url_req.ssl_checker(domain)
        try:
            data = wappalyzing.run_wappalyze(targeturl)
            self.db.insert_wappalyzing(self.project_id, domain, data)
        except urllib2.URLError as exc:
            log.console_log('URL Error: {0}'.format(str(exc)))
        except urllib2.HTTPError as exc:
            log.console_log('HTTP Error: {0}'.format(str(exc)))
        except Exception as exc:
            log.console_log('Unknow error: {0}'.format(str(exc)))

    def service_scanning(self, ipaddress):
        scan_nm = ScanNmap()
        log.console_log("{}[*] Perfoming Nmap Full Scan on IP {}{}".format(
            G, ipaddress, W))
        log.console_log("{}[*] nmap -sS -A -Pn {}{}".format(G, ipaddress, W))
        scan_nm.run_scanning(ipaddress)

    def scan_DNS_zone(self, domain_name):
        log.console_log("{}[*] Perfoming DNS Zone Scanning... {}".format(G, W))
        log.console_log(
            "{}[*] Please wait, maximum timeout for checking is 1 minutes {}".
            format(G, W))
        signal.signal(signal.SIGALRM, self.timeLimitHandler)
        signal.alarm(60)
        try:
            scan_list = str(list(Scanner(domain_name).scan()))
            ns_record_list = []
            mx_record_list = []
            log.console_log("{}{}{}".format(G, scan_list.replace(",", "\n"),
                                            W))
            log.console_log("{}DNS Server:{}".format(G, W))
            for ns in dns.resolver.query(domain_name, 'NS'):
                log.console_log(G + ns.to_text() + W)
                ns_record_list.append(ns.to_text())

            log.console_log("{}MX Record:{}".format(G, W))
            for ns in dns.resolver.query(domain_name, 'MX'):
                log.console_log("{}{}{}".format(G, ns.to_text(), W))
                mx_record_list.append(ns.to_text())

            self.db.update_dns_zone(self.project_id, domain_name,
                                    str(ns_record_list), str(mx_record_list))

        except Exception, exc:
            print("{}[*] No response from server... SKIP!{}".format(R, W))
Esempio n. 2
0
class Belati(Cmd):
    def __init__(self):
        self.about = AboutProject()
        self.url_req = URLRequest()

        Cmd.doc_header = "Core Commands"
        Cmd.prompt = "{}belati{} > ".format(UNDERLINE, ENDC)
        Cmd.path_complete

        Cmd.__init__(self)

        self.list_parameter = ['domain', 'username', 'email', 'orgcomp', 'proxy', 'proxy_file']
        self.parameters = {}
        self.multiple_proxy_list = []
        self.current_time = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')

        self.show_banner()
        self.conf = Config()
        self.db = Database()


    def show_banner(self):
        banner = """
        {}

         /$$$$$$$  /$$$$$$$$ /$$        /$$$$$$  /$$$$$$$$     .
        | $$__  $$| $$_____/| $$       /$$__  $$|__  $$__/    J:L
        | $$  \ $$| $$      | $$      | $$  \ $$   | $$       |:|
        | $$$$$$$ | $$$$$   | $$      | $$$$$$$$   | $$       |:|
        | $$__  $$| $$__/   | $$      | $$__  $$   | $$       |:|
        | $$  \ $$| $$      | $$      | $$  | $$   | $$       |:|
        | $$$$$$$/| $$$$$$$$| $$$$$$$$| $$  | $$   | $$   /]  |:|  [\ 
        |_______/ |________/|________/|__/  |__/   |__/   \:-'\"""'-:/
                                                            ""III""
                                                              III
                                                              III
                                                              III
                                                             (___)

                                The Traditional Swiss Army Knife for OSINT

        =[ {} {} by {}]=

        + -- --=[ {} ]=-- -- +
        + -- --=[ {} ]=-- -- +
        {}
        """

        warning_message = """
        {}
        This tool is for educational purposes only.
        Any damage you make will not affect the author.
        Do It With Your Own Risk!

        For Better Privacy, Please Use proxychains or other proxy service!
        {}
        """

        log.console_log(banner.format(G, self.about.__name__, self.about.__version__, self.about.__author__, self.about.__info__, self.about.__authorurl__, W))
        log.console_log(warning_message.format(R, W))

    def do_help(self, line):
    	'print help message'

    	print("\nCore commands")
    	print("==============\n")
    	print tabulate([["Name","Description"],
    		["?", "Help menu"],
    		["!", "Run OS Command"],
    		["history", "Show command history"],
    		["set", "Set parameters option value"],
    		["show", "Display list available parameter option"],
    		["start", "Start Automatic Scanning Belati"],
    		["startws", "Start Web Server Only Mode"],
    		["version", "Show application version number"],
    		["quit", "Exit the application"]],
			headers="firstrow")


    def do_set(self, arg, opts=None):
    	'''Set Variable for Belati parameters.\nUsage: set [option] [value]\n\nAvailable options:\ndomain, username, email, orgcomp, proxy, proxy_file'''

    	if not arg:
    		log.console_log('{} Set Variable for Belati parameters.\nUsage: set [option] [value]\n\nAvailable options:\ndomain, username, email, orgcomp, proxy, proxy_file {}'.format(W, W))
    	else:
            param = shlex.split(arg)
            key = param[0]
            value = param[1]
            if key in self.list_parameter:
                self.parameters[key] = value
                log.console_log('{} => {}'.format(self.parameters[key], value))
            else:
                log.console_log("Available parameters: domain, username, email, orgcomp, proxy, proxy_file")

    def do_show(self, arg, opts=None):
    	'Show available parameter options'
    	
    	domain_val = self.parameters['domain'] if 'domain' in self.parameters else None
    	orgcomp = self.parameters['orgcomp'] if 'orgcomp' in self.parameters else None
     	email = self.parameters['email'] if 'email' in self.parameters else None
     	username = self.parameters['username'] if 'username' in self.parameters else None
     	proxy = self.parameters['proxy'] if 'proxy' in self.parameters else None
     	proxy_file = self.parameters['proxy_file'] if 'proxy_file' in self.parameters else None
     	org_val = ""
    	arg = shlex.split(arg)

    	if not arg:
    		print("Please use command 'show options' to see list of option parameters")

    	elif arg[0] == "options":
            print tabulate([["Name","Value", "Required", "Description"],
    			["domain", domain_val, "Yes", "Domain name for OSINT"],
    			["orgcomp", orgcomp, "Yes", "Organization/Company name for OSINT"],
    			["email", email, "Optional", "Email address for OSINT"],
    			["username", username, "Optional", "Username for OSINT"],
    			["proxy", proxy, "Optional", "Proxy server(e.g http://127.0.0.1:8080)"],
    			["proxy_file", proxy_file, "Optional", "Proxy file list location"]],
    			headers="firstrow")

    def do_startws(self, line):
    	'Start Belati in Web Server Only Mode'
    	
    	log.console_log("{}[*] Entering Web Server Only Mode...{}".format(Y,W))
    	self.start_web_server()
    	sys.exit()

    def do_version(self, line):
    	'Check current Belati version'
    	
    	log.console_log('{} {} by {}\n'.format(self.about.__name__, self.about.__version__, self.about.__author__))
    	log.console_log('Project URL: {}'.format(self.about.__giturl__))

    def do_start(self, line):
    	'Start automatic scanning'
    	domain = self.parameters['domain'] if 'domain' in self.parameters else None
    	orgcomp = self.parameters['orgcomp'] if 'orgcomp' in self.parameters else None
     	email = self.parameters['email'] if 'email' in self.parameters else None
     	username = self.parameters['username'] if 'username' in self.parameters else None
     	proxy = self.parameters['proxy'] if 'proxy' in self.parameters else ''
     	proxy_file = self.parameters['proxy_file'] if 'proxy_file' in self.parameters else ''

    	if domain is None and orgcomp is None:
    		log.console_log("{}[-] Please specify domain/organization {}".format(R,W))
    		sys.exit()
			
		log.console_log("{}[*] Starting at: {} {}".format(Y, self.current_time , W))

        self.updater = Updater()
        self.updater.check_update(self.about.__version__)

        # Setup project
        self.project_id = self.db.create_new_project(domain, orgcomp, self.current_time)
        log.console_log("{}[+] Creating New Belati Project... {}".format(G, W))
        log.console_log("---------------------------------------------------------")
        log.console_log("Project ID: {}".format(str(self.project_id)))
        log.console_log("Project Domain: {}".format(domain))
        log.console_log("Project Organization/Company: {}".format(orgcomp))
        log.console_log("---------------------------------------------------------")

        if domain is not None:
            if proxy is not '':
                log.console_log("{}[*] Checking Proxy Status... {}".format(G, W))
                if self.check_single_proxy_status(proxy, "http://" + str(domain)) == 'ok':
                    pass
                else:
                    log.console_log('{}[-] Please use another proxy or disable proxy! {}'.format(R, W))
                    sys.exit()

            if proxy_file is not '':
                log.console_log("{}[*] Checking Proxy Status from file {}{}".format(G, proxy_file, W))
                self.check_multiple_proxy_status(proxy_file, "http://" + str(domain))
                proxy = self.multiple_proxy_list

            extract_domain = tldextract.extract(domain)

            self.check_domain(self.url_req.ssl_checker(domain), proxy)
            self.banner_grab(self.url_req.ssl_checker(domain), proxy)

            if extract_domain.subdomain == "":
                self.robots_scraper(self.url_req.ssl_checker(domain), proxy)
                self.enumerate_subdomains(domain, proxy)
                self.scan_DNS_zone(domain)
                self.harvest_email_search(domain, proxy)
                self.harvest_email_pgp(domain, proxy)
            else:
                domain = extract_domain.domain + '.' + extract_domain.suffix
                self.robots_scraper(self.url_req.ssl_checker(domain), proxy)
                self.enumerate_subdomains(domain, proxy)
                self.scan_DNS_zone(domain)
                self.harvest_email_search(domain, proxy)
                self.harvest_email_pgp(domain, proxy)

            self.harvest_document(domain, proxy)

        if username is not None:
            self.username_checker(username)

        if orgcomp is not None:
            self.gather_company(orgcomp, proxy)

        if email is not None:
            log.console_log("This feature will be coming soon. Be patient :)")

        log.console_log("{}All done sir! All logs saved in {}logs{} directory and dowloaded file saved in {}belatiFiles{} {}".format(Y, B, Y, B, Y, W))

        self.start_web_server()

    def check_domain(self, domain_name, proxy_address):
        check = CheckDomain()

        log.console_log(G + "{}[*] Checking Domain Availability... {}".format(G, W) , 0)
        log.console_log(check.domain_checker(domain_name, proxy_address))
        
        log.console_log("{}[*] Checking URL Alive... {}".format(G, W), 0)
        log.console_log(check.alive_check(domain_name, proxy_address))

        log.console_log("{}[*] Perfoming Whois... {}".format(G, W))
        whois_result = check.whois_domain(domain_name)
        log.console_log(whois_result)
        email = re.findall(r'[a-zA-Z0-9._+-]+@[a-zA-Z0-9._+-]+\s*', str(whois_result))

        # JSON Beautifier
        json_bf = JsonBeautifier()
        json_whois = json_bf.beautifier(str(whois_result))
        self.db.insert_domain_result(self.project_id, util.strip_scheme(domain_name), str(json_whois), util.clean_list_string(email))

    def banner_grab(self, domain_name, proxy_address):
        banner = BannerGrab()
        log.console_log("{}[*] Perfoming HTTP Banner Grabbing... {}".format(G, W))
        banner_info = banner.show_banner(domain_name, proxy_address)
        log.console_log(banner_info)
        self.db.insert_banner(domain_name, self.project_id, str(banner_info))

    def enumerate_subdomains(self, domain_name, proxy):
        log.console_log("{}[*] Perfoming Subdomains Enumeration...{}".format(G, W))
        sub_enum = SubdomainEnum()
        log.console_log("{}[+] Grabbing data from dnsdumpster...{}\n".format(B, W))
        dnsdumpster = sub_enum.scan_dnsdumpster(domain_name)
        subdomain_list = []
        data_table = [["Domain", "IP", "Provider", "Country"]]
        for entry in dnsdumpster['dns_records']['host']:
            data_table.extend([[entry['domain'], entry['ip'], entry['provider'], entry['country']]])
            subdomain_list.append(entry['domain'])

        log.console_log( tabulate(data_table, headers='firstrow') )

        log.console_log("{}[+] Grabbing data from crt.sh...{}\n".format(B, W))
        crt_list = sub_enum.scan_crtsh(domain_name, proxy)
        
        if crt_list is not None:
            log.console_log("\n".join(crt_list))
            subdomain_list = list(set(subdomain_list + crt_list))
        
        log.console_log("{}[+] Grabbing data from findsubdomains.com...{}\n".format(B, W))
        findsubdomains_list = sub_enum.scan_findsubdomainsCom(domain_name,proxy)
        
        if findsubdomains_list is not None:
            log.console_log("\n".join(findsubdomains_list))
            subdomain_list = list(set(subdomain_list + findsubdomains_list))

        subdomain_ip_list = []

        for subdomain in subdomain_list:
            self.banner_grab(self.url_req.ssl_checker(subdomain), proxy)
            self.robots_scraper(self.url_req.ssl_checker(subdomain), proxy)
            self.wappalyzing_webpage(subdomain)
            self.public_git_finder(subdomain, proxy)
            self.public_svn_finder(subdomain, proxy)
            try:
                subdomain_ip_list.append(socket.gethostbyname(subdomain))
                self.db.update_subdomain_ip(self.project_id, subdomain, str(socket.gethostbyname(subdomain)))
            except socket.gaierror:
                pass

        subdomain_ip_listFix = list(set(subdomain_ip_list))

        # check common service port TODO
        #for ipaddress in subdomain_ip_listFix:
            #self.common_service_check(ipaddress)

        for ipaddress in subdomain_ip_listFix:
            self.service_scanning(ipaddress)

    def wappalyzing_webpage(self, domain):
        log.console_log("{}[*] Wapplyzing on domain {}{}".format(G, domain, W))
        wappalyzing = Wappalyzer()
        targeturl = self.url_req.ssl_checker(domain)
        try:
            data = wappalyzing.run_wappalyze(targeturl)
            self.db.insert_wappalyzing(self.project_id, domain, data)
        except urllib2.URLError as exc:
            log.console_log('URL Error: {0}'.format(str(exc)))
        except urllib2.HTTPError as exc:
            log.console_log('HTTP Error: {0}'.format(str(exc)))
        except Exception as exc:
            log.console_log('Unknown error: {0}'.format(str(exc)))

    def service_scanning(self, ipaddress):
        scan_nm = ScanNmap()
        log.console_log("{}[*] Perfoming Nmap Full Scan on IP {}{}".format(G, ipaddress, W))
        log.console_log("{}[*] nmap -sS -A -Pn {}{}".format(G, ipaddress, W))
        scan_nm.run_scanning(ipaddress)

    def scan_DNS_zone(self, domain_name):
        log.console_log("{}[*] Perfoming DNS Zone Scanning... {}".format(G, W))
        log.console_log("{}[*] Please wait, maximum timeout for checking is 1 minutes {}".format(G, W))
        signal.signal(signal.SIGALRM, self.timeLimitHandler)
        signal.alarm(60)
        try:
            scan_list = str(list(Scanner(domain_name).scan()))
            ns_record_list = []
            mx_record_list = []
            log.console_log("{}{}{}".format(G, scan_list.replace(",","\n"), W))
            log.console_log("{}DNS Server:{}".format(G, W))
            for ns in dns.resolver.query(domain_name, 'NS'):
                log.console_log(G + ns.to_text() + W)
                ns_record_list.append(ns.to_text())

            log.console_log("{}MX Record:{}".format(G, W))
            for ns in dns.resolver.query(domain_name, 'MX'):
                log.console_log("{}{}{}".format(G, ns.to_text(), W))
                mx_record_list.append(ns.to_text())

            self.db.update_dns_zone(self.project_id, domain_name, util.clean_list_string(ns_record_list), util.clean_list_string(mx_record_list))

        except Exception, exc:
            print("{}[*] No response from server... SKIP!{}".format(R, W))