Example #1
0
def shodan(self, scope_file, output):
    """
The Shodan module:\n
Look-up information on the target IP address(es) using Shodan's API.\n
A Shodan API key is required.
    """
    asciis.print_art()
    print(
        green(
            "[+] Shodan Module Selected: O.D.I.N. will check Shodan for the provided domains \
and IPs."))

    if __name__ == "__main__":
        report = reporter.Reporter(output)
        scope, ip_list, domains_list = report.prepare_scope(scope_file)

        # Create empty job queue
        jobs = []
        shodan_report = multiprocess.Process(name="Shodan Report",
                                             target=report.create_shodan_table,
                                             args=(ip_list, domains_list))
        jobs.append(shodan_report)

        for job in jobs:
            print(green("[+] Starting new process: {}".format(job.name)))
            job.start()
        for job in jobs:
            job.join()

        report.close_out_reporting()
        print(green("[+] Job's done! Your results are in {}.".format(output)))
Example #2
0
def people(self, client, domain):
    """
Uses TheHarvester and EmailHunter to locate email addresses and social media profiles. Profiles
are cross-referenced with HaveIBeenPwned, Twitter's API, and search engines to try to find security
breaches, pastes, and social media accounts.\n
Several API keys are required for all of the look-ups: EmailHunter and Twitter.
    """
    asciis.print_art()
    print(
        green(
            "[+] People Module Selected: O.D.I.N. will run only modules for email addresses \
and social media."))

    # Perform prep work for reporting
    setup_reports(client)
    output_report = "reports/{}/OSINT_DB.db".format(client)

    if __name__ == "__main__":
        report = reporter.Reporter(output_report)

        # Create empty job queue
        jobs = []
        company_info = multiprocess.Process(
            name="Company Info Report",
            target=report.create_company_info_table,
            args=(domain, ))
        jobs.append(company_info)
        employee_report = multiprocess.Process(
            name="Employee Report",
            target=report.create_people_table,
            args=(domain, client))
        jobs.append(employee_report)

        for job in jobs:
            print(green("[+] Starting new process: {}".format(job.name)))
            job.start()
        for job in jobs:
            job.join()

        report.close_out_reporting()
        print(
            green("[+] Job's done! Your results are in {}.".format(
                output_report)))
Example #3
0
def osint(self, organization, domain, files, ext, delete, scope_file, aws,
          aws_fixes, html, screenshots, graph, nuke, whoxy_limit):
    """
The OSINT toolkit:\n
This is ODIN's primary module. ODIN will take the tagret organization, domain, and other data
provided and hunt for information. On the human side, ODIN looks for employee names,
email addresses, and social media profiles. Names and emails are cross-referenced with
HaveIBeenPwned, Twitter's API, and search engines to collect additional information.

ODIN also uses various tools and APIs to collect information on the provided IP addresses
and domain names, including things like DNS and IP address history.

View the README for the full detailsand lists of API keys!

Note: If providing a scope file, acceptable IP addresses/ranges include:

    * Single Address:      8.8.8.8

    * Basic CIDR:          8.8.8.0/24

    * Nmap-friendly Range: 8.8.8.8-10

    * Underscores? OK:     8.8.8.8_8.8.8.10
    """
    click.clear()
    asciis.print_art()
    print(green("[+] OSINT Module Selected: ODIN will run all recon modules."))

    verbose = None

    if verbose:
        print(
            yellow(
                "[*] Verbose output Enabled -- Enumeration of RDAP contact information \
is enabled, so you may get a lot of it if scope includes a large cloud provider."
            ))

    # Perform prep work for reporting
    setup_reports(organization)
    report_path = "reports/{}/".format(organization)
    output_report = report_path + "OSINT_DB.db"

    if __name__ == "__main__":
        # Create manager server to handle variables shared between jobs
        manager = Manager()
        ip_list = manager.list()
        domain_list = manager.list()
        # Create reporter object and generate final list, the scope from scope file
        report = reporter.Reporter(report_path, output_report)
        report.create_tables()
        scope, ip_list, domain_list = report.prepare_scope(
            ip_list, domain_list, scope_file, domain)

        # Create some jobs and put Python to work!
        # Job queue 1 is for the initial phase
        jobs = []
        # Job queue 2 is used for jobs using data from job queue 1
        more_jobs = []
        # Job queue 3 is used for jobs that take a while and use the progress bar, i.e. AWS enum
        even_more_jobs = []
        company_info = Process(name="Company Info Collector",
                               target=report.create_company_info_table,
                               args=(domain, ))
        jobs.append(company_info)
        employee_report = Process(name="Employee Hunter",
                                  target=report.create_people_table,
                                  args=(domain_list, organization))
        jobs.append(employee_report)
        domain_report = Process(name="Domain and IP Address Recon",
                                target=report.create_domain_report_table,
                                args=(organization, scope, ip_list,
                                      domain_list, whoxy_limit))
        jobs.append(domain_report)

        shodan_report = Process(name="Shodan Queries",
                                target=report.create_shodan_table,
                                args=(ip_list, domain_list))
        more_jobs.append(shodan_report)
        urlcrazy_report = Process(name="Domain Squatting Recon",
                                  target=report.create_urlcrazy_table,
                                  args=(organization, domain))
        more_jobs.append(urlcrazy_report)

        cloud_report = Process(name="Cloud Recon",
                               target=report.create_cloud_table,
                               args=(organization, domain, aws, aws_fixes))
        even_more_jobs.append(cloud_report)

        if screenshots:
            take_screenshots = Process(name="Screenshot Snapper",
                                       target=report.capture_web_snapshots,
                                       args=(report_path, ))
            more_jobs.append(take_screenshots)

        if files:
            files_report = Process(name="File Hunter",
                                   target=report.create_foca_table,
                                   args=(domain, ext, delete, report_path,
                                         verbose))
            more_jobs.append(files_report)

        print(
            green(
                "[+] Beginning initial discovery phase! This could take some time..."
            ))
        for job in jobs:
            print(green("[+] Starting new process: {}".format(job.name)))
            job.start()
        for job in jobs:
            job.join()

        print(
            green(
                "[+] Initial discovery is complete! Proceeding with additional queries..."
            ))
        for job in more_jobs:
            print(green("[+] Starting new process: {}".format(job.name)))
            job.start()
        for job in more_jobs:
            job.join()

        print(green("[+] Final phase: checking the cloud and web services..."))
        for job in even_more_jobs:
            print(green("[+] Starting new process: {}".format(job.name)))
            job.start()
        for job in even_more_jobs:
            job.join()

        report.close_out_reporting()
        print(
            green(
                "[+] Job's done! Your results are in {} and can be viewed and queried with \
any SQLite browser.".format(output_report)))

        if graph:
            graph_reporter = grapher.Grapher(output_report)
            print(
                green(
                    "[+] Loading ODIN database file {} for conversion to Neo4j"
                ).format(output_report))

            if nuke:
                confirm = input(
                    red("\n[!] You set the --nuke option. This wipes out all nodes \
for a fresh start. Proceed? (Y\\N) "))
                if confirm.lower() == "y":
                    graph_reporter.clear_neo4j_database()
                    print(green("[+] Database successfully wiped!\n"))
                    graph_reporter.convert()
                else:
                    print(
                        red("[!] Then you can convert your database to a graph database later. \
Run lib/grapher.py with the appropriate options."))
            else:
                graph_reporter.convert()

        if html:
            print(
                green("\n[+] Creating the HTML report using {}.".format(
                    output_report)))
            html_reporter = htmlreporter.HTMLReporter(
                organization, report_path + "/html_report/", output_report)
            html_reporter.generate_full_report()
Example #4
0
def osint(self, organization, domain, files, ext, delete, scope_file, aws,
          aws_fixes, verbose, html, screenshots):
    """
The OSINT toolkit:\n
This is ODIN's primary module. ODIN will take the tagret organization, domain, and other data
provided and hunt for information. On the human side, ODIN looks for employee names,
email addresses, and social media profiles. Names and emails are cross-referenced with
HaveIBeenPwned, Twitter's API, and search engines to collect additional information.\n
ODIN also uses various tools and APIs to collect information on the provided IP addresses
and domain names, including things like DNS and IP address history.\n
View the README for the full detailsand lists of API keys!
    """
    asciis.print_art()
    print(green("[+] OSINT Module Selected: ODIN will run all recon modules."))

    if verbose:
        print(
            yellow(
                "[*] Verbose output Enabled -- Enumeration of RDAP contact information \
is enabled, so you may get a lot of it if scope includes a large cloud provider."
            ))

    # Perform prep work for reporting
    setup_reports(organization)
    report_path = "reports/{}/".format(organization)
    output_report = report_path + "OSINT_DB.db"

    if __name__ == "__main__":
        # Create manager server to handle variables shared between jobs
        manager = Manager()
        ip_list = manager.list()
        domain_list = manager.list()
        # Create reporter object and generate final list, the scope from scope file
        report = reporter.Reporter(output_report)
        report.create_tables()
        scope, ip_list, domain_list = report.prepare_scope(
            ip_list, domain_list, scope_file, domain)

        # Create some jobs and put Python to work!
        # Job queue 1 is for the initial phase
        jobs = []
        # Job queue 2 is used for jobs using data from job queue 1
        more_jobs = []
        # Job queue 3 is used for jobs that take a while and use the progress bar, i.e. AWS enum
        even_more_jobs = []
        company_info = Process(name="Company Info Collector",
                               target=report.create_company_info_table,
                               args=(domain, ))
        jobs.append(company_info)
        employee_report = Process(name="Employee Hunter",
                                  target=report.create_people_table,
                                  args=(domain, organization))
        jobs.append(employee_report)
        domain_report = Process(name="Domain and IP Address Recon",
                                target=report.create_domain_report_table,
                                args=(scope, ip_list, domain_list, verbose))
        jobs.append(domain_report)

        shodan_report = Process(name="Shodan Queries",
                                target=report.create_shodan_table,
                                args=(ip_list, domain_list))
        more_jobs.append(shodan_report)
        urlcrazy_report = Process(name="Domain Squatting Recon",
                                  target=report.create_urlcrazy_table,
                                  args=(organization, domain))
        more_jobs.append(urlcrazy_report)

        cloud_report = Process(name="Cloud Recon",
                               target=report.create_cloud_table,
                               args=(organization, domain, aws, aws_fixes))
        even_more_jobs.append(cloud_report)

        if screenshots:
            take_screenshots = Process(name="Screenshot Snapper",
                                       target=report.capture_web_snapshots,
                                       args=(report_path, ))
            more_jobs.append(take_screenshots)

        if files:
            files_report = Process(name="File Hunter",
                                   target=report.create_foca_table,
                                   args=(domain, ext, delete, report_path,
                                         verbose))
            jobs.append(files_report)

        print(
            green(
                "[+] Beginning initial discovery phase! This could take some time..."
            ))
        for job in jobs:
            print(green("[+] Starting new process: {}".format(job.name)))
            job.start()
        for job in jobs:
            job.join()

        print(
            green(
                "[+] Initial discovery is complete! Proceeding with additional queries..."
            ))
        for job in more_jobs:
            print(green("[+] Starting new process: {}".format(job.name)))
            job.start()
        for job in more_jobs:
            job.join()

        print(green("[+] Final phase: checking the cloud and web services..."))
        for job in even_more_jobs:
            print(green("[+] Starting new process: {}".format(job.name)))
            job.start()
        for job in even_more_jobs:
            job.join()

        report.close_out_reporting()
        print(
            green("[+] Job's done! Your results are in {}.".format(
                output_report)))

        if html:
            html_reporter = htmlreporter.HTMLReporter(
                organization, report_path + "/html_report/", output_report)
            html_reporter.generate_full_report()
Example #5
0
def domains(self, client, domain, files, ext, delete, scope_file, aws,
            aws_fixes, verbose):
    """
The Domain module uses various tools and APIs to collect information on the provided IP addresses
and/or domains.\n
Several API keys are required for all of the look-ups: Censys, FullContact, Shodan, URLVoid, and
Cymon.
    """
    asciis.print_art()
    print(
        green(
            "[+] Domain Module Selected: O.D.I.N. will run only domain and IP-related \
modules."))
    if verbose:
        print(
            yellow(
                "[*] Verbose output Enabled -- Enumeration of RDAP contact information \
is enabled, so you may get a lot of it if scope includes a large cloud provider."
            ))
    else:
        print(
            yellow(
                "[*] Verbose output Disabled -- Enumeration of contact information \
will be skipped."))

    # Perform prep work for reporting
    setup_reports(client)
    output_report = "reports/{}/OSINT_DB.db".format(client)

    if __name__ == "__main__":
        report = reporter.Reporter(output_report)
        scope, ip_list, domains_list = report.prepare_scope(scope_file, domain)

        # Create empty job queue
        jobs = []
        workbook = "boo"
        company_info = multiprocess.Process(
            name="Company Info Report",
            target=report.create_company_info_table,
            args=(domain, ))
        jobs.append(company_info)
        domain_report = multiprocess.Process(
            name="Domains Report",
            target=report.create_domain_report_table,
            args=(scope, ip_list, domains_list, verbose))
        jobs.append(domain_report)
        urlcrazy_report = multiprocess.Process(
            name="Domain Squatting Report",
            target=report.create_urlcrazy_table,
            args=(workbook, client, domain))
        jobs.append(urlcrazy_report)
        shodan_report = multiprocess.Process(name="Shodan Report",
                                             target=report.create_shodan_table,
                                             args=(workbook, ip_list,
                                                   domains_list))
        jobs.append(shodan_report)
        cloud_report = multiprocess.Process(name="Cloud Report",
                                            target=report.create_cloud_table,
                                            args=(client, domain, aws,
                                                  aws_fixes))
        jobs.append(cloud_report)
        if files:
            files_report = multiprocess.Process(
                name="File Metadata Report",
                target=report.create_foca_table,
                args=(workbook, domain, ext, delete, verbose))
            jobs.append(files_report)

        for job in jobs:
            print(green("[+] Starting new process: {}".format(job.name)))
            job.start()
        for job in jobs:
            job.join()

        report.close_out_reporting()
        print(
            green("[+] Job's done! Your results are in {}.".format(
                output_report)))
Example #6
0
def osint(self, client, domain, files, ext, delete, scope_file, aws, aws_fixes,
          verbose):
    """
The full O.D.I.N. toolkit:\n
This module runs all OSINT modules together. O.D.I.N. uses TheHarvester and Hunter.io
to locate email addresses and social media profiles. Profiles are then cross-referenced with
HaveIBeenPwned, Twitter's API, and search engines to collect additional information.\n
O.D.I.N. uses various tools and APIs to collect domain/IP information on the provided IP
addresses and/or domains.\n
Several API keys are required for all of the look-ups: Twitter, Censys, Shodan, EmailHunter,
and Cymon.
    """
    asciis.print_art()
    print(
        green(
            "[+] OSINT Module Selected: O.D.I.N. will run all recon modules."))

    if verbose:
        print(
            yellow(
                "[*] Verbose output Enabled -- Enumeration of RDAP contact information \
is enabled, so you may get a lot of it if scope includes a large cloud provider."
            ))
    else:
        print(
            yellow(
                "[*] Verbose output Disabled -- Enumeration of contact information \
will be skipped."))

    # Perform prep work for reporting
    setup_reports(client)
    output_report = "reports/{}/OSINT_DB.db".format(client)

    if __name__ == "__main__":
        report = reporter.Reporter(output_report)
        scope, ip_list, domains_list = report.prepare_scope(scope_file, domain)

        # Create empty job queue
        jobs = []
        company_info = multiprocess.Process(
            name="Company Info Report",
            target=report.create_company_info_table,
            args=(domain, ))
        jobs.append(company_info)
        employee_report = multiprocess.Process(
            name="Employee Report",
            target=report.create_people_table,
            args=(domain, client))
        jobs.append(employee_report)
        domain_report = multiprocess.Process(
            name="Domains Report",
            target=report.create_domain_report_table,
            args=(scope, ip_list, domains_list, verbose))
        jobs.append(domain_report)
        urlcrazy_report = multiprocess.Process(
            name="Domain Squatting Report",
            target=report.create_urlcrazy_table,
            args=(client, domain))
        jobs.append(urlcrazy_report)
        shodan_report = multiprocess.Process(name="Shodan Report",
                                             target=report.create_shodan_table,
                                             args=(ip_list, domains_list))
        jobs.append(shodan_report)
        cloud_report = multiprocess.Process(name="Cloud Report",
                                            target=report.create_cloud_table,
                                            args=(client, domain, aws,
                                                  aws_fixes))
        jobs.append(cloud_report)
        if files:
            files_report = multiprocess.Process(
                name="File Metadata Report",
                target=report.create_foca_table,
                args=(domain, ext, delete, verbose))
            jobs.append(files_report)

        for job in jobs:
            print(green("[+] Starting new process: {}".format(job.name)))
            job.start()
        for job in jobs:
            job.join()

        report.close_out_reporting()
        print(
            green("[+] Job's done! Your results are in {}.".format(
                output_report)))
Example #7
0
def osint(self, organization, domain, files, ext, scope_file, aws, aws_fixes,
          html, screenshots, graph, nuke, whoxy_limit, typo, unsafe):
    """
The OSINT toolkit:

This is ODIN's primary module. ODIN will take the tagret organization, domain, and other data
provided and hunt for information. On the human side, ODIN looks for employee names,
email addresses, and social media profiles. Names and emails are cross-referenced with
HaveIBeenPwned, Twitter's API, and search engines to collect additional information.

ODIN also uses various tools and APIs to collect information on the provided IP addresses
and domain names, including things like DNS and IP address history.

View the wiki for the full details, reporting information, and lists of API keys.

Note: If providing any IP addresses in a scope file, acceptable IP addresses/ranges include:

    * Single Address:      8.8.8.8

    * Basic CIDR:          8.8.8.0/24

    * Nmap-friendly Range: 8.8.8.8-10

    * Underscores? OK:     8.8.8.8_8.8.8.10
    """
    click.clear()
    click.secho(asciis.print_art(), fg="magenta")
    click.secho("\tRelease v{}, {}".format(VERSION, CODENAME), fg="magenta")
    click.secho("[+] OSINT Module Selected: ODIN will run all recon modules.",
                fg="green")
    # Perform prep work for reporting
    setup_reports(organization)
    report_path = "reports/{}/".format(organization)
    output_report = report_path + "OSINT_DB.db"
    if __name__ == "__main__":
        # Create manager server to handle variables shared between jobs
        manager = Manager()
        ip_list = manager.list()
        domain_list = manager.list()
        rev_domain_list = manager.list()
        # Create reporter object and generate lists of everything, just IP addresses, and just domains
        browser = helpers.setup_headless_chrome(unsafe)
        report = reporter.Reporter(organization, report_path, output_report,
                                   browser)
        report.create_tables()
        scope, ip_list, domain_list = report.prepare_scope(
            ip_list, domain_list, scope_file, domain)
        # Create some jobs and put Python to work!
        # Job queue 1 is for the initial phase
        jobs = []
        # Job queue 2 is used for jobs using data from job queue 1
        more_jobs = []
        # Job queue 3 is used for jobs that take a while and use the progress bar, i.e. AWS enum
        even_more_jobs = []
        # Phase 1 jobs
        company_info = Process(name="Company Info Collector",
                               target=report.create_company_info_table,
                               args=(domain, ))
        jobs.append(company_info)
        employee_report = Process(name="Employee Hunter",
                                  target=report.create_people_table,
                                  args=(domain_list, rev_domain_list,
                                        organization))
        jobs.append(employee_report)
        domain_report = Process(name="Domain and IP Hunter",
                                target=report.create_domain_report_table,
                                args=(organization, scope, ip_list,
                                      domain_list, rev_domain_list,
                                      whoxy_limit))
        jobs.append(domain_report)
        # Phase 2 jobs
        shodan_report = Process(name="Shodan Hunter",
                                target=report.create_shodan_table,
                                args=(ip_list, domain_list))
        more_jobs.append(shodan_report)
        if typo:
            lookalike_report = Process(name="Lookalike Domain Reviewer",
                                       target=report.create_lookalike_table,
                                       args=(organization, domain))
            more_jobs.append(lookalike_report)
        if screenshots:
            take_screenshots = Process(name="Screenshot Snapper",
                                       target=report.capture_web_snapshots,
                                       args=(report_path, browser))
            more_jobs.append(take_screenshots)
        if files:
            files_report = Process(name="File Hunter",
                                   target=report.create_metadata_table,
                                   args=(domain, ext, report_path))
            more_jobs.append(files_report)
        # Phase 3 jobs
        cloud_report = Process(name="Cloud Hunter",
                               target=report.create_cloud_table,
                               args=(organization, domain, aws, aws_fixes))
        even_more_jobs.append(cloud_report)
        # Process the lists of jobs in phases, starting with phase 1
        click.secho(
            "[+] Beginning initial discovery phase! This could take some time...",
            fg="green")
        for job in jobs:
            click.secho("[+] Starting new process: {}".format(job.name),
                        fg="green")
            job.start()
        for job in jobs:
            job.join()
        # Wait for phase 1 and then begin phase 2 jobs
        click.secho(
            "[+] Initial discovery is complete! Proceeding with additional queries...",
            fg="green")
        for job in more_jobs:
            click.secho("[+] Starting new process: {}".format(job.name),
                        fg="green")
            job.start()
        for job in more_jobs:
            job.join()
        # Wait for phase 2 and then begin phase 3 jobs
        click.secho("[+] Final phase: checking the cloud and web services...",
                    fg="green")
        for job in even_more_jobs:
            click.secho("[+] Starting new process: {}".format(job.name),
                        fg="green")
            job.start()
        for job in even_more_jobs:
            job.join()
        # All jobs are done, so close out the SQLIte3 database connection
        report.close_out_reporting()
        click.secho(
            "[+] Job's done! Your results are in {} and can be viewed and queried with \
any SQLite browser.".format(output_report),
            fg="green")
        # Perform additional tasks depending on the user's command line options
        if graph:
            graph_reporter = grapher.Grapher(output_report)
            click.secho(
                "[+] Loading ODIN database file {} for conversion to Neo4j".
                format(output_report),
                fg="green")
            if nuke:
                if click.confirm(click.style(
                        "[!] You set the --nuke option. This wipes out all nodes for a \
fresh start. Proceed?",
                        fg="red"),
                                 default=True):
                    try:
                        graph_reporter.clear_neo4j_database()
                        click.secho("[+] Database successfully wiped!\n",
                                    fg="green")
                    except Exception as error:
                        click.secho(
                            "[!] Failed to clear the database! Check the Neo4j console and \
your configuration and try running grapher.py again.",
                            fg="red")
                        click.secho("L.. Details: {}".format(error), fg="red")
                else:
                    click.secho(
                        "[!] You can convert your database to a graph database later. \
Run lib/grapher.py with the appropriate options.",
                        fg="red")
                try:
                    graph_reporter.convert()
                except Exception as error:
                    click.secho(
                        "[!] Failed to convert the database! Check the Neo4j console and \
your configuration and try running grapher.py again.",
                        fg="red")
                    click.secho("L.. Details: {}".format(error), fg="red")
        if html:
            click.secho("\n[+] Creating the HTML report using {}.".format(
                output_report),
                        fg="green")
            try:
                html_reporter = htmlreporter.HTMLReporter(
                    organization, report_path + "/html_report/", output_report)
                html_reporter.generate_full_report()
            except Exception as error:
                click.secho("[!] Failed to create the HTML report!", fg="red")
                click.secho("L.. Details: {}".format(error), fg="red")