Exemplo n.º 1
0
    def delegate(self, args, **kwargs):
        """
        Executes the command based on the given cli args.

        Parameters
        ----------
        args: Namespace
            The argparse cli arguments
        kwargs: dict
            The arguments to be passed to the handler
        """

        Log.raw_info(self.BANNER)

        if not args.dry_run:

            Log.info("Setting up proxy config")
            Log.info("Proxy IP:   {}".format(args.proxy_ip))
            Log.info("Proxy PORT: {}".format(args.proxy_port))

            TorProxy.setup(args.proxy_ip, args.proxy_port)
            tor = TorCheck()
            tor_status = ""

            Log.info("Checking Tor Status")

            success = tor.check_tor_status()
            if success:
                tor_status = "{}{}".format(Fore.GREEN, "\033[1mREADY\033[0m")
            else:
                tor_status = "{}{}".format(Fore.RED, "\033[1mNOT READY\033[0m")

            Log.info("Tor Status: {}".format(tor_status))
            Log.info("Checking IP Address")
            Log.info("IP: \033[1m{}{}\033[0m".format(Fore.GREEN,
                                                     tor.check_ip()))

            if not success:
                raise ValueError("Unable to route traffic through Tor")

        else:

            Log.warn("Dry run enabled, skipping proxy setup")

        result = self.handler(kwargs)
        result_std = "{}\n".format(result)

        if args.output:
            out = args.output
            out.write(result)
            out.close()

        if args.dry_run or (not args.output and IOUtils.is_piped_output()):
            sys.stdout.write(result_std)
Exemplo n.º 2
0
    def __discover(self, kwargs):
        """
        Handles the operation --discover

        Parameters
        ----------
        kwargs: dict
            The dictionary parameter containing the attributes:
            * count    - The number of urls to generate
            * prefix   - The url prefix
            * suffix   - The url suffix
            * protocol - The protocol
            * tld      - The top level domain
            * workers  - The number of url validation workers
        
        Returns
        -------
        result: str
            The list of urls
        """

        result = []

        size = kwargs["count"]
        prefix = kwargs["prefix"]
        suffix = kwargs["suffix"]
        protocol = kwargs["protocol"]
        tld = kwargs["tld"]
        workers = kwargs["workers"]

        Log.warn("Discover mode enabled, this may take a loooong time")
        Log.warn("oniongen.py will run until it finds {} url(s)".format(size))
        Log.info("Protocol: {}".format(protocol.upper()))
        Log.info("Prefix:   {}".format(prefix if prefix else None))
        Log.info("Suffix:   {}".format(suffix if suffix else None))
        Log.info("TLD:      {}".format(tld))
        Log.info("Workers:  {}".format(workers))

        while len(result) < size:
            urls = []
            index = 1

            pool = ThreadPoolExecutor(max_workers=workers)
            while index <= size:
                url = self.onion.generate(prefix, suffix, protocol, tld)
                urls.append(url)
                index += 1

            Log.info("Generated {} {} url(s)".format(len(urls), tld))
            Log.info("Running HTTP status check in all urls")

            for url in urls:
                pool.submit(self.__url_check, url=url, urls=result)

            pool.shutdown(wait=True)

            Log.info("Waiting 5 seconds before next try")
            time.sleep(5)

        Log.info("Found {} of {} urls".format(len(result), len(urls)))

        return "\n".join(result)
Exemplo n.º 3
0
        sys.stderr.close()

    except KeyError:
        parser.print_help(sys.stderr)
        sys.exit(1)

    except ValueError as error:
        Log.error(str(error))
        sys.exit(1)


if __name__ == "__main__":

    try:

        url = None

        if IOUtils.is_piped_input():
            url = IOUtils.read_piped_input()

        cli_args = parser.parse_args()
        cli_args.url = cli_args.url or url

        main(cli_args)

    except KeyboardInterrupt:
        Log.raw_info("")
        Log.warn("User requested to stop")
        Log.warn("Killing all processes")
        os._exit(0)
Exemplo n.º 4
0
    def __generate(self, kwargs):
        """
        Handles the operations -g, --generate and --dry-run

        Parameters
        ----------
        kwargs: dict
            The dictionary parameter containing the attributes:
            * count    - The number of urls to generate
            * prefix   - The url prefix
            * suffix   - The url suffix
            * protocol - The protocol
            * tld      - The top level domain
            * workers  - The number of url validation workers
            * dry_run  - The dry run mode
        
        Returns
        -------
        result: str
            The list of urls
        """

        result = []

        size = kwargs["count"]
        prefix = kwargs["prefix"]
        suffix = kwargs["suffix"]
        protocol = kwargs["protocol"]
        tld = kwargs["tld"]
        workers = kwargs["workers"]
        dry_run = kwargs["dry_run"]

        Log.info("Generating {} {} url(s)".format(size, tld))
        Log.info("Protocol: {}".format(protocol.upper()))
        Log.info("Prefix:   {}".format(prefix if prefix else None))
        Log.info("Suffix:   {}".format(suffix if suffix else None))
        Log.info("TLD:      {}".format(tld))
        Log.info("Workers:  {}".format(workers))

        urls = []
        index = 1
        pool = ThreadPoolExecutor(max_workers=workers)

        while index <= size:
            url = self.onion.generate(prefix, suffix, protocol, tld)
            urls.append(url)
            index += 1

        Log.info("Generated {} {} url(s)".format(len(urls), tld))

        if dry_run:
            Log.warn("Dry run enabled, skipping url validation")
            return "\n".join(urls)

        Log.info("Running HTTP status check in all urls")
        Log.info("Be patient, this may take a looooong time...")

        for url in urls:
            pool.submit(self.__url_check, url=url, urls=result)

        pool.shutdown(wait=True)

        Log.info("Found {} of {} urls".format(len(result), len(urls)))

        return "\n".join(result)