Exemple #1
0
def worker():
    q = PriorityQueue()
    q.put(Job(5, 'mid job'))
    q.put(Job(10, 'low job'))
    q.put(Job(1, 'high job'))

    while not q.empty():
        job = q.get()
        print(job)
Exemple #2
0
class HttpTest(object):

    def __init__(self,host,keyword,ips,timeout):
        self.threads = 100
        self.queue = PriorityQueue()
        self.host = host
        self.keyword = keyword
        self.result = []
        for ip in ips:
            self.queue.put(ip)
        self.num = self.queue.qsize()
        self.i = 0
        self.success = 0
        self.timeout = timeout
        self.filename = os.path.join(rootPath,"result",host + ".log")
        self.outfile = open(self.filename, 'w')


    def _scan(self,j):
        while not self.queue.empty():
            try:
                item = self.queue.get(timeout=3.0)
                if config.HTTPS_Support:
                    host, domain, port = item, self.host , 443
                else:
                    host, domain, port = item, self.host , 80
                html = httpServer((host, domain, port),self.timeout)
                if html  is not None and self.keyword in html:
                    self.outfile.write(item + '\n')
                    self.outfile.flush()
                    self.success += 1
            except:
                pass
            finally:
                self.i += 1
                msg = '[*] %s found, %s scanned , %s groups left'%(self.success,self.i,self.num - self.i)
                print_msg(msg)
            time.sleep(1.0)

    def run(self):
        threads = [gevent.spawn(self._scan, i) for i in range(self.threads)]
        gevent.joinall(threads)

        msg = '[+] All Done. Success:%d Saved in:%s'%(self.success,self.filename)
        print_msg(msg, line_feed=True)
Exemple #3
0
class Track:
    def __init__(self, options, target):
        self.start_time = time.time()
        self.dns_servers = []
        self.options = options
        self.target = target
        self.resolvers = [dns.resolver.Resolver(configure=False) for _ in range(self.options.threads)]
        self.all_dns = dns.resolver.Resolver(configure=False)
        self.all_dns.nameservers = self.dns_servers
        detect_target.detect_target(self.target, True)
        self.dns_servers = dns_server.load_dns_server()
        self.dns_server_count = len(self.dns_servers)
        self.queue = PriorityQueue()
        self.priority = 0
        self.scan_count = self.confirm_count = 0
        self.confirm_subdomain = set()
        self.cdn = ''
        self.load_status = {}
        self.load_status = load_subdomain_name.load_sub_name(self.options, self.target, self.priority, self.queue)
        self.priority = self.load_status['priority']
        self.queue = self.load_status['queue']
        self.print_header_info()
        self.outfile = self.outfile_descriptor()


    def outfile_descriptor(self):
        if self.options.outfile:
            outfile_name = self.options.outfile + '.txt'
        else:
            outfile_name = self.target + '.txt'
        f = open(outfile_name, 'w')
        return f

    def print_header_info(self):
        print 'Domain Name\t\t\t\tIp Adress\t\tStatus\tCDN\tServer'
        print '------------\t\t\t\t----------\t\t------\t------\t---------'

    def subdomain_track(self, serial):
        dns_serial_number = serial % self.dns_server_count
        self.resolvers[serial].nameservers = [self.dns_servers[dns_serial_number]]
        while not self.queue.empty():
            try:
                subdomain_name = self.queue.get(timeout=1)[1]
                msg = '%.1fs | Found %d \r' % (time.time()-self.start_time, self.confirm_count)
                sys.stdout.write(msg)
                sys.stdout.flush()
                self.scan_count += 1
            except traceback:
                traceback.print_exc()
                sys.exit(1)
            try:
                if subdomain_name in self.confirm_subdomain:
                    continue
                if not self.options.crt:
                    subdomian_url = subdomain_name + '.' + self.target
                else:
                    subdomian_url = subdomain_name

                try:
                    dns_responses = self.resolvers[serial].query(subdomian_url)
                except dns.resolver.NoAnswer, e:
                    dns_responses = self.all_dns.query(subdomian_url)

                if len(dns_responses) > 1:
                    self.cdn = 'Yes'
                else:
                    self.cdn = 'Unknown'

                server_responses = detect_target.detect_target(subdomian_url)

                if dns_responses and server_responses:
                    self.confirm_subdomain.add(subdomain_name)
                    self.confirm_count += 1
                    ip = dns_responses[0].address
                    if server_responses.get('Server'):
                        server_info = server_responses['Server']
                    else:
                        server_info = 'Unknown'
                    status_code = server_responses['status_code']
                    print '%s\t\t%s\t\t%s\t%s\t%s' % (subdomian_url.ljust(30), ip, status_code, self.cdn, server_info)

                    msg = '%s\t\t%s\t\t%s\t%s\t%s' % (subdomian_url.ljust(30), ip, status_code, self.cdn, server_info)
                    self.outfile.write(msg)
                    self.outfile.flush()

            except (dns.resolver.NoAnswer, dns.resolver.NoNameservers, dns.exception.Timeout, dns.resolver.NXDOMAIN) as e:
                pass
            except Exception as e:
                traceback.print_exc()
                sys.exit(1)
Exemple #4
0
class AWSLogs(object):

    ACTIVE = 1
    EXHAUSTED = 2
    WATCH_SLEEP = 2

    def __init__(self, **kwargs):
        self.connection_cls = kwargs.get('connection_cls', AWSConnection)
        self.aws_region = kwargs.get('aws_region')
        self.aws_access_key_id = kwargs.get('aws_access_key_id')
        self.aws_secret_access_key = kwargs.get('aws_secret_access_key')
        self.log_group_name = kwargs.get('log_group_name')
        self.log_stream_name = kwargs.get('log_stream_name')
        self.watch = kwargs.get('watch')
        self.color_enabled = kwargs.get('color_enabled')
        self.output_stream_enabled = kwargs.get('output_stream_enabled')
        self.output_group_enabled = kwargs.get('output_group_enabled')
        self.start = self.parse_datetime(kwargs.get('start'))
        self.end = self.parse_datetime(kwargs.get('end'))
        self.pool_size = max(kwargs.get('pool_size', 0), 10)
        self.max_group_length = 0
        self.max_stream_length = 0
        self.publishers = []
        self.events_queue = Queue()
        self.raw_events_queue = PriorityQueue()
        self.publishers_queue = PriorityQueue()
        self.publishers = []
        self.stream_status = {}
        self.stream_max_timestamp = {}
        self.connection = self.connection_cls(
            self.aws_region,
            aws_access_key_id=self.aws_access_key_id,
            aws_secret_access_key=self.aws_secret_access_key
        )

    def _get_streams_from_patterns(self, log_group_pattern, log_stream_pattern):
        """Returns pairs of group, stream matching ``log_group_pattern`` and
        ``log_stream_pattern``."""
        for group in self._get_groups_from_pattern(log_group_pattern):
            for stream in self._get_streams_from_pattern(group,
                                                         log_stream_pattern):
                yield group, stream

    def _get_groups_from_pattern(self, pattern):
        """Returns groups matching ``pattern``."""
        pattern = '.*' if pattern == 'ALL' else pattern
        reg = re.compile('^{0}'.format(pattern))
        for group in self.get_groups():
            if re.match(reg, group):
                yield group

    def _get_streams_from_pattern(self, group, pattern):
        """Returns streams in ``group`` matching ``pattern``."""
        pattern = '.*' if pattern == 'ALL' else pattern
        reg = re.compile('^{0}'.format(pattern))
        for stream in self.get_streams(group):
            if re.match(reg, stream):
                yield stream

    def _publisher_queue_consumer(self):
        """Consume ``publishers_queue`` api calls, run them and publish log
        events to ``raw_events_queue``. If ``nextForwardToken`` is present
        register a new api call into ``publishers_queue`` using as weight
        the timestamp of the latest event."""
        while True:
            try:
                _, (log_group_name, log_stream_name, next_token) = self.publishers_queue.get(block=False)
            except Empty:
                if self.watch:
                    gevent.sleep(self.WATCH_SLEEP)
                else:
                    break

            response = self.connection.get_log_events(
                next_token=next_token,
                log_group_name=log_group_name,
                log_stream_name=log_stream_name,
                start_time=self.start,
                end_time=self.end,
                start_from_head=True
            )

            if not len(response['events']):
                self.stream_status[(log_group_name, log_stream_name)] = self.EXHAUSTED
                continue

            self.stream_status[(log_group_name, log_stream_name)] = self.ACTIVE

            for event in response['events']:
                event['group'] = log_group_name
                event['stream'] = log_stream_name
                self.raw_events_queue.put((event['timestamp'], event))
                self.stream_max_timestamp[(log_group_name, log_stream_name)] = event['timestamp']

            if 'nextForwardToken' in response:
                self.publishers_queue.put(
                    (response['events'][-1]['timestamp'],
                     (log_group_name, log_stream_name, response['nextForwardToken']))
                )

    def _get_min_timestamp(self):
        """Return the minimum timestamp available across all active streams."""
        pending = [self.stream_max_timestamp[k] for k, v in self.stream_status.iteritems() if v != self.EXHAUSTED]
        return min(pending) if pending else None

    def _get_all_streams_exhausted(self):
        """Return if all streams are exhausted."""
        return all((s == self.EXHAUSTED for s in self.stream_status.itervalues()))

    def _raw_events_queue_consumer(self):
        """Consume events from ``raw_events_queue`` if all active streams
        have already publish events up to the ``_get_min_timestamp`` and
        register them in order into ``events_queue``."""
        while True:
            if self._get_all_streams_exhausted() and self.raw_events_queue.empty():
                if self.watch:
                    gevent.sleep(self.WATCH_SLEEP)
                    continue
                self.events_queue.put(NO_MORE_EVENTS)
                break

            try:
                timestamp, line = self.raw_events_queue.peek(timeout=1)
            except Empty:
                continue

            min_timestamp = self._get_min_timestamp()
            if min_timestamp and min_timestamp < timestamp:
                gevent.sleep(0.3)
                continue

            timestamp, line = self.raw_events_queue.get()

            output = [line['message']]
            if self.output_stream_enabled:
                output.insert(
                    0,
                    self.color(
                        line['stream'].ljust(self.max_stream_length, ' '),
                        'cyan'
                    )
                )
            if self.output_group_enabled:
                output.insert(
                    0,
                    self.color(
                        line['group'].ljust(self.max_group_length, ' '),
                        'green'
                    )
                )
            self.events_queue.put("{0}\n".format(' '.join(output)))

    def _events_consumer(self):
        """Print events from ``events_queue`` as soon as they are available."""
        while True:
            event = self.events_queue.get(True)
            if event == NO_MORE_EVENTS:
                break
            sys.stdout.write(event)
            sys.stdout.flush()

    def list_logs(self):
        self.register_publishers()

        pool = Pool(size=self.pool_size)
        pool.spawn(self._raw_events_queue_consumer)
        pool.spawn(self._events_consumer)

        if self.watch:
            pool.spawn(self.register_publishers_periodically)

        for i in xrange(self.pool_size):
            pool.spawn(self._publisher_queue_consumer)
        pool.join()

    def register_publishers(self):
        """Register publishers into ``publishers_queue``."""
        for group, stream in self._get_streams_from_patterns(self.log_group_name, self.log_stream_name):
            if (group, stream) in self.publishers:
                continue
            self.publishers.append((group, stream))
            self.max_group_length = max(self.max_group_length, len(group))
            self.max_stream_length = max(self.max_stream_length, len(stream))
            self.publishers_queue.put((0, (group, stream, None)))
            self.stream_status[(group, stream)] = self.ACTIVE
            self.stream_max_timestamp[(group, stream)] = -1

    def register_publishers_periodically(self):
        while True:
            self.register_publishers()
            gevent.sleep(2)

    def list_groups(self):
        """Lists available CloudWatch logs groups"""
        for group in self.get_groups():
            print group

    def list_streams(self, *args, **kwargs):
        """Lists available CloudWatch logs streams in ``log_group_name``."""
        for stream in self.get_streams(*args, **kwargs):
            print stream

    def get_groups(self):
        """Returns available CloudWatch logs groups"""
        next_token = None
        while True:
            response = self.connection.describe_log_groups(next_token=next_token)

            for group in response.get('logGroups', []):
                yield group['logGroupName']

            if 'nextToken' in response:
                next_token = response['nextToken']
            else:
                break

    def get_streams(self, log_group_name=None):
        """Returns available CloudWatch logs streams in ``log_group_name``."""
        log_group_name = log_group_name or self.log_group_name
        next_token = None

        while True:
            response = self.connection.describe_log_streams(
                log_group_name=log_group_name,
                next_token=next_token
            )

            for stream in response.get('logStreams', []):
                yield stream['logStreamName']

            if 'nextToken' in response:
                next_token = response['nextToken']
            else:
                break

    def color(self, text, color):
        """Returns coloured version of ``text`` if ``color_enabled``."""
        if self.color_enabled:
            return colored(text, color)
        return text

    def parse_datetime(self, datetime_text):
        """Parse ``datetime_text`` into a ``datetime``."""
        if not datetime_text:
            return None

        ago_match = re.match(r'(\d+)\s?(m|minute|minutes|h|hour|hours|d|day|days|w|weeks|weeks)(?: ago)?', datetime_text)
        if ago_match:
            amount, unit = ago_match.groups()
            amount = int(amount)
            unit = {'m': 60, 'h': 3600, 'd': 86400, 'w': 604800}[unit[0]]
            date = datetime.now() + timedelta(seconds=unit * amount * -1)
        else:
            try:
                date = parse(datetime_text)
            except ValueError:
                raise exceptions.UnknownDateError(datetime_text)

        return int(date.strftime("%s")) * 1000
class SubNameBrute:
    """
        receive commandline args and do some initialization work
    """
    def __init__(self, target, options):
        self.start_time = time.time()
        self.target = target.strip()
        self.options = options
        self.scan_count = self.found_count = 0
        self.console_width = os.get_terminal_size()[0] - 2

        # create dns resolver pool ~ workers
        self.resolvers = [
            dns.resolver.Resolver(configure=False)
            for _ in range(options.threads)
        ]
        for resolver in self.resolvers:
            resolver.lifetime = resolver.timeout = 10.0

        self.print_count = 0
        self.STOP_ME = False

        # load dns servers and check whether these dns servers works fine ?
        self._load_dns_servers()

        # load sub names
        self.subs = []  # subs in file
        self.goodsubs = []  # checks ok for further exploitation
        self._load_subname('dict/subnames.txt', self.subs)

        # load sub.sub names
        self.subsubs = []
        self._load_subname('dict/next_sub.txt', self.subsubs)

        # results will save to target.txt

        global path

        path = os.path.join("results", target)
        if not os.path.exists(path):
            os.makedirs(path)

        self.outfile = open('%s/%s.txt' % (path, target), 'w')

        self.ip_dict = set()  #
        self.found_sub = set()

        # task queue
        self.queue = PriorityQueue()
        for sub in self.subs:
            self.queue.put(sub)

    """
        Load DNS Servers(ip saved in file), and check whether the DNS servers works fine
    """

    def _load_dns_servers(self):
        print('[*] Validate DNS servers ...')
        self.dns_servers = []

        # create a process pool for checking DNS servers, the number is your processors(cores) * 2, just change it!
        processors = cpu_count() * 2
        pool = Pool(processors)

        # read dns ips and check one by one
        for server in open('dict/dns_servers.txt').readlines():
            server = server.strip()
            if server:
                pool.apply_async(self._test_server, (server, ))

        pool.join()  # waiting for process finish
        self.dns_count = len(self.dns_servers)

        sys.stdout.write('\n')
        dns_info = '[+] Found {} available DNS Servers in total'.format(
            self.dns_count)
        print(dns_info)

        if self.dns_count == 0:
            print('[ERROR] No DNS Servers available.')
            sys.exit(-1)

    """
        test these dns servers whether works fine
    """

    def _test_server(self, server):

        # create a dns resolver and set timeout
        resolver = dns.resolver.Resolver()
        resolver.lifetime = resolver.timeout = 10.0

        try:
            resolver.nameservers = [server]

            answers = resolver.query('public-dns-a.baidu.com')
            if answers[0].address != '180.76.76.76':
                raise Exception('incorrect DNS response')
            self.dns_servers.append(server)
        except:
            self._print_msg('[-] Check DNS Server %s <Fail>   Found %s' %
                            (server.ljust(16), len(self.dns_servers)))

        self._print_msg('[+] Check DNS Server %s < OK >   Found %s' %
                        (server.ljust(16), len(self.dns_servers)))

    """
        load sub names in dict/*.txt, one function would be enough
        file for read, subname_list for saving sub names
    """

    def _load_subname(self, file, subname_list):
        self._print_msg('[*] Load sub names ...')

        with open(file) as f:
            for line in f:
                sub = line.strip()
                if sub and sub not in subname_list:
                    tmp_set = {sub}
                    """
                        in case of the sub names which contains the following expression
                        and replace them {alphnum}, {alpha}, {num} with character and num
                    """
                    while len(tmp_set) > 0:
                        item = tmp_set.pop()
                        if item.find('{alphnum}') >= 0:
                            for _letter in 'abcdefghijklmnopqrstuvwxyz0123456789':
                                tmp_set.add(
                                    item.replace('{alphnum}', _letter, 1))
                        elif item.find('{alpha}') >= 0:
                            for _letter in 'abcdefghijklmnopqrstuvwxyz':
                                tmp_set.add(item.replace(
                                    '{alpha}', _letter, 1))
                        elif item.find('{num}') >= 0:
                            for _letter in '0123456789':
                                tmp_set.add(item.replace('{num}', _letter, 1))
                        elif item not in subname_list:
                            subname_list.append(item)

    """
        for better presentation of brute force results, not really matters ...
    """

    def _print_msg(self, _msg=None, _found_msg=False):
        if _msg is None:
            self.print_count += 1
            if self.print_count < 100:
                return
            self.print_count = 0
            msg = '%s Found| %s Groups| %s scanned in %.1f seconds' % (
                self.found_count, self.queue.qsize(), self.scan_count,
                time.time() - self.start_time)
            sys.stdout.write('\r' + ' ' * (self.console_width - len(msg)) +
                             msg)
        elif _msg.startswith('[+] Check DNS Server'):
            sys.stdout.write('\r' + _msg + ' ' *
                             (self.console_width - len(_msg)))
        else:
            sys.stdout.write('\r' + _msg + ' ' *
                             (self.console_width - len(_msg)) + '\n')
            if _found_msg:
                msg = '%s Found| %s Groups| %s scanned in %.1f seconds' % (
                    self.found_count, self.queue.qsize(), self.scan_count,
                    time.time() - self.start_time)
                sys.stdout.write('\r' + ' ' * (self.console_width - len(msg)) +
                                 msg)
        sys.stdout.flush()

    def _print_domain(self, msg):
        console_width = os.get_terminal_size()[0]
        msg = '\r' + msg + ' ' * (console_width - len(msg))
        # msg = '\033[0;31;47m%s{}\033[0m'.format(msg)
        sys.stdout.write(msg)

    def _print_progress(self):
        """
            显示扫描进度,显示更美观
        """
        msg = '\033[0;31;47m%s\033[0m found | %s remaining | %s scanned in %.2f seconds' % \
              (self.found_count, self.queue.qsize(), self.scan_count, time.time()- self.start_time)

        console_width = os.get_terminal_size()[0]
        out = '\r' + ' ' * int((console_width - len(msg)) / 2) + msg
        sys.stdout.write(out)

    """
        important : assign task to resolvers
    """

    def _scan(self, j):
        self.resolvers[j].nameservers = [self.dns_servers[j % self.dns_count]]
        while not self.queue.empty():
            sub = self.queue.get(timeout=1.0)
            self.scan_count += 1

            try:
                cur_sub_domain = sub + '.' + self.target
                answers = self.resolvers[j].query(cur_sub_domain)
            except:
                continue

            if answers:
                ips = ', '.join(sorted([answer.address for answer in answers]))

                # exclude : intranet or kept addresses
                if ips in ['1.1.1.1', '127.0.0.1', '0.0.0.0', '0.0.0.1']:
                    continue
                if SubNameBrute.is_intranet(answers[0].address):
                    continue

                self.found_sub.add(cur_sub_domain)
                for answer in answers:
                    self.ip_dict.add(answer.address)

                if sub not in self.goodsubs:
                    self.goodsubs.append(sub)

                self.found_count += 1
                ip_info = '{} \t {}'.format(cur_sub_domain, ips)
                # print(ip_info)
                self.outfile.write(cur_sub_domain + '\t' + ips + '\n')
                self._print_domain(ip_info)
                sys.stdout.flush()
                self._print_progress()
                sys.stdout.flush()

    @staticmethod
    def is_intranet(ip):
        ret = ip.split('.')
        if len(ret) != 4:
            return True
        if ret[0] == '10':
            return True
        if ret[0] == '172' and 16 <= int(ret[1]) <= 32:
            return True
        if ret[0] == '192' and ret[1] == '168':
            return True
        return False

    """
        assign task to threads ...
    """

    def run(self):
        threads = [
            gevent.spawn(self._scan, i) for i in range(self.options.threads)
        ]

        print('[*] Initializing %d threads' % self.options.threads)

        try:
            gevent.joinall(threads)
        except KeyboardInterrupt as e:
            msg = '[WARNING] User aborted.'
            sys.stdout.write('\r' + msg + ' ' *
                             (self.console_width - len(msg)) + '\n\r')
            sys.stdout.flush()
class StaticSmtpRelay(Relay):
    """Manages the relaying of messages to a specific ``host:port``. Connections
    may be recycled when possible, to send multiple messages over a single
    channel.

    :param host: Host string to connect to.
    :param port: Port to connect to.
    :param pool_size: At most this many simultaneous connections will be open to
                      the destination. If this limit is reached and no
                      connections are idle, new attempts will block.
    :param tls: Optional dictionary of TLS settings passed directly as
                keyword arguments to :class:`gevent.ssl.SSLSocket`.
    :param tls_required: If given and True, it should be considered a delivery
                         failure if TLS cannot be negotiated by the client.
    :param connect_timeout: Timeout in seconds to wait for a client connection
                            to be successful before issuing a transient failure.
    :param command_timeout: Timeout in seconds to wait for a reply to each SMTP
                            command before issuing a transient failure.
    :param data_timeout: Timeout in seconds to wait for a reply to message data
                         before issuing a transient failure.
    :param idle_timeout: Timeout in seconds after a message is delivered before
                         a QUIT command is sent and the connection terminated.
                         If another message should be delivered before this
                         timeout expires, the connection will be re-used. By
                         default, QUIT is sent immediately and connections are
                         never re-used.


    """

    def __init__(self, host, port=25, pool_size=None, client_class=None,
                       **client_kwargs):
        super(StaticSmtpRelay, self).__init__()
        if client_class:
            self.client_class = client_class
        else:
            from slimta.relay.smtp.client import SmtpRelayClient
            self.client_class = SmtpRelayClient
        self.host = host
        self.port = port
        self.queue = PriorityQueue()
        self.pool = set()
        self.pool_size = pool_size
        self.client_kwargs = client_kwargs

    def _remove_client(self, client):
        self.pool.remove(client)
        if not self.queue.empty() and not self.pool:
            self._add_client()

    def _add_client(self):
        client = self.client_class((self.host, self.port), self.queue,
                                   **self.client_kwargs)
        client.start()
        client.link(self._remove_client)
        self.pool.add(client)

    def _check_idle(self):
        for client in self.pool:
            if client.idle:
                return
        if not self.pool_size or len(self.pool) < self.pool_size:
            self._add_client()

    def attempt(self, envelope, attempts):
        self._check_idle()
        result = AsyncResult()
        self.queue.put((1, result, envelope))
        return result.get()
class EdgeDataBridge(object):
    """Edge Bridge"""
    def __init__(self, config):
        super(EdgeDataBridge, self).__init__()
        self.config = config
        self.workers_config = {}
        self.bridge_id = uuid.uuid4().hex
        self.api_host = self.config_get('resources_api_server')
        self.api_version = self.config_get('resources_api_version')
        self.retrievers_params = self.config_get('retrievers_params')

        # Check up_wait_sleep
        up_wait_sleep = self.retrievers_params.get('up_wait_sleep')
        if up_wait_sleep is not None and up_wait_sleep < 30:
            raise DataBridgeConfigError('Invalid \'up_wait_sleep\' in '
                                        '\'retrievers_params\'. Value must be '
                                        'grater than 30.')

        # Workers settings
        for key in WORKER_CONFIG:
            self.workers_config[key] = (self.config_get(key)
                                        or WORKER_CONFIG[key])

        # Init config
        for key in DEFAULTS:
            setattr(self, key, self.config_get(key) or DEFAULTS[key])

        # Pools
        self.workers_pool = gevent.pool.Pool(self.workers_max)
        self.retry_workers_pool = gevent.pool.Pool(self.retry_workers_max)
        self.filter_workers_pool = gevent.pool.Pool(self.filter_workers_count)

        # Queues
        if self.input_queue_size == -1:
            self.input_queue = PriorityQueue()
        else:
            self.input_queue = PriorityQueue(self.input_queue_size)

        if self.resource_items_queue_size == -1:
            self.resource_items_queue = PriorityQueue()
        else:
            self.resource_items_queue = PriorityQueue(
                self.resource_items_queue_size)

        self.api_clients_queue = Queue()
        # self.retry_api_clients_queue = Queue()

        if self.retry_resource_items_queue_size == -1:
            self.retry_resource_items_queue = PriorityQueue()
        else:
            self.retry_resource_items_queue = PriorityQueue(
                self.retry_resource_items_queue_size)

        if self.api_host != '' and self.api_host is not None:
            api_host = urlparse(self.api_host)
            if api_host.scheme == '' and api_host.netloc == '':
                raise DataBridgeConfigError(
                    'Invalid \'tenders_api_server\' url.')
        else:
            raise DataBridgeConfigError('In config dictionary empty or missing'
                                        ' \'tenders_api_server\'')
        self.db = prepare_couchdb(self.couch_url, self.db_name, logger)
        db_url = self.couch_url + '/' + self.db_name
        prepare_couchdb_views(db_url, self.workers_config['resource'], logger)
        self.server = Server(self.couch_url,
                             session=Session(retry_delays=range(10)))
        self.view_path = '_design/{}/_view/by_dateModified'.format(
            self.workers_config['resource'])
        extra_params = {
            'mode': self.retrieve_mode,
            'limit': self.resource_items_limit
        }
        self.feeder = ResourceFeeder(host=self.api_host,
                                     version=self.api_version,
                                     key='',
                                     resource=self.workers_config['resource'],
                                     extra_params=extra_params,
                                     retrievers_params=self.retrievers_params,
                                     adaptive=True,
                                     with_priority=True)
        self.api_clients_info = {}

    def config_get(self, name):
        try:
            return self.config.get('main').get(name)
        except AttributeError:
            raise DataBridgeConfigError('In config dictionary missed section'
                                        ' \'main\'')

    def create_api_client(self):
        client_user_agent = self.user_agent + '/' + self.bridge_id
        timeout = 0.1
        while 1:
            try:
                api_client = APIClient(
                    host_url=self.api_host,
                    user_agent=client_user_agent,
                    api_version=self.api_version,
                    key='',
                    resource=self.workers_config['resource'])
                client_id = uuid.uuid4().hex
                logger.info('Started api_client {}'.format(
                    api_client.session.headers['User-Agent']),
                            extra={'MESSAGE_ID': 'create_api_clients'})
                api_client_dict = {
                    'id': client_id,
                    'client': api_client,
                    'request_interval': 0,
                    'not_actual_count': 0
                }
                self.api_clients_info[api_client_dict['id']] = {
                    'drop_cookies': False,
                    'request_durations': {},
                    'request_interval': 0,
                    'avg_duration': 0
                }
                self.api_clients_queue.put(api_client_dict)
                break
            except RequestFailed as e:
                logger.error(
                    'Failed start api_client with status code {}'.format(
                        e.status_code),
                    extra={'MESSAGE_ID': 'exceptions'})
                timeout = timeout * 2
                logger.info(
                    'create_api_client will be sleep {} sec.'.format(timeout))
                sleep(timeout)
            except Exception as e:
                logger.error('Failed start api client with error: {}'.format(
                    e.message),
                             extra={'MESSAGE_ID': 'exceptions'})
                timeout = timeout * 2
                logger.info(
                    'create_api_client will be sleep {} sec.'.format(timeout))
                sleep(timeout)

    def fill_api_clients_queue(self):
        while self.api_clients_queue.qsize() < self.workers_min:
            self.create_api_client()

    def fill_input_queue(self):
        for resource_item in self.feeder.get_resource_items():
            self.input_queue.put(resource_item)
            logger.debug('Add to temp queue from sync: {} {} {}'.format(
                self.workers_config['resource'][:-1], resource_item[1]['id'],
                resource_item[1]['dateModified']),
                         extra={
                             'MESSAGE_ID': 'received_from_sync',
                             'TEMP_QUEUE_SIZE': self.input_queue.qsize()
                         })

    def send_bulk(self, input_dict, priority_cache):
        sleep_before_retry = 2
        for i in xrange(0, 3):
            try:
                logger.debug('Send check bulk: {}'.format(len(input_dict)),
                             extra={'CHECK_BULK_LEN': len(input_dict)})
                start = time()
                rows = self.db.view(self.view_path, keys=input_dict.values())
                end = time() - start
                logger.debug('Duration bulk check: {} sec.'.format(end),
                             extra={'CHECK_BULK_DURATION': end * 1000})
                resp_dict = {k.id: k.key for k in rows}
                break
            except (IncompleteRead, Exception) as e:
                logger.error('Error while send bulk {}'.format(e.message),
                             extra={'MESSAGE_ID': 'exceptions'})
                if i == 2:
                    raise e
                sleep(sleep_before_retry)
                sleep_before_retry *= 2
        for item_id, date_modified in input_dict.items():
            if item_id in resp_dict and date_modified == resp_dict[item_id]:
                logger.debug('Skipped {} {}: In db exist newest.'.format(
                    self.workers_config['resource'][:-1], item_id),
                             extra={'MESSAGE_ID': 'skipped'})
            elif ((1, item_id) not in self.resource_items_queue.queue
                  and (1000, item_id) not in self.resource_items_queue.queue):
                self.resource_items_queue.put(
                    (priority_cache[item_id], item_id))
                logger.debug(
                    'Put to main queue {}: {}'.format(
                        self.workers_config['resource'][:-1], item_id),
                    extra={'MESSAGE_ID': 'add_to_resource_items_queue'})
            else:
                logger.debug(
                    'Skipped {} {}: In queue exist with same id'.format(
                        self.workers_config['resource'][:-1], item_id),
                    extra={'MESSAGE_ID': 'skipped'})

    def fill_resource_items_queue(self):
        start_time = datetime.now()
        input_dict = {}
        priority_cache = {}
        while True:
            # Get resource_item from temp queue
            if not self.input_queue.empty():
                priority, resource_item = self.input_queue.get()
            else:
                timeout = self.bulk_query_interval -\
                    (datetime.now() - start_time).total_seconds()
                if timeout > self.bulk_query_interval:
                    timeout = self.bulk_query_interval
                try:
                    priority, resource_item = self.input_queue.get(
                        timeout=timeout)
                except Empty:
                    resource_item = None

            # Add resource_item to bulk
            if resource_item is not None:
                logger.debug('Add to input_dict {}'.format(
                    resource_item['id']))
                input_dict[resource_item['id']] = resource_item['dateModified']
                priority_cache[resource_item['id']] = priority

            if (len(input_dict) >= self.bulk_query_limit
                    or (datetime.now() - start_time).total_seconds() >=
                    self.bulk_query_interval):
                if len(input_dict) > 0:
                    self.send_bulk(input_dict, priority_cache)
                    input_dict = {}
                    priority_cache = {}
                start_time = datetime.now()

    def resource_items_filter(self, r_id, r_date_modified):
        try:
            local_document = self.db.get(r_id)
            if local_document:
                if local_document['dateModified'] < r_date_modified:
                    return True
                else:
                    return False
            else:
                return True
        except Exception as e:
            logger.error(
                'Filter error: Error while getting {} {} from couchdb: '
                '{}'.format(self.workers_config['resource'][:-1], r_id,
                            e.message),
                extra={'MESSAGE_ID': 'exceptions'})
            return True

    def _get_average_requests_duration(self):
        req_durations = []
        delta = timedelta(seconds=self.perfomance_window)
        current_date = datetime.now() - delta
        for cid, info in self.api_clients_info.items():
            if len(info['request_durations']) > 0:
                if min(info['request_durations'].keys()) <= current_date:
                    info['grown'] = True
                avg = round(
                    sum(info['request_durations'].values()) * 1.0 /
                    len(info['request_durations']), 3)
                req_durations.append(avg)
                info['avg_duration'] = avg

        if len(req_durations) > 0:
            return round(sum(req_durations) / len(req_durations),
                         3), req_durations
        else:
            return 0, req_durations

    # TODO: Add logic for restart sync if last response grater than some values
    # and no active tasks specific for resource

    def queues_controller(self):
        while True:
            if (self.workers_pool.free_count() > 0
                    and (self.resource_items_queue.qsize() >
                         ((float(self.resource_items_queue_size) / 100) *
                          self.workers_inc_threshold))):
                self.create_api_client()
                w = ResourceItemWorker.spawn(self.api_clients_queue,
                                             self.resource_items_queue,
                                             self.db, self.workers_config,
                                             self.retry_resource_items_queue,
                                             self.api_clients_info)
                self.workers_pool.add(w)
                logger.info('Queue controller: Create main queue worker.')
            elif (self.resource_items_queue.qsize() <
                  ((float(self.resource_items_queue_size) / 100) *
                   self.workers_dec_threshold)):
                if len(self.workers_pool) > self.workers_min:
                    wi = self.workers_pool.greenlets.pop()
                    wi.shutdown()
                    api_client_dict = self.api_clients_queue.get()
                    del self.api_clients_info[api_client_dict['id']]
                    logger.info('Queue controller: Kill main queue worker.')
            filled_resource_items_queue = round(
                self.resource_items_queue.qsize() /
                (float(self.resource_items_queue_size) / 100), 2)
            logger.info('Resource items queue filled on {} %'.format(
                filled_resource_items_queue))
            filled_retry_resource_items_queue \
                = round(self.retry_resource_items_queue.qsize() / float(
                    self.retry_resource_items_queue_size) / 100, 2)
            logger.info('Retry resource items queue filled on {} %'.format(
                filled_retry_resource_items_queue))
            sleep(self.queues_controller_timeout)

    def gevent_watcher(self):
        self.perfomance_watcher()
        for t in self.server.tasks():
            if (t['type'] == 'indexer' and t['database'] == self.db_name
                    and t.get('design_document', None) == '_design/{}'.format(
                        self.workers_config['resource'])):
                logger.info(
                    'Watcher: Waiting for end of view indexing. Current'
                    ' progress: {} %'.format(t['progress']))

        # Check fill threads
        input_threads = 1
        if self.input_queue_filler.ready():
            input_threads = 0
            logger.error('Temp queue filler error: {}'.format(
                self.input_queue_filler.exception.message),
                         extra={'MESSAGE_ID': 'exception'})
            self.input_queue_filler = spawn(self.fill_input_queue)
        logger.info('Input threads {}'.format(input_threads),
                    extra={'INPUT_THREADS': input_threads})
        fill_threads = 1
        if self.filler.ready():
            fill_threads = 0
            logger.error('Fill thread error: {}'.format(
                self.filler.exception.message),
                         extra={'MESSAGE_ID': 'exception'})
            self.filler = spawn(self.fill_resource_items_queue)
        logger.info('Filter threads {}'.format(fill_threads),
                    extra={'FILTER_THREADS': fill_threads})

        main_threads = self.workers_max - self.workers_pool.free_count()
        logger.info('Main threads {}'.format(main_threads),
                    extra={'MAIN_THREADS': main_threads})

        if len(self.workers_pool) < self.workers_min:
            for i in xrange(0, (self.workers_min - len(self.workers_pool))):
                w = ResourceItemWorker.spawn(self.api_clients_queue,
                                             self.resource_items_queue,
                                             self.db, self.workers_config,
                                             self.retry_resource_items_queue,
                                             self.api_clients_info)
                self.workers_pool.add(w)
                logger.info('Watcher: Create main queue worker.')
                self.create_api_client()
        retry_threads = self.retry_workers_max -\
            self.retry_workers_pool.free_count()
        logger.info('Retry threads {}'.format(retry_threads),
                    extra={'RETRY_THREADS': retry_threads})
        if len(self.retry_workers_pool) < self.retry_workers_min:
            for i in xrange(
                    0, self.retry_workers_min - len(self.retry_workers_pool)):
                self.create_api_client()
                w = ResourceItemWorker.spawn(self.api_clients_queue,
                                             self.retry_resource_items_queue,
                                             self.db, self.workers_config,
                                             self.retry_resource_items_queue,
                                             self.api_clients_info)
                self.retry_workers_pool.add(w)
                logger.info('Watcher: Create retry queue worker.')

        # Log queues size and API clients count
        main_queue_size = self.resource_items_queue.qsize()
        logger.info('Resource items queue size {}'.format(main_queue_size),
                    extra={'MAIN_QUEUE_SIZE': main_queue_size})
        retry_queue_size = self.retry_resource_items_queue.qsize()
        logger.info(
            'Resource items retry queue size {}'.format(retry_queue_size),
            extra={'RETRY_QUEUE_SIZE': retry_queue_size})
        api_clients_count = len(self.api_clients_info)
        logger.info('API Clients count: {}'.format(api_clients_count),
                    extra={'API_CLIENTS': api_clients_count})

    def _calculate_st_dev(self, values):
        if len(values) > 0:
            avg = sum(values) * 1.0 / len(values)
            variance = map(lambda x: (x - avg)**2, values)
            avg_variance = sum(variance) * 1.0 / len(variance)
            st_dev = math.sqrt(avg_variance)
            return round(st_dev, 3)
        else:
            return 0

    def _mark_bad_clients(self, dev):
        # Mark bad api clients
        for cid, info in self.api_clients_info.items():
            if info.get('grown', False) and info['avg_duration'] > dev:
                info['drop_cookies'] = True
                logger.debug('Perfomance watcher: Mark client {} as bad, avg.'
                             ' request_duration is {} sec.'.format(
                                 cid, info['avg_duration']),
                             extra={'MESSAGE_ID': 'marked_as_bad'})
            elif info['avg_duration'] < dev and info['request_interval'] > 0:
                info['drop_cookies'] = True
                logger.debug('Perfomance watcher: Mark client {} as bad,'
                             ' request_interval is {} sec.'.format(
                                 cid, info['request_interval']),
                             extra={'MESSAGE_ID': 'marked_as_bad'})

    def perfomance_watcher(self):
        avg_duration, values = self._get_average_requests_duration()
        for _, info in self.api_clients_info.items():
            delta = timedelta(seconds=self.perfomance_window +
                              self.watch_interval)
            current_date = datetime.now() - delta
            delete_list = []
            for key in info['request_durations']:
                if key < current_date:
                    delete_list.append(key)
            for k in delete_list:
                del info['request_durations'][k]
            delete_list = []

        st_dev = self._calculate_st_dev(values)
        if len(values) > 0:
            min_avg = min(values) * 1000
            max_avg = max(values) * 1000
        else:
            max_avg = 0
            min_avg = 0
        dev = round(st_dev + avg_duration, 3)

        logger.info('Perfomance watcher:\nREQUESTS_STDEV - {} sec.\n'
                    'REQUESTS_DEV - {} ms.\nREQUESTS_MIN_AVG - {} ms.\n'
                    'REQUESTS_MAX_AVG - {} ms.\nREQUESTS_AVG - {} sec.'.format(
                        round(st_dev, 3), dev, min_avg, max_avg, avg_duration),
                    extra={
                        'REQUESTS_DEV': dev * 1000,
                        'REQUESTS_MIN_AVG': min_avg,
                        'REQUESTS_MAX_AVG': max_avg,
                        'REQUESTS_AVG': avg_duration * 1000
                    })
        self._mark_bad_clients(dev)

    def run(self):
        logger.info('Start Edge Bridge',
                    extra={'MESSAGE_ID': 'edge_bridge_start_bridge'})
        logger.info('Start data sync...',
                    extra={'MESSAGE_ID': 'edge_bridge__data_sync'})
        self.input_queue_filler = spawn(self.fill_input_queue)
        self.filler = spawn(self.fill_resource_items_queue)
        spawn(self.queues_controller)
        while True:
            self.gevent_watcher()
            sleep(self.watch_interval)
Exemple #8
0
class SubNameBrute:
    def __init__(self, target, options):
        self.start_time = time.time()
        self.target = target.strip()
        self.options = options
        self.ignore_intranet = options.i
        self.scan_count = self.found_count = 0
        self.console_width = getTerminalSize()[0] - 2
        self.resolvers = [
            dns.resolver.Resolver(configure=False)
            for _ in range(options.threads)
        ]
        for _ in self.resolvers:
            _.lifetime = _.timeout = 10.0
        self.print_count = 0
        self.STOP_ME = False
        self._load_dns_servers()
        self._load_next_sub()
        self.queue = PriorityQueue()
        self.priority = 0
        self._load_sub_names()
        if options.output:
            outfile = options.output
        else:
            _name = os.path.basename(self.options.file).replace('subnames', '')
            if _name != '.txt':
                _name = '_' + _name
            outfile = target + _name if not options.full_scan else target + '_full' + _name
        self.outfile = open(outfile, 'w')
        self.ip_dict = {}
        self.found_subs = set()
        self.ex_resolver = dns.resolver.Resolver(configure=False)
        self.ex_resolver.nameservers = self.dns_servers
        self.result_lines = []
        self.result_domains = []
        self.result_ips = []

    def _load_dns_servers(self):
        print '[+] Validate DNS servers ...'
        self.dns_servers = []
        pool = Pool(30)

        # change file location
        for server in open('lijiejie/' + 'dict/dns_servers.txt').xreadlines():
            server = server.strip()
            if server:
                pool.apply_async(self._test_server, (server, ))
        pool.join()

        self.dns_count = len(self.dns_servers)
        sys.stdout.write('\n')
        print '[+] Found %s available DNS Servers in total' % self.dns_count
        if self.dns_count == 0:
            print '[ERROR] No DNS Servers available.'
            sys.exit(-1)

    def _test_server(self, server):
        resolver = dns.resolver.Resolver()
        resolver.lifetime = resolver.timeout = 10.0
        try:
            resolver.nameservers = [server]
            answers = resolver.query(
                'public-dns-a.baidu.com')  # test lookup a existed domain
            if answers[0].address != '180.76.76.76':
                raise Exception('incorrect DNS response')
            try:
                resolver.query(
                    'test.bad.dns.lijiejie.com')  # Non-existed domain test
                with open('bad_dns_servers.txt', 'a') as f:
                    f.write(server + '\n')
                self._print_msg('[+] Bad DNS Server found %s' % server)
            except:
                self.dns_servers.append(server)
            self._print_msg('[+] Check DNS Server %s < OK >   Found %s' %
                            (server.ljust(16), len(self.dns_servers)))
        except:
            self._print_msg('[+] Check DNS Server %s <Fail>   Found %s' %
                            (server.ljust(16), len(self.dns_servers)))

    def _load_sub_names(self):
        self._print_msg('[+] Load sub names ...')
        if self.options.full_scan and self.options.file == 'subnames.txt':
            _file = 'lijiejie/' + 'dict/subnames_full.txt'
        else:
            if os.path.exists(self.options.file):
                _file = self.options.file
            elif os.path.exists('lijiejie/' + 'dict/%s' % self.options.file):
                _file = 'lijiejie/' + 'dict/%s' % self.options.file
            else:
                self._print_msg('[ERROR] Names file not exists: %s' %
                                self.options.file)
                exit(-1)

        if self.options.debug:
            _file = 'lijiejie/' + 'dict/debug.txt'
            if not os.path.exists(_file):
                self._print_msg('[ERROR] Names file not exists: %s' %
                                self.options.file)
                exit(-1)

        normal_lines = []
        wildcard_lines = []
        wildcard_list = []
        regex_list = []
        lines = set()
        with open(_file) as f:
            for line in f.xreadlines():
                sub = line.strip()
                if not sub or sub in lines:
                    continue
                lines.add(sub)

                if sub.find('{alphnum}') >= 0 or sub.find(
                        '{alpha}') >= 0 or sub.find('{num}') >= 0:
                    wildcard_lines.append(sub)
                    sub = sub.replace('{alphnum}', '[a-z0-9]')
                    sub = sub.replace('{alpha}', '[a-z]')
                    sub = sub.replace('{num}', '[0-9]')
                    if sub not in wildcard_list:
                        wildcard_list.append(sub)
                        regex_list.append('^' + sub + '$')
                else:
                    normal_lines.append(sub)
        pattern = '|'.join(regex_list)
        if pattern:
            _regex = re.compile(pattern)
            if _regex:
                for line in normal_lines[:]:
                    if _regex.search(line):
                        normal_lines.remove(line)

        for item in normal_lines:
            self.priority += 1
            self.queue.put((self.priority, item))

        for item in wildcard_lines:
            self.queue.put((88888888, item))

    def _load_next_sub(self):
        self._print_msg('[+] Load next level subs ...')
        self.next_subs = []
        _set = set()
        _file = 'lijiejie/' + 'dict/next_sub.txt' if not self.options.full_scan else 'dict/next_sub_full.txt'
        with open(_file) as f:
            for line in f:
                sub = line.strip()
                if sub and sub not in self.next_subs:
                    tmp_set = {sub}
                    while len(tmp_set) > 0:
                        item = tmp_set.pop()
                        if item.find('{alphnum}') >= 0:
                            for _letter in 'abcdefghijklmnopqrstuvwxyz0123456789':
                                tmp_set.add(
                                    item.replace('{alphnum}', _letter, 1))
                        elif item.find('{alpha}') >= 0:
                            for _letter in 'abcdefghijklmnopqrstuvwxyz':
                                tmp_set.add(item.replace(
                                    '{alpha}', _letter, 1))
                        elif item.find('{num}') >= 0:
                            for _letter in '0123456789':
                                tmp_set.add(item.replace('{num}', _letter, 1))
                        elif item not in _set:
                            _set.add(item)
                            self.next_subs.append(item)

    def _print_msg(self, _msg=None, _found_msg=False):
        if _msg is None:
            self.print_count += 1
            if self.print_count < 100:
                return
            self.print_count = 0
            msg = '%s Found| %s Groups| %s scanned in %.1f seconds' % (
                self.found_count, self.queue.qsize(), self.scan_count,
                time.time() - self.start_time)
            sys.stdout.write('\r' + ' ' * (self.console_width - len(msg)) +
                             msg)
        elif _msg.startswith('[+] Check DNS Server'):
            sys.stdout.write('\r' + _msg + ' ' *
                             (self.console_width - len(_msg)))
        else:
            sys.stdout.write('\r' + _msg + ' ' *
                             (self.console_width - len(_msg)) + '\n')
            if _found_msg:
                msg = '%s Found| %s Groups| %s scanned in %.1f seconds' % (
                    self.found_count, self.queue.qsize(), self.scan_count,
                    time.time() - self.start_time)
                sys.stdout.write('\r' + ' ' * (self.console_width - len(msg)) +
                                 msg)
        sys.stdout.flush()

    @staticmethod
    def is_intranet(ip):
        ret = ip.split('.')
        if len(ret) != 4:
            return True
        if ret[0] == '10':
            return True
        if ret[0] == '172' and 16 <= int(ret[1]) <= 32:
            return True
        if ret[0] == '192' and ret[1] == '168':
            return True
        return False

    def put_item(self, item):
        num = item.count('{alphnum}') + item.count('{alpha}') + item.count(
            '{num}')
        if num == 0:
            self.priority += 1
            self.queue.put((self.priority, item))
        else:
            self.queue.put((self.priority + num * 10000000, item))

    def _scan(self, j):
        self.resolvers[j].nameservers = [self.dns_servers[j % self.dns_count]]
        while not self.queue.empty():
            try:
                item = self.queue.get(timeout=1.0)[1]
                self.scan_count += 1
            except:
                break
            self._print_msg()
            try:
                if item.find('{alphnum}') >= 0:
                    for _letter in 'abcdefghijklmnopqrstuvwxyz0123456789':
                        self.put_item(item.replace('{alphnum}', _letter, 1))
                    continue
                elif item.find('{alpha}') >= 0:
                    for _letter in 'abcdefghijklmnopqrstuvwxyz':
                        self.put_item(item.replace('{alpha}', _letter, 1))
                    continue
                elif item.find('{num}') >= 0:
                    for _letter in '0123456789':
                        self.put_item(item.replace('{num}', _letter, 1))
                    continue
                elif item.find('{next_sub}') >= 0:
                    for _ in self.next_subs:
                        self.queue.put((0, item.replace('{next_sub}', _, 1)))
                    continue
                else:
                    sub = item

                if sub in self.found_subs:
                    continue

                cur_sub_domain = sub + '.' + self.target
                _sub = sub.split('.')[-1]
                try:
                    answers = self.resolvers[j].query(cur_sub_domain)
                except dns.resolver.NoAnswer, e:
                    answers = self.ex_resolver.query(cur_sub_domain)

                if answers:
                    self.found_subs.add(sub)
                    ips = ', '.join(
                        sorted([answer.address for answer in answers]))
                    if ips in ['1.1.1.1', '127.0.0.1', '0.0.0.0']:
                        continue

                    if self.ignore_intranet and SubNameBrute.is_intranet(
                            answers[0].address):
                        continue

                    try:
                        self.scan_count += 1
                        answers = self.resolvers[j].query(
                            cur_sub_domain, 'cname')
                        cname = answers[0].target.to_unicode().rstrip('.')
                        if cname.endswith(
                                self.target) and cname not in self.found_subs:
                            self.found_subs.add(cname)
                            cname_sub = cname[:len(cname) - len(self.target) -
                                              1]  # new sub
                            self.queue.put((0, cname_sub))

                    except:
                        pass

                    if (_sub, ips) not in self.ip_dict:
                        self.ip_dict[(_sub, ips)] = 1
                    else:
                        self.ip_dict[(_sub, ips)] += 1

                    if ips not in self.ip_dict:
                        self.ip_dict[ips] = 1
                    else:
                        self.ip_dict[ips] += 1

                    if self.ip_dict[(_sub, ips)] > 3 or self.ip_dict[ips] > 6:
                        continue

                    self.found_count += 1
                    msg = cur_sub_domain.ljust(30) + ips
                    self._print_msg(msg, _found_msg=True)
                    self._print_msg()

                    # TODO: close write file
                    self.outfile.write(
                        cur_sub_domain.ljust(30) + '\t' + ips + '\n')
                    self.outfile.flush()

                    self.result_lines.append(
                        cur_sub_domain.ljust(30) + '\t' + ips)
                    self.result_domains.append(cur_sub_domain)
                    self.result_ips.extend(ips.split(", "))

                    try:
                        self.resolvers[j].query('lijiejietest.' +
                                                cur_sub_domain)
                    except (dns.resolver.NXDOMAIN, dns.resolver.NoAnswer) as e:
                        self.queue.put((999999999, '{next_sub}.' + sub))
                    except:
                        pass

            except (dns.resolver.NXDOMAIN, dns.name.EmptyLabel) as e:
                pass
            except (dns.resolver.NoNameservers, dns.resolver.NoAnswer,
                    dns.exception.Timeout) as e:
                pass
Exemple #9
0
class SubNameBrute:
    def __init__(self, target, args, process_num, dns_servers, next_subs,
                 scan_count, found_count, queue_size_list, tmp_dir):
        self.target = target.strip()
        self.args = args
        self.process_num = process_num
        self.dns_servers = dns_servers
        self.dns_count = len(dns_servers)
        self.next_subs = next_subs
        self.scan_count = scan_count
        self.scan_count_local = 0
        self.found_count = found_count
        self.found_count_local = 0
        self.queue_size_list = queue_size_list

        self.resolvers = [
            dns.resolver.Resolver(configure=False) for _ in range(args.threads)
        ]
        for _r in self.resolvers:
            _r.lifetime = _r.timeout = 6.0

        self.queue = PriorityQueue()
        self.item_index = 0
        self.priority = 0
        self._load_sub_names()
        self.ip_dict = {}
        self.found_subs = set()
        self.ex_resolver = dns.resolver.Resolver(configure=False)
        self.ex_resolver.nameservers = dns_servers
        self.local_time = time.time()
        self.outfile = open(
            '%s/%s_part_%s.txt' % (tmp_dir, target, process_num), 'w')
        self.outfile_html = open('tmp/%s_html_%s.txt' % (target, process_num),
                                 'w')

    def _load_sub_names(self):
        if self.args.full_scan and self.args.file == 'subnames.txt':
            _file = 'Dict/subnames_full.txt'
        else:
            if os.path.exists(self.args.file):
                _file = self.args.file
            elif os.path.exists('Dict/%s' % self.args.file):
                _file = 'Dict/%s' % self.args.file
            else:
                print_msg('[ERROR] Names file not found: %s' % self.args.file)
                exit(-1)

        normal_lines = []
        lines = set()
        with open(_file) as f:
            for line in f.xreadlines():
                sub = line.strip()
                if not sub or sub in lines:
                    continue
                lines.add(sub)
                normal_lines.append(sub)

        for item in normal_lines[self.process_num::self.args.process]:
            self.priority += 1
            self.queue.put((self.priority, item))

    def _scan(self, j):
        self.resolvers[j].nameservers = [self.dns_servers[j % self.dns_count]]
        while not self.queue.empty():
            try:
                item = self.queue.get()[1]
                self.scan_count_local += 1
                if time.time() - self.local_time > 3.0:
                    self.scan_count.value += self.scan_count_local
                    self.scan_count_local = 0
                    self.queue_size_list[self.process_num] = self.queue.qsize()
            except Exception as e:
                break
            try:
                if item.find('{next_sub}') >= 0:
                    for _ in self.next_subs:
                        self.queue.put((0, item.replace('{next_sub}', _, 1)))
                    continue
                else:
                    sub = item

                if sub in self.found_subs:
                    continue

                cur_sub_domain = sub + '.' + self.target
                _sub = sub.split('.')[-1]
                try:
                    answers = self.resolvers[j].query(cur_sub_domain)
                except dns.resolver.NoAnswer, e:
                    answers = self.ex_resolver.query(cur_sub_domain)

                if answers:
                    self.found_subs.add(sub)
                    ips = ', '.join(
                        sorted([answer.address for answer in answers]))
                    if ips in ['1.1.1.1', '127.0.0.1', '0.0.0.0']:
                        continue

                    if self.args.intranet and is_intranet(answers[0].address):
                        continue

                    try:
                        self.scan_count_local += 1
                        answers = self.resolvers[j].query(
                            cur_sub_domain, 'cname')
                        cname = answers[0].target.to_unicode().rstrip('.')
                        if cname.endswith(
                                self.target) and cname not in self.found_subs:
                            self.found_subs.add(cname)
                            cname_sub = cname[:len(cname) - len(self.target) -
                                              1]
                            self.queue.put((0, cname_sub))

                    except:
                        pass

                    if (_sub, ips) not in self.ip_dict:
                        self.ip_dict[(_sub, ips)] = 1
                    else:
                        self.ip_dict[(_sub, ips)] += 1
                        if self.ip_dict[(_sub, ips)] > 30:
                            continue

                    self.found_count_local += 1
                    if time.time() - self.local_time > 3.0:
                        self.found_count.value += self.found_count_local
                        self.found_count_local = 0
                        self.queue_size_list[
                            self.process_num] = self.queue.qsize()
                        self.local_time = time.time()

                    self.outfile_html.write(
                        PrintHtml.Sub_html_print(cur_sub_domain, ips))
                    self.outfile_html.flush()
                    self.outfile.write(
                        cur_sub_domain.ljust(30) + '\t' + ips + '\n')
                    self.outfile.flush()
                    try:
                        self.resolvers[j].query('myzxcghelloha.' +
                                                cur_sub_domain)
                    except (dns.resolver.NXDOMAIN, dns.resolver.NoAnswer) as e:
                        self.queue.put((999999999, '{next_sub}.' + sub))
                    except:
                        pass

            except (dns.resolver.NXDOMAIN, dns.name.EmptyLabel) as e:
                pass
            except (dns.resolver.NoNameservers, dns.resolver.NoAnswer,
                    dns.exception.Timeout) as e:
                pass
Exemple #10
0
class Scanner:
    def __init__(self):
        self.start_time = time.time()
        self.queue = PriorityQueue()
        self.history = []
        self.total_count = 0
        self.scan_count = 0
        self._load_target()
        self.outfile = open("log.log", 'w')
        self.console_width = getTerminalSize()[0] - 2

    def _print_msg(self, _msg=None, _found_msg=False):
        if _msg is None:
            msg = '%s TotalCount| %s Scanned in %.2f seconds' % (
                self.total_count, self.total_count - self.queue.qsize(),
                time.time() - self.start_time)
            sys.stdout.write('\r' + ' ' * (self.console_width - len(msg)) +
                             msg)
        else:
            sys.stdout.write('\r' + _msg + ' ' *
                             (self.console_width - len(_msg)) + '\n')
            self.outfile.write(_msg + '\n')
            self.outfile.flush()
            if _found_msg:
                msg = '%s TotalCount| %s Scanned in %.2f seconds' % (
                    self.total_count, self.total_count - self.queue.qsize(),
                    time.time() - self.start_time)
                sys.stdout.write('\r' + ' ' * (self.console_width - len(msg)) +
                                 msg)
        sys.stdout.flush()

    def _load_target(self):
        print('[+] Read targets ...')
        target_file = raw_input("Target File :")
        with open(target_file) as f:
            for line in f.xreadlines():
                target = line.strip()
                self.queue.put(target)

        print("TotalCount is %d" % self.queue.qsize())
        self.total_count = self.queue.qsize()
        print("Now scanning ...")

    def _scan(self, case):
        while not self.queue.empty():
            target = self.queue.get()
            if case == "1":
                self.vulnCheck(target)
            if case == "2":
                self.s2_045(target)
            if case == "3":
                self.headers(target)
            if case == "4":
                self.weakfile(target)
            if case == "5":
                self.portscan_c(target)

    def vulnCheck(self, target):
        if ":2375" in target:
            try:
                res = requests.head("http://" + str(target) +
                                    "/containers/json",
                                    timeout=2)
                if res.headers['Content-Type'] == 'application/json':
                    self._print_msg(target + "==>  docker api Vuln", True)
                else:
                    self._print_msg()
            except:
                self._print_msg()
            self._print_msg()

        if ":27017" in target:
            ip, port = target.split(":")
            try:
                socket.setdefaulttimeout(3)
                s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
                s.connect((ip, int(port)))
                data = binascii.a2b_hex(
                    "3a000000a741000000000000d40700000000000061646d696e2e24636d640000000000ffffffff130000001069736d6173746572000100000000"
                )
                s.send(data)
                result = s.recv(1024)
                if "ismaster" in result:
                    getlog_data = binascii.a2b_hex(
                        "480000000200000000000000d40700000000000061646d696e2e24636d6400000000000100000021000000026765744c6f670010000000737461727475705761726e696e67730000"
                    )
                    s.send(getlog_data)
                    result = s.recv(1024)
                    if "totalLinesWritten" in result:
                        self._print_msg(target + "==>  mongodb Vuln", True)
            except Exception as e:
                pass

        if ":6379" in target:
            ip, port = target.split(":")
            try:
                socket.setdefaulttimeout(3)
                s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
                s.connect((ip, int(port)))
                s.send("INFO\r\n")
                result = s.recv(1024)
                if "redis_version" in result:
                    self._print_msg(target + "==>  redis Vuln", True)
                elif "Authentication" in result:
                    for pass_ in ['123456', 'redis', 'pass', 'password']:
                        s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
                        s.connect((ip, int(port)))
                        s.send("AUTH %s\r\n" % (pass_))
                        result = s.recv(1024)
                        if '+OK' in result:
                            self._print_msg(
                                target + "==>  redis pass Vuln :" + pass_,
                                True)
            except Exception as e:
                pass
        if ":11211" in target:
            ip, port = target.split(":")
            try:
                socket.setdefaulttimeout(3)
                s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
                s.connect((ip, int(port)))
                s.send("stats\r\n")
                result = s.recv(1024)
                if "STAT version" in result:
                    self._print_msg(target + "==>  memcache Vuln", True)
            except Exception as e:
                pass

        if ":9200" in target:
            try:
                res = requests.head("http://" + str(target) +
                                    "/_rvier/_search",
                                    timeout=2)
                if res.status_code == 200:
                    self._print_msg(target + "==>  Elasticsearch Vuln", True)
                else:
                    self._print_msg()
            except:
                self._print_msg()
            self._print_msg()

    def headers(self, target):
        try:
            res = requests.head("http://" + str(target), timeout=1)
            self._print_msg(target + "==>" + str(res.headers), True)
        except:
            self._print_msg()
        self._print_msg()

    def s2_045(self, target):
        try:
            data = {"image": " "}
            headers = {
                "User-Agent":
                "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/56.0.2924.87 Safari/537.36",
                "Content-Type":
                "%{#context['com.opensymphony.xwork2.dispatcher.HttpServletResponse'].addHeader('vul','s2-045')}.multtargetart/form-data"
            }
            req = requests.post("http://" + str(target),
                                data=data,
                                headers=headers)
            if req.headers["vul"] == "s2-045":
                self._print_msg(target + "==>" + "S2-045 Vuln", True)
        except:
            self._print_msg()
        self._print_msg()

    def weakfile(self, target):
        weaklist = ["robots.txt", "/i.php", "/phpinfo.php"]
        for weakfile in weaklist:
            try:
                res = requests.head("http://" + str(target) + weakfile,
                                    timeout=1)
                if res.status_code == 200:
                    if ("User-agent" in res.content) or ("phpinfo"
                                                         in res.content):
                        self._print_msg("http://" + target + weakfile, True)
            except:
                self._print_msg()
        self._print_msg()

    def portscan_c(self, target):
        import socket
        ip = socket.gethostbyname(target)
        ports = [1433, 2375, 3306, 6379, 9200, 11211, 27017]
        ip = ip.split(".")
        ipc = ip[0] + "." + ip[1] + "." + ip[2] + "."
        if ipc in self.history:
            return
        else:
            self.history.append(ipc)

        for port in ports:
            for i in range(255):
                try:
                    cs = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
                    cs.settimeout(float(2.5))
                    address = (ipc + str(i), int(port))
                    status = cs.connect_ex((address))
                    if status == 0:
                        self._print_msg(ipc + str(i) + ":" + str(port), True)
                except Exception as e:
                    pass

                finally:
                    cs.close()
                self._print_msg()

    def run(self, case):
        threads = [gevent.spawn(self._scan, case) for i in xrange(1000)]
Exemple #11
0
class AWSLogs(object):

    ACTIVE = 1
    EXHAUSTED = 2
    WATCH_SLEEP = 2

    def __init__(self, **kwargs):
        self.connection_cls = kwargs.get('connection_cls', AWSConnection)
        self.aws_region = kwargs.get('aws_region')
        self.aws_access_key_id = kwargs.get('aws_access_key_id')
        self.aws_secret_access_key = kwargs.get('aws_secret_access_key')
        self.log_group_name = kwargs.get('log_group_name')
        self.log_stream_name = kwargs.get('log_stream_name')
        self.watch = kwargs.get('watch')
        self.color_enabled = kwargs.get('color_enabled')
        self.output_stream_enabled = kwargs.get('output_stream_enabled')
        self.output_group_enabled = kwargs.get('output_group_enabled')
        self.start = self.parse_datetime(kwargs.get('start'))
        self.end = self.parse_datetime(kwargs.get('end'))
        self.pool_size = max(kwargs.get('pool_size', 0), 10)
        self.max_group_length = 0
        self.max_stream_length = 0
        self.publishers = []
        self.events_queue = Queue()
        self.raw_events_queue = PriorityQueue()
        self.publishers_queue = PriorityQueue()
        self.publishers = []
        self.stream_status = {}
        self.stream_max_timestamp = {}

        self.connection = self.connection_cls(
            self.aws_region,
            aws_access_key_id=self.aws_access_key_id,
            aws_secret_access_key=self.aws_secret_access_key)

    def _get_streams_from_patterns(self, log_group_pattern,
                                   log_stream_pattern):
        """Returns pairs of group, stream matching ``log_group_pattern`` and
        ``log_stream_pattern``."""
        for group in self._get_groups_from_pattern(log_group_pattern):
            for stream in self._get_streams_from_pattern(
                    group, log_stream_pattern):
                yield group, stream

    def _get_groups_from_pattern(self, pattern):
        """Returns groups matching ``pattern``."""
        pattern = '.*' if pattern == 'ALL' else pattern
        reg = re.compile('^{0}'.format(pattern))
        for group in self.get_groups():
            if re.match(reg, group):
                yield group

    def _get_streams_from_pattern(self, group, pattern):
        """Returns streams in ``group`` matching ``pattern``."""
        pattern = '.*' if pattern == 'ALL' else pattern
        reg = re.compile('^{0}'.format(pattern))
        for stream in self.get_streams(group):
            if re.match(reg, stream):
                yield stream

    def _publisher_queue_consumer(self):
        """Consume ``publishers_queue`` api calls, run them and publish log
        events to ``raw_events_queue``. If ``nextForwardToken`` is present
        register a new api call into ``publishers_queue`` using as weight
        the timestamp of the latest event."""
        while True:
            try:
                _, (log_group_name, log_stream_name,
                    next_token) = self.publishers_queue.get(block=False)
            except Empty:
                if self.watch:
                    gevent.sleep(self.WATCH_SLEEP)
                    continue
                else:
                    break

            response = self.connection.get_log_events(
                next_token=next_token,
                log_group_name=log_group_name,
                log_stream_name=log_stream_name,
                start_time=self.start,
                end_time=self.end,
                start_from_head=True)

            if not len(response['events']):
                self.stream_status[(log_group_name,
                                    log_stream_name)] = self.EXHAUSTED
                continue

            self.stream_status[(log_group_name, log_stream_name)] = self.ACTIVE

            for event in response['events']:
                event['group'] = log_group_name
                event['stream'] = log_stream_name
                self.raw_events_queue.put((event['timestamp'], event))
                self.stream_max_timestamp[(
                    log_group_name, log_stream_name)] = event['timestamp']

            if 'nextForwardToken' in response:
                self.publishers_queue.put((response['events'][-1]['timestamp'],
                                           (log_group_name, log_stream_name,
                                            response['nextForwardToken'])))

    def _get_min_timestamp(self):
        """Return the minimum timestamp available across all active streams."""
        pending = [
            self.stream_max_timestamp[k]
            for k, v in self.stream_status.iteritems() if v != self.EXHAUSTED
        ]
        return min(pending) if pending else None

    def _get_all_streams_exhausted(self):
        """Return if all streams are exhausted."""
        return all(
            (s == self.EXHAUSTED for s in self.stream_status.itervalues()))

    def _raw_events_queue_consumer(self):
        """Consume events from ``raw_events_queue`` if all active streams
        have already publish events up to the ``_get_min_timestamp`` and
        register them in order into ``events_queue``."""
        while True:
            if self._get_all_streams_exhausted(
            ) and self.raw_events_queue.empty():
                if self.watch:
                    gevent.sleep(self.WATCH_SLEEP)
                    continue
                self.events_queue.put(NO_MORE_EVENTS)
                break

            try:
                timestamp, line = self.raw_events_queue.peek(timeout=1)
            except Empty:
                continue

            min_timestamp = self._get_min_timestamp()
            if min_timestamp and min_timestamp < timestamp:
                gevent.sleep(0.3)
                continue

            timestamp, line = self.raw_events_queue.get()

            output = [line['message']]
            if self.output_stream_enabled:
                output.insert(
                    0,
                    self.color(
                        line['stream'].ljust(self.max_stream_length, ' '),
                        'cyan'))
            if self.output_group_enabled:
                output.insert(
                    0,
                    self.color(line['group'].ljust(self.max_group_length, ' '),
                               'green'))
            self.events_queue.put("{0}\n".format(' '.join(output)))

    def _events_consumer(self):
        """Print events from ``events_queue`` as soon as they are available."""
        while True:
            event = self.events_queue.get(True)
            if event == NO_MORE_EVENTS:
                break
            sys.stdout.write(event)
            sys.stdout.flush()

    def list_logs(self):
        self.register_publishers()

        pool = Pool(size=self.pool_size)
        pool.spawn(self._raw_events_queue_consumer)
        pool.spawn(self._events_consumer)

        if self.watch:
            pool.spawn(self.register_publishers_periodically)

        for i in xrange(self.pool_size):
            pool.spawn(self._publisher_queue_consumer)
        pool.join()

    def register_publishers(self):
        """Register publishers into ``publishers_queue``."""
        for group, stream in self._get_streams_from_patterns(
                self.log_group_name, self.log_stream_name):
            if (group, stream) in self.publishers:
                continue
            self.publishers.append((group, stream))
            self.max_group_length = max(self.max_group_length, len(group))
            self.max_stream_length = max(self.max_stream_length, len(stream))
            self.publishers_queue.put((0, (group, stream, None)))
            self.stream_status[(group, stream)] = self.ACTIVE
            self.stream_max_timestamp[(group, stream)] = -1

    def register_publishers_periodically(self):
        while True:
            self.register_publishers()
            gevent.sleep(2)

    def list_groups(self):
        """Lists available CloudWatch logs groups"""
        for group in self.get_groups():
            print group

    def list_streams(self, *args, **kwargs):
        """Lists available CloudWatch logs streams in ``log_group_name``."""
        for stream in self.get_streams(*args, **kwargs):
            print stream

    def get_groups(self):
        """Returns available CloudWatch logs groups"""
        next_token = None
        while True:
            response = self.connection.describe_log_groups(
                next_token=next_token)

            for group in response.get('logGroups', []):
                yield group['logGroupName']

            if 'nextToken' in response:
                next_token = response['nextToken']
            else:
                break

    def get_streams(self, log_group_name=None):
        """Returns available CloudWatch logs streams in ``log_group_name``."""
        log_group_name = log_group_name or self.log_group_name
        next_token = None
        window_start = self.start or 0
        window_end = self.end or sys.maxint

        while True:
            response = self.connection.describe_log_streams(
                log_group_name=log_group_name, next_token=next_token)

            for stream in response.get('logStreams', []):
                if max(stream['firstEventTimestamp'], window_start) <= \
                   min(stream['lastEventTimestamp'], window_end):
                    yield stream['logStreamName']

            if 'nextToken' in response:
                next_token = response['nextToken']
            else:
                break

    def color(self, text, color):
        """Returns coloured version of ``text`` if ``color_enabled``."""
        if self.color_enabled:
            return colored(text, color)
        return text

    def parse_datetime(self, datetime_text):
        """Parse ``datetime_text`` into a ``datetime``."""
        if not datetime_text:
            return None

        ago_match = re.match(
            r'(\d+)\s?(m|minute|minutes|h|hour|hours|d|day|days|w|weeks|weeks)(?: ago)?',
            datetime_text)
        if ago_match:
            amount, unit = ago_match.groups()
            amount = int(amount)
            unit = {'m': 60, 'h': 3600, 'd': 86400, 'w': 604800}[unit[0]]
            date = datetime.now() + timedelta(seconds=unit * amount * -1)
        else:
            try:
                date = parse(datetime_text)
            except ValueError:
                raise exceptions.UnknownDateError(datetime_text)

        return int(date.strftime("%s")) * 1000
Exemple #12
0
class SubNameBrute:
    def __init__(self, target, options):
        # 设置优先级
        self.queue = PriorityQueue()
        self.priority = 0

        # 根据参数进行基本设置
        self.target = target.strip()
        self.options = options
        self.ignore_intranet = options.get('ignore_intranet')

        # 是否用大字典
        if self.options.get('subnames_full'):
            outfile_name+='_sfull'
        if self.options.get('next_sub_full'):
            outfile_name += '_nfull'

        # 根据主域名确定结果文件名称
        outfile_name = options.get('file') if options.get('file') else(target)
        self.fname = 'results/'+outfile_name+'.txt'
        self.outfile = open('results/'+outfile_name+'.txt', 'wb')
        self.outfile_ips = open('results/'+outfile_name+'_ip.txt', 'w')

        # 设置dns解析器 (根据预设的线程数量初始化dns resolver)
        # QUESTION: configure = False还是不太明白 为什么要不以/etc/resolv.conf的常规常规配置??
        self.resolvers = [dns.resolver.Resolver(configure=False) for _ in range(options.get('threads'))]
        for _ in self.resolvers:
            '''
            dns.resolver.Resolver: http://www.dnspython.org/docs/1.14.0/dns.resolver.Resolver-class.html
            dns.resolver.Resolver.lifetime: The total number of seconds to spend trying to get an answer to the question.
            dns.resolver.Resolver.timeout: The number of seconds to wait for a response from a server, before timing out.
            '''
            # QUESTION:lifetime 与 timeout 什么区别?
            _.lifetime = _.timeout = 10.0

        # 加载dns服务器列表
        self._load_dns_servers()
        # self.ex_resolver是备用的在出现except时使用的dns_resolver
        self.ex_resolver = dns.resolver.Resolver(configure=False)
        self.ex_resolver.nameservers = self.dns_servers
        self.logfile = open('results/'+target+'_log.txt','a')

        #set subdomain dct set
        self._load_next_sub()
        self._load_sub_names()

        #set init paras
        self.start_time = time.time()
        self.scan_count = 0
        self.found_count = 0 # 已验证过存在子域名的前缀
        self.STOP_ME = False
        self.ip_dict = {}
        self.found_subs = set()

    def _load_dns_servers(self):
        """
        功能:导入可用的名称服务器 (init初始化时执行)
        :return:
        """
        print '[+] Validate DNS servers ...'
        self.dns_servers = []
        pool = Pool(30)
        for server in open('dns_servers.txt').xreadlines():#xreadlines返回一个生成器
            server = server.strip()
            if server:
                # apply_async 并行
                pool.apply_async(self._test_server, (server,))#apply_async(func[, args[, kwds[, callback]]]) 它是非阻塞
        pool.join()#主进程阻塞,等待子进程的退出
        self.dns_count = len(self.dns_servers)
        sys.stdout.write('\n')
        print '[+] Found %s available DNS Servers in total' % self.dns_count
        if self.dns_count == 0:
            print '[ERROR] No DNS Servers available.'
            sys.exit(-1)

    def _test_server(self, server):
        '''
        功能:检测dns服务器是否可用(_load_dns_servers()在加载dns列表时会探测)
            检测思路:1.已存在域名可成功解析出ip;
                    2.不存在的域名解析则会出错.
        :param server:nameserver
        :return: 无
        '''
        resolver = dns.resolver.Resolver()
        resolver.lifetime = resolver.timeout = 10.0
        try:
            resolver.nameservers = [server]
            existed_domain = 'public-dns-a.baidu.com'
            corrs_ip = '180.76.76.76'
            answers = resolver.query(existed_domain)
            if answers[0].address != corrs_ip:
                raise Exception('incorrect DNS response')
            try:
                non_existed_domain = 'test.bad.dns.lijiejie.com'
                resolver.query(non_existed_domain)
                print '[+] Bad DNS Server found %s' % server
            except:
                self.dns_servers.append(server)
            print '[+] Check DNS Server %s < OK >   Found %s' % (server.ljust(16), len(self.dns_servers))
        except:
            print '[+] Check DNS Server %s <Fail>   Found %s' % (server.ljust(16), len(self.dns_servers))


    def _get_filename(self,option,is_full):
        '''
        功能:构造要打开字典文件的目录
        param: option: 字典的类型 subnames / next_sub
        param: is_full: 决定使用大字典还是小字典
        return: _file: 当前要加载字典的路径
        '''
        has_newdct = self.options.get('new_dct')
        if has_newdct:
            try:
                # 有新字典文件名,则加载新的字典
                next_sub,subnames = has_newdct.split(',')
            except Exception:
                print '[ERROR] Names file not exists: %s' % has_newdct
                exit(-1)
            else:
                # 若新字典名next_sub,subnames加载异常,则打开原来的字典
                self.new_filenames = {
                    'next_sub':'dict/'+next_sub,
                    'subnames':'dict/'+subnames
                }
                filename = self.new_filenames.get(option)
                if os.path.exists(filename):
                    _file = filename
                else:
                    print '[ERROR] Names file not exists: %s' % filename
                    exit(-1)
        elif is_full:
            _file = 'dict/'+option+'_full.txt'
        else:
            _file = 'dict/'+option+'.txt'

        return _file

    def _load_sub_names(self):

        print '[+] Load sub names ...'
        is_full = self.options.get('subnames_full')
        # _file是完整的路径名
        _file = self._get_filename('subnames',is_full)
        normal_lines = []
        wildcard_list = []
        regex_list = []
        lines = set()
        with open(_file) as f:
            wildcard_lines = []
            for line in f.xreadlines():
                sub = line.strip()
                print 'sub:' + sub
                if not sub or sub in lines:
                    continue
                lines.add(sub)
                # 通配符
                # QUESTION:但实际的sub文件中都没有通配符????
                if sub.find('{alphnum}') >= 0 or sub.find('{alpha}') >= 0 or sub.find('{num}') >= 0:
                    # 如果存在某个通配符,则先将其加入到wildcard_lines
                    wildcard_lines.append(sub)
                    sub = sub.replace('{alphnum}', '[a-z0-9]')
                    sub = sub.replace('{alpha}', '[a-z]')
                    sub = sub.replace('{num}', '[0-9]')
                    print 'sun2: ' + sub
                    if sub not in wildcard_list:
                        # QUESTION:为什么替换通配符后还要加入到wildcard_list??
                        wildcard_list.append(sub)
                        regex_list.append('^' + sub + '$')
                else:
                    # 不存在通配符的加入到normal_lines
                    normal_lines.append(sub)
        pattern = '|'.join(regex_list)
        if pattern:
            _regex = re.compile(pattern)
            if _regex:
                for line in normal_lines[:]:
                    if _regex.search(line):
                        normal_lines.remove(line)

        for item in normal_lines:
            # QUESTION: 为什么遍历时每次要令priority自增?
            self.priority = self.priority+1
            self.queue.put((self.priority, item))

        for item in wildcard_lines:
            # QUESTION: wildcard_lines中元素含有通配符所以优先级低???(大数对应低优先级)
            self.queue.put((88888888, item))

    def _load_next_sub(self):
        """
        枚举一、二位子域并添加已存子域
        :return:
        """
        self.next_subs = []
        _set = set()
        is_full = self.options.get('next_sub_full')
        #  _file是nett_sub完整的路径
        _file = self._get_filename('next_sub',is_full)
        with open(_file) as f:
            for line in f:
                sub = line.strip()
                if sub and sub not in self.next_subs:
                    #  利用{alphnum}等通配符组合新的子串
                    # QUESTION:但原文件中的其他子串没有用?
                    tmp_set = {sub} # 相当于tep_set = set(sub)
                    while len(tmp_set) > 0:
                        item = tmp_set.pop()
                        # print 'item: ' + item
                        if item.find('{alphnum}') >= 0:
                            for _letter in 'abcdefghijklmnopqrstuvwxyz0123456789':
                            # for _letter in 'ab89':
                                # 如果是{alphnum}{alphnum},则将'abcdefghijklmnopqrstuvwxyz0123456789' 两两组合的结果加入了tmp_set
                                tt = item.replace('{alphnum}', _letter, 1)
                                tmp_set.add(tt)
                        elif item.find('{alpha}') >= 0:
                            for _letter in 'abcdefghijklmnopqrstuvwxyz':
                                tmp_set.add(item.replace('{alpha}', _letter, 1))
                        elif item.find('{num}') >= 0:
                            for _letter in '0123456789':
                                tmp_set.add(item.replace('{num}', _letter, 1))
                        elif item not in _set:
                            # 当所有的{alphnum}等通配符都被replace完后,将被加入到_set / self.next_subs
                            # 原文件中不包括通配符的子串直接加入了_set,也加入了self.next_subs
                            _set.add(item)
                            self.next_subs.append(item)


    @staticmethod
    # 判断是否是内网ip
    def is_intranet(ip):
        ret = ip.split('.')
        if len(ret) != 4:
            return True
        if ret[0] == '10':
            return True
        if ret[0] == '172' and 16 <= int(ret[1]) <= 32:
            return True
        if ret[0] == '192' and ret[1] == '168':
            return True
        return False

    def put_item(self, item):
        # 向待检测前缀队列self.queue中添加新的子域名前缀
        num = item.count('{alphnum}') + item.count('{alpha}') + item.count('{num}')
        if num == 0:
            self.priority += 1
            self.queue.put((self.priority, item))
        else:
            # 存在通配符则将优先级设为低级
            self.queue.put((self.priority + num * 10000000, item))

    def _universal_parsing(self,sub,ips):
        # 统计数量,与泛解析有关
        _sub = sub.split('.')[-1]
        # (_sub,ips)前缀与该前缀构成的子域名所得ip QUESTION:这和泛解析什么关系???
        '''
        a.b.baidu.com 与 a.baidu.com ,它们的_sub都是'a.',当他们解析到相同的A记录时,
        则有可能其他_sub同为'a.'(最左侧一级为a.)的子域名也会解析到同样的ip,存在泛解析
        '''
        if (_sub, ips) not in self.ip_dict:
            self.ip_dict[(_sub, ips)] = 1
        else:
            self.ip_dict[(_sub, ips)] += 1

        # 计数:一组ips被多少个sub解析到  (如果一组ips被多组sub解析到,则可能是泛解析)
        if ips not in self.ip_dict:
            self.ip_dict[ips] = 1
        else:
            self.ip_dict[ips] += 1

        return True if self.ip_dict[(_sub, ips)] > 3 or self.ip_dict[ips] > 6 else(False)

    def _validate_subdomain(self,j,sub):
        '''
           功能:验证子域名是否存在
        '''

        # 构造新的子域名
        subdmname = sub + '.' + self.target

        try:
            answers = self.resolvers[j].query(subdmname)
        except dns.resolver.NoAnswer:
            try:
                # 出现异常则用备用dns解析器解析
                answers = self.ex_resolver.query(subdmname)
            except dns.resolver.NoAnswer:
                # 如果2次都出现异常,则返回False
                return False
        if answers:
            # 如果得到响应,则将该前缀加入到self.found_subs
            # QUESTION: 验证说明不存在的不用单独存下来吗
            self.found_subs.add(sub)
            # 得到A记录集合
            ips = ', '.join(sorted([answer.address for answer in answers]))
            print ips
            self.cur_ips = ips
            # QUESTION: 只有一个ip且ip是一下其中之一的情况
            if ips in ['1.1.1.1', '127.0.0.1', '0.0.0.0']:
                return False
            #除去内网域名
            # self.ignore_instanet表示是否要进行内网ip过滤
            # SubNameBrute.is_intranet(answers[0].address) 是实际进行是否是内外ip的测算
            # QUESTION: 为什么只对answers[0]中的ip进行测试???
            if self.ignore_intranet and SubNameBrute.is_intranet(answers[0].address):
                return False
            # 泛解析
            if self._universal_parsing(sub, ips):
                return False
        else:
            return False

        return True

    def _scan_cname(self,j,subdmname):
        '''
        功能:检测子域名的cname是否是新的子域名,是否可以得到新的前缀
        '''
        try:
            self.scan_count += 1
            # subdmname是已经验证有效的子域名,现获取其cname
            answers = self.resolvers[j].query(subdmname, 'cname')
            cname = answers[0].target.to_unicode().rstrip('.')
            # cname.endswith(self.target)判断cname是不是子域名
            if cname.endswith(self.target) and cname not in self.found_subs:
                # 将是子域名的cname加入到self.found_subs

                self.found_subs.add(cname)
                # 假设cname是'www.a.shifen.com',target是'shifen.com',,则cname_sub是'www.a'
                # 当cname是子域时,将i其前缀再次加入队列,当此前缀在不同级上时,可能构成新的子域
                cname_sub = cname[:len(cname) - len(self.target) - 1]  # new sub
                self.queue.put((0, cname_sub))
        except:
            pass

    def _scan(self, j):
        # 具体运行的核心函数
        self.resolvers[j].nameservers = [self.dns_servers[j % self.dns_count]]

        while not self.queue.empty():
            try:
                # 从队列中获得一个前缀
                item = self.queue.get(timeout=1.0)[1]
                self.scan_count += 1
            except:
                break
            try:
                # 根据_load_sub_names代码,会有包含通配符的sub被加入了queue,因此这里要进行处理
                if item.find('{alphnum}') >= 0:
                    for _letter in 'abcdefghijklmnopqrstuvwxyz0123456789':
                        self.put_item(item.replace('{alphnum}', _letter, 1))
                    continue
                elif item.find('{alpha}') >= 0:
                    for _letter in 'abcdefghijklmnopqrstuvwxyz':
                        self.put_item(item.replace('{alpha}', _letter, 1))
                    continue
                elif item.find('{num}') >= 0:
                    for _letter in '0123456789':
                        self.put_item(item.replace('{num}', _letter, 1))
                    continue
                elif item.find('{next_sub}') >= 0:
                    for _ in self.next_subs:
                        self.queue.put((0, item.replace('{next_sub}', _, 1)))
                    continue
                else:
                    sub = item

                # 如果是已经验证过的,则不进行验证处理
                if sub in self.found_subs:
                    continue

                if self._validate_subdomain(j,sub):
                    cur_sub_domain = sub+'.'+self.target
                    self._scan_cname(j,cur_sub_domain) # 检测子域名的cname中是否包含子域名
                    self.found_count += 1
                    # QUESTON
                    self.outfile.write(cur_sub_domain+'\n')
                    '''
                    关于flush与write:http://blog.csdn.net/fenfeiqinjian/article/details/49444973
                        一般的文件流操作都包含缓冲机制,write方法并不直接将数据写入文件,而是先写入内存中特定的缓冲区。
                        flush方法是用来刷新缓冲区的,即将缓冲区中的数据立刻写入文件,同时清空缓冲区
                        正常情况下缓冲区满时,操作系统会自动将缓冲数据写入到文件中。
                        至于close方法,原理是内部先调用flush方法来刷新缓冲区,再执行关闭操作,这样即使缓冲区数据未满也能保证数据的完整性。
                        如果进程意外退出或正常退出时而未执行文件的close方法,缓冲区中的内容将会丢失
                    '''
                    self.outfile.flush()
                    self.outfile_ips.write(self.cur_ips+'\n')
                    self.outfile_ips.flush()
                    print cur_sub_domain
                    # '{next_sub}.' + sub 目的在于给当前前缀再增加前缀,以构成多级域名
                    self.queue.put((999999999, '{next_sub}.' + sub))

            except (dns.resolver.NXDOMAIN, dns.name.EmptyLabel):
                pass
            except (dns.resolver.NoNameservers, dns.resolver.NoAnswer, dns.exception.Timeout):
                pass
            except Exception:
                pass
            print "scan_count=%s,found_count=%s"%(self.scan_count,self.found_count)

    def run(self):
        # i用来标识是第几个写成,同时在协程中用来选择dns resolver
        threads = [gevent.spawn(self._scan, i) for i in range(self.options.get('threads'))]

        try:
            gevent.joinall(threads)
        except KeyboardInterrupt:
            print '[WARNING] User aborted.'

        self.end_time = time.time()
        s = (self.end_time-self.start_time)
        m = ((self.end_time - self.start_time)/60)
        h = ((self.end_time - self.start_time) / 3600)

        self.logfile.write(self.fname+'\n')
        result = "scan_count=%s,found_count=%s"%(self.scan_count,self.found_count)
        self.logfile.write(result+'\n')
        time_consuming = "time-consuming:%d seconds"%s
        print time_consuming
        self.logfile.write(time_consuming+'\n')
        time_consuming = "time-consuming:%d minutes" % m
        print time_consuming
        self.logfile.write(time_consuming+'\n')
        time_consuming = "time-consuming:%d hours" % h
        print time_consuming
        self.logfile.write(time_consuming+'\n')
        # 统计结果文件中各级域名的数量(self.fname是输出结果的文件名)
        ocount, bcount, tcount, fcount = self.get_distribution(self.fname)

        subdomain_count = '二级域名数量: %d' % ocount
        print subdomain_count
        self.logfile.write(subdomain_count+'\n')
        subdomain_count = '三级域名数量: %d' % bcount
        print subdomain_count
        self.logfile.write(subdomain_count+'\n')
        subdomain_count = '四级域名数量: %d' % tcount
        print subdomain_count
        self.logfile.write(subdomain_count+'\n')
        subdomain_count = '五级域名数量: %d' % fcount
        print subdomain_count
        self.logfile.write(subdomain_count+'\n')

        self.outfile.flush()
        self.outfile.close()
        self.outfile_ips.flush()
        self.outfile_ips.close()

    def get_distribution(self,filename):
        '''
        功能:统计结果文件filename中二级、三级和四级域名的数量
        '''
        with open(filename, 'rb') as f:
            subdomains = [line.strip() for line in f.readlines()]
        ocount = bcount = tcount = fcount = 0
        for domain in subdomains:
            if domain.count('.') == 2:
                ocount += 1
            elif domain.count('.') == 3:
                bcount += 1
            elif domain.count('.') == 4:
                tcount += 1
            else:
                fcount += 1

        return ocount, bcount, tcount, fcount
class NetworkManager(object):

    """网络控制类"""

    logger = logging.getLogger('Crawler.NetworkManager')

    def __init__(self, crawler):
        self._crawler = crawler
        self.proxy_pool = Queue()
        self._proxy_lock = RLock()
        max_connections = crawler.max_connections
        self._request_queue = PriorityQueue()
        self._request_semaphore = BoundedSemaphore(max_connections)

    def join(self):
        """等待队列里面的请求发送完成"""
        while not self._request_queue.empty():
            # self._process_request_from_queue()
            gevent.sleep(5)

    def request(self, method, url, **kwargs):
        """阻塞请求一个url。

        :param method:
        :param url:
        :param kwargs: 同add_request
        :return: :rtype: :raise err:
        """

        # 构造默认HTTP头
        default_header = {
            'Accept': self._crawler.accept_mine,
            'Accept-Language': self._crawler.accept_language,
            'User-Agent': self._crawler.user_agent,
            'Connection': 'keep-alive',
            'Accept-Encoding': 'gzip, deflate'
        }

        # 如果没有设置headers就使用全局设置
        kwargs['headers'] = kwargs.pop('headers', {})
        default_header.update(kwargs['headers'])
        kwargs['headers'] = default_header

        # 如果没有设置timeout就使用全局设置
        kwargs['timeout'] = kwargs.pop('timeout',
                                       self._crawler.timeout)

        session = requests.Session()
        session.max_redirects = self._crawler.max_redirects

        kwargs['cookies'] = kwargs.pop('cookies', {})

        # 设置代理
        kwargs['proxies'] = kwargs.pop('proxies', self._crawler.proxies)

        try_times = 0

        while try_times <= self._crawler.max_retries:
            try_times += 1
            try:
                self.logger.debug('[%s]>> %s' % (method.upper(), url))
                response = session.request(method, url, **kwargs)

                if self._crawler.retry_with_no_content and not response.content:
                    self.logger.warning('Page have no content.')
                    raise NoContent

                if self._crawler.retry_with_broken_content and '</html>' not in response.content:
                    self.logger.warning('Page content has been breaken.')
                    raise BreakenContent

                if response.status_code in self._crawler.do_not_retry_with_server_error_code:
                    self.logger.warning(
                        'Something wrong with server,but we DO NOT retry with it.')
                    raise ServerErrorWithoutRetry(
                        'Error Code:%s' % response.status_code)

                # 遇到非200错误
                if response.status_code != 200 and response.status_code not in self._crawler.ignore_server_error_code:
                    self._crawler.on_server_error(response)

                    # self.logger.warning('Something wrong with server.')
                    # raise ServerError, 'Error Code:%s' % response.status_code

            except (ConnectionError, Timeout, socket.timeout, socket.error, TryAgain,), err:
                # 好恶心的做法,代理发生错误居然没有特定的Exception
                if kwargs['proxies'] and any(
                        urlsplit(proxy).hostname in str(err.message) for proxy in kwargs['proxies'].values()):
                    # 代理有问题就切换呗
                    self.logger.debug(
                        'Proxy %s seems go down.', kwargs['proxies'])
                    self.switch_proxy(kwargs['proxies'].values()[0])

                    # self._crawler.on_proxies_error(kwargs['proxies'][0])

                # 如果发生重试异常和空白页异常的,就进行重试,否则把异常往上爆
                if isinstance(err, ConnectionError) and not isinstance(err.message, MaxRetryError):
                    raise err

                sleep_time = self._crawler.sleep_seconds * try_times

                self.logger.debug(err)

                self.logger.info('Try again with %s after %s '
                                 'seconds' % (url, sleep_time))

                gevent.sleep(sleep_time)
            except BaseException, err:
                # TODO:不知道是不是这里有捕获不了的gevent超时,稳定后删除。
                self.logger.error(type(err))
                self.logger.error(err)
            else:
class SubNameBrute:
    def __init__(self, target, options):
        self.start_time = time.time()
        self.target = target.strip()
        self.options = options
        self.ignore_intranet = options.i
        self.scan_count = self.found_count = 0
        self.console_width = getTerminalSize()[0] - 2
        self.resolvers = [dns.resolver.Resolver(configure=False) for _ in range(options.threads)]
        for _ in self.resolvers:
            _.lifetime = _.timeout = 10.0
        self.print_count = 0
        self.STOP_ME = False
        self._load_dns_servers()
        self._load_next_sub()
        self.queue = PriorityQueue()
        self.priority = 0
        self._load_sub_names()
        if options.output:
            outfile = options.output
        else:
            _name = os.path.basename(self.options.file).replace('subnames', '')
            if _name != '.txt':
                _name = '_' + _name
            outfile = target + _name if not options.full_scan else target + '_full' + _name
        self.outfile = open(outfile, 'w')
        self.ip_dict = {}
        self.found_subs = set()
        self.ex_resolver = dns.resolver.Resolver(configure=False)
        self.ex_resolver.nameservers = self.dns_servers

    def _load_dns_servers(self):
        print '[+] Validate DNS servers ...'
        self.dns_servers = []
        pool = Pool(30)
        for server in open('dict/dns_servers.txt').xreadlines():
            server = server.strip()
            if server:
                pool.apply_async(self._test_server, (server,))
        pool.join()

        self.dns_count = len(self.dns_servers)
        sys.stdout.write('\n')
        print '[+] Found %s available DNS Servers in total' % self.dns_count
        if self.dns_count == 0:
            print '[ERROR] No DNS Servers available.'
            sys.exit(-1)

    def _test_server(self, server):
        resolver = dns.resolver.Resolver()
        resolver.lifetime = resolver.timeout = 10.0
        try:
            resolver.nameservers = [server]
            answers = resolver.query('public-dns-a.baidu.com')    # test lookup a existed domain
            if answers[0].address != '180.76.76.76':
                raise Exception('incorrect DNS response')
            try:
                resolver.query('test.bad.dns.lijiejie.com')    # Non-existed domain test
                with open('bad_dns_servers.txt', 'a') as f:
                    f.write(server + '\n')
                self._print_msg('[+] Bad DNS Server found %s' % server)
            except:
                self.dns_servers.append(server)
            self._print_msg('[+] Check DNS Server %s < OK >   Found %s' % (server.ljust(16), len(self.dns_servers)))
        except:
            self._print_msg('[+] Check DNS Server %s <Fail>   Found %s' % (server.ljust(16), len(self.dns_servers)))

    def _load_sub_names(self):
        self._print_msg('[+] Load sub names ...')
        if self.options.full_scan and self.options.file == 'subnames.txt':
            _file = 'dict/subnames_full.txt'
        else:
            if os.path.exists(self.options.file):
                _file = self.options.file
            elif os.path.exists('dict/%s' % self.options.file):
                _file = 'dict/%s' % self.options.file
            else:
                self._print_msg('[ERROR] Names file not exists: %s' % self.options.file)
                exit(-1)

        normal_lines = []
        wildcard_lines = []
        wildcard_list = []
        regex_list = []
        lines = set()
        with open(_file) as f:
            for line in f.xreadlines():
                sub = line.strip()
                if not sub or sub in lines:
                    continue
                lines.add(sub)

                if sub.find('{alphnum}') >= 0 or sub.find('{alpha}') >= 0 or sub.find('{num}') >= 0:
                    wildcard_lines.append(sub)
                    sub = sub.replace('{alphnum}', '[a-z0-9]')
                    sub = sub.replace('{alpha}', '[a-z]')
                    sub = sub.replace('{num}', '[0-9]')
                    if sub not in wildcard_list:
                        wildcard_list.append(sub)
                        regex_list.append('^' + sub + '$')
                else:
                    normal_lines.append(sub)
        pattern = '|'.join(regex_list)
        if pattern:
            _regex = re.compile(pattern)
            if _regex:
                for line in normal_lines[:]:
                    if _regex.search(line):
                        normal_lines.remove(line)

        for item in normal_lines:
            self.priority += 1
            self.queue.put((self.priority, item))

        for item in wildcard_lines:
            self.queue.put((88888888, item))

    def _load_next_sub(self):
        self._print_msg('[+] Load next level subs ...')
        self.next_subs = []
        _set = set()
        _file = 'dict/next_sub.txt' if not self.options.full_scan else 'dict/next_sub_full.txt'
        with open(_file) as f:
            for line in f:
                sub = line.strip()
                if sub and sub not in self.next_subs:
                    tmp_set = {sub}
                    while len(tmp_set) > 0:
                        item = tmp_set.pop()
                        if item.find('{alphnum}') >= 0:
                            for _letter in 'abcdefghijklmnopqrstuvwxyz0123456789':
                                tmp_set.add(item.replace('{alphnum}', _letter, 1))
                        elif item.find('{alpha}') >= 0:
                            for _letter in 'abcdefghijklmnopqrstuvwxyz':
                                tmp_set.add(item.replace('{alpha}', _letter, 1))
                        elif item.find('{num}') >= 0:
                            for _letter in '0123456789':
                                tmp_set.add(item.replace('{num}', _letter, 1))
                        elif item not in _set:
                            _set.add(item)
                            self.next_subs.append(item)

    def _print_msg(self, _msg=None, _found_msg=False):
        if _msg is None:
            self.print_count += 1
            if self.print_count < 100:
                return
            self.print_count = 0
            msg = '%s Found| %s Groups| %s scanned in %.1f seconds' % (
                self.found_count, self.queue.qsize(), self.scan_count, time.time() - self.start_time)
            sys.stdout.write('\r' + ' ' * (self.console_width - len(msg)) + msg)
        elif _msg.startswith('[+] Check DNS Server'):
            sys.stdout.write('\r' + _msg + ' ' * (self.console_width - len(_msg)))
        else:
            sys.stdout.write('\r' + _msg + ' ' * (self.console_width - len(_msg)) + '\n')
            if _found_msg:
                msg = '%s Found| %s Groups| %s scanned in %.1f seconds' % (
                    self.found_count, self.queue.qsize(), self.scan_count, time.time() - self.start_time)
                sys.stdout.write('\r' + ' ' * (self.console_width - len(msg)) + msg)
        sys.stdout.flush()

    @staticmethod
    def is_intranet(ip):
        ret = ip.split('.')
        if len(ret) != 4:
            return True
        if ret[0] == '10':
            return True
        if ret[0] == '172' and 16 <= int(ret[1]) <= 32:
            return True
        if ret[0] == '192' and ret[1] == '168':
            return True
        return False

    def put_item(self, item):
        num = item.count('{alphnum}') + item.count('{alpha}') + item.count('{num}')
        if num == 0:
            self.priority += 1
            self.queue.put((self.priority, item))
        else:
            self.queue.put((self.priority + num * 10000000, item))

    def _scan(self, j):
        self.resolvers[j].nameservers = [self.dns_servers[j % self.dns_count]]
        while not self.queue.empty():
            try:
                item = self.queue.get(timeout=1.0)[1]
                self.scan_count += 1
            except:
                break
            self._print_msg()
            try:
                if item.find('{alphnum}') >= 0:
                    for _letter in 'abcdefghijklmnopqrstuvwxyz0123456789':
                        self.put_item(item.replace('{alphnum}', _letter, 1))
                    continue
                elif item.find('{alpha}') >= 0:
                    for _letter in 'abcdefghijklmnopqrstuvwxyz':
                        self.put_item(item.replace('{alpha}', _letter, 1))
                    continue
                elif item.find('{num}') >= 0:
                    for _letter in '0123456789':
                        self.put_item(item.replace('{num}', _letter, 1))
                    continue
                elif item.find('{next_sub}') >= 0:
                    for _ in self.next_subs:
                        self.queue.put((0, item.replace('{next_sub}', _, 1)))
                    continue
                else:
                    sub = item

                if sub in self.found_subs:
                    continue

                cur_sub_domain = sub + '.' + self.target
                _sub = sub.split('.')[-1]
                try:
                    answers = self.resolvers[j].query(cur_sub_domain)
                except dns.resolver.NoAnswer, e:
                    answers = self.ex_resolver.query(cur_sub_domain)

                if answers:
                    self.found_subs.add(sub)
                    ips = ', '.join(sorted([answer.address for answer in answers]))
                    if ips in ['1.1.1.1', '127.0.0.1', '0.0.0.0']:
                        continue

                    if self.ignore_intranet and SubNameBrute.is_intranet(answers[0].address):
                        continue

                    try:
                        self.scan_count += 1
                        answers = self.resolvers[j].query(cur_sub_domain, 'cname')
                        cname = answers[0].target.to_unicode().rstrip('.')
                        if cname.endswith(self.target) and cname not in self.found_subs:
                            self.found_subs.add(cname)
                            cname_sub = cname[:len(cname) - len(self.target) - 1]    # new sub
                            self.queue.put((0, cname_sub))

                    except:
                        pass

                    if (_sub, ips) not in self.ip_dict:
                        self.ip_dict[(_sub, ips)] = 1
                    else:
                        self.ip_dict[(_sub, ips)] += 1

                    if ips not in self.ip_dict:
                        self.ip_dict[ips] = 1
                    else:
                        self.ip_dict[ips] += 1

                    if self.ip_dict[(_sub, ips)] > 3 or self.ip_dict[ips] > 6:
                        continue

                    self.found_count += 1
                    msg = cur_sub_domain.ljust(30) + ips
                    self._print_msg(msg, _found_msg=True)
                    self._print_msg()
                    self.outfile.write(cur_sub_domain.ljust(30) + '\t' + ips + '\n')
                    self.outfile.flush()
                    try:
                        self.resolvers[j].query('lijiejietest.' + cur_sub_domain)
                    except (dns.resolver.NXDOMAIN, dns.resolver.NoAnswer) as e:
                        self.queue.put((999999999, '{next_sub}.' + sub))
                    except:
                        pass

            except (dns.resolver.NXDOMAIN, dns.name.EmptyLabel) as e:
                pass
            except (dns.resolver.NoNameservers, dns.resolver.NoAnswer, dns.exception.Timeout) as e:
                pass
Exemple #15
0
class SubNameBrute:
    def __init__(self, target, options):
        # save result to list
        self.subdomains = list()
        self.start_time = time.time()
        self.target = target.strip()
        self.options = options
        self.ignore_intranet = options.i
        self.scan_count = self.found_count = 0
        self.console_width = getTerminalSize()[0] - 2
        self.resolvers = [dns.resolver.Resolver(configure=False) for _ in range(options.threads)]
        for _ in self.resolvers:
            _.lifetime = _.timeout = 10.0
        self.STOP_ME = False
        self._load_dns_servers()
        self._load_next_sub()
        self.queue = PriorityQueue()
        self.priority = 0
        self._load_sub_names()
        self.ip_dict = {}
        self.found_subs = set()
        self.ex_resolver = dns.resolver.Resolver(configure=False)
        self.ex_resolver.nameservers = self.dns_servers

    def _load_dns_servers(self):
        self.dns_servers = []
        pool = Pool(30)
        for server in open('dict/dns_servers.txt').xreadlines():
            server = server.strip()
            if server:
                pool.apply_async(self._test_server, (server,))
        pool.join()

        self.dns_count = len(self.dns_servers)
        sys.stdout.write('\n')
        if self.dns_count == 0:
            sys.exit(-1)

    def _test_server(self, server):
        resolver = dns.resolver.Resolver()
        resolver.lifetime = resolver.timeout = 10.0
        try:
            resolver.nameservers = [server]
            answers = resolver.query('public-dns-a.baidu.com')    # test lookup a existed domain
            if answers[0].address != '180.76.76.76':
                raise Exception('incorrect DNS response')
            try:
                resolver.query('test.bad.dns.lijiejie.com')    # Non-existed domain test
                with open('bad_dns_servers.txt', 'a') as f:
                    f.write(server + '\n')
            except:
                self.dns_servers.append(server)
        except:
            pass
    def _load_sub_names(self):
        if self.options.full_scan and self.options.file == 'subnames.txt':
            _file = 'dict/subnames_full.txt'
        else:
            if os.path.exists(self.options.file):
                _file = self.options.file
            elif os.path.exists('dict/%s' % self.options.file):
                _file = 'dict/%s' % self.options.file
            else:
                exit(-1)

        normal_lines = []
        wildcard_lines = []
        wildcard_list = []
        regex_list = []
        lines = set()
        with open(_file) as f:
            for line in f.xreadlines():
                sub = line.strip()
                if not sub or sub in lines:
                    continue
                lines.add(sub)

                if sub.find('{alphnum}') >= 0 or sub.find('{alpha}') >= 0 or sub.find('{num}') >= 0:
                    wildcard_lines.append(sub)
                    sub = sub.replace('{alphnum}', '[a-z0-9]')
                    sub = sub.replace('{alpha}', '[a-z]')
                    sub = sub.replace('{num}', '[0-9]')
                    if sub not in wildcard_list:
                        wildcard_list.append(sub)
                        regex_list.append('^' + sub + '$')
                else:
                    normal_lines.append(sub)
        pattern = '|'.join(regex_list)
        if pattern:
            _regex = re.compile(pattern)
            if _regex:
                for line in normal_lines[:]:
                    if _regex.search(line):
                        normal_lines.remove(line)

        for item in normal_lines:
            self.priority += 1
            self.queue.put((self.priority, item))

        for item in wildcard_lines:
            self.queue.put((88888888, item))

    def _load_next_sub(self):
        self.next_subs = []
        _set = set()
        _file = 'dict/next_sub.txt' if not self.options.full_scan else 'dict/next_sub_full.txt'
        with open(_file) as f:
            for line in f:
                sub = line.strip()
                if sub and sub not in self.next_subs:
                    tmp_set = {sub}
                    while len(tmp_set) > 0:
                        item = tmp_set.pop()
                        if item.find('{alphnum}') >= 0:
                            for _letter in 'abcdefghijklmnopqrstuvwxyz0123456789':
                                tmp_set.add(item.replace('{alphnum}', _letter, 1))
                        elif item.find('{alpha}') >= 0:
                            for _letter in 'abcdefghijklmnopqrstuvwxyz':
                                tmp_set.add(item.replace('{alpha}', _letter, 1))
                        elif item.find('{num}') >= 0:
                            for _letter in '0123456789':
                                tmp_set.add(item.replace('{num}', _letter, 1))
                        elif item not in _set:
                            _set.add(item)
                            self.next_subs.append(item)

   

    @staticmethod
    def is_intranet(ip):
        ret = ip.split('.')
        if len(ret) != 4:
            return True
        if ret[0] == '10':
            return True
        if ret[0] == '172' and 16 <= int(ret[1]) <= 32:
            return True
        if ret[0] == '192' and ret[1] == '168':
            return True
        return False

    def put_item(self, item):
        num = item.count('{alphnum}') + item.count('{alpha}') + item.count('{num}')
        if num == 0:
            self.priority += 1
            self.queue.put((self.priority, item))
        else:
            self.queue.put((self.priority + num * 10000000, item))

    def _scan(self, j):
        self.resolvers[j].nameservers = [self.dns_servers[j % self.dns_count]]
        while not self.queue.empty():
            try:
                item = self.queue.get(timeout=1.0)[1]
                self.scan_count += 1
            except:
                break
            try:
                if item.find('{alphnum}') >= 0:
                    for _letter in 'abcdefghijklmnopqrstuvwxyz0123456789':
                        self.put_item(item.replace('{alphnum}', _letter, 1))
                    continue
                elif item.find('{alpha}') >= 0:
                    for _letter in 'abcdefghijklmnopqrstuvwxyz':
                        self.put_item(item.replace('{alpha}', _letter, 1))
                    continue
                elif item.find('{num}') >= 0:
                    for _letter in '0123456789':
                        self.put_item(item.replace('{num}', _letter, 1))
                    continue
                elif item.find('{next_sub}') >= 0:
                    for _ in self.next_subs:
                        self.queue.put((0, item.replace('{next_sub}', _, 1)))
                    continue
                else:
                    sub = item

                if sub in self.found_subs:
                    continue

                cur_sub_domain = sub + '.' + self.target
                _sub = sub.split('.')[-1]
                try:
                    answers = self.resolvers[j].query(cur_sub_domain)
                except dns.resolver.NoAnswer, e:
                    answers = self.ex_resolver.query(cur_sub_domain)

                if answers:
                    self.found_subs.add(sub)
                    ips = ', '.join(sorted([answer.address for answer in answers]))
                    if ips in ['1.1.1.1', '127.0.0.1', '0.0.0.0']:
                        continue

                    if self.ignore_intranet and SubNameBrute.is_intranet(answers[0].address):
                        continue

                    try:
                        self.scan_count += 1
                        answers = self.resolvers[j].query(cur_sub_domain, 'cname')
                        cname = answers[0].target.to_unicode().rstrip('.')
                        if cname.endswith(self.target) and cname not in self.found_subs:
                            self.found_subs.add(cname)
                            cname_sub = cname[:len(cname) - len(self.target) - 1]    # new sub
                            self.queue.put((0, cname_sub))

                    except:
                        pass

                    if (_sub, ips) not in self.ip_dict:
                        self.ip_dict[(_sub, ips)] = 1
                    else:
                        self.ip_dict[(_sub, ips)] += 1

                    if ips not in self.ip_dict:
                        self.ip_dict[ips] = 1
                    else:
                        self.ip_dict[ips] += 1

                    if self.ip_dict[(_sub, ips)] > 3 or self.ip_dict[ips] > 6:
                        continue

                    self.found_count += 1
                    msg = cur_sub_domain.ljust(30) + ips

                    # save result to list
                    self.subdomains.append([cur_sub_domain,ips])


                    try:
                        self.resolvers[j].query('lijiejietest.' + cur_sub_domain)
                    except (dns.resolver.NXDOMAIN, dns.resolver.NoAnswer) as e:
                        self.queue.put((999999999, '{next_sub}.' + sub))
                    except:
                        pass

            except (dns.resolver.NXDOMAIN, dns.name.EmptyLabel) as e:
                pass
            except (dns.resolver.NoNameservers, dns.resolver.NoAnswer, dns.exception.Timeout) as e:
                pass
Exemple #16
0
class Scanner:
    def __init__(self):
        self.start_time = time.time()
        self.queue = PriorityQueue()
        self.history = []
        self.total_count = 0
        self.scan_count = 0
        self._load_target()
        self.outfile = open("log.log", 'w')
        self.console_width = getTerminalSize()[0] - 2

    def _print_msg(self, _msg=None, _found_msg=False):
        if _msg is None:
            msg = '%s TotalCount| %s Scanned in %.2f seconds' % (
                    self.total_count,self.total_count - self.queue.qsize(), time.time() - self.start_time)
            sys.stdout.write('\r' + ' ' * (self.console_width - len(msg)) + msg)
        else:
            sys.stdout.write('\r' + _msg + ' ' * (self.console_width - len(_msg)) + '\n')
            self.outfile.write(_msg + '\n')
            self.outfile.flush()
            if _found_msg:
                msg = '%s TotalCount| %s Scanned in %.2f seconds' % (
                        self.total_count,self.total_count - self.queue.qsize(), time.time() - self.start_time)
                sys.stdout.write('\r' + ' ' * (self.console_width - len(msg)) + msg)
        sys.stdout.flush()

    def _load_target(self):
        print '[+] Read targets ...'
        target_file = raw_input("Target File :")
        with open(target_file) as f:
            for line in f.xreadlines():
                target = line.strip()
                self.queue.put(target)

        print "TotalCount is %d" % self.queue.qsize()
        self.total_count = self.queue.qsize()
        print "Now scanning ..."

    def _scan(self,case):
        while not self.queue.empty():
            target = self.queue.get()
            if case == "1":
                self.vulnCheck(target)
            if case == "2":
                self.s2_045(target)
            if case == "3":
                self.headers(target)
            if case == "4":
                self.weakfile(target)
            if case == "5":
                self.portscan_c(target)



#####################################################################
#                                                                   #
#    Vuln poc by:xi4okv QQ:48011203                                 #
#                                                                   #
#####################################################################

    def vulnCheck(self,target):
        if ":2375" in target:        
            try:
                res = requests.head("http://" + str(target) + "/containers/json",timeout=2)
                if res.headers['Content-Type'] == 'application/json':
                    self._print_msg(target + "==>  docker api Vuln",True)
                else:
                    self._print_msg()
            except:
                self._print_msg()
            self._print_msg()

        if ":27017" in target:
            ip,port = target.split(":")
            try:
                socket.setdefaulttimeout(3)
                s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
                s.connect((ip, int(port)))
                data = binascii.a2b_hex("3a000000a741000000000000d40700000000000061646d696e2e24636d640000000000ffffffff130000001069736d6173746572000100000000")
                s.send(data)
                result = s.recv(1024)
                if "ismaster" in result:
                    getlog_data = binascii.a2b_hex("480000000200000000000000d40700000000000061646d696e2e24636d6400000000000100000021000000026765744c6f670010000000737461727475705761726e696e67730000")
                    s.send(getlog_data)
                    result = s.recv(1024)
                    if "totalLinesWritten" in result:
                        self._print_msg(target + "==>  mongodb Vuln",True)
            except Exception, e:
                pass

        if ":6379" in target:
            ip,port = target.split(":")
            try:
                socket.setdefaulttimeout(3)
                s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
                s.connect((ip, int(port)))
                s.send("INFO\r\n")
                result = s.recv(1024)
                if "redis_version" in result:
                    self._print_msg(target + "==>  redis Vuln",True)
                elif "Authentication" in result:
                    for pass_ in ['123456','redis','pass','password']:
                        s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
                        s.connect((ip, int(port)))
                        s.send("AUTH %s\r\n" % (pass_))
                        result = s.recv(1024)
                        if '+OK' in result:
                           self._print_msg(target + "==>  redis pass Vuln :" + pass_,True)
            except Exception, e:
                pass
Exemple #17
0
class JobSpider:
    def __init__(self, start_requests):
        self.start_request = start_requests
        self.domain = tldextract.extract(self.start_request.url).domain

        self.request_queue = PriorityQueue()
        self.result = {
            start_requests.url: 0,
        }
        self.gl_list = []
        self.stop_flag = False

    def start(self, number):
        resp = requests.get(self.start_request.url)
        if resp.status_code != 200:
            raise Exception('HTTPError<%d>' % resp.status_code)

        self.request_queue.put((0, self.start_request))
        for i in range(number):
            gl = gevent.spawn(self.downloader)
            self.gl_list.append(gl)
            gl.start()

    def stop(self):
        self.stop_flag = True

    def join(self):
        return gevent.joinall(self.gl_list)

    def downloader(self):
        a_re = re.compile(
            r'''<a.+?href=(['"])([^>\s]+)\1.*?>([\S\s]+?)<\/a>''',
            re.IGNORECASE)

        while not self.request_queue.empty():
            if self.stop_flag: break
            prio, request = self.request_queue.get()
            headers = {'User-Agent': choice(random_ua)}
            try:
                resp = requests.get(request.url, headers=headers)
            except Exception as e:
                continue

            encoding = chardet.detect(resp.content)['encoding']
            html_text = resp.content.decode(
                encoding) if encoding is not None else resp.text
            self.result[request.url] += calc_text_weight(html_text)
            if self.result[request.url] >= 100:
                self.stop()
                break

            if request.depth == max_depth:
                continue

            matches = a_re.findall(html_text)
            for each_a in matches:
                href = each_a[1]
                name = each_a[2]
                if href.startswith('javascript'): continue
                if href.startswith('/'): href = request.url + href
                if href.startswith('http'):
                    new_request = Request(href, request.depth + 1)
                    self.result[href] = calc_name_url_weight(name, href)
                    if tldextract.extract(href).domain == self.domain:
                        self.request_queue.put(
                            (-self.result[href], new_request))
                    elif self.result[href] >= 80:
                        self.request_queue.put(
                            (-self.result[href], new_request))
class SyncClient:
    idle = idle
    backward_class = BackwardWorker
    forward_class = ForwardWorker

    def __init__(self,
                 host_url,
                 resource,
                 auth=None,
                 params={},
                 headers=None,
                 retrievers_params=DEFAULT_RETRIEVERS_PARAMS,
                 adaptive=False,
                 with_priority=False):
        LOGGER.info(f'Init SyncClient for resource {resource}')
        self.host = host_url
        self.auth = auth
        self.resource = resource
        self.adaptive = adaptive
        self.headers = headers

        self.params = params
        self.retrievers_params = retrievers_params
        self.queue = PriorityQueue(maxsize=retrievers_params['queue_size'])

    def init_clients(self):
        self.backward_client = ResourceClient(self.host, self.resource,
                                              self.params, self.auth,
                                              self.headers)
        self.forward_client = ResourceClient(self.host, self.resource,
                                             self.params, self.auth,
                                             self.headers)

    def handle_response_data(self, data):
        for resource_item in data:
            self.queue.put(PrioritizedItem(1, resource_item))

    def worker_watcher(self):
        while True:
            if time() - self.heartbeat > DEFAULT_FORWARD_HEARTBEAT:
                self.restart_sync()
                LOGGER.warning(
                    'Restart sync, reason: Last response from workers greater than 15 min ago.'
                )
            sleep(300)

    def start_sync(self):
        LOGGER.info('Start sync...')

        data = self.backward_client.get_resource_items(self.params)

        self.handle_response_data(data[f'{self.resource}s'])

        forward_params = deepcopy(self.params)
        forward_params.update({
            k: v[0]
            for k, v in parse.parse_qs(parse.urlparse(
                data.links.prev).query).items()
        })
        backward_params = deepcopy(self.params)
        backward_params.update({
            k: v[0]
            for k, v in parse.parse_qs(parse.urlparse(
                data.links.next).query).items()
        })

        self.forward_worker = self.forward_class(sync_client=self,
                                                 client=self.forward_client,
                                                 params=forward_params)
        self.backward_worker = self.backward_class(sync_client=self,
                                                   client=self.backward_client,
                                                   params=backward_params)
        self.workers = [self.forward_worker, self.backward_worker]

        for worker in self.workers:
            worker.start()
        self.heartbeat = time()
        self.watcher = spawn(self.worker_watcher)

    def restart_sync(self):
        """
        Restart retrieving from OCDS API.
        """

        LOGGER.info('Restart workers')
        for worker in self.workers:
            worker.kill()
        self.watcher.kill()
        self.init_clients()
        self.start_sync()

    def get_resource_items(self):
        self.init_clients()
        self.start_sync()
        while True:
            if self.forward_worker.check() or self.backward_worker.check():
                self.restart_sync()
            while not self.queue.empty():
                LOGGER.debug(f'Sync queue size: {self.queue.qsize()}',
                             extra={'SYNC_QUEUE_SIZE': self.queue.qsize()})
                LOGGER.debug('Yield resource item',
                             extra={'MESSAGE_ID': 'sync_yield'})
                item = self.queue.get()
                yield item.data
            LOGGER.debug(f'Sync queue size: {self.queue.qsize()}',
                         extra={'SYNC_QUEUE_SIZE': self.queue.qsize()})
            try:
                self.queue.peek(block=True, timeout=0.1)
            except Empty:
                pass
Exemple #19
0
class TaskList:
    """
    Task list if a FIFO queue of tasks
    """
    def __init__(self, service):
        self.service = service
        self._queue = PriorityQueue()
        # done keeps the tasks that have been extracted from the queue
        # so we can inspect them later

        # keep the done task on disk, not in memory.
        # now we use the filesystem, but we could plug any key-value stor or database behind
        # check TaskStorageBase to see the interface your storage needs to have
        # to be used to store tasks
        # self._done = TaskStorageFile(self)
        self._done = TaskStorageSqlite(self)
        # pointer to current task
        self._current = None
        self._current_mu = Semaphore()

    @property
    def current(self):
        with self._current_mu:
            return self._current

    @current.setter
    def current(self, value):
        with self._current_mu:
            self._current = value

    def __del__(self):
        if self._done:
            self._done.close()

    def get(self):
        """
        pop out a task from the task list
        this call is blocking when the task list is empty
        """
        _, task = self._queue.get()
        self.current = task
        nr_task_waiting.labels(service_guid=self.service.guid).dec()
        return task

    def put(self, task, priority=PRIORITY_NORMAL):
        """
        append task to the task list
        """
        if not isinstance(task, Task):
            raise ValueError(
                "task should be an instance of the Task class not %s" %
                type(task))
        task._priority = priority
        nr_task_waiting.labels(service_guid=self.service.guid).inc()
        self._queue.put((priority, task))

    def done(self, task):
        """
        notify that a task is done
        """
        if task._priority != PRIORITY_SYSTEM:
            self.current = None
            self._done.add(task)

    def empty(self):
        """
        return True if the task list is empty, False otherwise
        """
        return self._queue.empty()

    def clear(self):
        """
        clear emtpy the task list from all its tasks
        """

        try:
            while not self.empty():
                self._queue.get_nowait()
        except gevent.queue.Empty:
            return

    def list_tasks(self, all=False):
        """
        @param all: if True, also return the task that have been executed
                    if False only return the task waiting in the task list
        returns all the task that are currently in the task list
        """
        tasks = [x[1] for x in self._queue.queue]
        if all:
            tasks.extend(self._done.list())

        if self.current and self.current.state == TASK_STATE_RUNNING:
            # also return the current running
            # task as part of the task list
            tasks.insert(0, self.current)

        return tasks

    def get_task_by_guid(self, guid):
        """
        return a task from the list by it's guid
        """

        # FIXME: this is really inefficient
        def find_task(guid, l):
            for task in l:
                if task.guid == guid:
                    return task
            raise TaskNotFoundError()

        # check if it's not the current running task
        if self.current and self.current.guid == guid:
            return self.current

        # search in waiting tasks
        try:
            task = find_task(guid, [x[1] for x in self._queue.queue])
            return task
        except TaskNotFoundError:
            pass

        # search in done task
        # this will raise TaskNotFoundError if can't find the task
        return self._done.get(guid)

    def save(self, path):
        """
        serialize the task list to disk
        @param path: file path where to serialize the task list
        """
        def serialize_task(task):
            return {
                "guid": task.guid,
                "action_name": task.action_name,
                "args": task._args,
                "state": task.state,
                "eco": json.loads(task.eco.json) if task.eco else None,
                "created": task.created,
            }

        output = []
        for task in self.list_tasks(all=False):
            output.append(serialize_task(task))
        j.data.serializer.yaml.dump(path, output)

    def load(self, path):
        """
        load a task list that have been serialized with save method
        @param path: file path where the task list is serialized
        @param service: the service object to which this task list belongs
        """
        if not os.path.exists(path):
            return

        data = j.data.serializer.yaml.load(path)
        for task in data:
            if task['state'] in [TASK_STATE_NEW, TASK_STATE_RUNNING]:
                self.put(_instantiate_task(task, self.service))
            else:
                # None supported state, just skip it
                continue
Exemple #20
0
class SubNameBrute:
    def __init__(self, target, options, process_num, dns_servers, cdns,next_subs,
                 scan_count, found_count, queue_size_list, tmp_dir):
        self.target = target.strip()
        self.options = options
        self.process_num = process_num
        self.dns_servers = dns_servers
        self.cdns = cdns

        self.dns_count = len(dns_servers)
        self.next_subs = next_subs
        self.scan_count = scan_count
        self.scan_count_local = 0
        self.found_count = found_count
        self.found_count_local = 0
        self.queue_size_list = queue_size_list

        self.resolvers = [dns.resolver.Resolver(configure=False) for _ in range(options.threads)]
        for _r in self.resolvers:
            _r.lifetime = _r.timeout = 6.0
        self.queue = PriorityQueue()
        self.item_index = 0
        self.priority = 0
        self._load_sub_names()
        self.ip_dict = {}
        self.found_subs = set()
        self.ex_resolver = dns.resolver.Resolver(configure=False)
        self.ex_resolver.nameservers = dns_servers
        self.local_time = time.time()
        self.outfile = open('%s/%s_part_%s.txt' % (tmp_dir, target, process_num), 'w')

    def _load_sub_names(self):
        if self.options.full_scan and self.options.file == 'subnames.txt':
            _file = 'dict/subnames_full.txt'
        else:
            if os.path.exists(self.options.file):
                _file = self.options.file
            elif os.path.exists('dict/%s' % self.options.file):
                _file = 'dict/%s' % self.options.file
            else:
                print_msg('[ERROR] Names file not found: %s' % self.options.file)
                exit(-1)

        normal_lines = []
        wildcard_lines = []
        wildcard_list = []
        regex_list = []
        lines = set()
        with open(_file) as f:
            for line in f.xreadlines():
                sub = line.strip()
                if not sub or sub in lines:
                    continue
                lines.add(sub)

                if sub.find('{alphnum}') >= 0 or sub.find('{alpha}') >= 0 or sub.find('{num}') >= 0:
                    wildcard_lines.append(sub)
                    sub = sub.replace('{alphnum}', '[a-z0-9]')
                    sub = sub.replace('{alpha}', '[a-z]')
                    sub = sub.replace('{num}', '[0-9]')
                    if sub not in wildcard_list:
                        wildcard_list.append(sub)
                        regex_list.append('^' + sub + '$')
                else:
                    normal_lines.append(sub)
        if regex_list:
            pattern = '|'.join(regex_list)
            _regex = re.compile(pattern)
            for line in normal_lines[:]:
                if _regex.search(line):
                    normal_lines.remove(line)

        for item in normal_lines[self.process_num::self.options.process]:
            self.priority += 1
            self.queue.put((self.priority, item))

        for item in wildcard_lines[self.process_num::self.options.process]:
            self.queue.put((88888888, item))

    def put_item(self, item):
        num = item.count('{alphnum}') + item.count('{alpha}') + item.count('{num}')
        if num == 0:
            self.priority += 1
            self.queue.put((self.priority, item))
        else:
            self.queue.put((self.priority + num * 10000000, item))

    def check_cdn(self, cname):
        '''
        bTrue = True
        bFound = False
        i = 0
        while bTrue:
          cdn = self.cdns[i]
          i += 1
          if (cdn in cname) or (i == len(self.cdns)):
            if (cdn in cname): bFound = True
            bTrue = False
        return bFound
        '''
        for cdn in self.cdns:
          if cdn in cname:
            return True
        return False

    def _scan(self, j):
        self.resolvers[j].nameservers = [self.dns_servers[j % self.dns_count]]
        while not self.queue.empty():
            try:
                item = self.queue.get(timeout=3.0)[1]
                self.scan_count_local += 1
                if time.time() - self.local_time > 3.0:
                    self.scan_count.value += self.scan_count_local
                    self.scan_count_local = 0
                    self.queue_size_list[self.process_num] = self.queue.qsize()
            except Exception as e:
                break
            try:
                if item.find('{alphnum}') >= 0:
                    for _letter in 'abcdefghijklmnopqrstuvwxyz0123456789':
                        self.put_item(item.replace('{alphnum}', _letter, 1))
                    continue
                elif item.find('{alpha}') >= 0:
                    for _letter in 'abcdefghijklmnopqrstuvwxyz':
                        self.put_item(item.replace('{alpha}', _letter, 1))
                    continue
                elif item.find('{num}') >= 0:
                    for _letter in '0123456789':
                        self.put_item(item.replace('{num}', _letter, 1))
                    continue
                elif item.find('{next_sub}') >= 0:
                    for _ in self.next_subs:
                        self.queue.put((0, item.replace('{next_sub}', _, 1)))
                    continue
                else:
                    sub = item

                if sub in self.found_subs:
                    continue

                cur_sub_domain = sub + '.' + self.target
                _sub = sub.split('.')[-1]
                try:
                    answers = self.resolvers[j].query(cur_sub_domain)
                except dns.resolver.NoAnswer, e:
                    answers = self.ex_resolver.query(cur_sub_domain)

                if answers:
                    ans = self.resolvers[j].query(cur_sub_domain,'cname')
                    cname = ans[0].target.to_unicode().rstrip('.')

                    if self.check_cdn(cname):
                      continue 
                    self.found_subs.add(sub)
                    ips = ', '.join(sorted([answer.address for answer in answers]))
                    if ips in ['1.1.1.1', '127.0.0.1', '0.0.0.0']:
                        continue

                    if self.options.i and is_intranet(answers[0].address):
                        continue

                    try:
                        self.scan_count_local += 1
                        answers = self.resolvers[j].query(cur_sub_domain, 'cname')
                        cname = answers[0].target.to_unicode().rstrip('.')
                        if cname.endswith(self.target) and cname not in self.found_subs:
                            self.found_subs.add(cname)
                            cname_sub = cname[:len(cname) - len(self.target) - 1]    # new sub
                            self.queue.put((0, cname_sub))

                    except:
                        pass

                    if (_sub, ips) not in self.ip_dict:
                        self.ip_dict[(_sub, ips)] = 1
                    else:
                        self.ip_dict[(_sub, ips)] += 1
                        if self.ip_dict[(_sub, ips)] > 30:
                            continue

                    self.found_count_local += 1
                    if time.time() - self.local_time > 3.0:
                        self.found_count.value += self.found_count_local
                        self.found_count_local = 0
                        self.queue_size_list[self.process_num] = self.queue.qsize()
                        self.local_time = time.time()

                    msg = cur_sub_domain.ljust(30) + ips
                    # print_msg(msg, line_feed=True)

                    self.outfile.write(cur_sub_domain.ljust(30) + '\t' + ips + '\n')
                    self.outfile.flush()
                    try:
                        self.resolvers[j].query('lijiejietest.' + cur_sub_domain)
                    except (dns.resolver.NXDOMAIN, dns.resolver.NoAnswer) as e:
                        self.queue.put((999999999, '{next_sub}.' + sub))
                    except:
                        pass

            except (dns.resolver.NXDOMAIN, dns.name.EmptyLabel) as e:
                pass
            except (dns.resolver.NoNameservers, dns.resolver.NoAnswer, dns.exception.Timeout) as e:
                pass
class SubNameBrute:
    def __init__(self, target, options, process_num, dns_servers, next_subs,
                 scan_count, found_count, queue_size_list, tmp_dir):
        self.target = target.strip()
        self.options = options
        self.process_num = process_num
        self.dns_servers = dns_servers
        self.dns_count = len(dns_servers)
        self.next_subs = next_subs
        self.scan_count = scan_count
        self.scan_count_local = 0
        self.found_count = found_count
        self.found_count_local = 0
        self.queue_size_list = queue_size_list

        self.resolvers = [dns.resolver.Resolver(configure=False) for _ in range(options.threads)]
        for _r in self.resolvers:
            _r.lifetime = _r.timeout = 6.0
        self.queue = PriorityQueue()
        self.item_index = 0
        self.priority = 0
        self._load_sub_names()
        self.ip_dict = {}
        self.found_subs = set()
        self.ex_resolver = dns.resolver.Resolver(configure=False)
        self.ex_resolver.nameservers = dns_servers
        self.local_time = time.time()
        self.outfile = open('%s/%s_part_%s.txt' % (tmp_dir, target, process_num), 'w')

    def _load_sub_names(self):
        if self.options.full_scan and self.options.file == 'subnames.txt':
            _file = 'dict/subnames_full.txt'
        else:
            if os.path.exists(self.options.file):
                _file = self.options.file
            elif os.path.exists('dict/%s' % self.options.file):
                _file = 'dict/%s' % self.options.file
            else:
                print_msg('[ERROR] Names file not found: %s' % self.options.file)
                exit(-1)

        normal_lines = []
        wildcard_lines = []
        wildcard_list = []
        regex_list = []
        lines = set()
        with open(_file) as f:
            for line in f.xreadlines():
                sub = line.strip()
                if not sub or sub in lines:
                    continue
                lines.add(sub)

                if sub.find('{alphnum}') >= 0 or sub.find('{alpha}') >= 0 or sub.find('{num}') >= 0:
                    wildcard_lines.append(sub)
                    sub = sub.replace('{alphnum}', '[a-z0-9]')
                    sub = sub.replace('{alpha}', '[a-z]')
                    sub = sub.replace('{num}', '[0-9]')
                    if sub not in wildcard_list:
                        wildcard_list.append(sub)
                        regex_list.append('^' + sub + '$')
                else:
                    normal_lines.append(sub)
        if regex_list:
            pattern = '|'.join(regex_list)
            _regex = re.compile(pattern)
            for line in normal_lines[:]:
                if _regex.search(line):
                    normal_lines.remove(line)

        for item in normal_lines[self.process_num::self.options.process]:
            self.priority += 1
            self.queue.put((self.priority, item))

        for item in wildcard_lines[self.process_num::self.options.process]:
            self.queue.put((88888888, item))

    def put_item(self, item):
        num = item.count('{alphnum}') + item.count('{alpha}') + item.count('{num}')
        if num == 0:
            self.priority += 1
            self.queue.put((self.priority, item))
        else:
            self.queue.put((self.priority + num * 10000000, item))

    def _scan(self, j):
        self.resolvers[j].nameservers = [self.dns_servers[j % self.dns_count]]
        while not self.queue.empty():
            try:
                item = self.queue.get(timeout=3.0)[1]
                self.scan_count_local += 1
                if time.time() - self.local_time > 3.0:
                    self.scan_count.value += self.scan_count_local
                    self.scan_count_local = 0
                    self.queue_size_list[self.process_num] = self.queue.qsize()
            except Exception as e:
                break
            try:
                if item.find('{alphnum}') >= 0:
                    for _letter in 'abcdefghijklmnopqrstuvwxyz0123456789':
                        self.put_item(item.replace('{alphnum}', _letter, 1))
                    continue
                elif item.find('{alpha}') >= 0:
                    for _letter in 'abcdefghijklmnopqrstuvwxyz':
                        self.put_item(item.replace('{alpha}', _letter, 1))
                    continue
                elif item.find('{num}') >= 0:
                    for _letter in '0123456789':
                        self.put_item(item.replace('{num}', _letter, 1))
                    continue
                elif item.find('{next_sub}') >= 0:
                    for _ in self.next_subs:
                        self.queue.put((0, item.replace('{next_sub}', _, 1)))
                    continue
                else:
                    sub = item

                if sub in self.found_subs:
                    continue

                cur_sub_domain = sub + '.' + self.target
                _sub = sub.split('.')[-1]
                try:
                    answers = self.resolvers[j].query(cur_sub_domain)
                except dns.resolver.NoAnswer, e:
                    answers = self.ex_resolver.query(cur_sub_domain)

                if answers:
                    self.found_subs.add(sub)
                    ips = ', '.join(sorted([answer.address for answer in answers]))
                    if ips in ['1.1.1.1', '127.0.0.1', '0.0.0.0']:
                        continue

                    if self.options.i and is_intranet(answers[0].address):
                        continue

                    try:
                        self.scan_count_local += 1
                        answers = self.resolvers[j].query(cur_sub_domain, 'cname')
                        cname = answers[0].target.to_unicode().rstrip('.')
                        if cname.endswith(self.target) and cname not in self.found_subs:
                            self.found_subs.add(cname)
                            cname_sub = cname[:len(cname) - len(self.target) - 1]    # new sub
                            self.queue.put((0, cname_sub))

                    except:
                        pass

                    if (_sub, ips) not in self.ip_dict:
                        self.ip_dict[(_sub, ips)] = 1
                    else:
                        self.ip_dict[(_sub, ips)] += 1
                        if self.ip_dict[(_sub, ips)] > 30:
                            continue

                    self.found_count_local += 1
                    if time.time() - self.local_time > 3.0:
                        self.found_count.value += self.found_count_local
                        self.found_count_local = 0
                        self.queue_size_list[self.process_num] = self.queue.qsize()
                        self.local_time = time.time()

                    msg = cur_sub_domain.ljust(30) + ips
                    # print_msg(msg, line_feed=True)

                    self.outfile.write(cur_sub_domain.ljust(30) + '\t' + ips + '\n')
                    self.outfile.flush()
                    try:
                        self.resolvers[j].query('lijiejietest.' + cur_sub_domain)
                    except (dns.resolver.NXDOMAIN, dns.resolver.NoAnswer) as e:
                        self.queue.put((999999999, '{next_sub}.' + sub))
                    except:
                        pass

            except (dns.resolver.NXDOMAIN, dns.name.EmptyLabel) as e:
                pass
            except (dns.resolver.NoNameservers, dns.resolver.NoAnswer, dns.exception.Timeout) as e:
                pass