def djikstra(grid, start, end): distance = defaultdict(lambda: float("inf")) distance[start] = 0 frontier = PriorityQueue() frontier.put_nowait((0, start)) visited = set([]) retrace = {} while not frontier.empty(): (d, p) = frontier.get_nowait() if p in visited: continue visited.add(p) if p == end: break for n in reachable(grid, p): if distance[n] > d + 1: distance[n] = d + 1 retrace[n] = p frontier.put_nowait((distance[n], n)) # Backtrack current = end forward = {} while current != start: forward[retrace[current]] = current current = retrace[current] return ({p: distance[p] for p in visited}, forward)
class KthLargest: def __init__(self, k, nums): self.k = k self.pq = PriorityQueue() for i in nums: self.pq.put(i) def add(self, n): if self.pq.qsize() < self.k: self.pq.put(n) return self.pq.get_nowait()
def reverse_djikstra(grid, end): distance = defaultdict(lambda: float("inf")) distance[end] = 0 frontier = PriorityQueue() frontier.put_nowait((0, end)) visited = set([]) while not frontier.empty(): (d, p) = frontier.get_nowait() if p in visited: continue else: visited.add(p) for n in reachable(grid, p): if distance[n] > d + 1: distance[n] = d + 1 frontier.put_nowait((distance[n], n)) return {p: distance[p] for p in visited}
class WaitingPeers(Generic[TChainPeer]): """ Peers waiting to perform some action. When getting a peer from this queue, prefer the peer with the best throughput for the given command. """ _waiting_peers: 'PriorityQueue[SortableTask[TChainPeer]]' _response_command_type: Tuple[Type[CommandAPI[Any]], ...] def __init__( self, response_command_type: Union[Type[CommandAPI[Any]], Sequence[Type[CommandAPI[Any]]]], sort_key: Callable[[PerformanceAPI], float] = _items_per_second) -> None: """ :param sort_key: how should we sort the peers to get the fastest? low score means top-ranked """ self._waiting_peers = PriorityQueue() if isinstance(response_command_type, type): self._response_command_type = (response_command_type, ) elif isinstance(response_command_type, collections.abc.Sequence): self._response_command_type = tuple(response_command_type) else: raise TypeError(f"Unsupported value: {response_command_type}") self._peer_wrapper = SortableTask.orderable_by_func( self._get_peer_rank) self._sort_key = sort_key def _get_peer_rank(self, peer: TChainPeer) -> float: scores = [ self._sort_key(exchange.tracker) for exchange in peer.chain_api.exchanges if issubclass( exchange.get_response_cmd_type(), self._response_command_type) ] if len(scores) == 0: raise ValidationError( f"Could not find any exchanges on {peer} " f"with response {self._response_command_type!r}") # Typically there will only be one score, but we might want to match multiple commands. # To handle that case, we take the average of the scores: return sum(scores) / len(scores) def put_nowait(self, peer: TChainPeer) -> None: try: wrapped_peer = self._peer_wrapper(peer) except PeerConnectionLost: return self._waiting_peers.put_nowait(wrapped_peer) async def get_fastest(self) -> TChainPeer: wrapped_peer = await self._waiting_peers.get() peer = wrapped_peer.original # make sure the peer has not gone offline while waiting in the queue while not peer.is_alive: # if so, look for the next best peer wrapped_peer = await self._waiting_peers.get() peer = wrapped_peer.original return peer def pop_nowait(self) -> TChainPeer: """ :raise QueueEmpty: if no peer is available """ wrapped_peer = self._waiting_peers.get_nowait() peer = wrapped_peer.original # make sure the peer has not gone offline while waiting in the queue while not peer.is_alive: # if so, look for the next best peer wrapped_peer = self._waiting_peers.get_nowait() peer = wrapped_peer.original return peer def __len__(self) -> int: return self._waiting_peers.qsize()
class SubNameBrute(object): def __init__(self, *params): self.domain, self.options, self.process_num, self.dns_servers, self.next_subs, \ self.scan_count, self.found_count, self.queue_size_array, tmp_dir = params self.dns_count = len(self.dns_servers) self.scan_count_local = 0 self.found_count_local = 0 self.resolvers = [ aiodns.DNSResolver(tries=1) for _ in range(self.options.threads) ] self.queue = PriorityQueue() self.ip_dict = {} self.found_subs = set() self.timeout_subs = {} self.count_time = time.time() self.outfile = open( '%s/%s_part_%s.txt' % (tmp_dir, self.domain, self.process_num), 'w') self.normal_names_set = set() self.lock = asyncio.Lock() self.loop = None self.threads_status = ['1'] * self.options.threads async def load_sub_names(self): normal_lines = [] wildcard_lines = [] wildcard_set = set() regex_list = [] lines = set() with open(self.options.file) as inFile: for line in inFile.readlines(): sub = line.strip() if not sub or sub in lines: continue lines.add(sub) brace_count = sub.count('{') if brace_count > 0: wildcard_lines.append((brace_count, sub)) sub = sub.replace('{alphnum}', '[a-z0-9]') sub = sub.replace('{alpha}', '[a-z]') sub = sub.replace('{num}', '[0-9]') if sub not in wildcard_set: wildcard_set.add(sub) regex_list.append('^' + sub + '$') else: normal_lines.append(sub) self.normal_names_set.add(sub) if regex_list: pattern = '|'.join(regex_list) _regex = re.compile(pattern) for line in normal_lines: if _regex.search(line): normal_lines.remove(line) for _ in normal_lines[self.process_num::self.options.process]: await self.queue.put((0, _)) # priority set to 0 for _ in wildcard_lines[self.process_num::self.options.process]: await self.queue.put(_) async def scan(self, j): self.resolvers[j].nameservers = [self.dns_servers[j % self.dns_count]] if self.dns_count > 1: while True: s = random.choice(self.resolvers) if s != self.dns_servers[j % self.dns_count]: self.resolvers[j].nameservers.append(s) break while True: try: if time.time() - self.count_time > 1.0: async with self.lock: self.scan_count.value += self.scan_count_local self.scan_count_local = 0 self.queue_size_array[ self.process_num] = self.queue.qsize() if self.found_count_local: self.found_count.value += self.found_count_local self.found_count_local = 0 self.count_time = time.time() try: brace_count, sub = self.queue.get_nowait() self.threads_status[j] = '1' except asyncio.queues.QueueEmpty as e: self.threads_status[j] = '0' await asyncio.sleep(0.5) if '1' not in self.threads_status: break else: continue if brace_count > 0: brace_count -= 1 if sub.find('{next_sub}') >= 0: for _ in self.next_subs: await self.queue.put( (0, sub.replace('{next_sub}', _))) if sub.find('{alphnum}') >= 0: for _ in 'abcdefghijklmnopqrstuvwxyz0123456789': await self.queue.put( (brace_count, sub.replace('{alphnum}', _, 1))) elif sub.find('{alpha}') >= 0: for _ in 'abcdefghijklmnopqrstuvwxyz': await self.queue.put( (brace_count, sub.replace('{alpha}', _, 1))) elif sub.find('{num}') >= 0: for _ in '0123456789': await self.queue.put( (brace_count, sub.replace('{num}', _, 1))) continue except Exception as e: import traceback print(traceback.format_exc()) break try: if sub in self.found_subs: continue self.scan_count_local += 1 cur_domain = sub + '.' + self.domain # print('Query %s' % cur_domain) answers = await self.resolvers[j].query(cur_domain, 'A') if answers: self.found_subs.add(sub) ips = ', '.join(sorted([answer.host for answer in answers])) if ips in ['1.1.1.1', '127.0.0.1', '0.0.0.0', '0.0.0.1']: continue if self.options.i and is_intranet(answers[0].host): continue try: self.scan_count_local += 1 answers = await self.resolvers[j].query( cur_domain, 'CNAME') cname = answers[0].target.to_unicode().rstrip('.') if cname.endswith( self.domain) and cname not in self.found_subs: cname_sub = cname[:len(cname) - len(self.domain) - 1] # new sub if cname_sub not in self.normal_names_set: self.found_subs.add(cname) await self.queue.put((0, cname_sub)) except Exception as e: pass first_level_sub = sub.split('.')[-1] if (first_level_sub, ips) not in self.ip_dict: self.ip_dict[(first_level_sub, ips)] = 1 else: self.ip_dict[(first_level_sub, ips)] += 1 if self.ip_dict[(first_level_sub, ips)] > 30: continue self.found_count_local += 1 self.outfile.write( cur_domain.ljust(30) + '\t' + ips + '\n') self.outfile.flush() try: self.scan_count_local += 1 await self.resolvers[j].query( 'lijiejie-test-not-existed.' + cur_domain, 'A') except aiodns.error.DNSError as e: if e.args[0] in [4]: if self.queue.qsize() < 50000: for _ in self.next_subs: await self.queue.put((0, _ + '.' + sub)) else: await self.queue.put((1, '{next_sub}.' + sub)) except Exception as e: pass except aiodns.error.DNSError as e: if e.args[0] in [1, 4]: pass elif e.args[0] in [ 11, 12 ]: # 12 timeout # (11, 'Could not contact DNS servers') # print('timed out sub %s' % sub) self.timeout_subs[sub] = self.timeout_subs.get(sub, 0) + 1 if self.timeout_subs[sub] <= 1: await self.queue.put((0, sub)) # Retry else: print(e) except asyncio.TimeoutError as e: pass except Exception as e: import traceback traceback.print_exc() with open('errors.log', 'a') as errFile: errFile.write('[%s] %s\n' % (type(e), str(e))) async def async_run(self): await self.load_sub_names() tasks = [self.scan(i) for i in range(self.options.threads)] await asyncio.gather(*tasks) def run(self): self.loop = asyncio.get_event_loop() asyncio.set_event_loop(self.loop) self.loop.run_until_complete(self.async_run())
class BrokerHandler(BrokerHandlerSetup): """Broker Handler class.""" __slots__ = "_handlers", "_records", "_retry", "_queue", "_consumers", "_consumer_concurrency" def __init__( self, records: int, handlers: dict[str, Optional[Callable]], retry: int, publisher: BrokerPublisher, consumer_concurrency: int = 15, **kwargs: Any, ): super().__init__(**kwargs) self._handlers = handlers self._records = records self._retry = retry self._queue = PriorityQueue(maxsize=self._records) self._consumers: list[Task] = list() self._consumer_concurrency = consumer_concurrency self._publisher = publisher @classmethod def _from_config(cls, config: MinosConfig, **kwargs) -> BrokerHandler: kwargs["handlers"] = cls._get_handlers(config, **kwargs) kwargs["publisher"] = cls._get_publisher(**kwargs) # noinspection PyProtectedMember return cls(**config.broker.queue._asdict(), **kwargs) @staticmethod def _get_handlers( config: MinosConfig, handlers: dict[str, Optional[Callable]] = None, **kwargs ) -> dict[str, Callable[[BrokerRequest], Awaitable[Optional[BrokerResponse]]]]: if handlers is None: builder = EnrouteBuilder(*config.services, middleware=config.middleware) decorators = builder.get_broker_command_query_event(config=config, **kwargs) handlers = {decorator.topic: fn for decorator, fn in decorators.items()} return handlers # noinspection PyUnusedLocal @staticmethod @inject def _get_publisher( publisher: Optional[BrokerPublisher] = None, broker_publisher: BrokerPublisher = Provide["broker_publisher"], **kwargs, ) -> BrokerPublisher: if publisher is None: publisher = broker_publisher if publisher is None or isinstance(publisher, Provide): raise NotProvidedException(f"A {BrokerPublisher!r} object must be provided.") return publisher async def _setup(self) -> None: await super()._setup() await self._create_consumers() async def _destroy(self) -> None: await self._destroy_consumers() await super()._destroy() async def _create_consumers(self): while len(self._consumers) < self._consumer_concurrency: self._consumers.append(create_task(self._consume())) async def _destroy_consumers(self): for consumer in self._consumers: consumer.cancel() await gather(*self._consumers, return_exceptions=True) self._consumers = list() while not self._queue.empty(): entry = self._queue.get_nowait() await self.submit_query(self._queries["update_not_processed"], (entry.id,)) async def _consume(self) -> None: while True: await self._consume_one() async def _consume_one(self) -> None: entry = await self._queue.get() try: await self._dispatch_one(entry) finally: self._queue.task_done() @property def publisher(self) -> BrokerPublisher: """Get the publisher instance. :return: A ``BrokerPublisher`` instance. """ return self._publisher @property def consumers(self) -> list[Task]: """Get the consumers. :return: A list of ``Task`` instances. """ return self._consumers @property def handlers(self) -> dict[str, Optional[Callable]]: """Handlers getter. :return: A dictionary in which the keys are topics and the values are the handler. """ return self._handlers @property def topics(self) -> KeysView[str]: """Get an iterable containing the topic names. :return: An ``Iterable`` of ``str``. """ return self.handlers.keys() async def dispatch_forever(self, max_wait: Optional[float] = 60.0) -> NoReturn: """Dispatch the items in the consuming queue forever. :param max_wait: Maximum seconds to wait for notifications. If ``None`` the wait is performed until infinity. :return: This method does not return anything. """ async with self.cursor() as cursor: await self._listen_entries(cursor) try: while True: await self._wait_for_entries(cursor, max_wait) await self.dispatch(cursor, background_mode=True) finally: await self._unlisten_entries(cursor) async def _listen_entries(self, cursor: Cursor): for topic in self.topics: # noinspection PyTypeChecker await cursor.execute(_LISTEN_QUERY.format(Identifier(topic))) async def _unlisten_entries(self, cursor: Cursor) -> None: for topic in self.topics: # noinspection PyTypeChecker await cursor.execute(_UNLISTEN_QUERY.format(Identifier(topic))) async def _wait_for_entries(self, cursor: Cursor, max_wait: Optional[float]) -> None: if await self._get_count(cursor): return while True: try: return await wait_for(consume_queue(cursor.connection.notifies, self._records), max_wait) except TimeoutError: if await self._get_count(cursor): return async def _get_count(self, cursor) -> int: if not len(self.topics): return 0 await cursor.execute(_COUNT_NOT_PROCESSED_QUERY, (self._retry, tuple(self.topics))) count = (await cursor.fetchone())[0] return count async def dispatch(self, cursor: Optional[Cursor] = None, background_mode: bool = False) -> None: """Dispatch a batch of ``HandlerEntry`` instances from the database's queue. :param cursor: The cursor to interact with the database. If ``None`` is provided a new one is acquired. :param background_mode: If ``True`` the entries dispatching waits until every entry is processed. Otherwise, the dispatching is performed on background. :return: This method does not return anything. """ is_external_cursor = cursor is not None if not is_external_cursor: cursor = await self.cursor().__aenter__() async with cursor.begin(): await cursor.execute( self._queries["select_not_processed"], (self._retry, tuple(self.topics), self._records) ) result = await cursor.fetchall() if len(result): entries = self._build_entries(result) await cursor.execute(self._queries["mark_processing"], (tuple(e.id for e in entries),)) for entry in entries: await self._queue.put(entry) if not is_external_cursor: await cursor.__aexit__(None, None, None) if not background_mode: await self._queue.join() def _build_entries(self, rows: list[tuple]) -> list[BrokerHandlerEntry]: kwargs = {"callback_lookup": self.get_action} return [BrokerHandlerEntry(*row, **kwargs) for row in rows] async def _dispatch_one(self, entry: BrokerHandlerEntry) -> None: logger.debug(f"Dispatching '{entry!r}'...") try: await self.dispatch_one(entry) except (CancelledError, Exception) as exc: logger.warning(f"Raised an exception while dispatching {entry!r}: {exc!r}") entry.exception = exc if isinstance(exc, CancelledError): raise exc finally: query_id = "delete_processed" if entry.success else "update_not_processed" await self.submit_query(self._queries[query_id], (entry.id,)) async def dispatch_one(self, entry: BrokerHandlerEntry) -> None: """Dispatch one row. :param entry: Entry to be dispatched. :return: This method does not return anything. """ logger.info(f"Dispatching '{entry!s}'...") fn = self.get_callback(entry.callback) message = entry.data data, status, headers = await fn(message) if message.reply_topic is not None: await self.publisher.send( data, topic=message.reply_topic, identifier=message.identifier, status=status, user=message.user, headers=headers, ) @staticmethod def get_callback( fn: Callable[[BrokerRequest], Union[Optional[BrokerRequest], Awaitable[Optional[BrokerRequest]]]] ) -> Callable[[BrokerMessage], Awaitable[tuple[Any, BrokerMessageStatus, dict[str, str]]]]: """Get the handler function to be used by the Broker Handler. :param fn: The action function. :return: A wrapper function around the given one that is compatible with the Broker Handler API. """ @wraps(fn) async def _wrapper(raw: BrokerMessage) -> tuple[Any, BrokerMessageStatus, dict[str, str]]: request = BrokerRequest(raw) user_token = REQUEST_USER_CONTEXT_VAR.set(request.user) headers_token = REQUEST_HEADERS_CONTEXT_VAR.set(raw.headers) try: response = fn(request) if isawaitable(response): response = await response if isinstance(response, Response): response = await response.content() return response, BrokerMessageStatus.SUCCESS, REQUEST_HEADERS_CONTEXT_VAR.get() except ResponseException as exc: logger.warning(f"Raised an application exception: {exc!s}") return repr(exc), BrokerMessageStatus.ERROR, REQUEST_HEADERS_CONTEXT_VAR.get() except Exception as exc: logger.exception(f"Raised a system exception: {exc!r}") return repr(exc), BrokerMessageStatus.SYSTEM_ERROR, REQUEST_HEADERS_CONTEXT_VAR.get() finally: REQUEST_USER_CONTEXT_VAR.reset(user_token) REQUEST_HEADERS_CONTEXT_VAR.reset(headers_token) return _wrapper def get_action(self, topic: str) -> Optional[Callable]: """Get handling function to be called. Gets the instance of the class and method to call. Args: topic: Kafka topic. Example: "TicketAdded" Raises: MinosNetworkException: topic TicketAdded have no controller/action configured, please review th configuration file. """ if topic not in self._handlers: raise MinosActionNotFoundException( f"topic {topic} have no controller/action configured, " f"please review th configuration file" ) handler = self._handlers[topic] logger.debug(f"Loaded {handler!r} action!") return handler @cached_property def _queries(self) -> dict[str, str]: # noinspection PyTypeChecker return { "count_not_processed": _COUNT_NOT_PROCESSED_QUERY, "select_not_processed": _SELECT_NOT_PROCESSED_QUERY, "mark_processing": _MARK_PROCESSING_QUERY, "delete_processed": _DELETE_PROCESSED_QUERY, "update_not_processed": _UPDATE_NOT_PROCESSED_QUERY, }
class AsyncProxyBroker: def __init__(self, check_url, allowed_anonymity_levels=None, qps_per_proxy=1, max_consecutive_failures=5, providers=PROVIDERS, timeout=5): self._proxies = Queue() self._pending_providers = Queue() self._providers = providers self._verified_proxies = {} self._throttled_proxies = PriorityQueue() self._errors = {} self._check_url = check_url self._qps_per_proxy = qps_per_proxy self._max_consecutive_failures = max_consecutive_failures self._timeout = timeout self._ip = None self._ip_lock = Lock() if not allowed_anonymity_levels: self._allowed_anonymity_levels = ['Anonymous', 'Elite'] else: self._allowed_anonymity_levels = allowed_anonymity_levels async def _get_real_ip(self): while not self._ip: async with self._ip_lock: if self._ip: return self._ip try: async with aiohttp.request( url=random.choice(IP_HOSTS), method='GET', timeout=aiohttp.ClientTimeout( total=self._timeout)) as response: contents = await response.text() ips = get_all_ip(contents) if len(ips) == 1: self._ip = ips.pop() return self._ip except (UnicodeDecodeError, asyncio.TimeoutError, aiohttp.ClientOSError, aiohttp.ClientResponseError, aiohttp.ServerDisconnectedError): pass return self._ip async def _get_anonymity_level(self, proxy_address): judge = random.choice(JUDGES) ip = await self._get_real_ip() try: async with aiohttp.request(url=judge, method='GET', proxy=proxy_address, timeout=aiohttp.ClientTimeout( total=self._timeout)) as response: contents = (await response.text()).lower() contained_ips = get_all_ip(contents) if ip in contained_ips: return 'Transparent' elif 'via' in contents or 'proxy' in contents: return 'Anonymous' else: return 'Elite' except (UnicodeDecodeError, asyncio.TimeoutError, aiohttp.ClientOSError, aiohttp.ClientResponseError, aiohttp.ServerDisconnectedError): return 'None' def _populate_providers(self): for provider in self._providers: self._pending_providers.put_nowait(provider) async def _can_connect_to_test_url(self, proxy_address): try: async with aiohttp.request(url=self._check_url, method='GET', proxy=proxy_address, timeout=aiohttp.ClientTimeout( total=self._timeout)) as response: await response.text() return True except (UnicodeDecodeError, asyncio.TimeoutError, aiohttp.ClientOSError, aiohttp.ClientResponseError, aiohttp.ServerDisconnectedError): return False async def _populate_proxies(self): if self._pending_providers.empty(): self._populate_providers() provider = self._pending_providers.get_nowait() proxies = await provider.get_proxies() for proxy in proxies: self._proxies.put_nowait(proxy) self._pending_providers.task_done() async def _try_verify_one_proxy(self): if self._proxies.empty(): await self._populate_proxies() return (host, port, types) = self._proxies.get_nowait() proxy_address = 'http://%s:%s' % (host, port) if await self._get_anonymity_level(proxy_address) in self._allowed_anonymity_levels and \ await self._can_connect_to_test_url(proxy_address): self._verified_proxies[proxy_address] = deque() self._errors[proxy_address] = 0 self._proxies.task_done() @staticmethod def _flush_history(history): executions_removed = 0 earliest_time = time.monotonic() while len(history) > 0: earliest_time = history.popleft() if time.monotonic() - earliest_time < 1: history.appendleft(earliest_time) break executions_removed += 1 return executions_removed, earliest_time def _flush_throttled_proxies(self): while not self._throttled_proxies.empty(): (_, proxy_url, history) = self._throttled_proxies.get_nowait() executions_removed, earliest_time = self._flush_history(history) if executions_removed == 0: self._throttled_proxies.put_nowait( (earliest_time, proxy_url, history)) self._throttled_proxies.task_done() return self._verified_proxies[proxy_url] = history self._throttled_proxies.task_done() def mark_successful(self, proxy_url): if proxy_url not in self._errors: return self._errors[proxy_url] = max(0, self._errors[proxy_url] - 1) def mark_failure(self, proxy_url): if proxy_url not in self._errors: return self._errors[proxy_url] += 1 async def random_proxy(self): while True: self._flush_throttled_proxies() if not self._verified_proxies: await self._try_verify_one_proxy() while self._verified_proxies: proxy_url = random.choice(list(self._verified_proxies.keys())) if self._errors[proxy_url] >= self._max_consecutive_failures: del self._errors[proxy_url] del self._verified_proxies[proxy_url] continue history = self._verified_proxies[proxy_url] _, earliest_time = self._flush_history(history) if len(history) < self._qps_per_proxy: history.append(time.monotonic()) return proxy_url del self._verified_proxies[proxy_url] self._throttled_proxies.put_nowait( (earliest_time, proxy_url, history))
class SubNameBrute(object): def __init__(self, *params): self.domain, self.options, self.process_num, self.dns_servers, self.next_subs, \ self.scan_count, self.found_count, self.queue_size_array, tmp_dir = params self.dns_count = len(self.dns_servers) self.scan_count_local = 0 self.found_count_local = 0 self.resolvers = [ dns.asyncresolver.Resolver(configure=False) for _ in range(self.options.threads) ] for r in self.resolvers: r.lifetime = 6.0 r.timeout = 10.0 self.queue = PriorityQueue() self.ip_dict = {} self.found_subs = set() self.cert_subs = set() self.timeout_subs = {} self.no_server_subs = {} self.count_time = time.time() self.outfile = open( '%s/%s_part_%s.txt' % (tmp_dir, self.domain, self.process_num), 'w') self.normal_names_set = set() self.lock = asyncio.Lock() self.threads_status = ['1'] * self.options.threads async def load_sub_names(self): normal_lines = [] wildcard_lines = [] wildcard_set = set() regex_list = [] lines = set() with open(self.options.file) as inFile: for line in inFile.readlines(): sub = line.strip() if not sub or sub in lines: continue lines.add(sub) brace_count = sub.count('{') if brace_count > 0: wildcard_lines.append((brace_count, sub)) sub = sub.replace('{alphnum}', '[a-z0-9]') sub = sub.replace('{alpha}', '[a-z]') sub = sub.replace('{num}', '[0-9]') if sub not in wildcard_set: wildcard_set.add(sub) regex_list.append('^' + sub + '$') else: normal_lines.append(sub) self.normal_names_set.add(sub) if regex_list: pattern = '|'.join(regex_list) _regex = re.compile(pattern) for line in normal_lines: if _regex.search(line): normal_lines.remove(line) for _ in normal_lines[self.process_num::self.options.process]: await self.queue.put((0, _)) # priority set to 0 for _ in wildcard_lines[self.process_num::self.options.process]: await self.queue.put(_) async def update_counter(self): while True: if '1' not in self.threads_status: return self.scan_count.value += self.scan_count_local self.scan_count_local = 0 self.queue_size_array[self.process_num] = self.queue.qsize() if self.found_count_local: self.found_count.value += self.found_count_local self.found_count_local = 0 self.count_time = time.time() await asyncio.sleep(0.5) async def check_https_alt_names(self, domain): try: reader, _ = await asyncio.open_connection( host=domain, port=443, ssl=True, server_hostname=domain, ) for item in reader._transport.get_extra_info( 'peercert')['subjectAltName']: if item[0].upper() == 'DNS': name = item[1].lower() if name.endswith(self.domain): sub = name[:len(name) - len(self.domain) - 1] # new sub sub = sub.replace('*', '') sub = sub.strip('.') if sub and sub not in self.found_subs and \ sub not in self.normal_names_set and sub not in self.cert_subs: self.cert_subs.add(sub) await self.queue.put((0, sub)) except Exception as e: pass async def do_query(self, j, cur_domain): async with timeout(10.2): return await self.resolvers[j].resolve(cur_domain, 'A') # asyncio.wait_for did not work properly # hang up in some cases, we use async_timeout instead # return await asyncio.wait_for(self.resolvers[j].resolve(cur_domain, 'A', lifetime=8), timeout=9) async def scan(self, j): self.resolvers[j].nameservers = [self.dns_servers[j % self.dns_count]] if self.dns_count > 1: while True: s = random.choice(self.dns_servers) if s != self.dns_servers[j % self.dns_count]: self.resolvers[j].nameservers.append(s) break empty_counter = 0 while True: try: brace_count, sub = self.queue.get_nowait() self.threads_status[j] = '1' empty_counter = 0 except asyncio.queues.QueueEmpty as e: empty_counter += 1 if empty_counter > 10: self.threads_status[j] = '0' if '1' not in self.threads_status: break else: await asyncio.sleep(0.1) continue if brace_count > 0: brace_count -= 1 if sub.find('{next_sub}') >= 0: for _ in self.next_subs: await self.queue.put((0, sub.replace('{next_sub}', _))) if sub.find('{alphnum}') >= 0: for _ in 'abcdefghijklmnopqrstuvwxyz0123456789': await self.queue.put( (brace_count, sub.replace('{alphnum}', _, 1))) elif sub.find('{alpha}') >= 0: for _ in 'abcdefghijklmnopqrstuvwxyz': await self.queue.put( (brace_count, sub.replace('{alpha}', _, 1))) elif sub.find('{num}') >= 0: for _ in '0123456789': await self.queue.put( (brace_count, sub.replace('{num}', _, 1))) continue try: if sub in self.found_subs: continue self.scan_count_local += 1 cur_domain = sub + '.' + self.domain answers = await self.do_query(j, cur_domain) if answers: self.found_subs.add(sub) ips = ', '.join( sorted([answer.address for answer in answers])) invalid_ip_found = False for answer in answers: if answer.address in [ '1.1.1.1', '127.0.0.1', '0.0.0.0', '0.0.0.1' ]: invalid_ip_found = True if invalid_ip_found: continue if self.options.i and is_intranet(answers[0].host): continue try: cname = str(answers.canonical_name)[:-1] if cname != cur_domain and cname.endswith(self.domain): cname_sub = cname[:len(cname) - len(self.domain) - 1] # new sub if cname_sub not in self.found_subs and cname_sub not in self.normal_names_set: await self.queue.put((0, cname_sub)) except Exception as e: pass first_level_sub = sub.split('.')[-1] max_found = 20 if self.options.w: first_level_sub = '' max_found = 3 if (first_level_sub, ips) not in self.ip_dict: self.ip_dict[(first_level_sub, ips)] = 1 else: self.ip_dict[(first_level_sub, ips)] += 1 if self.ip_dict[(first_level_sub, ips)] > max_found: continue self.found_count_local += 1 self.outfile.write( cur_domain.ljust(30) + '\t' + ips + '\n') self.outfile.flush() if not self.options.no_cert_check: async with timeout(10.0): await self.check_https_alt_names(cur_domain) try: self.scan_count_local += 1 await self.do_query( j, 'lijiejie-test-not-existed.' + cur_domain) except dns.resolver.NXDOMAIN as e: if self.queue.qsize() < 20000: for _ in self.next_subs: await self.queue.put((0, _ + '.' + sub)) else: await self.queue.put((1, '{next_sub}.' + sub)) except Exception as e: continue except (dns.resolver.NXDOMAIN, dns.resolver.NoAnswer) as e: pass except dns.resolver.NoNameservers as e: self.no_server_subs[sub] = self.no_server_subs.get(sub, 0) + 1 if self.no_server_subs[sub] <= 3: await self.queue.put((0, sub)) # Retry again except (dns.exception.Timeout, dns.resolver.LifetimeTimeout) as e: self.timeout_subs[sub] = self.timeout_subs.get(sub, 0) + 1 if self.timeout_subs[sub] <= 3: await self.queue.put((0, sub)) # Retry again except Exception as e: if str(type(e)).find('asyncio.exceptions.TimeoutError') < 0: with open('errors.log', 'a') as errFile: errFile.write('[%s] %s\n' % (type(e), str(e))) async def async_run(self): await self.load_sub_names() tasks = [self.scan(i) for i in range(self.options.threads)] tasks.insert(0, self.update_counter()) await asyncio.gather(*tasks) def run(self): loop = asyncio.get_event_loop() asyncio.set_event_loop(loop) loop.run_until_complete(self.async_run())