def parse(self): try: self.header() self.question_record_handler() self.get_qname() self.resource_record_handler() except Exception as E: tools.p(E)
def _server_connection_handler(self): for secure_server, status in self.DNSRelay.dns_servers.items(): if (status['tls_up']): error = self._tls_connect(secure_server) if (not error): break else: tools.p('NO SECURE SERVER AVAILABLE!')
def _process_query(self, client_query): if self.DNSCache.valid_top_domain(client_query.request): self.DNSCache.increment_counter(client_query.request) cached_packet = self.DNSCache.search(client_query) if (cached_packet): self.send_to_client(client_query, client_query.address) tools.p( f'CACHED RESPONSE | NAME: {client_query.request} TTL: {client_query.calculated_ttl}' ) else: self.external_query(client_query)
def _recv_handler(self): try: while True: data_from_server = self.secure_socket.recv(1024) if (not data_from_server): tools.p('PIPELINE CLOSED BY REMOTE SERVER!') break self._parse_server_response(data_from_server) except (timeout, OSError): pass finally: self.secure_socket.close()
def _tls_connect(self, secure_server): tools.p( f'PIPELINE CLOSED. REESTABLISHING CONNECTION TO SERVER: {secure_server}.' ) try: sock = socket(AF_INET, SOCK_STREAM) sock.settimeout(10) self.secure_socket = self.tls_context.wrap_socket( sock, server_hostname=secure_server) self.secure_socket.connect((secure_server, DNS_TLS_PORT)) except OSError as e: return e
def _tls_reachability(self): print('[+] Starting TLS reachability tests.') while True: for secure_server, server_info in self.DNSRelay.dns_servers.items( ): error = self._tls_reachability_worker(secure_server) if (error): tools.p(f'TLS reachability failed for: {secure_server}') server_info['tls_up'] = False else: tools.p( f'TLS reachability successful for: {secure_server}') server_info['tls_up'] = True time.sleep(10)
def _parse_server_response(self, data_from_server): server_response = PacketManipulation(data_from_server) dns_id = server_response.get_dns_id() client_query = self.DNSRelay.request_mapper.pop(dns_id, None) if (client_query): server_response.parse() tools.p( f'Secure Request Received from Server. DNS ID: {server_response.dns_id} | {server_response.request}' ) server_response.rewrite(dns_id=client_query.dns_id) if (client_query.address): ## Parsing packet and rewriting TTL to minimum 5 minutes/max 1 hour and changing DNS ID back to original. self.DNSRelay.send_to_client(server_response, client_query.address) # adding packets to cache if not already in and incrimenting the counter for the requested domain. self.DNSRelay.DNSCache.add(server_response, client_query.address)
def add(self, server_response, client_address): now = time.time() expire = int(now) + server_response.cache_ttl already_cached = self.dns_cache.get(server_response.request, None) # will cache packet if not already cached or if it is from the top domains list(no client address) if ((not already_cached or already_cached['expire'] <= now) or (not client_address) and server_response.data_to_cache): self.dns_cache.update({ server_response.request: { 'records': server_response.data_to_cache, 'expire': expire, 'normal_cache': bool(client_address) } }) tools.p( f'CACHE ADD | NAME: {server_response.request} TTL: {server_response.cache_ttl}' )
def auto_clear_cache(self): print('[+] Starting automated standard cache clearing.') while True: now = time.time() query_cache = deepcopy(self.dns_cache) for domain, info in query_cache.items(): if (info['expire'] < now and domain not in self.top_domains): self.dns_cache.pop(domain, None) # here for testing purposes || consider reporting the cache size to the front end # tools.p('CLEARED EXPIRED CACHE.') cache_size = sys.getsizeof(self.dns_cache) num_records = len(self.dns_cache) tools.p( f'CACHE SIZE: {cache_size} | NUMBER OF RECORDS: {num_records} | CACHE: {self.dns_cache}' ) time.sleep(3 * 60)
def auto_top_domains(self): print('[+] Starting automated top domains caching.') while True: self.top_domains = { domain[0]: count for count, domain in enumerate( self.domain_counter.most_common(TOP_DOMAIN_COUNT), 1) } for domain in self.top_domains: # creating empty class object, then assigning required fields. this will allow compatibility with standard server # operations for locally generated requests/queries new_query = RequestHandler(None, None) new_query.set_required_fields(domain) self.DNSRelay.external_query(new_query) tools.p(f'RE CACHED TOP DOMAINS. TOTAL: {len(self.top_domains)}') # logging top domains in cache for reference. if top domains are useless, will work on a way to ensure only important domains # are cached. worst case can make them configurable. top_domains = {'top_domains': self.top_domains} tools.write_cache(top_domains, 'top_domains_cache.json') time.sleep(3 * 60)
def send_to_client(self, server_response, client_address): ## Relaying packet from server back to host self.sock.sendto(server_response.send_data, client_address) tools.p( f'Request: {server_response.request} RELAYED TO {client_address[0]}: {client_address[1]}' )
def parse(self): try: self._parse_header() self._parse_dns_query() except Exception as E: tools.p(E)