def get_nslookup(domain): dns_query = Nslookup(dns_servers=['8.8.8.8']) try: ips_record = dns_query.dns_lookup(domain) return domain, ips_record.answer except Exception as e: return domain, e
def __init__(self): # Set the object nslookup self.nslookup = Nslookup() # Store headers as dict self.headers = { "cookies" : [] } # Store entire header string self.header_str = "" # Store top level domain self.tld = None # Store subdomain self.ext = None # Store url with extension self.url = None # Store the alexa position self.position = None # Store the http return code self.http_code = None # Store the nslookup IP address self.ip = None # Store full nslookup response self.ip_full = None # Store MX records for domain self.mx = []
def getmyip(): domain = "myip.opendns.com" dns_query = Nslookup(dns_servers=["208.67.220.220"]) ips_record = dns_query.dns_lookup(domain) #print(ips_record.response_full, ips_record.answer) if(len(ips_record.answer) == 0): print("Query My IP from opendns failed") return ips_record.answer[0]
def __init__(self, config): self.target = config.domain self.output_file = config.output self.verbose_mode = config.verbose self.threads = config.threads if config.wordlist: self.candidates = config.wordlist.split(',') else: with open(args.file, 'r') as candidates_file: self.candidates = list(filter(bool, candidates_file.read().split('\n'))) self.dns_query = Nslookup(dns_servers=["1.1.1.1"]) # set optional Cloudflare public DNS server
class Scanner: def __init__(self, config): self.target = config.domain self.output_file = config.output self.verbose_mode = config.verbose self.threads = config.threads if config.wordlist: self.candidates = config.wordlist.split(',') else: with open(args.file, 'r') as candidates_file: self.candidates = list(filter(bool, candidates_file.read().split('\n'))) self.dns_query = Nslookup(dns_servers=["1.1.1.1"]) # set optional Cloudflare public DNS server def run(self): with ThreadPoolExecutor(max_workers=self.threads) as executor: resolver_futures = {executor.submit(self.resolve, subdomain): subdomain for subdomain in self.candidates} for future in futures.as_completed(resolver_futures): try: data = future.result() if self.verbose_mode: print(f'{data["domain"]} - {",".join(data["answer"]) if len(data["answer"]) > 0 else "Not found"}') else: if len(data['answer']) > 0: print(f'{data["domain"]} - {", ".join(data["answer"])}') except Exception as exc: print(exc) def resolve(self, subdomain): domain = subdomain + '.' + self.target return {'domain': domain, 'answer': self.dns_query.dns_lookup(domain).answer}
def resolveIPFromDNS(hostname, DNSList): domain = hostname compiledList = [] # set optional Cloudflare public DNS server for DNSIP in DNSList: dns_query = Nslookup(dns_servers=[DNSIP]) ips_record = dns_query.dns_lookup(domain) soa_record = dns_query.soa_lookup(domain) tuple = (DNSIP, ips_record.answer) compiledList.append(tuple) tuple = () return compiledList
def results_url(request): if 'href' in request.GET: href = request.GET['href'] domain = urlparse(href).netloc data = whois.whois(domain) dns_query = Nslookup(dns_servers=["1.1.1.1"]) ips_record = dns_query.dns_lookup(domain) ip_record = ips_record.answer[0] access_token = 'efd54bc59b7a42' # For IP info query handler = ipinfo.getHandler(access_token) ip_address = ip_record details = handler.getDetails(ip_address) ip_country = details.country_name ip_org = details.org context = { 'data': data, 'ip_record': ip_record, 'ip_country': ip_country, 'ip_org': ip_org, } return render(request, "results_url.html", context)
def test(domain, dns_servers): domain_number, best_servers = domain.__len__(), [] for index, dns in enumerate(dns_servers): success_count, avg_ms = 0, 0 print('---------------------------------') print('-------------', dns, '--------------') for d in domain: dns_query = None with Timeout(2, False): dns_query = Nslookup(dns_servers=[dns]) if dns_query is not None: ips_record = dns_query.dns_lookup(d) server, r = ips_record.answer[0], ping( ips_record.answer[0], timeout=1, size=40, count=10) print(d, '-->', server, 'min: ', r.rtt_min_ms, 'max: ', r.rtt_max_ms, 'avg: ', r.rtt_avg_ms) avg_ms = avg_ms + r.rtt_max_ms if r.rtt_avg_ms < 300: success_count = success_count + 1 print( 'DNS: {}'.format(dns), '\n', 'Resolving Rate: {:.2f}%'.format( success_count / domain_number * 100), '\n', 'Average Delay: {:.2f}'.format(avg_ms / success_count)) if success_count == domain_number: best_servers.append([dns, avg_ms / success_count]) print('Best Selected?', 'Yes!') else: print('Best Selected?', 'No') print('----------------Top Rank----------------') best_servers.sort(key=lambda x: x[1]) for item in best_servers: print(item[0], '{:.2f}ms'.format(item[1])) print('----------------End Rank----------------')
def add(request): data = {} if request.method == 'POST': form = M.ScriptForm(request.POST) if form.is_valid(): ok = True journal = '' url = form.cleaned_data.get("url") m = url_regexp.fullmatch(url) valid = validators.url('http://' + url) if m is None or not valid: ok = False journal += f'invalid url: {url}\n' data["message"] = 'Invalid URL' data["url"] = url data["ips"] = [] data["journal"] = journal data["ok"] = False logging.info('Invalid URL %s', url) else: dns_query = Nslookup(dns_servers=["8.8.8.8", "1.1.1.1"]) ips_record = dns_query.dns_lookup(url) ips = ips_record.answer for ip in ips: query = ssh_query.format(ip, url) process = subprocess.Popen(query, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True) output = process.stdout.readlines() err = process.stderr.readlines() log = b"".join(bytes(x) for x in output) log.strip() log = log.decode('ascii') err_log = b"".join(bytes(x) for x in err) err_log.strip() err_log = err_log.decode('ascii') print(log, err_log) if len(log) != 0 or len(err_log) != 0: ok = False journal += 'ERROR for IP {}\n'.format(ip) journal += log + '\n' journal += err_log + '\n' break if ok: data["message"] = 'Success!' data["url"] = url data["ips"] = ips data["ok"] = True ips_str = ''.join(str(elem) + ' ' for elem in ips) logging.info('added hostname %s with IPs %s', url, ips_str) else: data["message"] = 'ERROR!' data["url"] = url data["ips"] = ips data["journal"] = journal data["ok"] = False ips_str = ''.join(str(elem) + ' ' for elem in ips) logging.info( 'tried to add hostname %s with IPs %s, err: %s', url, ips_str, log + '\n' + err_log) return render(request, 'sos/index.html', {'form': form, 'data': data})
from nslookup import Nslookup import fileinput import time #enter a path of Domain List infile = open('/home/crimson/Downloads/domains.txt', 'r') filestr = infile.read() words = filestr.split() print("Started...") dns_query = Nslookup(dns_servers=["8.8.8.8"]) print("DNS Server is Set to 8.8.8.8") print("Querying Domains Please Wait...") for domain in words: ips_record = dns_query.dns_lookup(domain) print(ips_record.response_full, ips_record.answer) time.sleep(1) dns_record = dns_query.soa_lookup(domain) print(dns_record.response_full, dns_record.answer) time.sleep(1) #1 second Gap Before querying
pass # Objet HTTP urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) http = urllib3.PoolManager() # Bot telegram bot = telebot.TeleBot(telegram_token) # URL API url_getip = "https://api.ipify.org?format=json" request_ddns = "{0}/nic/update?hostname={1}&myip={2}" # Set DNS server # set optional Cloudflare public DNS server dns_query = Nslookup(dns_servers=["1.1.1.1", "8.8.8.8"]) last_ip = "" dns_update = 1 send_message("Start *Simple-DDNS*!") while True: # Check IP Public try: ip_now = requestAPI(url_getip)["ip"] except Exception as e: print("Error check IP: {0}".format(e)) time.sleep(5) continue
# get db connection cursor cursor = conn.cursor() # get list of ping monitors from the db try: sql = "SELECT monitor_id,monitor_type,monitor_source FROM monitors WHERE monitor_type=%s AND monitor_state=%s" val = ('dns', 1) cursor.execute(sql, val) except mysql.connector.Error as err: print(err) sys.exit(1) results = cursor.fetchall() dns_query = Nslookup() for (monitor_id, monitor_type, monitor_source) in results: ips_record = dns_query.dns_lookup(monitor_source) #print(ips_record.response_full, ips_record.answer) if not ips_record.answer: # host unknown (e.g. domain name lookup error) # store result in the db as -1 try: sql = "INSERT INTO monitor_results (monitor_id, monitor_type, monitor_source, monitor_result) VALUES (%s, %s, %s, %s)" val = (monitor_id, monitor_type, monitor_source, -1) cursor.execute(sql, val) except mysql.connector.Error as err: print(err)
# Signup for a free API key here: https://auth0.com/signals/api/signup AUTH0_API_KEY = "PLACE YOUR AUTH0 API KEY HERE" # If we don't have at least 1 argument, len = 1 is just the script name if len(sys.argv) < 2: print("Usage: nslookup-geo.py DOMAIN.TO.LOOKUP [DNS.SERVER.TO.USE]") sys.exit() # The domain to lookup lookup_domain = sys.argv[1] # If the length of argv is more than 2, meaning we have the script name, # a domain to lookup and a DNS server to use. if len(sys.argv) > 2: #TODO: Why is this not returning? dns_query = Nslookup(dns_servers=sys.argv[2]) else: dns_query = Nslookup() dns_resp = dns_query.dns_lookup(lookup_domain) # had to do a length check, because nothing is returned if there is no result if len(dns_resp.answer) > 0: lookup_ip = dns_resp.answer[0] else: print(f"Unable to get IP Address for {lookup_domain}") sys.exit() def ipgeo_lookup(): """
def resolve_domain(domain, dns_servers=[]): lookuper = Nslookup(dns_servers=dns_servers) return lookuper.dns_lookup(domain).answer
def __init__(self, url): parsed_url = urlparse( url ) # The structure of the parsed url is: scheme://netloc/path;parameters?query#fragment # Attributes based on the whole url self.url = parsed_url.geturl() count = Counter(self.url) self.url_num_dot = count['.'] self.url_num_hyphen = count['-'] self.url_num_underline = count['_'] self.url_num_slash = count['/'] self.url_num_quest = count['?'] self.url_num_equ = count['='] self.url_num_at = count['@'] self.url_num_amp = count['&'] self.url_num_excl = count['!'] self.url_num_spaces = count[' '] self.url_num_tilde = count['~'] self.url_num_comma = count[','] self.url_num_plus = count['+'] self.url_num_ast = count['*'] self.url_num_pound = count['#'] self.url_num_doll = count['$'] self.url_num_perc = count['%'] self.url_length = len(self.url) url_email_addresses = self.get_email(self.url) self.url_emails = True if url_email_addresses else False # self.url_num_emails = len(url_email_addresses) # Attributes based on the domain part of the url self.domain = parsed_url.netloc count = Counter(self.domain) self.dom_num_dot = count['.'] self.dom_num_hyphen = count['-'] self.dom_num_underline = count['_'] self.dom_num_slash = count['/'] self.dom_num_quest = count['?'] self.dom_num_equ = count['='] self.dom_num_at = count['@'] self.dom_num_amp = count['&'] self.dom_num_excl = count['!'] self.dom_num_spaces = count[' '] self.dom_num_tilde = count['~'] self.dom_num_comma = count[','] self.dom_num_plus = count['+'] self.dom_num_ast = count['*'] self.dom_num_pound = count['#'] self.dom_num_doll = count['$'] self.dom_num_perc = count['%'] self.dom_num_vowels = count['a'] + count['e'] + count['i'] + count[ 'o'] + count['u'] self.dom_length = len(self.domain) self.dom_is_ip = self.is_ipaddress(parsed_url.netloc) self.dom_contains_server_or_client = True if "server" in self.domain.lower( ) or "client" in self.domain.lower() else False self.dom_spf = None # We are currently not using this feature # Attributes based on the directory part of the url self.directory = self.get_directory(parsed_url.path) count = Counter(self.directory) self.dir_num_dot = count['.'] self.dir_num_hyphen = count['-'] self.dir_num_underline = count['_'] self.dir_num_slash = count['/'] self.dir_num_quest = count['?'] self.dir_num_equ = count['='] self.dir_num_at = count['@'] self.dir_num_amp = count['&'] self.dir_num_excl = count['!'] self.dir_num_spaces = count[' '] self.dir_num_tilde = count['~'] self.dir_num_comma = count[','] self.dir_num_plus = count['+'] self.dir_num_ast = count['*'] self.dir_num_pound = count['#'] self.dir_num_doll = count['$'] self.dir_num_perc = count['%'] self.dir_length = len(self.directory) # Attributes based on the file_name part of the url self.file_name = self.get_file_name(parsed_url.path) # we could also test for the file extension count = Counter(self.file_name) self.file_num_dot = count['.'] self.file_num_hyphen = count['-'] self.file_num_underline = count['_'] self.file_num_slash = count['/'] self.file_num_quest = count['?'] self.file_num_equ = count['='] self.file_num_at = count['@'] self.file_num_amp = count['&'] self.file_num_excl = count['!'] self.file_num_spaces = count[' '] self.file_num_tilde = count['~'] self.file_num_comma = count[','] self.file_num_plus = count['+'] self.file_num_ast = count['*'] self.file_num_pound = count['#'] self.file_num_doll = count['$'] self.file_num_perc = count['%'] self.file_length = len(self.file_name) # Attributes based on the query part of the url self.param_string = parsed_url.query # there is a discrepancy with the data (from the paper) and the way urlparse parses the url self.params = parse_qs(self.param_string) count = Counter(self.param_string) self.params_num_dot = count['.'] self.params_num_hyphen = count['-'] self.params_num_underline = count['_'] self.params_num_slash = count['/'] self.params_num_quest = count['?'] self.params_num_equ = count['='] self.params_num_at = count['@'] self.params_num_amp = count['&'] self.params_num_excl = count['!'] self.params_num_spaces = count[' '] self.params_num_tilde = count['~'] self.params_num_comma = count[','] self.params_num_plus = count['+'] self.params_num_ast = count['*'] self.params_num_pound = count['#'] self.params_num_doll = count['$'] self.params_num_perc = count['%'] self.params_length = len(self.param_string) self.params_tld_present = self.get_tld(self.param_string) # find out if there is a tld in the param_string variabe self.params_nums = len(self.params) # Attributes based on TLD (top level domain) try: self.tld = tld.get_tld(self.url) self.tld_length = len(self.tld) except Exception as e: l = parsed_url.netloc.split(".") self.tld = l if l else None self.tld_length = len(self.tld) if self.tld else -1 # # Attributes based on WHOIS # For some reason the command whois prints the error to stdin so we need to catch that in case whois returns nothing f = io.StringIO() # we are going to store stdout in the variable f with redirect_stdout(f): self.whois_record = whois(parsed_url.netloc) out = f.getvalue() # get the string from f into out error_pattern = re.compile(r'\bError.*') x = [i for i in out.split('\n') if error_pattern.match(i)] if len(x) > 0: print("There was an error in whois") # WHOIS_FOUND = True if self.dom_record else False # Get IP address dns_query = Nslookup(dns_servers=["1.1.1.1"]) start = time.time() dns_error = False try: ip_record = dns_query.dns_lookup(parsed_url.netloc) except e: # The dns lookup returned an error dns_error = True ip_record = None roundtrip = time.time() - start self.time_response = None if dns_error else roundtrip if self.dom_is_ip: self.ip = parsed_url.netloc else: # If the domain is not in IP format find the IP address self.ip = ip_record.answer[ 0] if ip_record and ip_record.answer else None # Get info from IP address and WHOIS server stuff self.num_resolved_ips = len(ip_record.answer) if ip_record else None obj = ipwhois.IPWhois(self.ip) if self.ip else None # Store the info from the IPWhois object # rdap some times returns better results than whois but most papers use whois data # self.dom_record = obj.lookup_rdap() self.dom_record = obj.lookup_whois( ) if obj else None # The only downside (also with rdap) is that we don't get expiration date self.asn_ip = self.dom_record['asn'] if obj and ( 'asn' in self.dom_record.keys()) else None # TODO: calculate the time in days correctly # I think that by "Domain activation time" they mean the time that the domain has been active # So we need to get the current date and time and see the difference if self.dom_record: datetime_pattern = re.compile( r'(\d{1,2}/\d{1,2}/\d{4})|(\d{4}-\d{1,2}-\d{1,2})') self.dom_activation_date = self.get_activation_date( self.dom_record ) # this would have to change if we move from whois to rdap self.dom_activation_date = datetime.datetime.strptime( datetime_pattern.search(self.dom_activation_date).group(), '%Y-%m-%d') self.dom_activation_date = datetime.datetime.now( ) - self.dom_activation_date self.dom_activation_date = self.dom_activation_date.days else: self.dom_activation_date = None self.dom_expiration_date = self.whois_record.expiration_date if type( self.whois_record.expiration_date ) != list else self.whois_record.expiration_date[0] if self.dom_expiration_date: self.dom_expiration_date = self.dom_expiration_date - datetime.datetime.now( ) self.dom_expiration_date = self.dom_expiration_date.days self.ttl = None # TODO: get ttl_hostname self.num_name_servers = None # TODO: get nameservers # self.dom_country = self.dom_record.country self.url_shortened = False # TODO: determine if the url has been shortened self.tls_ssl_cert = False # it gets updated in the fetch_page function page = self.fetch_page(url) self.content = page.content self.headers = page.headers self.scheme = parsed_url.scheme self.params = parsed_url.params self.fragments = parsed_url.fragment self.port = parsed_url.port # TODO: add a flag to know if we want to collect info from google or not since we don't need it all the time (because of rule-based or during crawling) self.url_google_indx = False # we are currently not using this feature self.dom_google_indx = False # we are currently not using this feature self.num_mx_servers = None
class Headers: def __init__(self): # Set the object nslookup self.nslookup = Nslookup() # Store headers as dict self.headers = { "cookies" : [] } # Store entire header string self.header_str = "" # Store top level domain self.tld = None # Store subdomain self.ext = None # Store url with extension self.url = None # Store the alexa position self.position = None # Store the http return code self.http_code = None # Store the nslookup IP address self.ip = None # Store full nslookup response self.ip_full = None # Store MX records for domain self.mx = [] # Accept the header stream from pycurl def display_header(self, header_line): header_line = header_line.decode('iso-8859-1') # Append the line to string self.header_str = self.header_str + header_line # Ignore all lines without a colon if ':' not in header_line: return # Break the header line into header name and value h_name, h_value = header_line.split(':', 1) # Remove whitespace that may be present h_name = h_name.strip() h_value = h_value.strip() h_name = h_name.lower() # Convert header names to lowercase # If line is cookie then append to cookies if h_name == 'set-cookie': self.headers['cookies'].append(h_value) # Append all other Header name and value. else: self.headers[h_name] = h_value # Get the http code from header string def get_http_return_code(self): first_line = self.header_str.split("\n")[0] if re.search(r' [\d]{3,}', first_line): self.http_code = re.search(r' [\d]{3,}', first_line).group(0) # Get the IP from nslookup def get_ip(self): try: # Set the uri with subdomain if "www." in self.url: uri = "www." + self.tld else: uri = self.tld print("-- Looking up IP for: " + uri) ip_rec = self.nslookup.dns_lookup(uri) if len(ip_rec.answer): self.ip = ip_rec.answer[0] #if len(ip_rec.response_full): #self.ip_full = ip_rec.response_full[0] except Exception as e: traceback.print_exc() logger.error("-- Error getting nslookup for: " + uri) logger.error(traceback.format_exc()) # Get MX records as array def get_mx_records(self): # Set the uri with subdomain if "www." in self.url: uri = "www." + self.tld else: uri = self.tld print("-- Looking up MX for: " + uri) try: mx = dns.resolver.query(uri, 'MX') if len(mx): for item in mx: #print(item.exchange) self.mx.append(str(item.exchange)) except Exception as e: #traceback.print_exc() logger.error("-- Error getting MX for: " + uri) logger.error(traceback.format_exc())
# © 2021 Reischl Franz # Regularly checks internet connectivity from time import sleep from nslookup import Nslookup from datetime import datetime, timedelta import os if __name__ == '__main__': DOMAIN_TO_CHECK = 'orf.at' DNS_LIWEST = '192.168.0.1' # Router gateway, has ISP DNS entries DNS_GOOGLE = '8.8.8.8' LOG_FILE = 'internet.log' dns_query_liwest = Nslookup(dns_servers=[DNS_LIWEST]) dns_query_google = Nslookup(dns_servers=[DNS_GOOGLE]) if not os.path.exists(LOG_FILE): # Create log file file = open(LOG_FILE, 'w') file.write('status\tfrom\tduration\n') file.close() print('status\tfrom\tduration') last_change = datetime.now() last_status = 'none' while True: ips_record = dns_query_liwest.dns_lookup(DOMAIN_TO_CHECK) internet_connection = len(ips_record.answer) > 0
def getDNSARecords(domain): # CONFIGURE DNS RESOLVER # Modify the following line. Defaults to Cloudflare and Google. Multiple are included for redundancy, but only one is required. dns_query = Nslookup(dns_servers=["1.1.1.1", "1.0.0.1", "8.8.8.8", "8.8.4.4"]) a_records = dns_query.dns_lookup(domain) return a_records.answer
#!/usr/bin/env python3 from requests import get, put from nslookup import Nslookup """ Cloudflare DDNS Updater Using a UDP query just like a dig command PEP8 compliant "Simple is better than complex." """ # a litte bit faster than HTTP request [current_ip := _ for _ in Nslookup(dns_servers=['208.67.222.222']) .dns_lookup('myip.opendns.com').answer] api_url = ('https://api.cloudflare.com/client/v4/zones/' '__REPLACE__/dns_records/' '__REPLACE__/') token = 'Bearer __PUT YOUR TOKEN HERE__' auth_header = {'Authorization': token, 'Content-Type': 'application/json'} data = {'type': 'A', 'name': '__REPLACE__', 'content': current_ip, 'ttl': 1, 'proxied': True} # there's no need to import json module get_ip_cloudflare = get(api_url, headers=auth_header).json() ip_on_cloudflare = get_ip_cloudflare['result']['content']
import logging from config import dnslist, name_file logging.basicConfig(format='%(message)s', level=logging.INFO) f1=[] with open(name_file, encoding='utf-8') as file: for domain in file: domain = domain.strip('\n').strip().encode('idna').decode('utf-8') f1.append(domain) f1.append('www.'+domain) for dns in dnslist: dns_query = Nslookup(dns_servers=[dns]) not_black=[] broken=[] for domain in f1: ips_record = dns_query.dns_lookup(domain) domain = domain.encode('idna').decode('idna') if len(ips_record.answer) > 0: if '127.0.0.1' not in ips_record.answer: not_black.append(domain) else: pass else: if domain not in broken: broken.append(domain) else: pass if len(broken)>0 or len(not_black)>0:
import decouple as decouple import discord import discord.ext import os from b import DSC_TOKEN from discord.ext import commands from nslookup import Nslookup client = commands.Bot(command_prefix=["!", "?", "%"]) domain = "pipi-pupu.sytes.net" dns_query = Nslookup() # TODO REMOVE TOKEN BEFORE GITHUB TOKEN = DSC_TOKEN @client.event async def on_ready(): print('Hello!') @client.command() async def gametime(ctx): await ctx.send('Game time: 6:30pm PST') # Valheim Server @client.command() async def vserver(ctx): ips_record = dns_query.dns_lookup(domain)