def orgGroups(self, sender, mID): # import pdb; pdb.set_trace() try: newmID = "www." + mID afterAT = "www." + sender[sender.index("@") + 1:] if newmID in self.domainCompanyPairing.keys(): res1 = self.domainCompanyPairing[newmID] else: ip1 = socket.gethostbyname(newmID) obj1 = IPWhois(ip1) res1 = obj1.lookup(get_referral=True)['nets'][0]['name'] self.domainCompanyPairing[newmID] = res1 if afterAT in self.domainCompanyPairing.keys(): res2 = self.domainCompanyPairing[afterAT] else: ip2 = socket.gethostbyname(afterAT) obj2 = IPWhois(ip2) res2 = obj2.lookup(get_referral=True)['nets'][0]['name'] self.domainCompanyPairing[afterAT] = res2 if res1 == res2: return True return False except: return False
def orgGroups(self, sender, mID): # import pdb; pdb.set_trace() try: newmID = "www." + mID afterAT = "www." + sender[sender.index("@")+1:] if newmID in self.domainCompanyPairing.keys(): res1 = self.domainCompanyPairing[newmID] else: ip1 = socket.gethostbyname(newmID) obj1 = IPWhois(ip1) res1 = obj1.lookup(get_referral=True)['nets'][0]['name'] self.domainCompanyPairing[newmID] = res1 if afterAT in self.domainCompanyPairing.keys(): res2 = self.domainCompanyPairing[afterAT] else: ip2 = socket.gethostbyname(afterAT) obj2 = IPWhois(ip2) res2 = obj2.lookup(get_referral=True)['nets'][0]['name'] self.domainCompanyPairing[afterAT] = res2 if res1 == res2: return True return False except: return False
def getCIDR(cls, ip): if Lookup.offline: Lookup.cidr_tot += 1 for cidr in Lookup.seen_pairings_keys: ip_bin = getBinaryRep(ip, cidr) if ip_bin in Lookup.seen_pairings[cidr]: Lookup.cidr_hit += 1 return ip_bin return getBinaryRep(ip, 32) else: Lookup.cidr_tot += 1 try: if ip in Lookup.seen_pairings: Lookup.cidr_hit += 1 return Lookup.seen_pairings[ip] else: obj = IPWhois(ip) results = obj.lookup() if "nets" not in results.keys() or "cidr" not in results["nets"][0].keys(): cidr = ip + "/32" else: cidr = results["nets"][0]["cidr"] Lookup.seen_pairings[ip] = cidr if cidr: Lookup.cidr_hit += 1 return cidr except: Lookup.seen_pairings[ip] = "Invalid" return "Invalid"
def set(self,pagenum,kw): try: url = 'http://www.ebay.com/sch/i.html?_from=R40&_sacat=0&LH_Complete=1&LH_Sold=1&LH_ItemCondition=3&_nkw=' + kw + '&_pgn=' + str(pagenum) + '&_ipg=200&rt=nc&_dmd=1' if len(self.proxies) > 0: countries = get_countries() obj = IPWhois(self.proxies[0].split(':')[0]) results = obj.lookup(False) if countries[results['nets'][0]['country']] == "United States": if self.getfile(url,self.proxies[0],".ht") == "error": print("Switching Proxy") self.proxies.pop(0) self.set(pagenum,kw) else: print(self.proxies[0]) else: print(countries[results['nets'][0]['country']]) print("Non-US IP " + self.proxies[0].split(':')[0] + ": Switching Proxy") self.proxies.pop(0) self.set(pagenum,kw) else: print("No Proxies in Queue") except Exception as e: print(str(e))
def domains_whois(domain): try: ip_addr = domaintoip(domain) data = IPWhois(ip_addr) out = data.lookup() city_data = out["nets"][0]['city'] country_data = out["nets"][0]['country'] description_data = out["nets"][0]['description'] emails_data = out["nets"][0]['emails'] name_data = out["nets"][0]['name'] range_data = out["nets"][0]['range'] state_data = out["nets"][0]['range'] out_email = ("".join(map(str, emails_data))) save_data = whoisinfo_db(ip=(ip_addr), sh_domain=(domain), city=(city_data), country=(country_data), description=(description_data), emails=(out_email), name=(name_data), range=(range_data), state=(state_data)) save_data.save() except Exception as error: print error
def mytraceroute(destination, timeout): for i in range(1, 31): pkt = IP(dst=destination, ttl=i) / UDP(dport=33434) reply = sr1(pkt, verbose=0, timeout=timeout) if reply is not None: ip_addr = reply.src if IPy.IP(ip_addr).iptype() != "PRIVATE": obj = IPWhois(ip_addr) results = obj.lookup() #pp(results) if results.get('nets', None): print(ip_addr, ' : ', results.get('asn', '_'), '/', results['nets'][0].get('name', '_'), '/', results.get('asn_country_code', '_'), '/', results['nets'][0].get('description', '_')) else: print(ip_addr, ' : ', results.get('asn', '_'), '/', results.get('asn_country_code', '_')) else: print(ip_addr) if reply.type == 3: print("Destination reached") break else: print("***")
def __get_query_info__(query_node, user, **kwargs): class ComplexEncoder(json.JSONEncoder): def default(self, obj): if hasattr(obj, 'reprJSON'): return obj.reprJSON() if hasattr(obj, 'isoformat'): return obj.isoformat() else: return json.JSONEncoder.default(self, obj) whois_consult = WhoisConsult.objects.filter(query_node=query_node, created_at__gt=timezone.now() - timezone.timedelta( days=365)).first() if whois_consult is None: if 'ip' in kwargs: obj = IPWhois(query_node) results = obj.lookup() whois_consult = WhoisConsult.objects.create(query_node=query_node, info_report=results, content_object=user) elif 'domain' in kwargs: w = pythonwhois.get_whois(query_node) whois_consult = WhoisConsult.objects.create(query_node=query_node, info_report=w, content_object=user) else: raise ValueError( "you must determine is you want to do a domain or ip consultation by __get_query_info" + "__('query', SomeUser, domain=True or ip=True") whois_consult.check_info_report(query_node, save=True) return whois_consult
def ip_lookup(self, ip): # given ip, look up org, isp, lat and lon # first, check if we have seen this ip before if ip in self.iptable: return self.iptable[ip] try: obj = IPWhois(ip, timeout=10) # times out after 10 seconds results = obj.lookup(get_referral=True) org = results['nets'][-1]['description'] isp = results['nets'][0]['description'] except (IPDefinedError, ASNLookupError, ASNRegistryError, WhoisLookupError, HostLookupError, BlacklistError, AttributeError) as e: # log bad ip and error logger.error('%s from IPWhois on IP %s, setting org & isp to None', e, ip) org = isp = None except ValueError: logger.error( 'Set org & isp to None, ValueError from IPWhois for IP %s', ip) org = isp = None # geolite2 returns NoneType if no match try: match = geolite2.lookup(ip) if match: if match.location: if match.location[0]: lat = match.location[0] else: lat = None logger.warn( 'Set lat = None, geolite2 unable to find lat for IP %s', ip) if match.location[1]: lon = match.location[1] else: lon = None logger.warn( 'Set lon = None, geolite2 unable to find lon for IP %s', ip) else: lat = lon = None logger.warn( 'Set lat & lon = None, geolite2 unable to find lat/lon for IP %s', ip) else: # log unable to find lat/lon for this ip logger.warn( 'Set lat & lon = None, geolite2 unable to find lat/lon for IP %s', ip) lat = lon = None except ValueError: # log bad ip and error logger.error( 'Set lat & lon = None, ValueError from geolite2 for IP %s', ip) lat = lon = None self.iptable[ip] = [org, lat, lon, isp] return self.iptable[ip]
def test_lookup(self): ips = [ '74.125.225.229', # ARIN '2001:4860:4860::8888', '62.239.237.1', # RIPE '2a00:2381:ffff::1', '210.107.73.73', # APNIC '2001:240:10c:1::ca20:9d1d', '200.57.141.161', # LACNIC '2801:10:c000::', '196.11.240.215', # AFRINIC '2001:43f8:7b0::' ] for ip in ips: result = IPWhois(ip) try: self.assertIsInstance(result.lookup(), dict) except (ASNLookupError, ASNRegistryError, WhoisLookupError): pass except AssertionError as e: raise e except Exception as e: self.fail('Unexpected exception raised: %r' % e)
def check_addr_type(addr, addr_dict): if addr != local and not any([pattern.match(addr) for pattern in pattern_list]): obj = IPWhois(addr) results = obj.lookup() domain = results['nets'][0]['name'] if domain is not None: addr_dict[addr] = domain
def udemy6(top): # file #udemy_logfile = r"C:\code\udemy\log.txt" #udemy_logfile = r"C:\code\udemy\log.txt" udemy_logfile = r"C:\code\udemy\sre_test_log.txt" logfile = open(udemy_logfile, 'r') log = logfile.readlines() logfile.close() global p7 number_successful = 0 # necesito la ip de la coleccion global p2 cnt = Counter(dict_ips) cnt.most_common() for ip, times in cnt.most_common(3): print('The Ip: %s: Number of Requests: %s' % (ip, times)) print(ip) # obj = IPWhois(get_ip(ip)) obj = IPWhois(ip) results = obj.lookup() print(results) # results = obj.get_host() # print(res["nets"][0]['country']]) print("Country") print(results['nets'][0]['description']) #print("Abuse") #print(results["nets"][0]['abuse_emails']) p2 += 1 print("################################################") print("################################################") return ()
def getCIDR(cls, ip): if Lookup.offline: Lookup.cidr_tot += 1 for cidr in Lookup.seen_pairings_keys: ip_bin = getBinaryRep(ip, cidr) if ip_bin in Lookup.seen_pairings[cidr]: Lookup.cidr_hit += 1 return ip_bin return getBinaryRep(ip, 32) else: Lookup.cidr_tot += 1 try: if ip in Lookup.seen_pairings: Lookup.cidr_hit += 1 return Lookup.seen_pairings[ip] else: obj = IPWhois(ip) results = obj.lookup() if "nets" not in results.keys( ) or "cidr" not in results["nets"][0].keys(): cidr = ip + "/32" else: cidr = results["nets"][0]["cidr"] Lookup.seen_pairings[ip] = cidr if cidr: Lookup.cidr_hit += 1 return cidr except: Lookup.seen_pairings[ip] = "Invalid" return "Invalid"
def geoinfo(self): try: data = IPWhois(self.ip) return data.lookup(False)['nets'][0] except: #raise return None
def whois(ip): try: obj = IPWhois(ip) response = obj.lookup() except ipwhois.exceptions.WhoisLookupError: return None return response
def getIPandWhoIsData(self,url): try: ip=socket.gethostbyname(url); obj=IPWhois(ip); whoIsDict=obj.lookup(); whoIsDict['resolved_IP']=ip; return {url : whoIsDict}; except Exception: return dict();
def test_lookup(self): result = IPWhois('74.125.225.229') try: self.assertIsInstance(result.lookup(), dict) except (ASNLookupError, WhoisLookupError): pass except AssertionError as e: raise e except Exception as e: self.fail('Unexpected exception raised: %r' % e)
def queryIP(self,ip): try: self.result['type'] = 'ip' self.result['keyword'] = ip self.ip = ip ipwhois = IPWhois(self.ip) self.result['whois'] = ipwhois.lookup() except Exception as e: self.result['exceptions'].append(e) return self
def get_country_by_ip(self, ip_addr): try: whois_info = IPWhois(ip_addr) except IPDefinedError: return None results = whois_info.lookup(False) country = request.env['res.country'].search([ ('code', '=', results['nets'][0]['country']) ]) return country
def run(self): try: ip_whois = IPWhois(self._dst_ip) raw_res = ip_whois.lookup() res = [] for k,v in raw_res.iteritems(): if not v is None: res.append("%s: %s" % (k,v)) return ",".join(res) except Exception, e: return ""
def run(self): try: ip_whois = IPWhois(self._dst_ip) raw_res = ip_whois.lookup() res = [] for k, v in raw_res.iteritems(): if not v is None: res.append("%s: %s" % (k, v)) return ",".join(res) except Exception, e: return ""
def ipWhois(self, ip): out = None try: if self.checkIfIP(ip): obj = IPWhois(ip) out = obj.lookup() except: out = None pass return out
def getIPandWhoIsData(self, url): try: ip = socket.gethostbyname(url) obj = IPWhois(ip) whoIsDict = obj.lookup() whoIsDict['resolved_IP'] = ip return { url: whoIsDict } except Exception: return dict()
def resolveDomainIpwhois(dom): print "... ipwhois" i = 0 while(i < len(dom['ipaddr'])): time.sleep(1) ip_whois = IPWhois( dom['ipaddr'][i]['ipaddr'] ) dom['ipaddr'][i]['whois'] = ip_whois.lookup() pprint(dom['ipaddr'][i]['whois']) i += 1
def analyze(ip, results): links = set() r = IPWhois(ip.value) result = r.lookup() results.update(raw=pformat(result)) # Let's focus on the most specific information # Which should be in the smallest subnet n = 0 smallest_subnet = None for network in result['nets']: cidr_bits = int(network['cidr'].split('/')[1].split(',')[0]) if cidr_bits > n: n = cidr_bits smallest_subnet = network if smallest_subnet: # Create the company company = Company.get_or_create( name=smallest_subnet['description'].split("\n")[0]) links.update(ip.active_link_to(company, 'hosting', 'Network Whois')) # Link it to every email address referenced if smallest_subnet['emails']: for email_address in smallest_subnet['emails'].split("\n"): email = Email.get_or_create(value=email_address) links.update(company.link_to(email, None, 'Network Whois')) # Copy the subnet info into the main dict for key in smallest_subnet: if smallest_subnet[key]: result["net_{}".format(key)] = smallest_subnet[key] # Add the network whois to the context if not already present for context in ip.context: if context['source'] == 'network_whois': break else: # Remove the nets info (the main one was copied) result.pop("nets", None) result.pop("raw", None) result.pop("raw_referral", None) result.pop("referral", None) result.pop("query", None) result['source'] = 'network_whois' ip.add_context(result) return list(links)
def gather(self, all_ips): for path, incoming_ip_obj in all_ips.iteritems(): if incoming_ip_obj[0].ip_whois == "": try: print "Gathering whois information about " + incoming_ip_obj[0].ip_address ip_whois = IPWhois(incoming_ip_obj[0].ip_address) incoming_ip_obj[0].ip_whois = ip_whois.lookup() except IPDefinedError: print helpers.color("[*] Error: Private IP address, skipping IP!", warning=True) return
def check_site_hosting(url): urlbreakdown = urlparse.urlparse(url); netlocation = urlbreakdown.netloc; #urlcentre = url.split("://")[1].strip("/"); if netlocation[-12:] == "crowdmap.com": owner = "crowdmap" else: ipaddress = socket.gethostbyname(netlocation); #response = os.system("ping -c 1 " + ipaddress); #0 = site's up and okay obj = IPWhois(ipaddress); res=obj.lookup(); owner = res['nets'][0]['description']; return(owner)
def check_site_hosting(url): urlbreakdown = urlparse.urlparse(url) netlocation = urlbreakdown.netloc #urlcentre = url.split("://")[1].strip("/"); if netlocation[-12:] == "crowdmap.com": owner = "crowdmap" else: ipaddress = socket.gethostbyname(netlocation) #response = os.system("ping -c 1 " + ipaddress); #0 = site's up and okay obj = IPWhois(ipaddress) res = obj.lookup() owner = res['nets'][0]['description'] return (owner)
def classify(self, phish): RHList = [] sender = extract_email(phish, "From") receiver = extract_email(phish, "To") if (sender, receiver) not in self.srp: return False srp = self.srp[(sender, receiver)] if phish.get_all("Received"): for recHeader in phish.get_all("Received"): recHeader = ReceivedHeader(recHeader) if not "from" in recHeader.breakdown.keys(): RHList.append("None") continue elif self.public_domain(recHeader.breakdown["from"]): ip = self.public_domain(recHeader.breakdown["from"]) elif self.public_IP(recHeader.breakdown["from"]): ip = self.public_IP(recHeader.breakdown["from"]) else: # RHList.append("InvalidFrom") RHList.append("Invalid") continue try: # import pdb; pdb.set_trace() if ip in self.seen_pairings.keys(): RHList.append(self.seen_pairings[ip]) else: obj = IPWhois(ip) results = obj.lookup() if "nets" not in results.keys( ) or "cidr" not in results["nets"][0].keys(): cidr = ip + "/32" else: cidr = results["nets"][0]["cidr"] RHList.append(cidr) self.seen_pairings[ip] = cidr except: # RHList.append("InvalidIPWhoIs") RHList.append("Invalid") self.seen_pairings[ip] = "Invalid" if RHList not in srp.received_header_sequences: if srp.received_header_sequences: bestEditDist = None for lst in srp.received_header_sequences: ed = editdistance.eval(RHList, lst) if bestEditDist == None or bestEditDist > ed: bestEditDist = ed if bestEditDist > self.EDIT_DISTANCE_THRESHOLD: return True return False
def classify(self, phish): RHList = [] sender = extract_email(phish, "From") receiver = extract_email(phish, "To") if (sender, receiver) not in self.srp: return False srp = self.srp[(sender, receiver)] if phish.get_all("Received"): for recHeader in phish.get_all("Received"): recHeader = ReceivedHeader(recHeader) if not "from" in recHeader.breakdown.keys(): RHList.append("None") continue elif self.public_domain(recHeader.breakdown["from"]): ip = self.public_domain(recHeader.breakdown["from"]) elif self.public_IP(recHeader.breakdown["from"]): ip = self.public_IP(recHeader.breakdown["from"]) else: # RHList.append("InvalidFrom") RHList.append("Invalid") continue try: # import pdb; pdb.set_trace() if ip in self.seen_pairings.keys(): RHList.append(self.seen_pairings[ip]) else: obj = IPWhois(ip) results = obj.lookup() if "nets" not in results.keys() or "cidr" not in results["nets"][0].keys(): cidr = ip + "/32" else: cidr = results["nets"][0]["cidr"] RHList.append(cidr) self.seen_pairings[ip] = cidr except: # RHList.append("InvalidIPWhoIs") RHList.append("Invalid") self.seen_pairings[ip] = "Invalid" if RHList not in srp.received_header_sequences: if srp.received_header_sequences: bestEditDist = None for lst in srp.received_header_sequences: ed = editdistance.eval(RHList, lst) if bestEditDist == None or bestEditDist > ed: bestEditDist = ed if bestEditDist > self.EDIT_DISTANCE_THRESHOLD: return True return False
def get_cidre_from_lookup(ip, field): if ip not in "0.0.0.0": obj = IPWhois(ip) res=obj.lookup() if field == 'range': return res['nets'][-1]['range'] elif field == 'description': return res['nets'][-1]['description'] elif field == 'cidr': return res['nets'][-1]['cidr'] else: return None else: return None
def analyze(ip, results): links = set() r = IPWhois(ip.value) result = r.lookup() results.update(raw=pformat(result)) # Let's focus on the most specific information # Which should be in the smallest subnet n = 0 smallest_subnet = None for network in result['nets']: cidr_bits = int(network['cidr'].split('/')[1].split(',')[0]) if cidr_bits > n: n = cidr_bits smallest_subnet = network if smallest_subnet: # Create the company company = Company.get_or_create(name=smallest_subnet['description'].split("\n")[0]) links.update(ip.active_link_to(company, 'hosting', 'Network Whois')) # Link it to every email address referenced if smallest_subnet['emails']: for email_address in smallest_subnet['emails'].split("\n"): email = Email.get_or_create(value=email_address) links.update(company.link_to(email, None, 'Network Whois')) # Copy the subnet info into the main dict for key in smallest_subnet: if smallest_subnet[key]: result["net_{}".format(key)] = smallest_subnet[key] # Add the network whois to the context if not already present for context in ip.context: if context['source'] == 'network_whois': break else: # Remove the nets info (the main one was copied) result.pop("nets", None) result.pop("raw", None) result.pop("raw_referral", None) result.pop("referral", None) result.pop("query", None) result['source'] = 'network_whois' ip.add_context(result) return list(links)
def traceroute(dest_name, port, max_hops): dest_addr = socket.gethostbyname(dest_name) print("traceroute {0}({1})".format(dest_name, dest_addr)) socket.setdefaulttimeout(10) icmp = socket.getprotobyname("icmp") udp = socket.getprotobyname("udp") ttl = 1 while True: recv_socket = socket.socket(socket.AF_INET, socket.SOCK_RAW, icmp) send_socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, udp) send_socket.setsockopt(socket.SOL_IP, socket.IP_TTL, ttl) recv_socket.bind(("", port)) send_socket.sendto(bytes(512), (dest_addr, port)) try: curr_name, curr_addr = recv_socket.recvfrom(512) curr_addr = curr_addr[0] try: curr_name = socket.gethostbyaddr(curr_addr)[0] except socket.error: curr_name = curr_addr except socket.error: pass send_socket.close() recv_socket.close() if curr_addr is not None: print("{0} {1}({2})".format(ttl, curr_name, curr_addr)) try: obj = IPWhois(curr_addr) except IPDefinedError as e: print("local host") else: who = obj.lookup() nets = who['nets'] country = nets[0]['country'] company = nets[0]['description'] print("country:{0} company:{1}\n".format(country, company)) else: print("{} *".format(ttl)) ttl += 1 if curr_name == dest_name or curr_addr == dest_addr or ttl > max_hops: break
def gather(self, all_ips): for path, incoming_ip_obj in all_ips.iteritems(): if incoming_ip_obj[0].ip_whois == "" and incoming_ip_obj[0].ip_address != "": try: print "Gathering whois information about " + incoming_ip_obj[0].ip_address ip_whois = IPWhois(incoming_ip_obj[0].ip_address) incoming_ip_obj[0].ip_whois = ip_whois.lookup() except IPDefinedError: print helpers.color("[*] Error: Private IP address, skipping IP!", warning=True) except HTTPLookupError: print helpers.color("Could not connect online to lookup whois for " + incoming_ip_obj[0].domain_name, warning=True) return
def getIPandWhoIsData(self,url): try: ip=socket.gethostbyname(url); obj=IPWhois(ip); timeBefore=time.time(); whoIsDict=obj.lookup(); timeAfter=time.time(); latency=timeAfter-timeBefore; self.ipWhoIsLatencyMutex.acquire(); self.ipWhoIsLatencyFile.write(url+','+str(latency)+'\n'); self.ipWhoIsLatencyMutex.release(); whoIsDict['resolved_IP']=ip; return {url : whoIsDict}; except Exception: return dict();
def get_ip_whois_info(ipaddr): ''' Return Value whois_result_json: ip whois infomation with json format _ : ip hostname, look like gethostbyname(ip) ''' try: obj = IPWhois(ipaddr) except IPDefinedError: if __DEBUG__: print "fine we get prive ip ;-)" return -1 result = obj.lookup() dns_zone = obj.dns_zone return whois_result_json, osb.get_host()
def ip_lookup(self, ip): # given ip, look up org, isp, lat and lon # first, check if we have seen this ip before if ip in self.iptable: return self.iptable[ip] try: obj = IPWhois(ip, timeout = 10) # times out after 10 seconds results = obj.lookup(get_referral=True) org = results['nets'][-1]['description'] isp = results['nets'][0]['description'] except (IPDefinedError, ASNLookupError, ASNRegistryError, WhoisLookupError, HostLookupError, BlacklistError, AttributeError) as e: # log bad ip and error logger.error('%s from IPWhois on IP %s, setting org & isp to None', e, ip) org = isp = None except ValueError: logger.error('Set org & isp to None, ValueError from IPWhois for IP %s', ip) org = isp = None # geolite2 returns NoneType if no match try: match = geolite2.lookup(ip) if match: if match.location: if match.location[0]: lat = match.location[0] else: lat = None logger.warn('Set lat = None, geolite2 unable to find lat for IP %s', ip) if match.location[1]: lon = match.location[1] else: lon = None logger.warn('Set lon = None, geolite2 unable to find lon for IP %s', ip) else: lat = lon = None logger.warn('Set lat & lon = None, geolite2 unable to find lat/lon for IP %s', ip) else: # log unable to find lat/lon for this ip logger.warn('Set lat & lon = None, geolite2 unable to find lat/lon for IP %s', ip) lat = lon = None except ValueError: # log bad ip and error logger.error('Set lat & lon = None, ValueError from geolite2 for IP %s', ip) lat = lon = None self.iptable[ip] = [org, lat, lon, isp] return self.iptable[ip]
def get_ip_info(ip): ''' Gets the whois info for the ip address ip: an ip address parsed out of the log file. Returns: dict of information relating to the ip address NOTE: abandoned this because it was way too slow and the results are very unstructured. ''' obj = IPWhois(ip, timeout=1) ip_log_info = obj.lookup()#get_referral=True) # for RWhois calls instead le = ip_log_info['nets'][-1] # last_entry ...more likely to the the organization? org_address = "{} {} {} {}".format(le['address'], le['city'], le['state'], le['postal_code'], le['country']) return {'organization': le['description'].replace('\n', ' '), 'address': org_address}
def gather(self, all_ips): for path, incoming_ip_obj in all_ips.iteritems(): if incoming_ip_obj[0].ip_whois == "": try: print "Gathering whois information about " + incoming_ip_obj[ 0].ip_address ip_whois = IPWhois(incoming_ip_obj[0].ip_address) incoming_ip_obj[0].ip_whois = ip_whois.lookup() except IPDefinedError: print helpers.color( "[*] Error: Private IP address, skipping IP!", warning=True) return
def get_ipwhois(ip): try: ipwhois_query = IPWhois(ip) logger.debug(ipwhois_query) except ASNLookupError as e: return str(e) except IPDefinedError as e: return str(e) except ASNRegistryError as e: return str(e) except HostLookupError as e: return str(e) except BlacklistError as e: return str(e) ipwhois_result = ipwhois_query.lookup() return ipwhois_result
def whois_ip(ip, subcluster = None): #i = ip.key_property.value i = ip.value results = None try: obj = IPWhois(i) results = obj.lookup() except: return False if results: nets = sorted(results["nets"], key=lambda n:n["cidr"], reverse=True) properties = nets[0] name = properties["description"].split("\n")[0] org = get_node_on_db("Organization", "description", name) if org: type, created = RelType.objects.get_or_create(name="has_ip") if type and org and ip: rel, created = Relation.objects.get_or_create( type = type, src = org, dst = ip, ) if created: rel.firstseen = datetime.now() rel.lastseen = datetime.now() else: rel.lastseen = datetime.now() for k,v in properties.iteritems(): if k and v: pk, created = PropertyKey.objects.get_or_create( name = k ) p, created = Property.objects.get_or_create( key = pk, value = v, ) if not p in rel.properties.all(): rel.properties.add(p) rel.save() if rel and subcluster: if not subcluster in rel.subcluster.all(): rel.subcluster.add(subcluster) rel.save() if not subcluster in org.subcluster.all(): org.subcluster.add(subcluster) org.save()
def getIPandWhoIsData(self, url): try: ip = socket.gethostbyname(url) obj = IPWhois(ip) timeBefore = time.time() whoIsDict = obj.lookup() timeAfter = time.time() latency = timeAfter - timeBefore self.ipWhoIsLatencyMutex.acquire() self.ipWhoIsLatencyFile.write(url + ',' + str(latency) + '\n') self.ipWhoIsLatencyMutex.release() whoIsDict['resolved_IP'] = ip return { url: whoIsDict } except Exception: return dict()
def find_abuse_emails(attacks): """ This function is used to lookup whois data for the different IPs, this is done by the help of the ipwhois module. Since if the registrars aren't using the same format the parsing of the returned data often misses abuse emails. So if the module does not find a address, the function tries to search the raw data with a regex for a e-mail address to contact. :param attacks: List of the AttackAttempt objects. :return: Returns a modified list of the AttackAttempt objects. """ for key, value in attacks.items(): if len(value.abuse_emails) != 0: continue obj = IPWhois(value.ip) module_logger.info("Looking up whois data on %s" % value.ip) results = obj.lookup(inc_raw=True) other_emails = [] abuse_emails = [] for net in results["nets"]: if net["abuse_emails"] is not None: abuse_emails.append(net["abuse_emails"]) if net["misc_emails"] is not None: other_emails.append(net["misc_emails"]) if net["tech_emails"] is not None: other_emails.append(net["tech_emails"]) # If no other e-mails are found try searching the raw data. if len(abuse_emails) == 0 and len(other_emails) == 0: module_logger.debug("ipwhois parser did not find any emails, trying regex on raw.") found = re.findall(r"[A-Za-z0-9\.\+_-]+@[A-Za-z0-9\._-]+\.[a-zA-Z]{2,4}", results["raw"]) if len(found) != 0: abuse_emails = found if len(abuse_emails) == 0 and len(other_emails) != 0: abuse_emails = other_emails attacks[key].abuse_emails = abuse_emails return attacks
def getWhoIs(ipadress): if dictSaveWhoIs.has_key(ipadress): print "getWhoIs cache" return dictSaveWhoIs[ipadress] else: obj = IPWhois(ipadress) results = obj.lookup() nets = results['nets'] if len(nets) > 0: nets = nets[0] retStr = results['asn_country_code'] + ";" + nets['description'] retStr = retStr.replace('\n', '') else: retStr = ";" print "getWhoIs lookup" dictSaveWhoIs[ipadress] = retStr return retStr
def whois_tool(rline): m = re_addr.search(rline) if m is not None: obj = IPWhois(m.group(1)) lookup_result = obj.lookup() cn = lookup_result['asn_country_code'] if county_dic[cn] is not None: last_addr = "%s %s" % (m.group(1),county_dic[cn]) else: last_addr = m.group(1) # print "%s - %s" % (cn,last_addr) print
def _whois(ip, org_names): from ipwhois import IPWhois if type(ip) is not str: ip = _get_flow_ip(ip) if ip not in _whois_cache: whois = IPWhois(ip) try: name = whois.lookup_rdap()['network']['name'] if not name: name = whois.lookup()['nets'][0]['name'] except: print("WHOIS ERROR") name = 'OTHER' _whois_cache[ip] = _clean_netname(org_names, name, ip) return _whois_cache[ip]
def whois_ip(ip, subcluster=None): #i = ip.key_property.value i = ip.value results = None try: obj = IPWhois(i) results = obj.lookup() except: return False if results: nets = sorted(results["nets"], key=lambda n: n["cidr"], reverse=True) properties = nets[0] name = properties["description"].split("\n")[0] org = get_node_on_db("Organization", "description", name) if org: type, created = RelType.objects.get_or_create(name="has_ip") if type and org and ip: rel, created = Relation.objects.get_or_create( type=type, src=org, dst=ip, ) if created: rel.firstseen = datetime.now() rel.lastseen = datetime.now() else: rel.lastseen = datetime.now() for k, v in properties.iteritems(): if k and v: pk, created = PropertyKey.objects.get_or_create(name=k) p, created = Property.objects.get_or_create( key=pk, value=v, ) if not p in rel.properties.all(): rel.properties.add(p) rel.save() if rel and subcluster: if not subcluster in rel.subcluster.all(): rel.subcluster.add(subcluster) rel.save() if not subcluster in org.subcluster.all(): org.subcluster.add(subcluster) org.save()
def get_asn2(data): # find as number with ipwhois modules if chk_domain(data): ip, c_name = retIP(data) if chk_ip(data): ip = data obj = IPWhois(ip) results = obj.lookup() as_number = 0 subnet = '' try: if results.has_key('asn'): as_number = int(results['asn']) except: pass if results.has_key('asn_cidr'): subnet = results['asn_cidr'] return as_number, subnet
def get_data(ip): try: obj = IPWhois(ip) results = json.loads(json.dumps(obj.lookup()))['nets'][0] owner = results['name'].strip() country = results['country'].strip() org = get_org(ip) server = get_server(ip) desc = ' '.join(results['description'].split('\n')) text = '%s, %s, %s, %s, %s,%s' % (ip,owner,country,org,server,desc) table.append(text.split(',')) except requests.exceptions.Timeout: msg = 'Timeout - %s' % ip exceptions.append(msg) except requests.exceptions.ConnectionError: msg = 'Connection error - %s' % ip exceptions.append(msg) except Exception as e: exceptions.append(str(e))
def get_data(ip): try: obj = IPWhois(ip) results = json.loads(json.dumps(obj.lookup()))['nets'][0] owner = results['name'].strip() country = results['country'].strip() org = get_org(ip) server = get_server(ip) desc = ' '.join(results['description'].split('\n')) text = '%s, %s, %s, %s, %s,%s' % (ip, owner, country, org, server, desc) table.append(text.split(',')) except requests.exceptions.Timeout: msg = 'Timeout - %s' % ip exceptions.append(msg) except requests.exceptions.ConnectionError: msg = 'Connection error - %s' % ip exceptions.append(msg) except Exception as e: exceptions.append(str(e))
def domain_whois(domain): try: ip_addr = domaintoip(domain) data = IPWhois(ip_addr) out = data.lookup() city_data = out["nets"][0]['city'] country_data = out["nets"][0]['country'] description_data = out["nets"][0]['description'] emails_data = out["nets"][0]['emails'] name_data = out["nets"][0]['name'] range_data = out["nets"][0]['range'] state_data = out["nets"][0]['range'] out_email = ("".join(map(str, emails_data))) save_data = whoisinfo_db(ip=(ip_addr), sh_domain=(domain), city=(city_data), country=(country_data), description=(description_data), emails=(out_email), name=(name_data), range=(range_data), state=(state_data)) save_data.save() except Exception as error: print error
def gather(self, all_ips): for path, incoming_ip_obj in all_ips.iteritems(): if incoming_ip_obj[ 0].ip_whois == "" and incoming_ip_obj[0].ip_address != "": try: print "Gathering whois information about " + incoming_ip_obj[ 0].ip_address ip_whois = IPWhois(incoming_ip_obj[0].ip_address) incoming_ip_obj[0].ip_whois = ip_whois.lookup() except IPDefinedError: print helpers.color( "[*] Error: Private IP address, skipping IP!", warning=True) except HTTPLookupError: print helpers.color( "Could not connect online to lookup whois for " + incoming_ip_obj[0].domain_name, warning=True) return
def run(self, domain, start_time=""): """ str, str -> networkx multiDiGraph :param domain: a string containing a domain to look up :param start_time: string in ISO 8601 combined date and time format (e.g. 2014-11-01T10:34Z) or datetime object. :return: a networkx graph representing the whois information about the domain """ ip = socket.gethostbyname(domain) # This has a habit of failing record = [None] * 10 obj = IPWhois(ip) results = obj.lookup() nets = results.pop("nets") for i in range(len(nets)): net = nets[i] record[0] = i if "updated" in net: record[1] = net['updated'][:10] elif "created" in net: record[1] = net['created'][:10] record[2] = domain if "name" in net: record[3] = net['name'] if "organization" in net: record[4] = net['organization'] if 'address' in net: record[5] = net['address'] if 'city' in net: record[6] = net['city'] if 'state' in net: record[7] = net['state'] if 'country' in net: record[8] = net['country'] if 'misc_emails' in net and net['misc_emails'] is not None: emails = net['misc_emails'].split("\n") record[9] = emails[0] return self.enrich_record(record, start_time)
def _whois_ip(ip): result = {} obj = None try: obj = IPWhois(ip) result = obj.lookup(inc_raw=True) logger.debug(result["nets"]) except Exception as e: logger.debug(e) result["error"] = str(e) if result: result["reverse"] = None try: rev = obj.net.get_host() logger.debug(rev) if rev: result["reverse"] = rev except Exception as e: logger.debug(e) result["reverse"] = str(e) return result
def get_owner_ip(log): # necesito la ip de la coleccion global p2 cnt = Counter(dict_ips) cnt.most_common() for ip, times in cnt.most_common(3): print('The Ip: %s: Number of Requests: %s' % (ip, times)) print(ip) # obj = IPWhois(get_ip(ip)) obj = IPWhois(ip) results = obj.lookup() # print(results) # results = obj.get_host() # print(res["nets"][0]['country']]) print("Country") print(results['nets'][0]['description']) #print("Abuse") #print(results["nets"][0]['abuse_emails']) p2 += 1 print("################################################") print("################################################") return ()
from ipwhois import IPWhois from pprint import pprint client = MongoClient('mongodb://localhost:27017/') db = client['netwatch'] coll = db['grosmatou_11_03'] seen_host = {} for host in coll.find({"DNS": {"$exists": False}}).distinct('ips'): if not host in seen_host: print("search " + host) obj = IPWhois(host) results = obj.lookup() print("ok") coll.update({"ips": host}, {"$set": {"DNS": results}}, upsert=True, multi=True) pprint(results) seen_host[host] = True for host in coll.find({"DNS": {"$exists": False}}).distinct('ipd'): if not host in seen_host: print("search " + host) try: obj = IPWhois(host) results = obj.lookup() print("ok") coll.update({"ipd": host},