def main(): parser = optparse.OptionParser('%prog ' + \ '-r <ip_file> || -i <ip>') parser.add_option('-r', dest='ipfile', type='string',\ help='specify target file with ips') parser.add_option('-i', dest='ip', type='string',\ help='specify target with ip') (options, args) = parser.parse_args() ip = options.ip ipfile = options.ipfile if ip == None and ipfile == None: print parser.print_help() exit(1) if ip and ipfile : print parser.usage exit(1) if ipfile != None: ip_list = get_ips(ipfile) look(ip_list) else: c = Client() try: try: r = c.lookup(ip) except Exception as e:print e pt = r[ip].prefix + " ------> " + r[ip].ip + "\n" + \ r[ip].cc + "\t" + r[ip].owner print pt + "\n" + "-"*60 except Exception as e: print e
def main(): parser = optparse.OptionParser('%prog ' + \ '-r <file_with IPs> || -i <IP>') parser.add_option('-r', dest='ips', type='string', \ help='specify target file with IPs') parser.add_option('-i', dest='ip', type='string', \ help='specify a target IP address') (options, args) = parser.parse_args() ip = options.ip # Assigns a -i <IP> to variable 'ip' global ips; ips = options.ips # Assigns a -r <fileName> to variable 'ips' if (ips == None) and (ip == None): # If proper arguments aren't given print the script usage print parser.usage sys.exit(0) if ips != None: # Execute if ips has a value checkFile(ips) # Execute the function to check if the file can be read iplist = [] # create the ipslist list object for line in open(ips, 'r'): # Parse File to create a list iplist.append(line.strip('\n')) # Appends that line in the file to list and removes the new line char look(iplist) # pass the iplist list object to the look() function else: # Executes lookup() function for a single IP stored in variable 'ip' try: c = Client() r = c.lookup(ip) net = r.prefix; owner = r.owner; cc = r.cc line = '%-20s # - %15s (%s) - %s' % (net, ip, cc, owner) print line except:pass
def getTraceRoute(jsonFile): import json import sys import pygeoip from AtlasUtils import * from cymruwhois import Client c=Client() inFile = jsonFile outFile = "tmpASPath.txt" my_file = open(outFile, "w") asnDict = open("asnDict.txt","w") with open(inFile) as data_file: data = json.load(data_file) ### use GeoIPASNum.dat if system is not connected to Internet. This is a offline database of ip-to-AS mapping gi = pygeoip.GeoIP('GeoIPASNum.dat') # if is_valid_ipv4(data[0]["from"]): gi = pygeoip.GeoIP('GeoIPASNum.dat') else : gi = pygeoip.GeoIP('GeoIPASNumv6.dat') for index in range(len(data)): tmpASPath = "" for cnt in range(len(data[index]["result"])): if data[index]["result"][cnt].has_key("error"): print data[index]["result"][cnt]["error"] else: for resultrttindex in range(len(data[index]["result"][cnt]["result"])): #print data[index]["result"][cnt]["result"][resultrttindex] from_val = "*" asn_val = False if data[index]["result"][cnt]["result"][resultrttindex].has_key("from"): from_val = data[index]["result"][cnt]["result"][resultrttindex]["from"] tmp = "None" r=c.lookup(from_val) asn_val = r.asn+"$"+r.owner print "%s :--> %s" % (from_val, asn_val) if isinstance(asn_val, str) and not 'NA' in asn_val: asnDict.write(asn_val+"\n") if isinstance(asn_val, str) and not 'NA' in asn_val: s = tmpASPath.split(" ") if not s[len(s)-2] == r.asn: tmpASPath = tmpASPath + r.asn + " " my_file.write(tmpASPath + "\n") my_file.close() asnDict.close()
def use_ip_to_get_isp(ip): ''' This might be redundant on the previous ip lookup, but the owner output seems to have a might better consistenciy ''' c = Client() # should probably pull this out of here so not to create it each time r = c.lookup(ip) # print r.asn # print r.owner return {'isp_name': r.owner}
def get_isp(ip): ''' Gets the isp name for the ip Much faster than the ipwhois lookup, but there is no "organization" field. ip: an ip address parsed out of the log file. Returns: dict containing isp name for the ip ''' c = Client() # should probably pull this out of here so not to create it each time r = c.lookup(ip) return {'isp_name': r.owner}
def whoisrecord(ip): currenttime = time.time() ts = currenttime if ip in whois: ASN,ts = whois[ip] else: ts = 0 if ((currenttime - ts) > 36000): C = Client() ASN = C.lookup(ip) whois[ip] = (ASN,currenttime) return ASN
def whoisrecord(ip): try: currenttime = time.time() ts = currenttime if ip in whois: ASN,ts = whois[ip] else: ts = 0 if ((currenttime - ts) > 36000): c = Client() ASN = c.lookup(ip) whois[ip] = (ASN,currenttime) return ASN except Exception as e: return e
class Lookup(object): asndb = None def __init__(self, pyasn_file=None): if pyasn_file: self.asndb = pyasn(pyasn_file) self.whois = Client() def lookup(self, ip): if self.asndb: asn = self.asndb.lookup(ip)[0] if asn: return str(asn) else: return self.whois.lookup(ip).asn
ip = raw_input() from cymruwhois import Client c=Client() r=c.lookup(ip) print r
## Doesn't work from cymruwhois import Client c=Client() print 'enter filename' filename = raw_input() txt = open(filename) print f = open("output/list2asn.txt",'w') for ip in txt: s = str((c.lookup(ip))) + '\n' f.write(s)
class jetplane: def __init__(self, host, port, db=False, logging="_"): self._log = utility.logger("jetplane", logging) self._host = host self._port = port self._proxy_ip = None self._whois_handle = Client() self._criteria = None self._max_tours = 0 self._tours = 1 self._takeoff_time = None self._errors = 0 self._success = False self._trip_details = [] self._db = db self._kill_bit = False if self._db: self._mongodb_handle = mongodb("127.0.0.1", "27017", "jetplane", "hanger", "INFO") self._mdb = self._mongodb_handle.get_con() def take_off(self, criteria, tours): self._takeoff_time = time.time() self._max_tours = tours + 1 self._criteria = criteria.lower() self._log.info("Trip details: max tours - %d, local - %s" % (self._max_tours, self._criteria)) r = self._world_tour() return r def _world_tour(self): self._log.info("Baggage check number %d" % self._tours) time.sleep(3) # naptime self._log.info("Tickets purchased to %s" % self._criteria) self._flush_addr() # kill off any address we had before self._log.info("Safety and pre-flight checks") time.sleep(7) # accept 3 errors before ditching for i in range(0, 2): prox_run = self._proxy_check() # see what TOR gave us if prox_run: break else: if i == 1: return False for i in range(0, 2): who_run = self._whoami() # pull out the decision if who_run: break else: if i == 1: return False self._decide() # did we land in the right area return True def _flush_addr(self): self._log.info("Boarding the plane") sout = Popen(["/etc/init.d/tor", "restart"], stdout=PIPE, stderr=STDOUT).communicate()[0] def _proxy_check(self): self._headers = { "User-Agent": "Mozilla/5.0 (Windows NT 6.1; WOW64; rv:12.0) Gecko/20100101 Firefox/12.0", "Connection": "Keep-Alive", } self._proxies = {"http": self._host + ":" + self._port} self._urls = [ "http://whatismyipaddress.com/", "http://www.whatsmyip.us/", "http://www.ipchicken.com/", "http://www.whatsmyip.info/", "http://www.whatsmyip.in/", "http://www.whatsmyip.cc/", "http://ipswift.com", ] self._url = self._urls[random.randint(0, len(self._urls) - 1)] self._log.info("Fetching address data from %s" % self._url) response = requests.get(self._url, proxies=self._proxies) if response.status_code == 200: try: self._proxy_ip = re.search( r"((2[0-5]|1[0-9]|[0-9])?[0-9]\.){3}((2[0-5]|1[0-9]|[0-9])?[0-9])", response.content ).group() self._log.info("Query returned %s" % self._proxy_ip) return True except: self._log.error("Failed to find IP address using %s" % self._url) self._errors += 1 return False else: self._proxy_ip = "127.0.0.1" self._log.error("Query failed, flushing again") self._errors += 1 return False def _whoami(self): try: self._whois_data = self._whois_handle.lookup(self._proxy_ip) self._addr_owner = self._whois_data.owner self._addr_asn = self._whois_data.asn self._addr_prefix = self._whois_data.prefix self._addr_cc = self._whois_data.cc return True except Exception, e: self._log.error("Failed to gather WHOIS for %s" % self._proxy_ip) self._log.info(str(e)) self._errors += 1 return False
do a dns reverse lookup in random order """ ips = get_ips(start_ip, stop_ip) while len(ips) > 0: i = randint(0, len(ips) - 1) lookup_ip = str(ips[i]) try: print lookup_ip + ": " + str(socket.gethostbyaddr(lookup_ip)[0]) except socket.herror, e: print lookup_ip + ": " + str(e) except socket.error, e: print lookup_ip + ": " + str(e) if whois: info = whois_client.lookup(lookup_ip) print info.owner if delay > 0: sleep(delay) del ips[i] def do_web_lookup(host, path): """ do the actual web lookup, maybe mixin salt and search the path on host with google, print the result """ url = "" got_google_result = False
r = requests.get(item, timeout=10, headers=headers) for each in list(re.findall(r'[0-9]+(?:\.[0-9]+){3}', r.text)): if each not in ips_to_check: ips_to_check.append(each) print("[+]debug, adding ..." + str(each)) except BaseException: pass ips_to_check.sort() # print (ips_to_check) for each in ips_to_check: try: tmp_lst = [] tmp_lst.append(each) r = c.lookup(each) print("[+]debug, adding ..." + str(r.owner)) tmp_lst.append(str(r.owner)) owner_lst.append(str(r.owner)) ip_data.append(tmp_lst) except BaseException: pass ip_data.sort(key=lambda x: x[1]) # print(tabulate(ip_data)) f = open("owner_data.txt", "w") f.write(str(ip_data)) f.close() owner_count = pd.Series(owner_lst).value_counts().to_string() # print(owner_count)
def process_pcaps(pcap_file): aggr_dict = {} ROWS = 256 COLUMNS = 256 sport_grid = [] for row in range(ROWS): sport_grid.append([]) for column in range(COLUMNS): sport_grid[row].append(0) dport_grid = [] for row in range(ROWS): dport_grid.append([]) for column in range(COLUMNS): dport_grid[row].append(0) print("Reading pcap file " + pcap_file + "...") sys.stdout.flush() proto_dict = {17:'UDP', 6:'TCP'} ip_dports = {} packet_count = 0 start_time = None end_time = None with PcapReader(pcap_file) as packets: for packet in packets: try: if (IP in packet) and (packet.proto in proto_dict.keys()): if packet_count == 0: start_time = packet.time else: end_time = packet.time packet_count += 1 proto_name = proto_dict[packet.proto] l3 = packet['IP'] l4 = packet[proto_name] if (l3.src != '0.0.0.0' and l3.src != '255.255.255.255' and l3.dst != '0.0.0.0' and l3.dst != '255.255.255.255'): if l3.src not in aggr_dict: aggr_dict[l3.src] = {} if l3.dst not in aggr_dict[l3.src]: aggr_dict[l3.src][l3.dst] = 0 aggr_dict[l3.src][l3.dst] += len(packet.payload) # get ports if l3.src not in ip_dports: ip_dports[l3.src] = [] ip_dports[l3.src].append(l4.dport) except: # packet failed to parse, skipping pass print("done") ROWS = 289 COLUMNS = 289 private_grid = [] for row in range(ROWS): private_grid.append([]) for column in range(COLUMNS): private_grid[row].append([0, 0]) private_map = populate_1918_space() ROWS = 256 COLUMNS = 256 asn_dict = {} c = Client() for host in aggr_dict: if len(aggr_dict[host]) > 1: # get sent bytes print("host: {0}".format(host)) with open('www/static/img/maps/manifest.txt', 'a+') as f: f.write(pcap_file.split("/")[-1] + ": " + host + "\n") for port in ip_dports[host]: dport_grid[int(port/ROWS)][port%ROWS] = 1 for peer in aggr_dict[host]: try: r = c.lookup(peer) if not r.asn: if not r.cc: # RFC 1918, etc. #print "peer:", peer, "bytes out :", aggr_dict[host][peer] priv_arr = private_map[".".join(peer.split(".")[:-1])] private_grid[priv_arr[0]][priv_arr[1]][1] += aggr_dict[host][peer] else: print("found public IP without an ASN: {0} bytes out: {1}".format(peer, aggr_dict[host][peer])) else: # public ip space if r.asn in asn_dict: asn_dict[r.asn]['bytes_out'] += aggr_dict[host][peer] else: asn_dict[r.asn] = {'owner': r.owner, 'bytes_out': aggr_dict[host][peer], 'bytes_in': 0} except Exception as e: print("{0} FAILED TO LOOKUP ASN".format(peer)) print(str(sys.exc_info()[0]) + str(e)) else: if host in ip_dports: for port in ip_dports[host]: sport_grid[int(port/ROWS)][port%ROWS] = 2 # get received bytes dst = None # there is only one to loop through for d in aggr_dict[host]: dst = d try: r = c.lookup(host) if not r.asn: if not r.cc: # RFC 1918, etc. #print "peer:", host, "bytes in:", aggr_dict[host][dst] priv_arr = private_map[".".join(host.split(".")[:-1])] private_grid[priv_arr[0]][priv_arr[1]][0] += aggr_dict[host][dst] else: print("found public IP without an ASN: {0} bytes out: {1}".format(host, aggr_dict[host][dst])) else: # public ip space if r.asn in asn_dict: asn_dict[r.asn]['bytes_in'] += aggr_dict[host][dst] else: asn_dict[r.asn] = {'owner': r.owner, 'bytes_in': aggr_dict[host][dst], 'bytes_out': 0} except Exception as e: print("{0} FAILED TO LOOKUP ASN".format(host)) print(str(sys.exc_info()[0]) + str(e)) asn_grid = [] for row in range(ROWS): asn_grid.append([]) for column in range(COLUMNS): asn_grid[row].append([0, 0]) for asn in asn_dict: try: asn_num = int(asn) if asn_num < 65536: asn_grid[int(asn_num/ROWS)][asn_num%ROWS] = [asn_dict[asn]['bytes_in'], asn_dict[asn]['bytes_out']] else: print("ALERT!!!! high external asn: {0} asn owner: {1} total bytes sent: {2} total bytes received: {3}".format(asn, asn_dict[asn]['owner'], asn_dict[asn]['bytes_out'], asn_dict[asn]['bytes_in'])) except Exception as e: print(str(e)) return asn_grid, private_grid, sport_grid, dport_grid, packet_count, humanize.naturaldelta(datetime.utcfromtimestamp(end_time) - datetime.utcfromtimestamp(start_time))