def asn_db(date: str = None): """ Retrieve IPASN DB instance for given date in format YYYY-MM-DD. If date is None, uses yesterday's date. """ global __asn_db if date is None: date = (datetime.today() - timedelta(days=1)).strftime("%Y-%m-%d") logging.warning("No date specified for ASN DB, using %s", date) if date not in __asn_db: # IPASN not loaded -- load it now path = os.path.join(IPASN_DIR, date, ASN_DB_FNAME) path6 = os.path.join(IPASN6_DIR, date, ASN_DB_FNAME) logging.info("Loading IPASN IPv4 database %s", path) logging.info("Loading IPASN IPv6 database %s", path6) try: __asn_db[date] = pyasn.pyasn(path) except OSError as e: logging.error("Could not load IPASN IPv4 database %s", path) raise e try: __asn6_db[date] = pyasn.pyasn(path6) except OSError as e: logging.error("Could not load IPASN IPv6 database %s", path6) raise e return __asn_db[date], __asn6_db[date]
def __init__(self, hostname): try: from pyasn import pyasn except ImportError: system('pip install pyasn') from pyasn import pyasn try: asndb = pyasn(ASNDB_FILE_NAME) except IOError: self._install_asndb() asndb = pyasn(ASNDB_FILE_NAME) try: main_ip = gethostbyname(hostname) except gaierror: raise RuntimeError('Couldn' 't get ip for host %s.' % hostname) try: self._ignored_ranges = [] with open(BLACKLIST_FILE_NAME, 'r') as f: for i in f.read().split(','): ignored_ip_range = i.strip() if ignored_ip_range: self._ignored_ranges.append( self._ip_range_to_range(ignored_ip_range)) except IOError: print('File "%s" is missing, ips will not be ignored.' % BLACKLIST_FILE_NAME) ip_ranges = sorted(asndb.get_as_prefixes(asndb.lookup(main_ip)[0])) print('Found ranges %s' % ip_ranges) super(IPPoolASN, self).__init__(*ip_ranges)
def check(parameters): if not os.path.exists(parameters.get('database', '')): return [["error", "File given as parameter 'database' does not exist."]] try: pyasn.pyasn(parameters['database']) except Exception as exc: return [["error", "Error reading database: %r." % exc]]
def load_all_data(file_path_vrps, file_path_rib): asndb_vrps = pyasn.pyasn(file_path_vrps) logger.debug("finish load vrps from {}".format(file_path_vrps)) asndb_rib = pyasn.pyasn(file_path_rib) logger.debug("finish load rib from {}".format(file_path_rib)) return {"vrps": asndb_vrps, "rib": asndb_rib}
def get_path(hostname): tracer = subprocess.Popen(['tracer', '-w', '30', hostname], stdout=subprocess.PIPE) re_ip = re.compile("\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}") n = 0 result = PrettyTable() result.field = ['№ по порядку', 'IP', 'AS'] i = 0 for line in iter(tracer.stdout.readline, ""): if i < 4: i += 1 continue line = line.decode('windows-1251') if "* * *" in line: print(result) break else: ip = re.search(re_ip, line)[0] Asn = pyasn.pyasn('IpAsn.dat') lookup = Asn.lookup(ip) if lookup[0] is None: asn = '' else: asn = lookup[0] result.add_row(n, ip, asn) n += 1
def __init__(self, main, reserved=None): """load from file Args: main (string): path to the conversion out put of pyasn reserved (string): path to file recording reserved IP blocks """ try: self._main = pyasn.pyasn(main) except (IOError, RuntimeError) as e: logging.critical( "Encountered error when initializing IP to ASN DB: %s" % e) if reserved is not None: self._reserved = SubnetTree.SubnetTree() self.reserved_des = set() try: with open(reserved, 'r') as fp: for line in fp: if not line.startswith('#') and len(line.split()) >= 2: pref, desc = [i.strip() for i in line.split()] self._reserved.insert(pref, desc) self.reserved_des.add(desc) except IOError as e: logging.critical( "Encountered error when initializing IP to ASN DB: %s" % e) else: self._reserved = None self.reserved_des = None
def asn_lookup(ipv4): asndb = pyasn.pyasn('rib.20191127.2000.dat', as_names_file='asn_names.json') asn, prefix = asndb.lookup(ipv4) name = asndb.get_as_name(asn) return {'prefix': prefix, 'name': name, 'asn': asn}
def __init__(self, pyasn_file, ixp_interfaces_file, ixp_prefixes_file): self.asndb = pyasn.pyasn(pyasn_file) self.ixp_interface_prefixes = dict() self.ixp_interfaces, ixp_interface_prefixes = self.read_ixp_interfaces( ixp_interfaces_file) self.ixppref_tree = self.construct_ixprefix_tree( ixp_prefixes_file, ixp_interface_prefixes)
def get_path(self, hostname): tracert = subprocess.Popen(['tracert', '-w', '30', hostname], stdout=subprocess.PIPE) re_ip = re.compile("\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}") n = 0 result = PrettyTable() result.field_names = ['№ по порядку', 'IP', 'AS'] i = 0 for line in iter(tracert.stdout.readline, ""): if i < 4: i += 1 continue line = line.decode('windows-1251') if "* * *" in line: print(result) break else: ip = re.search(re_ip, line)[0] asndb = pyasn.pyasn('ipasn_20140513.dat') tuple = asndb.lookup(ip) if tuple[0] is None: asn = '' else: asn = tuple[0] result.add_row(n, ip, asn) n += 1
def fill_domains(asns, ipasn_db_path, top_domains_path, n_domains): """ Adds domains from alexa top N to ASNs 'domains' """ print(f"Parsing {ipasn_db_path} and {top_domains_path} for ASN domains.") asndb = pyasn.pyasn(ipasn_db_path) with open(top_domains_path, "r") as f_obj: reader = csv.reader(f_obj) counter = 0 for row in reader: if counter == n_domains: break domain = row[1] asn = str(get_asn_by_domain(asndb, domain)) if asn: asn_data = get_or_create_asn(asns, asn) # domains are unique asn_data['domains'].append(domain) asn_data['weight'] += 1 asns[asn] = asn_data counter += 1 if counter % 1000 == 0: print(f"Parsed {counter} domains")
def maj_location_asn(db): print("/----------------------------------/ \n\ / Updating geoip and ASN infos / \n\ /----------------------------------/") reader = geoip2.database.Reader(script_path + '/booLet_extres/GeoLite2-City.mmdb') asn_db = pyasn.pyasn(script_path + '/booLet_extres/ipasn_20150224.dat') les_lignes = [] with db: cur = db.cursor() sql = "SELECT IP FROM ips;" results = cur.execute(sql) for mon_ip in results: country_name = city_name = iso_code = '-' try: response = reader.city(mon_ip[0]) except: country_name = city_name = iso_code = '-' else: if response.country.names: if "fr" in response.country.names: country_name = response.country.names['fr'] if response.country.names != '': country_name = response.country.name if response.city.name: if response.city.name != '': city_name = response.city.name else: city_name = '-' if response.country.iso_code in ['', 'None']: iso_code = '-' else: iso_code = "{0!s}".format(response.country.iso_code) try: asn, range = asn_db.lookup(mon_ip[0]) except: asn = range = '-' else: if ("{0!s}".format(asn)).lower() == 'none': asn = range = '-' else: asn = "AS{0!s}".format(asn) les_lignes.append( (iso_code, country_name, city_name, asn, range, mon_ip[0])) with db: cur = db.cursor() sql = "UPDATE ips SET isocode=?, country_name=?, city_name=?, asn=?, range=? WHERE IP=?" db.executemany(sql, les_lignes) sql = "SELECT DISTINCT(i.asn),a.asnlabel FROM ips i, asns a WHERE i.asn = a.asn;" results = cur.execute(sql) values = [] for result in results: values.append((result[1], result[0])) sql = "UPDATE ips SET asnlabel=? WHERE asn=?" db.executemany(sql, values)
def load_ip_to_asn_file(): import pyasn # Initialize module and load IP to ASN database # the sample database can be downloaded or built - see below asndb = pyasn.pyasn( '/home/ec2-user/RouteControlMap/top_asns_and_ips/amsix.db') return asndb
def asn_lookup(ipv4): asndb = pyasn.pyasn('rib.20191127.2000.dat', as_names_file=AS_NAMES_FILE_PATH) asn, prefix = asndb.lookup(ipv4) name = asndb.get_as_name(asn) return {'name': name}
def test_pyasn_from_string(self): """ Test pyasn initialization from in memory string """ with open(FAKE_IPASN_DB_PATH, "rt") as f: ipasn_str = f.read() self.assertEqual(len(ipasn_str.splitlines()), 12) # fake data has 12 lines n = pyasn(None, ipasn_string=ipasn_str) # now test the correctness for i in range(4): asn, prefix = n.lookup("1.0.0.%d" % i) self.assertEqual(1, asn) self.assertEqual("1.0.0.0/30", prefix) for i in range(4, 256): asn, prefix = self.asndb_fake.lookup("1.0.0.%d" % i) self.assertEqual(2, asn) self.assertEqual("1.0.0.0/24", prefix) for i in range(256): asn, prefix = self.asndb_fake.lookup("2.0.0.%d" % i) self.assertEqual(3, asn) self.assertEqual("2.0.0.0/24", prefix) for i in range(128, 256): asn, prefix = self.asndb_fake.lookup("3.%d.0.0" % i) self.assertEqual(4, asn) self.assertEqual("3.0.0.0/8", prefix) for i in range(0, 128): asn, prefix = self.asndb_fake.lookup("3.%d.0.0" % i) self.assertEqual(5, asn) self.assertEqual("3.0.0.0/9", prefix) asn, prefix = self.asndb_fake.lookup("5.0.0.0") self.assertEqual(None, asn) self.assertEqual(None, prefix)
def init(self): try: self.database = pyasn.pyasn(self.parameters.database) except IOError: self.logger.error("pyasn data file does not exist or could not be accessed in '%s'" % self.parameters.database) self.logger.error("Read 'bots/experts/asnlookup/README' and follow the procedure") self.stop()
def main(): # process command-line args as_set_fname = sys.argv[1] as_set_fname += '.pickle' asndb_file_name = sys.argv[2] asndb = pyasn.pyasn(asndb_file_name) as_set = set() for ip_line in sys.stdin: ip = ip_line.rstrip() # convert to AS and add ASes to set (asn, prefix) = asndb.lookup(ip) if asn is not None: as_set.add(asn) else: print ip print 'ASN not found' # pickle the AS set with open(as_set_fname, 'wb') as f: pickle.dump(as_set, f, pickle.HIGHEST_PROTOCOL) print as_set print 'number of unique ASes = ' + str(len(as_set))
def load(self): """Load the ASN DB from disk. It'll try to load it from user's opensnitch directory if these file exist: - ~/.config/opensnitch/ipasn_db.dat.gz - ~/.config/opensnitch/asnames.json Otherwise it'll try to load it from python3-pyasn package. """ try: if self.asndb != None: return import pyasn IPASN_DB_PATH = os.path.expanduser( '~/.config/opensnitch/ipasn_db.dat.gz') # .gz not supported for asnames AS_NAMES_FILE_PATH = os.path.expanduser( '~/.config/opensnitch/asnames.json') # if the user hasn't downloaded an updated ipasn db, use the one # shipped with the python3-pyasn package if os.path.isfile(IPASN_DB_PATH) == False: IPASN_DB_PATH = '/usr/lib/python3/dist-packages/data/ipasn_20140513_v12.dat.gz' if os.path.isfile(AS_NAMES_FILE_PATH) == False: AS_NAMES_FILE_PATH = '/usr/lib/python3/dist-packages/data/asnames.json' print("using IPASN DB:", IPASN_DB_PATH) self.asndb = pyasn.pyasn(IPASN_DB_PATH, as_names_file=AS_NAMES_FILE_PATH) except Exception as e: self.ASN_AVAILABLE = False print("exception loading ipasn db:", e) print("Install python3-pyasn to display IP's network name.")
def __init__(self): resource_package = __name__ af = pkg_resources.resource_filename(resource_package, '/{0}'.format(ASN_DB)) self.asndb = pyasn.pyasn(af) gf = pkg_resources.resource_filename(resource_package, '/{0}'.format(GEO_DB)) self.geodb = geoip2.database.Reader(gf)
def dummy_asndb(tmpdir): """ Generates a dummy ASNDB for IP resolution testing. """ from cfltools.utilities import asn_update from pyasn import pyasn asn_datfile = asn_update(tmpdir) yield pyasn(str(asn_datfile))
def get_asndb(self): """Get an ASN database""" if config.read("asn_datfile") is None: logger.warning("No ASN data file detected! Creating one...") config.write("asn_datfile", asn_update()) config.write("asn_lastupdate", date.today().strftime("%Y-%m-%d")) asndb = pyasn(config.read("asn_datfile")) return asndb
def __init__(self, asn_dat=None, asn_map=None): if asn_dat is not None: self.asndb = pyasn.pyasn(asn_dat) else: self.asndb = pyasn.pyasn( os.path.join(LOCAL_DIR, '..', 'data', 'whoisip', 'ipasn_20160916.1200.dat')) if asn_map is not None: with open(asn_map, 'rb') as f: self.names = json.loads(f.read()) else: pkl_path = os.path.join(LOCAL_DIR, '..', 'data', 'whoisip', 'asn_names_20160930.pkl') with open(pkl_path, 'rb') as f: self.names = cPickle.load(f)
def maj_location_asn(db): title("Updating geoip and ASN infos") reader = geoip2.database.Reader(script_path+ '/booLet_extres/GeoLite2-City.mmdb') asn_db = pyasn.pyasn(script_path+'/booLet_extres/ipasn_20150224.dat') les_lignes=[] with db: cur = db.cursor() sql="SELECT IP FROM ips;" results=cur.execute(sql) for mon_ip in results: country_name=city_name=iso_code='-' try: response = reader.city(mon_ip[0]) except: country_name=city_name=iso_code='-' else: if response.country.names: if "fr" in response.country.names: country_name=response.country.names['fr'] if response.country.names!='' : country_name=response.country.name if response.city.name: if response.city.name!='': city_name=response.city.name else: city_name='-' if response.country.iso_code in ['','None'] : iso_code='-' else: iso_code="{0!s}".format(response.country.iso_code) try: asn,range=asn_db.lookup(mon_ip[0]) except: asn=range='-' else: if ("{0!s}".format(asn)).lower()=='none': asn=range='-' else: asn="AS{0!s}".format(asn) les_lignes.append((iso_code,country_name,city_name,asn,range,mon_ip[0])) with db: cur = db.cursor() sql="""UPDATE ips SET isocode=?, country_name=?, city_name=?, asn=?, range=? WHERE IP=?""" db.executemany(sql,les_lignes) sql="""SELECT DISTINCT(i.asn),a.asnlabel FROM ips i, asns a WHERE i.asn = a.asn;""" results=cur.execute(sql) values=[] for result in results: values.append((result[1],result[0])) sql="UPDATE ips SET asnlabel=? WHERE asn=?" db.executemany(sql,values)
def emit_tuples_SLD_ASN(lines, anomalies): # Create a pyasn to get ASNs asndb = pyasn.pyasn('ASN_VIEW') # Iterate over the lines for line in lines: if line.flags_resp != "-": DRES = line.resp_code DFRD = "1" if int(line.flags_resp) & 0x08 != 0 else "0" DFRA = "1" if int(line.flags_resp) & 0x04 != 0 else "0" DANS = '|-><-|'.join(line.answers) DANTTLS = ','.join([str(t) for t in line.answer_ttls]) DST = line.s_ip DQ = line.query # Keep only NOERROR responses and recursive queries if DRES == "NOERROR" and DFRD == "1" and DFRA == "1": # Get Number of CNAMEs and Server IP addresses records = str(DANS).split('|-><-|') sip = set() clen = 0 nip = 0 for record in records: if is_valid_ipv4(record): sip.add(record) nip += 1 else: clen += 1 # Continue only if at least one IP address has been returned if nip > 0: # Get the list of ASNs from t server IPs asns = [] for ip in sip: try: this_asn = str(asndb.lookup(ip)[0]) if this_asn == "None": this_asn = ".".join( ip.split(".")[0:2]) + ".0.0" if ip.startswith("127.0."): this_asn = ip except Exception as e: this_asn = ip asns.append(this_asn) # Emit a tuple for each couple Query ASN for asn in asns: # Only if it is not anomalous lookup = str(DST) + " " + str(DQ).lower() if lookup not in anomalies: SLD = getGood2LD(str(DQ).lower()) tup = (asn, SLD) yield tup
def init(self): try: self.database = pyasn.pyasn(self.parameters.database) except IOError: self.logger.error("pyasn data file does not exist or could not be " "accessed in '%s'" % self.parameters.database) self.logger.error("Read 'bots/experts/asn_lookup/README' and " "follow the procedure") self.stop()
def test_consistency(self): """ Tests if pyasn is consistently loaded and that it returns a consistent answer """ db = pyasn.pyasn(IPASN_DB_PATH) asn, prefix = db.lookup("8.8.8.8") for i in range(100): tmp_asn, tmp_prefix = self.asndb.lookup("8.8.8.8") self.assertEqual(asn, tmp_asn) self.assertEqual(prefix, tmp_prefix)
def reload(self): start = time.time() logger.debug("reloading databases...") self.asndb = pyasn.pyasn(self.db_filename) self.asnames = load_asnames(self.namedb_filename) self.db_ino = os.stat(self.db_filename).st_ino self.namedb_ino = os.stat(self.namedb_filename).st_ino end = time.time() logger.debug("reloading databases complete seconds=%0.1f", end - start)
def test_consistency(self): """ Tests if pyasn is consistently loaded and that it returns a consistent answer """ db = pyasn.pyasn(IPASN_DB_PATH) asn, prefix = db.lookup('8.8.8.8') for i in range(100): tmp_asn, tmp_prefix = self.asndb.lookup('8.8.8.8') self.assertEqual(asn, tmp_asn) self.assertEqual(prefix, tmp_prefix)
def getASN(ip_addr): asndb = pyasn.pyasn('/data/db.rviews/ipasn_20170207.dat') asn_size = asndb.lookup(ip_addr) asn = asn_size[0] # Check if ASN Count is in the database, if yes get it otherwise calculate the size of AS and # get the number of points required, add this to database. if not asn: UnresolvedIPmodel.objects.get_or_create(ip_address=ip_address) return asn
def __init__(self, asn_dat=None, asn_map=None): if asn_dat is not None: self.asndb = pyasn.pyasn(asn_dat) else: self.asndb = pyasn.pyasn( os.path.join(LOCAL_DIR, '..', 'data', 'whoisip', 'ipasn.dat')) if asn_map is not None: with open(asn_map, 'rb') as f: self.names = json.loads(f.read()) else: pkl_path = os.path.join(LOCAL_DIR, '..', 'data', 'whoisip', 'asn_names.pkl') with open(pkl_path, 'rb') as f: if (sys.version_info > (3, 0)): self.names = pickle.load(f, encoding='latin1') else: self.names = pickle.load(f)
def test_asnames(self): """ Test functionality of AS Name Lookup. """ db_with_names = pyasn(IPASN_DB_PATH, as_names_file=AS_NAMES_FILE_PATH) asn, prefix = db_with_names.lookup('8.8.8.8') name = db_with_names.get_as_name(asn) self.assertTrue(name.lower().find("google") >= 0, "ASN Name Incorrect! Should be Google") name = db_with_names.get_as_name(-1) self.assertTrue(name is None, "ASN Name Incorrect! Should be None")
def emit_tuples(lines): # Create a pyasn to get ASNs asndb = pyasn.pyasn('ASN_VIEW') # Iterate over the lines for line in lines: if line.flags_resp != "-": DRES = line.resp_code DFRD = "1" if int(line.flags_resp) & 0x08 != 0 else "0" DFRA = "1" if int(line.flags_resp) & 0x04 != 0 else "0" DANS = '|-><-|'.join(line.answers) DANTTLS = ','.join([str(t) for t in line.answer_ttls]) DST = line.s_ip DQ = line.query SRC = line.c_ip # Get Only Recursive Queries if DRES == "NOERROR" and DFRD == "1" and DFRA == "1": # Create Key key = DST # Parse simple fields clients = set((SRC, )) queries = set((DQ, )) # Parse Returned Server IPs servers = set() records = str(DANS).split('|-><-|') for record in records: if is_valid_ipv4(record): servers.add(record) # Get ASNs asns = set() for ip in servers: try: this_asn = str(asndb.lookup(ip)[0]) if this_asn == "None": this_asn = ".".join(ip.split(".")[0:2]) + ".0.0" if ip.startswith("127.0."): this_asn = ip except Exception as e: this_asn = ip asns.add(this_asn) value = (1, clients, queries, servers, asns) # Produce an output tuple tup = (key, value) yield tup
def getasnsize(asn): db = pyasn.pyasn('/data/db.rviews/ipasn_20170207.dat') all_prefixes = db.get_as_prefixes_effective(asn) size = 0 if all_prefixes: for prefix in all_prefixes: pref = prefix.split("/")[1] size = size + (2**(32 - int(pref))) #asn_size = `asn`+","+`size` return size
def init(self): if pyasn is None: raise ValueError('Could not import pyasn. Please install it.') try: self.database = pyasn.pyasn(self.parameters.database) except IOError: self.logger.error("pyasn data file does not exist or could not be " "accessed in %r.", self.parameters.database) self.logger.error("Read 'bots/experts/asn_lookup/README' and " "follow the procedure.") self.stop()
def filter_tstat_by_asn_worker_fn(tstat_row, target_asn, local_asn_ipasn_file): # one off loading of ipasn file in each worker, ipasn file copied to worker with spark-submit --files global worker_asndb if worker_asndb == None: worker_asndb = pyasn.pyasn(local_asn_ipasn_file) server_ip = tstat_row['s_ip:15'] # asndb.lookup('8.8.8.8') -> (15169, 8.8.8.0/24) asn = worker_asndb.lookup(server_ip) return str(asn[0]) == str(target_asn)
def get_networks(asn, asndb_file): """ Returns the network blocks of an Autonomous System number. Args: asn: An Autonomous System number. asndb_file: The ASN database file to perform lookups against. Returns: List: Network blocks in CIDR format. """ logging.debug("Attempting to find networks for ASN: {}".format(asn)) try: asndb = pyasn.pyasn(asndb_file) return asndb.get_as_prefixes(asn) except Exception as e: logging.error(e, exc_info=True) pass
def test_ipv6(self): """ Tests if IPv6 addresseses are lookedup correctly """ db = pyasn.pyasn(IPASN6_DB_PATH) known_ips = [ # First three IPs sugested by sebix (bug #14). Confirmed AS on WHOIS ("2001:41d0:2:7a6::1", 16276), # OVH IPv6, AS16276 ("2002:2d22:b585::2d22:b585", 6939), # WHOIS states: IPv4 endpoint(45.34.181.133) of a 6to4 address. AS6939 = Hurricane Electric ("2a02:2770:11:0:21a:4aff:fef0:e779", 196752), # TILAA, AS196752 ("2607:f8b0:4006:80f::200e", 15169), # GOOGLE AAAA ("d::d", None), # Random unused IPv6 ] for ip, known_as in known_ips: asn, prefix = db.lookup(ip) self.assertEqual(asn, known_as)
def get_asn(ip_addr, asndb_file): """ Returns the Autonomous System number of a network IP address. Args: ip_addr: A network IP address. asndb_file: The ASN database file to perform lookups against. Returns: String: The Autonomous System number. """ logging.debug("Attempting to find ASN for: {}".format(ip_addr)) try: asndb = pyasn.pyasn(asndb_file) result = asndb.lookup(ip_addr)[0] logging.info("Found ASN: ASN{} - {}".format(result, ip_addr)) return result except Exception as e: logging.error(e, exc_info=True) pass
def test_all_ipasn_dbs(self): """ Checks compatibility of PyASN 1.2 results with current pyasn for all 2014 ipasn dbs . """ version = sys.version_info[0] try: import PyASN assert version == 2 except: print("SKIPPING - Python 2 or PyASN 1.2 not present ...", file=sys.stderr, end=' ') return dbs = glob(IPASN_DB_PATH + "ipasn_2014*.dat") print("", file=sys.stderr) for db in sorted(dbs): random.seed(db) # for reproducibility print("comparing %s" % db, file=sys.stderr) newdb = pyasn.pyasn(db) olddb = PyASN.new(db) for i in range(1000000): i1 = random.randint(1, 223) i2 = random.randint(0, 255) i3 = random.randint(0, 255) i4 = random.randint(0, 255) sip = "%d.%d.%d.%d" % (i1, i2, i3, i4) newas, prefix = newdb.lookup(sip) if newas: self.assertTrue(newas > 0, msg="Negative AS for IP %s = %s" % (sip, newas)) oldas = olddb.Lookup(sip) if oldas < 0: # this is an overflow bug in the old version, # e.g. 193.181.4.145 on 2014/10/07 returns -33785576 continue self.assertEqual(oldas, newas, msg="Failed for IP %s" % sip)
def main(): parser = argparse.ArgumentParser(description='Low Impact Identification Tool') argroup = parser.add_mutually_exclusive_group(required=True) argroup.add_argument("-i", "--ip", help="An Ip address") argroup.add_argument("-f", "--ifile", help="A file of IPs") parser.add_argument("-p", "--port", help="A port") parser.add_argument("-v", "--verbose", help="Not your usual verbosity. This is for debugging why specific outputs aren't working! USE WITH CAUTION") argroup.add_argument("-s", "--subnet", help="A subnet!") argroup.add_argument("-a", "--asn", help="ASN number. WARNING: This will take a while") parser.add_argument("-r", "--recurse", help="Test Recursion", action="store_true") parser.add_argument("-I", "--info", help="Get more info about operations", action="store_true") parser.add_argument("-S", "--ssl", help="For doing SSL checks only", action="store_true") parser.add_argument("-R", "--recon", help="Gather information about a given device", action="store_true") args = parser.parse_args() libpath = os.path.dirname(os.path.realpath(__file__)) + '/lib' asndb = pyasn.pyasn(libpath + '/ipasn.dat') if args.verbose is None: verbose = None else: verbose = args.verbose if args.port is None: dport = 443 else: dport = int(args.port) if args.ssl: ssl_only = 1 else: ssl_only = 0 if not args.info: info = None else: info = 1 if args.ip and not args.recurse and not args.recon: dest_ip = args.ip if dport is 80 or 81: getheaders(args.ip, dport, verbose, info) print "Skipping SSL test for", dport else: testips(args.ip, dport, verbose, ssl_only, info) elif args.ifile and not args.recurse: ipfile = args.ifile dest_ip = args.ip try: with open(ipfile) as f: for line in f: if dport in [80, 8080, 81, 88, 8000, 8888, 7547]: # print "Skipping SSL test for", dport getheaders(str(line).rstrip('\r\n)'), dport, verbose, info) else: testips(line, dport, verbose, ssl_only, info) except KeyboardInterrupt: # print "Quitting" sys.exit(0) except Exception as e: sys.exc_info()[0] print "error in first try", e pass elif args.subnet: try: for ip in netaddr.IPNetwork(str(args.subnet)): try: if dport == 80: getheaders(str(ip).rstrip('\r\n)'), dport, verbose, info) elif args.recurse: if dport == 53: recurse_DNS_check(str(ip).rstrip('\r\n'), verbose) elif dport == 1900: recurse_ssdp_check(str(ip).rstrip('\r\n'), verbose) elif dport == 123: ntp_monlist_check(str(ip).rstrip('\r\n'), verbose) else: recurse_ssdp_check(str(ip).rstrip('\r\n'), verbose) recurse_DNS_check(str(ip).rstrip('\r\n'), verbose) ntp_monlist_check(str(ip).rstrip('\r\n'), verbose) else: testips(str(ip), dport, verbose, ssl_only, info) except KeyboardInterrupt: print "Quitting from Subnet" sys.exit(0) pass except Exception as e: if args.verbose is not None: print "Error occured in Subnet", e sys.exit(0) except KeyboardInterrupt: sys.exit() except Exception as e: sys.exit() elif args.asn: for subnet in asndb.get_as_prefixes(int(args.asn)): try: for ip in netaddr.IPNetwork(str(subnet)): if dport == 80: getheaders(str(ip).rstrip('\r\n)'), dport, verbose, info) elif args.recurse: if dport == 53: recurse_DNS_check(str(ip).rstrip('\r\n'), verbose) elif dport == 1900: recurse_ssdp_check(str(ip).rstrip('\r\n'), verbose) elif dport == 123: ntp_monlist_check(str(ip).rstrip('\r\n'), verbose) else: recurse_ssdp_check(str(ip).rstrip('\r\n'), verbose) recurse_DNS_check(str(ip).rstrip('\r\n'), verbose) ntp_monlist_check(str(ip).rstrip('\r\n'), verbose) else: testips(str(ip), dport, verbose, ssl_only, info) except KeyboardInterrupt: print "Quitting" sys.exit(1) except Exception as e: if args.verbose is not None: print "Error occured in Subnet", e sys.exit(0) elif args.ifile and args.recurse: ipfile = args.ifile try: with open(ipfile) as f: for line in f: if dport == 53: recurse_DNS_check(str(line).rstrip('\r\n'), verbose) elif dport == 1900: recurse_ssdp_check(str(line).rstrip('\r\n'), verbose) elif dport == 123: ntp_monlist_check(str(line).rstrip('\r\n'), verbose) else: recurse_ssdp_check(str(line).rstrip('\r\n'), verbose) recurse_DNS_check(str(line).rstrip('\r\n'), verbose) ntp_monlist_check(str(line).rstrip('\r\n'), verbose) except KeyboardInterrupt: print "Quitting from first try in ifile" sys.exit(0) except Exception as e: sys.exit() print "error in recurse try", e raise elif args.ip and args.recurse: if dport == 53: recurse_DNS_check(str(args.ip), verbose) elif dport == 1900: recurse_ssdp_check(str(args.ip), verbose) elif dport == 123: ntp_monlist_check(str(args.ip).rstrip('\r\n'), verbose) else: print "Trying 53,1900 and 123!" recurse_DNS_check(str(args.ip), verbose) recurse_ssdp_check(str(args.ip), verbose) ntp_monlist_check(str(args.ip).rstrip('\r\n'), verbose) if args.ip and args.recon: print "Doing recon on ", args.ip dest_ip = args.ip try: testips(dest_ip, dport, verbose, ssl_only, info) recurse_DNS_check(str(args.ip), verbose) recurse_ssdp_check(str(args.ip), verbose) ntp_monlist_check(str(args.ip).rstrip('\r\n'), verbose) except KeyboardInterrupt: print "Quitting" sys.exit(0) except Exception as e: print "Encountered an error", e
m_group=parser.add_mutually_exclusive_group() m_group.add_argument('-f', type=str, dest="fname", default=None, help="Pcap file to parse") m_group.add_argument('-d', type=str, dest="dir_path", default=None, help="Pcap directory to parse recursivly") m_group.add_argument('-i', type=str, dest="interface", default=None, help="interface for live capture") parser.add_argument('-t', action="store_true", dest="timestamp", help="Include a timestamp in all generated messages (useful for correlation)") parser.add_argument('-v', action="store_true", dest="Verbose", help="More verbose.") options = parser.parse_args() if options.fname is None and options.dir_path is None and options.interface is None: print '\n\033[1m\033[31m -f or -d or -i mandatory option missing.\033[0m\n' parser.print_help() exit(-1) try: asndb = pyasn.pyasn('ipasn.dat') gi = pygeoip.GeoIP('GeoIP.dat') except: print "You need ipasn.dat (pygeoip) and GeoIP.dat (maxmind db) to start this program" print "file has to be in libpcap format - editcap -F libpcap test.pcapng test.pcap may help" #exit(1) ShowWelcome() Verbose = options.Verbose fname = options.fname dir_path = options.dir_path interface = options.interface timestamp = options.timestamp start_time = time.time() Filename = str(os.path.join(os.path.dirname(__file__),"findSMTP-Session.log"))
def main(): """ Entry point. """ asndb = None args = parse_arguments() log.basicConfig(level=log.getLevelName(args.verbosity.upper()), format="%(asctime)s [%(levelname)s]: %(message)s") log.debug("Set verbosity level to: %s" % args.verbosity) if os.geteuid() != 0: log.critical("We need root privileges to run traceroutes.") return 1 try: asndb = pyasn.pyasn(args.asn_db) except Exception as err: log.critical("Couldn't load ASN DB file '%s': %s" % (args.asn_db, err)) sys.exit(1) if args.fqdn_file: fqdns = load_fqdns(args.fqdn_file) else: fqdns = [args.fqdn] for i, fqdn in enumerate(fqdns): log.info("Now handling FQDN %d of %d: %s" % (i + 1, len(fqdns), fqdn)) output_bytes = trace_fqdn(fqdn, args.dns_server) output = output_bytes.decode("utf-8") log.debug("dig output: %s" % output) servers = extract_servers(output, args.dns_server) log.info("DNS servers in dig trace: %s" % ", ".join([h.addr for h in servers])) try: dns_trs, _ = traceroute_dns_servers(servers, fqdn) except Exception as err: log.warning("Couldn't run traceroute: %s" % err) continue if args.graph_output is not None: file_name = "dns-servers_%s" % args.graph_output dns_trs.graph(target="> %s" % file_name) log.info("Wrote DNS servers traceroute graph to: %s" % file_name) try: web_tr, _ = traceroute_web_server(fqdn) except Exception as err: log.warning("Couldn't run traceroute: %s" % err) continue if args.graph_output is not None: file_name = "web-server_%s" % args.graph_output web_tr.graph(target="> %s" % file_name) log.info("Wrote web server traceroute graph to: %s" % file_name) log.info("Now comparing ASNs from both traceroute types.") dns_asns = asns_in_traceroute(dns_trs, asndb) web_asns = asns_in_traceroute(web_tr, asndb) stats.dns_asns += dns_asns stats.web_asns += web_asns asn_comparison(dns_asns, web_asns) determine_stats() return 0
if args.cron: # Assumes you are running the command from client. print('{} * * * * http_proxy=\'http://127.0.0.1:4444\' python {}/client.py --token $TOKEN'.format(random.randrange(0,55),os.getcwd())) raise SystemExit(1) if not args.token: print('Use a token. See --help for usage.') raise SystemExit(1) rpc = i2py.control.pyjsonrpc.HttpClient( url = ''.join(['http://',args.server,':',str(args.port)]), gzipped = True ) asndb = pyasn.pyasn('ipasn_20150224.dat') # Local router stuff try: a = i2py.control.I2PController() except: print('I2PControl not installed, or router is down.') raise SystemExit(1) ri_vals = a.get_router_info() this_router = { 'activepeers' : ri_vals['i2p.router.netdb.activepeers'], 'fastpeers' : ri_vals['i2p.router.netdb.fastpeers'], 'tunnelsparticipating' : ri_vals['i2p.router.net.tunnels.participating'], 'decryptFail' : a.get_rate(stat='crypto.garlic.decryptFail', period=3600),
import requests from requests.packages import urllib3 from bs4 import BeautifulSoup import rethinkdb as r from celery import Celery from celery.utils.log import get_task_logger import pyasn import geoip2.database, geoip2.errors import crawler.conf as conf logger = get_task_logger(__name__) app = Celery('crawler') app.config_from_object(conf.CeleryConf) asn_db = pyasn.pyasn(conf.ASN_FILE) geoip2_db = geoip2.database.Reader(conf.GEOIP2_FILE) DomainInfo = namedtuple( 'DomainInfo', ['name', 'elapsed', 'headers', 'linked_domains', 'asn', 'country'] ) class UncrawlableDomain(Exception): pass def get_page(domain): urls = ['http://' + domain, 'https://' + domain] for url in urls:
def __init__(self, pyasn_file=None): if pyasn_file: self.asndb = pyasn(pyasn_file) self.whois = Client()
try: csvstring = subprocess.check_output(cmd) except subprocess.CalledProcessError: log.warning("Non-zero exit code for %s" % f) continue num_reqs = len(csvstring.decode("unicode_escape").split("\n")) if num_reqs < MIN_REQUESTS: log.warning("Only %d requests in output. Skipping.", num_reqs) continue process_output(csvstring) if __name__ == "__main__": if len(sys.argv) != 4: print("Usage: %s PCAP_DIR CONSENSUS_DIR ASNDB" % sys.argv[0], file=sys.stderr) sys.exit(1) asndb = pyasn.pyasn(sys.argv[3]) process_files(sys.argv[1], sys.argv[2]) create_output() if top_ass != ASNS: log.critical("Top ASs not the same as hard-coded ASs. " "Update with: %s", top_ass) sys.exit(0)
def _pyasn_db(self): '''Return pyasn object after instantiating.''' asn_db_path = path.abspath(path.join( path.dirname(asn_report.__file__), 'resources/ip_to_asn.db')) return pyasn.pyasn(asn_db_path)