def filter_play_updates(collectors, start_time_ts, stop_time_ts): stream = BGPStream() is_ripe = False for c in collectors: if 'rrc' in c: is_ripe = True stream.add_filter('collector', c) stream.set_data_interface_option('broker', 'url', 'https://bgpstream-dev.caida.org/broker') stream.add_filter('prefix', '0.0.0.0/0') # Only focus on IPV4 stream.add_filter('record-type', 'ribs') stream.add_filter('record-type', 'updates') # Find the time when the last rib was available (every 2 hurs with routeviews) # 28800 if RIPE RIS, 7200 if RouteViews offset = 7200 if is_ripe: offset = 28880 rib_timestamp = start_time_ts while rib_timestamp % offset != 0: rib_timestamp -= 1 rib_timestamp -= 600 stream.add_interval_filter(rib_timestamp, stop_time_ts) stream.add_rib_period_filter(100000000000000000) return stream
def run(self): stream = BGPStream() rec = BGPRecord() if self.upd_file is None: stream.add_filter('collector', self.collector) stream.add_filter('record-type', self.record_type) stream.add_interval_filter(self.from_date, self.to_date) stream.set_live_mode() else: stream.set_data_interface('singlefile') if self.upd_file: stream.set_data_interface_option('singlefile', 'upd-file', self.upd_file) if self.rib_file: stream.set_data_interface_option('singlefile', 'rib-file', self.rib_file) if self.prefix_filter is not None: for prefix in self.prefix_filter: stream.add_filter('prefix', prefix) if self.peer_as_filter: for asn in self.peer_as_filter: stream.add_filter('peer-asn', str(asn)) if self.communities_filter: for community in self.communities_filter: stream.add_filter('community', community) stream.start() stream.get_next_record(rec) prev = rec.time while (stream.get_next_record(rec)): now = rec.time if rec.status == 'valid': elem = rec.get_next_elem() while (elem): statement = None peer_address = elem.peer_address peer_asn = elem.peer_asn if peer_asn in self.asn_to_nexthop: if elem.type == 'A' or elem.type == 'R': prefix = elem.fields['prefix'] as_path = elem.fields['as-path'] nexthop = elem.fields['next-hop'] if peer_asn in self.asn_to_nexthop: nexthop = self.asn_to_nexthop[peer_asn] statement = 'announce route %s next-hop %s as-path' \ ' [ %s ]' % (prefix, nexthop, as_path) elif elem.type == 'W': prefix = elem.fields['prefix'] statement = 'withdraw route %s' % prefix if statement: sys.stdout.write("%s\n" % statement) sys.stdout.flush() elem = rec.get_next_elem() time.sleep(self.delay + now - prev) prev = now
stream = BGPStream() rec = BGPRecord() mode = "" if len(sys.argv) == 1: collectors = ['rrc00', 'rrc01', 'rrc03', 'rrc04', 'rrc05'] for collector in collectors: stream.add_filter('collector', collector) stream.add_filter('record-type', 'ribs') stream.add_interval_filter(1475310000, 1475350000) stream.add_filter('prefix', '8.0.0.0/8') mode = 'ripe' elif len(sys.argv) == 2: filename = sys.argv[1] stream.set_data_interface('singlefile') stream.set_data_interface_option('singlefile', 'rib-file', filename) collectors = ['singlefile_ds'] mode = 'file' stream.start() # open files for neo4j-import collector_file = open("csv/collector.csv", 'w') AS_file = open("csv/AS.csv", 'w') prefix_file = open("csv/prefix.csv", 'w') route_file = open("csv/route.csv", 'w') connections_file = open("csv/connections.csv", 'w') connect_rels_file = open("csv/connect_rels.csv", 'w') route_rels_file = open("csv/route_rels.csv", 'w') # formats for files
#!/usr/bin/env python from _pybgpstream import BGPStream, BGPRecord, BGPElem def get_src_from_path(path_string): return path_string.split(" ")[-1] # Create a new bgpstream instance and a reusable bgprecord instance stream = BGPStream() rec = BGPRecord() stream.set_data_interface('singlefile') stream.set_data_interface_option('../bgp_dump.txt') # Consider RIPE RRC 10 only stream.add_filter('collector', 'rrc11') # Consider this time interval: # Sat Aug 1 08:20:11 UTC 2015 stream.add_interval_filter(1438417216, 1438417216) # Start the stream stream.start() # Get next record while (stream.get_next_record(rec)): # Print the record information only if it is not a valid record if rec.status != "valid": print(rec.project, rec.collector, rec.type, rec.time, rec.status)
import time stream = BGPStream() rec = BGPRecord() start_interval= 1403042654 end_interval= 1403055754 ##stream.add_filter('project', 'ris') stream.add_filter('collector', 'route-views2') #stream.add_filter('collector', 'rrc04') stream.add_filter('record-type', 'ribs') stream.add_filter('record-type', 'updates') stream.add_interval_filter(start_interval,end_interval ) stream.add_rib_period_filter(172800) stream.set_data_interface('mysql') stream.set_data_interface_option('mysql', 'db-host', 'loki-ge') stream.set_data_interface_option('mysql', 'db-port', '3306') stream.set_data_interface_option('mysql', 'db-user', 'bgpstream') print "start bgpstream" stream.start() # signature {collector}{ip}{asn} -> id info_id = {} # id peer ASN (for right analysis) id_ASN = {} # id signature (for charthouse) id_sig = {} # last id assigned last_id = -1
collectors = ['rrc01'] for collector in collectors: stream.add_filter('collector', collector) stream.add_filter('record-type', 'ribs') stream.add_filter('record-type', 'updates') stream.add_interval_filter(1203850000,1203920000) stream.add_rib_period_filter(70000) stream.add_filter('prefix', '208.65.153.0/22') mode = 'ripe' elif len(sys.argv) == 2: dirname = sys.argv[1] filelist = os.listdir(dirname) filelist.sort() filelist = [dirname + os.sep + f for f in filelist] stream.set_data_interface('singlefile') stream.set_data_interface_option('singlefile','rib-file', filelist[0]) # stream.add_filter('prefix', '8.8.0.0/16') filelist = filelist[1:] collectors = ['singlefile_ds'] mode = 'file' stream.start() # open files for neo4j-import collector_file = open("csv/collector.csv",'w') AS_file = open("csv/AS.csv",'w') prefix_file = open("csv/prefix.csv", 'w') route_file = open("csv/route.csv", 'w') time_file = open("csv/time.csv", 'w') connect_rels_file = open("csv/connect_rels.csv",'w')