def run(self): stream = BGPStream() rec = BGPRecord() if self.upd_file is None: stream.add_filter('collector', self.collector) stream.add_filter('record-type', self.record_type) stream.add_interval_filter(self.from_date, self.to_date) stream.set_live_mode() else: stream.set_data_interface('singlefile') if self.upd_file: stream.set_data_interface_option('singlefile', 'upd-file', self.upd_file) if self.rib_file: stream.set_data_interface_option('singlefile', 'rib-file', self.rib_file) if self.prefix_filter is not None: for prefix in self.prefix_filter: stream.add_filter('prefix', prefix) if self.peer_as_filter: for asn in self.peer_as_filter: stream.add_filter('peer-asn', str(asn)) if self.communities_filter: for community in self.communities_filter: stream.add_filter('community', community) stream.start() stream.get_next_record(rec) prev = rec.time while (stream.get_next_record(rec)): now = rec.time if rec.status == 'valid': elem = rec.get_next_elem() while (elem): statement = None peer_address = elem.peer_address peer_asn = elem.peer_asn if peer_asn in self.asn_to_nexthop: if elem.type == 'A' or elem.type == 'R': prefix = elem.fields['prefix'] as_path = elem.fields['as-path'] nexthop = elem.fields['next-hop'] if peer_asn in self.asn_to_nexthop: nexthop = self.asn_to_nexthop[peer_asn] statement = 'announce route %s next-hop %s as-path' \ ' [ %s ]' % (prefix, nexthop, as_path) elif elem.type == 'W': prefix = elem.fields['prefix'] statement = 'withdraw route %s' % prefix if statement: sys.stdout.write("%s\n" % statement) sys.stdout.flush() elem = rec.get_next_elem() time.sleep(self.delay + now - prev) prev = now
import radix import sys import calendar import time stream = BGPStream() rec = BGPRecord() start_interval= 1401623715 ##stream.add_filter('project', 'ris') stream.add_filter('collector', 'route-views2') #stream.add_filter('collector', 'rrc04') stream.add_filter('record-type', 'ribs') stream.add_filter('record-type', 'updates') stream.add_interval_filter(start_interval,1406894115 ) stream.add_rib_period_filter(172800) stream.set_data_interface('broker') #stream.set_data_interface_option('broker', 'db-host', 'loki-ge') #stream.set_data_interface_option('broker', 'db-port', '3306') #stream.set_data_interface_option('broker', 'db-user', 'bgpstream') print "start bgpstream" stream.start() # signature {collector}{ip}{asn} -> id info_id = {} # id peer ASN (for right analysis) id_ASN = {} # id signature (for charthouse) id_sig = {} # last id assigned last_id = -1
# Initialize BGPStream with relevant filters stream = BGPStream() rec = BGPRecord() mode = "" if len(sys.argv) == 1: collectors = ['rrc00', 'rrc01', 'rrc03', 'rrc04', 'rrc05'] for collector in collectors: stream.add_filter('collector', collector) stream.add_filter('record-type', 'ribs') stream.add_interval_filter(1475310000, 1475350000) stream.add_filter('prefix', '8.0.0.0/8') mode = 'ripe' elif len(sys.argv) == 2: filename = sys.argv[1] stream.set_data_interface('singlefile') stream.set_data_interface_option('singlefile', 'rib-file', filename) collectors = ['singlefile_ds'] mode = 'file' stream.start() # open files for neo4j-import collector_file = open("csv/collector.csv", 'w') AS_file = open("csv/AS.csv", 'w') prefix_file = open("csv/prefix.csv", 'w') route_file = open("csv/route.csv", 'w') connections_file = open("csv/connections.csv", 'w') connect_rels_file = open("csv/connect_rels.csv", 'w') route_rels_file = open("csv/route_rels.csv", 'w')
import calendar import time stream = BGPStream() rec = BGPRecord() start_interval= 1403042654 end_interval= 1403055754 ##stream.add_filter('project', 'ris') stream.add_filter('collector', 'route-views2') #stream.add_filter('collector', 'rrc04') stream.add_filter('record-type', 'ribs') stream.add_filter('record-type', 'updates') stream.add_interval_filter(start_interval,end_interval ) stream.add_rib_period_filter(172800) stream.set_data_interface('mysql') stream.set_data_interface_option('mysql', 'db-host', 'loki-ge') stream.set_data_interface_option('mysql', 'db-port', '3306') stream.set_data_interface_option('mysql', 'db-user', 'bgpstream') print "start bgpstream" stream.start() # signature {collector}{ip}{asn} -> id info_id = {} # id peer ASN (for right analysis) id_ASN = {} # id signature (for charthouse) id_sig = {} # last id assigned last_id = -1