def parseRawTracerouteResults(results, fnMakePath, *prune): '''parseRawTracerouteResults: Parses RIPE Atlas traceroute results into path sets, grouping on probe IDs. @param results List of raw traceroute results. @param fnMakePath Function that returns a path's set representation; it accepts a traceroute.ip_path object as input, and optionally a list *prune that contains locations to exclude. Refer to ripeh.makePath and caidah.makePath for examples. @param prune List of strings to exclude from our AS paths. @ret 1 List of probe IDs to pruned AS paths. @ret 2 List of probe IDs to fail counters. ''' logging.info( 'Parsing raw traceroute results using filter: {}'.format(prune)) p2r, p2f = {}, {} for result in results: tr = TracerouteResult(result) if tr.is_success: path = fnMakePath(tr.ip_path, *prune) p2r.setdefault(tr.probe_id, []) p2r[tr.probe_id].append(path) else: p2f.setdefault(tr.probe_id, 0) p2f[tr.probe_id] += 1 return p2r, p2f
def return_path(self, result): ''' Returns the ith ip path from a ripe measurement. Input: a) result: Dictionary containing the ith traceroute. Output: a) src_ip: A string with the source IP address. b) dst_ip: A string with the destination IP address. c) ip_path: A list containing the IPs of the traceroute path. d) delays: A list containing the delays of the traceroute path. ''' trace_res = TracerouteResult(result) all_replies = trace_res.hops src_ip = result['from'] ip_path = [] delays = [] dst_ip = '*' if 'error' not in result['result'][0]: dst_ip = trace_res.destination_address for node in all_replies: packet = node.packets if node.index != 255: [ip, delay] = self.choose_ip(packet) ip_path.append(ip) delays.append(delay) return (src_ip, dst_ip, ip_path, delays)
def compare_results(probe_table): for probe_id in probe_table: if debug: print('Parsing probe_id: %s' % probe_id) r4 = TracerouteResult(probe_table[probe_id][4], on_error=TracerouteResult.ACTION_IGNORE) r6 = TracerouteResult(probe_table[probe_id][6], on_error=TracerouteResult.ACTION_IGNORE) if r4.is_success and r6.is_success: probe = None if r6.total_hops - r4.total_hops > hop_threshold: probe = get_probe_details(probe_id) more_hops = r6.total_hops - r4.total_hops print('AS%s - %s hop count:%s vs %s hop count:%s - IPv6 has %s more hops' % (probe['asn_v6'], probe['prefix_v6'], r6.total_hops, probe['prefix_v4'], r4.total_hops, more_hops)) elif r4.total_hops - r6.total_hops > hop_threshold: probe = get_probe_details(probe_id) more_hops = r4.total_hops - r6.total_hops print('AS%s - %s hop count:%s vs %s hop count:%s - IPv4 has %s more hops' % (probe['asn_v4'], probe['prefix_v4'], r4.total_hops, probe['prefix_v6'], r6.total_hops, more_hops)) if r6.last_median_rtt - r4.last_median_rtt > rtt_threshold: if probe is None: probe = get_probe_details(probe_id) percent = int(((r6.last_median_rtt - r4.last_median_rtt) / r4.last_median_rtt) * 100) print('AS%s - %s RTT:%s vs %s RTT:%s - IPv6 is %s%% worse' % (probe['asn_v6'], probe['prefix_v6'], r6.last_median_rtt, probe['prefix_v4'], r4.last_median_rtt, percent)) elif r4.last_median_rtt - r6.last_median_rtt > rtt_threshold: if probe is None: probe = get_probe_details(probe_id) percent = int(((r4.last_median_rtt - r6.last_median_rtt) / r6.last_median_rtt) * 100) print('AS%s - %s RTT:%s vs %s RTT:%s - IPv4 is %s%% worse' % (probe['asn_v4'], probe['prefix_v4'], r4.last_median_rtt, probe['prefix_v6'], r6.last_median_rtt, percent)) else: if debug: if r6.destination_ip_responded is False or r6.last_hop_responded is False: print('Probe:%s could not trace to %s' % (r6.probe_id, r6.destination_address)) elif r4.destination_ip_responded is False or r4.last_hop_responded is False: print('Probe:%s could not trace to %s' % (r4.probe_id, r4.destination_address)) pass
def processMeasurement(mID, dic, asndb): #mID = '3703242' has two results in one measurement source = "https://atlas.ripe.net/api/v1/measurement-latest/" + mID response = requests.get(source).json() ip_set = set() for probe_id, result in response.items(): result = result[0] # There's only one result for each probe parsed_result = TracerouteResult(result) source_ip = parsed_result.origin dest_ip = parsed_result.destination_address # convert source_ip to ASN (source_asn, source_prefix) = asndb.lookup(source_ip) if source_asn is None: continue # get inner dict # create if it doesn't exist if source_asn not in dic: dic[source_asn] = {} inner_dic = dic[source_asn] # get ip set # create if it doesn't exist if dest_ip not in inner_dic: inner_dic[dest_ip] = set() as_path_set = inner_dic[dest_ip] ip_path_set = set() # Make set of IPs before converting to ASNs ip_path = parsed_result.ip_path for triple in ip_path: for ip in triple: if ip is not None: ip_path_set.add(ip) # Explicity ADD source_ip and dest_ip to ip_path_set ip_path_set.add(source_ip) ip_path_set.add(dest_ip) # remove destination IP from IP set before converting #ip_path_set.discard(dest_ip) # remove source_ip from IP set #ip_path_set.discard(source_ip) # convert to AS and add ASes to set for ip in ip_path_set: (asn, prefix) = asndb.lookup(ip) #print asn if asn is not None: as_path_set.add(asn)
def print_nicely(self, limit): with open(self.filename) as results: i = 0 for result in results.readlines(): if limit is not None: if i >= limit: return parsed_result = TracerouteResult.get(result) print("PROBE ID: "+str(parsed_result.probe_id)) print("firmware: "+str(parsed_result.firmware)) print("origin: "+parsed_result.origin) print("measurement type: "+self.measurement_type) self._print_traceroute_nicely(parsed_result) print("\n") i +=1
def index(family): exp_id = bottle.request.query.id res_blob = exp_config[exp_id][family] # Read the data G = nx.DiGraph() hop_cnt = defaultdict(int) hop_idx = defaultdict(int) origin_asn = None seq = [] for res_set in res_blob: sagan_res = TracerouteResult(res_set) asn = get_addr_as(sagan_res.origin) prev_asn = asn origin_asn = asn hop_cnt[asn] += 1 for hop in sagan_res.hops: hop_addr = set() for packet in hop.packets: if packet.origin is None: continue hop_addr.add(packet.origin) for a in hop_addr: asn = get_addr_as(a) if asn is not None: hop_cnt[asn] += 1 hop_idx[asn] = hop.index addr2as[a] = asn else: hop_cnt[prev_asn] += 1 hop_idx[prev_asn] = hop.index addr2as[a] = prev_asn if asn is not None and prev_asn != asn: G.add_edge(prev_asn, asn, rtt=hop.median_rtt) prev_asn = asn for n in G.nodes_iter(): G.node[n]['hops'] = hop_cnt[n] G.node[n]['index'] = hop_idx[n] n, e = transform_graph(G, origin_asn, asn) # Return an object return {'nodes': n, 'edges': e}
def make_probe_to_interval_results(msm_id, start_datetime): """ Retreive a single interval of traceroute results. Only returns results where the traceroute successfully made it to the destination. """ probe_to_interval_results = dict() msm_meta = query_msm_meta(msm_id) msm_interval = msm_meta.interval msm_target_ip_addr = msm_meta.target_ip stop_datetime = start_datetime + datetime.timedelta(seconds=msm_interval) measurements = query_msm(msm_id, int(start_datetime.timestamp()), int(stop_datetime.timestamp())) for msm in measurements: tracert_msm = TracerouteResult(msm) if not tracert_is_clean(tracert_msm, msm_target_ip_addr): continue probe_result = { "origin_addr": tracert_msm.origin, "stop_timestamp": tracert_msm.end_time.timestamp(), "ip_path": tracert_msm.ip_path } if tracert_msm.probe_id not in probe_to_interval_results: probe_to_interval_results[tracert_msm.probe_id] = probe_result else: current_result = probe_to_interval_results[tracert_msm.probe_id] if (probe_result["stop_timestamp"] < current_result["stop_timestamp"]): probe_to_interval_results[tracert_msm.probe_id] = probe_result return probe_to_interval_results
__author__ = 'secastro' import json import random from ripe.atlas.sagan import TracerouteResult from collections import Counter with open('data/final_test_1/results.json', 'rb') as res_fd: res_blob = random.sample(json.load(res_fd), 50) for res in res_blob: print "=" * 30 sagan_res = TracerouteResult(res) for h in reversed(sagan_res.hops): rtt_sum = Counter() rtt_cnt = Counter() for p in h.packets: if p.origin is None: continue if p.rtt is not None: rtt_sum[p.origin] += p.rtt rtt_cnt[p.origin] += 1 print p.origin, p.rtt for a in rtt_cnt: print "** ", a, rtt_sum[a] / rtt_cnt[a] print "\n"
traceroute_dict[measurement][ "target_lon"] = longitude # set target longiyude traceroute_dict[measurement][ "target_address"] = target_address # set target geo address traceroute_dict[measurement][ "target_isanchor"] = is_anchor # True if probe is anchor probes = { } # initialise all the probes dictionaries within this measurement i = 0 for result in results: # get all the results of the pings from landmarks to target print("Reading measurement data, ", measurement, "from probes ", i, " one moment") print("this is result ", i, "of measuremnt ", measurement) result = TracerouteResult(result) print(result) if not result.is_error: # if no error in handling/parsing this result p = result.probe_id probe = Probe( id=p ) # Get all the properties of the individual probe used in this individual measurement print('Probe ', p) a = probe.geometry[ 'coordinates'] # Create a list of Coordinates probe_x = a[0] # Probes X coordinate probe_y = a[1] # Probes Y coordinate probe_id = result.probe_id # get the probe_id for this individual measurement if probe_id == '1000492': print("HERE ITS IS *************************", probe_id, probe_x, probe_y)
def trace(start_time): # set end time end_time = str(int(start_time) + 86400) # initialize data structures uniqueTrace = set() uniqueIP = set() uniqueEdge = set() allTrace = [] allIP = [] nodeList = [] # initialize edge count edgecount = 0 # open node list with open("RipeNodeList") as f: for line in f: for word in line.split(): nodeList.append(word) # iterate through each of 195 nodes for node in nodeList: # open working files with open("/home/jay/Desktop/Trace_01/ripe-temp/traceList.txt", "w") as f: try: # download page from internet and store in filesystem **I changed this to run on desktop. For server, uncomment the next line and comment out 63-68 #urllib.urlretrieve("https://atlas.ripe.net/api/v2/measurements/" + node + "/results?start=" + str(start_time) + "&stop=" + str(end_time) + "&format=json", "/home/jay/Desktop/Trace_01/ripe-temp/ripe.json") req = urllib.request.Request("https://atlas.ripe.net/api/v2/measurements/" + node + "/results?start=" + str(start_time) + "&stop=" + str(end_time) + "&format=json") with urllib.request.urlopen(req) as response: the_page = response.read() outfile = open("/home/jay/Desktop/Trace_01/ripe-temp/ripe.json", "wb") outfile.write(the_page) outfile.close() try: # Import file to db os.system("mongoimport --db RipeNode --collection mapping --type json --file /home/jay/Desktop/Trace_01/ripe-temp/ripe.json --jsonArray") except: print ("Error--problem loading data to MongoDB") # Create client client = MongoClient() # Assign the local variable db to the database named primer db = client.RipeNode # Access collection object db.mapping # Create object variable for later use coll = db.mapping index_i = 0 index_j = 1 # iterate through all documents in a collection cursor = coll.find()#[index_i:index_j] # use mongo to parse data try: for post in coll.find(): #record = convert(post) # made a change here to get this to run on my desktop...for server, uncomment this and comment out the next line record = post my_result = TracerouteResult.get(record) newLine = [] flag = 0 ip = my_result.ip_path if(my_result.is_success == 1): # set value of source and destination source = my_result.source_address destination = my_result.destination_address # push src and dst to ip lists allIP.append(source) allIP.append(destination) uniqueIP.add(source) uniqueIP.add(destination) # add source:dest and hop-0 to newLine newLine.append(source + ':' + destination + '\t') #newLine.append(source + ',' + '0' + '\t') # set initial value of hop hop = 1 # iterate to get all addresses for x in ip: sublist = x for y in sublist: # take only the first of the three while flag == 0: # if 'None', add '0' to newLine if(y == None): newLine.append('0' + ',' + str(hop) + '\t') hop += 1 # if ! 'None, add address to newLine' else: newLine.append(y + ',' + str(hop) + '\t') allIP.append(y) uniqueIP.add(y) hop += 1 flag = 1 flag = 0 # remove last tab in string newLine[-1].replace('\t', '') # convert list to string newLine = ''.join(newLine) # add string to file f.write(newLine) f.write('\n') else: pass except: pass except: pass # Unique traces with open("/home/jay/Desktop/Trace_01/ripe-temp/traceList.txt", "r") as f: trace = [] for line in f: uniqueTrace.add(line) allTrace.append(line) # get edges #edgeList = set() starCounter = 1 count = 1 for item in uniqueTrace: # set list so it will reset trace = [] # split trace and push to list #for item in item.split(): # changed this for Abdullah to make parsing easier for item in item.split('\t'): if (':' in item): pass else: #item = item.split('-') # changed this for Abdullah to make parsing easier item = item.split(',') trace.append(item[0]) # find length of list length = len(trace) # set iterator variable so it will reset i = 0 # iterate through trace list for pairs while i < length - 1: first = trace[i] second = trace[i+1] # set incrementing value for 0's if first == '0': first = count count += 1 if second == '0': second = count count += 1 #uniqueEdge.add(str(first) + ' ' + str(second)) # changed this for Abdullah to make parsing easier uniqueEdge.add(str(first) + '\t' + str(second)) i += 1 # Drop Collection result = db.mapping.drop() # all traces with open("/home/jay/Desktop/Trace_01/RipeData/all_trace_" + start_time + ".txt", "w") as f: for item in allTrace: f.write(item) # all ips with open("/home/jay/Desktop/Trace_01/RipeData/all_ip_" + start_time + ".txt", "w") as f: for item in allIP: f.write(item + '\n') # unique traces with open("/home/jay/Desktop/Trace_01/RipeData/unique_trace_" + start_time + ".txt", "w") as f: for item in uniqueTrace: f.write(item) # unique ip addresses with open("/home/jay/Desktop/Trace_01/RipeData/unique_ip_" + start_time + ".txt", "w") as f: for item in uniqueIP: f.write(item + '\n') # unique edges (w/o '0's counted) with open("/home/jay/Desktop/Trace_01/RipeData/unique_edge_" + start_time + ".txt", "w") as f: for item in uniqueEdge: f.write(item + '\n') #item = item.split(' ') # changed this for Abdullah to make parsing easier item = item.split('\t') if (('.' in item[0]) and ('.' in item[1])): edgecount += 1 # write totals to file with open("/home/jay/Desktop/Trace_01/RipeData/stats_" + start_time + ".txt", "w") as f: f.write("Total IPs: " + str(len(allIP)) + '\n') f.write("Total Traces: " + str(len(allTrace)) + '\n') f.write("Total Unique IPs: " + str(len(uniqueIP)) + '\n') f.write("Total Unique Traces: " + str(len(uniqueTrace)) + '\n') f.write("Total Unique Edges: " + str(edgecount) + '\n')
def trace(): # global variables global start_time traceCount3 = 0 ipCount3 = 0 srcCount3 = 0 dstCount3 = 0 privateIP = 0 edgeCount = 0 target = 0 uniqueTrace = set() uniqueIP = set() uniqueSrc = set() uniqueDst = set() uniqueEdge = set() nodeList = [] with open("RipeNodeList") as f: for line in f: for word in line.split(): nodeList.append(word) for line in open("start_time.txt"): if line.strip(): start_time = int(line) end_time = start_time + 86400 for node in nodeList: traceCount1 = 0 srcCount1 = 0 dstCount1 = 0 ipCount1 = 0 traceCount2 = 0 srcCount2 = 0 dstCount2 = 0 ipCount2 = 0 flag = 0 privateIP = 0 edgeCount1 = 0 if not os.path.exists("/home/jay/RipeData/" + str(start_time) + "/all_nodes"): os.makedirs("/home/jay/RipeData/" + str(start_time) + "/all_nodes") if not os.path.exists("/home/jay/RipeData/" + str(start_time) + "/per_node/" + node): os.makedirs("/home/jay/RipeData/" + str(start_time) + "/per_node/" + node) # open working files of1 = open("traceList.txt", "w") of2 = open("ipList.txt", "w") of3 = open("src.txt", "w") of4 = open("dst.txt", "w") of5 = open("/home/jay/RipeData/" + str(start_time) + "/per_node/" + node + "/stats.txt", "w") of6 = open("/home/jay/RipeData/" + str(start_time) + "/per_node/" + node + "/trace.txt", "w") of7 = open("/home/jay/RipeData/" + str(start_time) + "/per_node/" + node + "/ip.txt", "w") of8 = open("/home/jay/RipeData/" + str(start_time) + "/per_node/" + node + "/src.txt", "w") of9 = open("/home/jay/RipeData/" + str(start_time) + "/per_node/" + node + "/dst.txt", "w") of10 = open("/home/jay/RipeData/" + str(start_time) + "/per_node/" + node + "/traceCount.txt", "w") of11 = open("/home/jay/RipeData/" + str(start_time) + "/per_node/" + node + "/edgeList.txt", "w") of15 = open("/home/jay/RipeData/" + str(start_time) + "/per_node/" + node + "/ipCount.txt", "w") ofx = open("/home/jay/RipeData/" + str(start_time) + "/all_nodes/stats.txt", "w") try: # download page from internet and store in filesystem urllib.urlretrieve("https://atlas.ripe.net/api/v2/measurements/" + node + "/results?start=" + str(start_time) + "&stop=" + str(end_time) + "&format=json", "/home/jay/RipeGraph/ripe.json") try: # Import file to db os.system("mongoimport --db RipeNode --collection mapping --type json --file ripe.json --jsonArray") except: print "Error--problem loading data to MongoDB" # Create client client = MongoClient() # Assign the local variable db to the database named primer db = client.RipeNode # Access collection object db.mapping # Create object variable for later use coll = db.mapping index_i = 0 index_j = 1 # iterate through all documents in a collection cursor = coll.find()#[index_i:index_j] # use mongo to parse data try: for post in coll.find(): record = convert(post) my_result = TracerouteResult.get(record) newLine = [] srcList = [] dstList = [] flag = 0 ip = my_result.ip_path if(my_result.is_success == 1): #newLine.append(my_result.source_address) source = my_result.source_address destination = my_result.destination_address srcList.append(my_result.source_address) dstList.append(my_result.destination_address) for x in ip: sublist = x for y in sublist: while flag == 0: if(y == None): newLine.append('0') else: newLine.append(y) flag = 1 flag = 0 else: pass # write parsed data to file # trace strings and ip addresses of1.write(source + ':' + destination + ' ') hop = 1 for address in newLine: of1.write(address + '-' + str(hop)) of2.write(address) of1.write(' ') of2.write('\n') ipCount1 += 1 hop += 1 if (len(newLine) != 0): of1.write('\n') traceCount1 += 1 # source ip addresses for address in srcList: of3.write(address) of3.write('\n') srcCount1 += 1 # destination ip addresses for address in dstList: of4.write(address) of4.write('\n') dstCount1 += 1 except: print "Could not process document" except: of5.write("ERROR -- Could not download node " + node) # Close files of1.close() of2.close() of3.close() of4.close() # write per node data to file # Unique Traces with open("traceList.txt", "r") as f: trace = [] for line in f: if line not in trace: trace.append(line) uniqueTrace.add(line) f.close() for item in trace: of6.write(item) traceCount2 += 1 of6.close() # Trace Counts # initialize variables allTraceList = [] uniqueTraceList = [] uniqueIpList = [] # open traceList file with open("traceList.txt", "r") as f: for line in f: # make list of traces allTraceList.append(line) if line not in uniqueTraceList: # make list of unique traces uniqueTraceList.append(line) f.close() # for each unique trace, count number of occurances in total list for item in uniqueTraceList: num = allTraceList.count(item) # write count and trace to new file of10.write(str(num) + " " + item) # close file of10.close() # ip counts with open("ipList.txt", "r") as f: # create lists for all and unique ips = [] uips = set() # fill both lists for item in f: ips.append(item) uips.add(item) # compare unique to all and count all for item in uips: num = ips.count(item) # write out to file of15.write(str(num) + " " + item) # close files of15.close() f.close() # initialize lists edgeList = set() starCounter = 1 count = 1 for item in uniqueTraceList: # set list so it will reset trace = [] # split trace and push to list for item in item.split(): if (':' in item): pass else: item = item.split('-') trace.append(item[0]) # find length of list length = len(trace) # set iterator variable so it will reset i = 0 # iterate through trace list for pairs while i < length - 1: first = trace[i] second = trace[i+1] # set incrementing value for 0's if first == '0': first = count count += 1 if second == '0': second = count count += 1 # add to edgeList set (unique values only) edgeList.add(str(first) + ' ' + str(second)) uniqueEdge.add(str(first) + ' ' + str(second)) i += 1 # write edgeList to file for item in edgeList: of11.write(item + '\n') edgeCount1 += 1 # close file of11.close() # Unique IPs with open("ipList.txt", "r") as f: ip = [] for line in f: if line not in ip: ip.append(line) uniqueIP.add(line) f.close() #for item in ip: for item in uniqueIP: of7.write(item) if (('10.0' in item) or ('192.168' in item) or ('172.16' in item)): privateIP += 1 ipCount2 += 1 of7.close() # Unique Source IPs with open("src.txt", "r") as f: src = [] for line in f: if line not in src: src.append(line) uniqueSrc.add(line) f.close() for item in src: of8.write(item) srcCount2 += 1 of8.close() # Unique Destination IPs with open("dst.txt", "r") as f: dst = [] for line in f: if line not in dst: dst.append(line) uniqueDst.add(line) f.close() for item in dst: of9.write(item) dstCount2 += 1 of9.close() # Drop Collection result = db.mapping.drop() # write individual node totals to file of5.write("Node: " + node + '\n') of5.write("Total Traces: " + str(traceCount1) + '\n') of5.write("Total IPs: " + str(ipCount1) + '\n') of5.write("Total Source IPs: " + str(srcCount1) + '\n') of5.write("Total Destination IPs: " + str(dstCount1) + '\n') of5.write("Unique Traces: " + str(traceCount2) + '\n') of5.write("Unique IPs: " + str(ipCount2) + '\n') of5.write("Private IPs: " + str(privateIP) + '\n') of5.write("Unique Source IPs: " + str(srcCount2) + '\n') of5.write("Unique Destination IPs: " + str(dstCount2) + '\n') of5.write("Unique Edges: " + str(edgeCount1) + '\n') of5.write("*************************************" + '\n') of5.close() # Write Totals To File # unique traces of11 = open("/home/jay/RipeData/" + str(start_time) + "/all_nodes/traces.txt", "w") for item in uniqueTrace: of11.write(item) traceCount3 += 1 of11.close() # unique ip addresses of12 = open("/home/jay/RipeData/" + str(start_time) + "/all_nodes/ips.txt", "w") for item in uniqueIP: of12.write(item) ipCount3 += 1 of12.close() # unique source ip addresses of13 = open("/home/jay/RipeData/" + str(start_time) + "/all_nodes/src.txt", "w") for item in uniqueSrc: of13.write(item) srcCount3 += 1 of13.close() # unique destination ip addresses of14 = open("/home/jay/RipeData/" + str(start_time) + "/all_nodes/dst.txt", "w") for item in uniqueDst: of14.write(item) dstCount3 += 1 of14.close() # unique edges (w/o '0's counted) of15 = open("/home/jay/RipeData/" + str(start_time) + "/all_nodes/edges.txt", "w") for item in uniqueEdge: of15.write(item + '\n') item = item.split(' ') if (('.' in item[0]) and ('.' in item[1])): edgeCount += 1 of15.close() # write totals to file ofx.write("Total Unique Traces: " + str(traceCount3) + '\n') ofx.write("Total Unique IPs: " + str(ipCount3) + '\n') ofx.write("Total Unique Source IPs: " + str(srcCount3) + '\n') ofx.write("Total Unique Destination IPs: " + str(dstCount3) + '\n') ofx.write("Total Unique Edges: " + str(edgeCount) + '\n') ofx.close() # update start_time.txt f = open("start_time.txt", "w") f.write(str(end_time)) f.close()
def atlas_api_result_call(msm_idvar, start_date, stop_date, count): kwargs = { "msm_id": msm_idvar, "start": start_date, "stop": stop_date, } is_success, results = AtlasResultsRequest(**kwargs).create() pathlist = [] hopslist = [] timelist = [] IPlistcomp = [] if is_success: for result in results: parsed_result = TracerouteResult.get(result) pathlist.append(parsed_result.ip_path) hopslist.append(parsed_result.total_hops) timelist.append(parsed_result.end_time) for i in range(0, len(pathlist)): for k in range(0, hopslist[i]): try: IPlistcomp.append(pathlist[i][k][0]) except: IPlistcomp.append("0.0.0.0") reader = geoip2.database.Reader( "/home/csg/Downloads/GeoLite2-Country.mmdb") h = open('countries.txt', 'w') for i in range(0, len(IPlistcomp)): try: response = reader.country(IPlistcomp[i]) except: h.write("none" + "\n") else: if response.country.name is None: h.write("none" + "\n") else: new = (response.country.name).encode('ascii', 'ignore') h.write(new + "\n") h.close() with open("countries.txt") as f: content = f.readlines() f.close() content = map(lambda s: s.strip(), content) counter = collections.Counter(content) sum = 0 for key, value in counter.iteritems(): sum = sum + value counter[key] = [counter[key]] for key, value in counter.iteritems(): var1 = (int((counter[key])[0]) / sum) * 100 counter[key].append(var1) key = "sum" counter.setdefault(key, []) counter[key].append(sum) #key = "loss data" #counter.setdefault(key, []) #var2=(int((counter["none"])[0])/sum)*100 #counter[key].append(var2) counter1 = collections.OrderedDict(sorted(counter.items())) json.dump(counter1, open("countries_stat.txt", 'w'))
def fetchResult(response, start_time, stop_time, fileName): pathOfTheFile = "/ldc/mdata/atlas/atlas." + str(fileName) dataLine = open(pathOfTheFile, "a") if len(response) != 0: for y in range(len(response)): res_ponse = response[y] mr = TracerouteResult(res_ponse) source_address = mr.source_address destination_address = mr.destination_address res = mr.ip_path if destination_address == None or source_address == None: continue ip_path = destination_address + ' ' + source_address for z in range(len(res)): if len(res[z]) < 1: continue elif len(res[z]) == 1: if res[z][0] == None: ip_path = ip_path + ' ' + 'q' else: ip_path = ip_path + ' ' + res[z][0] elif len(res[z]) == 2: if res[z][0] == res[z][1] == None: ip_path = ip_path + ' ' + 'q' elif res[z][0] == res[z][1]: ip_path = ip_path + ' ' + res[z][1] elif res[z][0] == None: ip_path = ip_path + ' ' + res[z][1] else: ip_path = ip_path + ' ' + res[z][0] elif res[z][0] == res[z][1] == res[z][2] and res[z][0] != None: ip_path = ip_path + ' ' + res[z][0] elif res[z][0] == res[z][1] == res[z][2] == None: ip_path = ip_path + ' ' + 'q' else: list_of_badhop_ip = [] for badhop in range(len(res[z])): if res[z][badhop] == None: continue list_of_badhop_ip.append(res[z][badhop]) if len(list_of_badhop_ip) == 1: ip_path = ip_path + ' ' + list_of_badhop_ip[0] elif len(list_of_badhop_ip) == 2: if list_of_badhop_ip[0] == list_of_badhop_ip[1]: ip_path = ip_path + ' ' + list_of_badhop_ip[0] else: ip_path = ip_path + ' ' + list_of_badhop_ip[ 0] + ',' + list_of_badhop_ip[1] else: if list_of_badhop_ip[0] != list_of_badhop_ip[ 1] and list_of_badhop_ip[ 1] != list_of_badhop_ip[ 2] and list_of_badhop_ip[ 0] != list_of_badhop_ip[2]: ip_path = ip_path + ' ' + list_of_badhop_ip[ 0] + ',' + list_of_badhop_ip[ 1] + ',' + list_of_badhop_ip[2] elif list_of_badhop_ip[0] == list_of_badhop_ip[1]: ip_path = ip_path + ' ' + list_of_badhop_ip[ 0] + ',' + list_of_badhop_ip[2] elif list_of_badhop_ip[0] == list_of_badhop_ip[2]: ip_path = ip_path + ' ' + list_of_badhop_ip[ 0] + ',' + list_of_badhop_ip[1] elif list_of_badhop_ip[1] == list_of_badhop_ip[2]: ip_path = ip_path + ' ' + list_of_badhop_ip[ 0] + ',' + list_of_badhop_ip[1] dataLine.writelines(ip_path + "\n") dataLine.close()
def do(self): all_target = Target.objects.all() for tar in all_target: kwargs = { "msm_id": tar.msm_id, "start": DT.date.today() - DT.timedelta(days=7), "stop": DT.date.today(), } is_success, results = AtlasResultsRequest(**kwargs).create() pathlist = [] hopslist = [] timelist = [] IPlistcomp = [] if is_success: for result in results: parsed_result = TracerouteResult.get(result) pathlist.append(parsed_result.ip_path) hopslist.append(parsed_result.total_hops) timelist.append(parsed_result.end_time) for i in range(0, len(pathlist)): for k in range(0, hopslist[i]): try: IPlistcomp.append(pathlist[i][k][0]) except: IPlistcomp.append("0.0.0.0") reader = geoip2.database.Reader( "/home/csg/Downloads/GeoLite2-Country.mmdb") h = open('countriescron.txt', 'w') for i in range(0, len(IPlistcomp)): try: response = reader.country(IPlistcomp[i]) except: h.write("OO" + "\n") else: if response.country.name is None: h.write("OO" + "\n") else: new = (response.country.iso_code).encode( 'ascii', 'ignore') h.write(new + "\n") h.close() with open("countriescron.txt") as f: countrylist = f.readlines() f.close() countrylist = map(lambda s: s.strip(), countrylist) desc = tar.description rel = int(filter(str.isdigit, str(desc))) desc = desc.replace(" ", "") desc = str(desc) rel = "Relation" + str(rel) m = 0 for i in range(0, len(hopslist)): tar1 = eval(desc)(timestamp=timelist[i]) tar1.save() for k in range(0, hopslist[i]): b = Countries.objects.get(country=countrylist[m]) eval(rel).objects.create(traceroutemeasurement_id=int( tar1.id), countries_id=int(b.id)) m = m + 1
def trace(): # global variables global start_time traceCount3 = 0 ipCount3 = 0 srcCount3 = 0 dstCount3 = 0 privateIP = 0 edgeCount = 0 target = 0 uniqueTrace = set() uniqueIP = set() uniqueSrc = set() uniqueDst = set() uniqueEdge = set() nodeList = [] with open("RipeNodeList") as f: for line in f: for word in line.split(): nodeList.append(word) for line in open("start_time.txt"): if line.strip(): start_time = int(line) end_time = start_time + 86400 for node in nodeList: traceCount1 = 0 srcCount1 = 0 dstCount1 = 0 ipCount1 = 0 traceCount2 = 0 srcCount2 = 0 dstCount2 = 0 ipCount2 = 0 flag = 0 privateIP = 0 edgeCount1 = 0 if not os.path.exists("/home/jay/Desktop/Data_Collector/RipeData/" + str(start_time) + "/all_nodes"): os.makedirs("/home/jay/Desktop/Data_Collector/RipeData/" + str(start_time) + "/all_nodes") if not os.path.exists("/home/jay/Desktop/Data_Collector/RipeData/" + str(start_time) + "/per_node/" + node): os.makedirs("/home/jay/Desktop/Data_Collector/RipeData/" + str(start_time) + "/per_node/" + node) # open working files of1 = open("traceList.txt", "w") of2 = open("ipList.txt", "w") of3 = open("src.txt", "w") of4 = open("dst.txt", "w") of5 = open( "/home/jay/Desktop/Data_Collector/RipeData/" + str(start_time) + "/per_node/" + node + "/stats.txt", "w") of6 = open( "/home/jay/Desktop/Data_Collector/RipeData/" + str(start_time) + "/per_node/" + node + "/trace.txt", "w") of7 = open( "/home/jay/Desktop/Data_Collector/RipeData/" + str(start_time) + "/per_node/" + node + "/ip.txt", "w") of8 = open( "/home/jay/Desktop/Data_Collector/RipeData/" + str(start_time) + "/per_node/" + node + "/src.txt", "w") of9 = open( "/home/jay/Desktop/Data_Collector/RipeData/" + str(start_time) + "/per_node/" + node + "/dst.txt", "w") of10 = open( "/home/jay/Desktop/Data_Collector/RipeData/" + str(start_time) + "/per_node/" + node + "/traceCount.txt", "w") of11 = open( "/home/jay/Desktop/Data_Collector/RipeData/" + str(start_time) + "/per_node/" + node + "/edgeList.txt", "w") of15 = open( "/home/jay/Desktop/Data_Collector/RipeData/" + str(start_time) + "/per_node/" + node + "/ipCount.txt", "w") ofx = open( "/home/jay/Desktop/Data_Collector/RipeData/" + str(start_time) + "/all_nodes/stats.txt", "w") try: # download page from internet and store in filesystem urllib.urlretrieve( "https://atlas.ripe.net/api/v2/measurements/" + node + "/results?start=" + str(start_time) + "&stop=" + str(end_time) + "&format=json", "/home/jay/Desktop/Data_Collector/ripe.json") try: # Import file to db os.system( "mongoimport --db RipeNode --collection mapping --type json --file ripe.json --jsonArray" ) except: print "Error--problem loading data to MongoDB" # Create client client = MongoClient() # Assign the local variable db to the database named primer db = client.RipeNode # Access collection object db.mapping # Create object variable for later use coll = db.mapping index_i = 0 index_j = 1 # iterate through all documents in a collection cursor = coll.find() #[index_i:index_j] # use mongo to parse data try: for post in coll.find(): record = convert(post) my_result = TracerouteResult.get(record) newLine = [] srcList = [] dstList = [] flag = 0 ip = my_result.ip_path if (my_result.is_success == 1): #newLine.append(my_result.source_address) source = my_result.source_address destination = my_result.destination_address srcList.append(my_result.source_address) dstList.append(my_result.destination_address) for x in ip: sublist = x for y in sublist: while flag == 0: if (y == None): newLine.append('0') else: newLine.append(y) flag = 1 flag = 0 else: pass # write parsed data to file # trace strings and ip addresses of1.write(source + ':' + destination + ' ') hop = 1 for address in newLine: of1.write(address + '-' + str(hop)) of2.write(address) of1.write(' ') of2.write('\n') ipCount1 += 1 hop += 1 if (len(newLine) != 0): of1.write('\n') traceCount1 += 1 # source ip addresses for address in srcList: of3.write(address) of3.write('\n') srcCount1 += 1 # destination ip addresses for address in dstList: of4.write(address) of4.write('\n') dstCount1 += 1 except: print "Could not process document" except: of5.write("ERROR -- Could not download node " + node) # Close files of1.close() of2.close() of3.close() of4.close() # write per node data to file # Unique Traces with open("traceList.txt", "r") as f: trace = [] for line in f: if line not in trace: trace.append(line) uniqueTrace.add(line) f.close() for item in trace: of6.write(item) traceCount2 += 1 of6.close() # Trace Counts # initialize variables allTraceList = [] uniqueTraceList = [] uniqueIpList = [] # open traceList file with open("traceList.txt", "r") as f: for line in f: # make list of traces allTraceList.append(line) if line not in uniqueTraceList: # make list of unique traces uniqueTraceList.append(line) f.close() # for each unique trace, count number of occurances in total list for item in uniqueTraceList: num = allTraceList.count(item) # write count and trace to new file of10.write(str(num) + " " + item) # close file of10.close() # ip counts with open("ipList.txt", "r") as f: # create lists for all and unique ips = [] uips = set() # fill both lists for item in f: ips.append(item) uips.add(item) # compare unique to all and count all for item in uips: num = ips.count(item) # write out to file of15.write(str(num) + " " + item) # close files of15.close() f.close() # initialize lists edgeList = set() starCounter = 1 count = 1 for item in uniqueTraceList: # set list so it will reset trace = [] # split trace and push to list for item in item.split(): if (':' in item): pass else: item = item.split('-') trace.append(item[0]) # find length of list length = len(trace) # set iterator variable so it will reset i = 0 # iterate through trace list for pairs while i < length - 1: first = trace[i] second = trace[i + 1] # set incrementing value for 0's if first == '0': first = count count += 1 if second == '0': second = count count += 1 # add to edgeList set (unique values only) edgeList.add(str(first) + ' ' + str(second)) uniqueEdge.add(str(first) + ' ' + str(second)) i += 1 # write edgeList to file for item in edgeList: of11.write(item + '\n') edgeCount1 += 1 # close file of11.close() # Unique IPs with open("ipList.txt", "r") as f: ip = [] for line in f: if line not in ip: ip.append(line) uniqueIP.add(line) f.close() #for item in ip: for item in uniqueIP: of7.write(item) if (('10.0' in item) or ('192.168' in item) or ('172.16' in item)): privateIP += 1 ipCount2 += 1 of7.close() # Unique Source IPs with open("src.txt", "r") as f: src = [] for line in f: if line not in src: src.append(line) uniqueSrc.add(line) f.close() for item in src: of8.write(item) srcCount2 += 1 of8.close() # Unique Destination IPs with open("dst.txt", "r") as f: dst = [] for line in f: if line not in dst: dst.append(line) uniqueDst.add(line) f.close() for item in dst: of9.write(item) dstCount2 += 1 of9.close() # Drop Collection result = db.mapping.drop() # write individual node totals to file of5.write("Node: " + node + '\n') of5.write("Total Traces: " + str(traceCount1) + '\n') of5.write("Total IPs: " + str(ipCount1) + '\n') of5.write("Total Source IPs: " + str(srcCount1) + '\n') of5.write("Total Destination IPs: " + str(dstCount1) + '\n') of5.write("Unique Traces: " + str(traceCount2) + '\n') of5.write("Unique IPs: " + str(ipCount2) + '\n') of5.write("Private IPs: " + str(privateIP) + '\n') of5.write("Unique Source IPs: " + str(srcCount2) + '\n') of5.write("Unique Destination IPs: " + str(dstCount2) + '\n') of5.write("Unique Edges: " + str(edgeCount1) + '\n') of5.write("*************************************" + '\n') of5.close() # Write Totals To File # unique traces of11 = open( "/home/jay/Desktop/Data_Collector/RipeData/" + str(start_time) + "/all_nodes/traces.txt", "w") for item in uniqueTrace: of11.write(item) traceCount3 += 1 of11.close() # unique ip addresses of12 = open( "/home/jay/Desktop/Data_Collector/RipeData/" + str(start_time) + "/all_nodes/ips.txt", "w") for item in uniqueIP: of12.write(item) ipCount3 += 1 of12.close() # unique source ip addresses of13 = open( "/home/jay/Desktop/Data_Collector/RipeData/" + str(start_time) + "/all_nodes/src.txt", "w") for item in uniqueSrc: of13.write(item) srcCount3 += 1 of13.close() # unique destination ip addresses of14 = open( "/home/jay/Desktop/Data_Collector/RipeData/" + str(start_time) + "/all_nodes/dst.txt", "w") for item in uniqueDst: of14.write(item) dstCount3 += 1 of14.close() # unique edges (w/o '0's counted) of15 = open( "/home/jay/Desktop/Data_Collector/RipeData/" + str(start_time) + "/all_nodes/edges.txt", "w") for item in uniqueEdge: of15.write(item + '\n') item = item.split(' ') if (('.' in item[0]) and ('.' in item[1])): edgeCount += 1 of15.close() # write totals to file ofx.write("Total Unique Traces: " + str(traceCount3) + '\n') ofx.write("Total Unique IPs: " + str(ipCount3) + '\n') ofx.write("Total Unique Source IPs: " + str(srcCount3) + '\n') ofx.write("Total Unique Destination IPs: " + str(dstCount3) + '\n') ofx.write("Total Unique Edges: " + str(edgeCount) + '\n') ofx.close() # update start_time.txt f = open("start_time.txt", "w") f.write(str(end_time)) f.close()