def on_result_response(*args): #Store probe statistics in Redis #print (args[0]['prb_id']) if 'result' in args[0]: result = args[0]['result'] if 'answers' in result: #process_answers(data=result['answers']) logging.disable(logging.WARNING) res = DnsResult.get(args[0], parse_buf=True) logging.basicConfig(level=defaultloglevel) if res.is_error: return True if (res.responses[0].abuf.answers): for answer in res.responses[0].abuf.answers: process_answers(data=answer['raw_data'], sagan=True) # print (result['answers']) else: # Some of the records are not automatically decoded and need to pass # into ripe.atlas.sagan firat logging.disable(logging.WARNING) res = DnsResult.get(args[0], parse_buf=True) logging.basicConfig(level=defaultloglevel) if res.is_error: return True if (res.responses[0].abuf.answers): for answer in res.responses[0].abuf.answers: process_answers(data=answer['raw_data'], sagan=True)
def simplify_file_results(file_results: List[Dict[str, Any]], fp): """ This will take a singular file's results and simplify it """ # Annoyingly if there is an error you can't determine whether you requested # A or AAAA records, so we loop through to find out first, then print results for probe_result in file_results: dns_result = DnsResult(probe_result) if dns_result.is_error: continue record_type = dns_result.responses[0].abuf.questions[0].type domain = dns_result.responses[0].abuf.questions[0].name[:-1] for probe_result in file_results: simplify_single_result(DnsResult(probe_result), record_type, domain, fp)
def create_list(kwargs): is_success, results = AtlasResultsRequest(**kwargs).create() if is_success: l_soa = [] l_time = [] l_dt = [] l_hover = [] count = 0 while count < len(results) - 1: my_error = DnsResult(results[count], on_error=DnsResult.ACTION_IGNORE) if not my_error.is_error: timestamp = results[count]['timestamp'] dt = datetime.datetime.fromtimestamp(timestamp) dt = dt.strftime("%m/%d/%Y , %H:%M:%S") og_soa_serial = results[count]['result']['answers'][0][ 'SERIAL'] soa_serial = str(og_soa_serial) soa_serial = datetime.datetime.strptime( soa_serial, "%Y%m%d%H") soa_serial = datetime.datetime.timestamp(soa_serial) l_soa.append(soa_serial) l_time.append(timestamp) l_dt.append(dt) l_hover.append(og_soa_serial) count += 1 return l_soa, l_time, l_dt, l_hover
def validate_and_populate_measurement(self): api_address = 'https://atlas.ripe.net:443/api/v2/measurements/%s/results/' % self.id r = requests.get(api_address, headers=headers) res = ast.literal_eval(r.text) count_of_valid_responses = 0 for request in res: current_dns_result = DnsResult(request) response_probe_id = current_dns_result.probe_id try: ret_code = current_dns_result.responses[0].abuf.header.return_code response_time = current_dns_result.responses[0].response_time dns_question_name = current_dns_result.responses[0].abuf.questions[0].name print("The request was sent from prob_id %d" % response_probe_id) print("The response_time is " + str(response_time)) if ret_code != "NXDOMAIN": print(ret_code) print("At least one of the %s responses is not NXDOMAIN but %s" % (self.id, ret_code)) else: self.response_time.append(response_time) self.dns_questions.add(str(dns_question_name)) count_of_valid_responses += 1 except: print("The response of probe id %s was not valid" % response_probe_id) self.number_of_requests = count_of_valid_responses print("The number of requests is %s at %s" % (str(count_of_valid_responses), str(datetime.now())))
def print_nicely(self, limit): with open(self.filename) as results: i = 0 for result in results.readlines(): if limit is not None: if i >= limit: return parsed_result = DnsResult.get(result) print("PROBE ID: "+str(parsed_result.probe_id)) print("firmware: "+str(parsed_result.firmware)) print("origin: "+parsed_result.origin) print("measurement type: "+self.measurement_type) self._print_dns_nicely(parsed_result) print("\n") i +=1
def print_nicely(self, limit): with open(self.filename) as results: i = 0 for result in results.readlines(): if limit is not None: if i >= limit: return parsed_result = DnsResult.get(result) print("PROBE ID: "+str(parsed_result.probe_id)) print("firmware: "+str(parsed_result.firmware)) print("origin: "+parsed_result.origin) print("measurement type: "+self.measurement_type) self._print_dns_nicely(parsed_result) print("\n") i +=1
def _get_list_of_qtypes(self): list_of_qtypes = [] with open(self.filename) as results: for this_result in results: parsed_result = DnsResult.get(this_result) response_list = parsed_result.responses for this_response in response_list: this_abuf = this_response.abuf list_of_questions = this_abuf.questions for this_question in list_of_questions: this_qtype = this_question.type list_of_qtypes.append(this_qtype) return list_of_qtypes
def _get_list_of_qtypes(self): list_of_qtypes = [] with open(self.filename) as results: for this_result in results: parsed_result = DnsResult.get(this_result) response_list = parsed_result.responses for this_response in response_list: this_abuf = this_response.abuf list_of_questions = this_abuf.questions for this_question in list_of_questions: this_qtype = this_question.type list_of_qtypes.append(this_qtype) return list_of_qtypes
def json_parser(f): answers = [] try: measurement = json.loads(f) for lines in measurement: try: my_results = DnsResult(lines) try: src_result = my_results.responses[0].source_address dst_result = my_results.responses[0].destination_address proto_result = my_results.responses[0].protocol rtt_result = my_results.responses[0].response_time abuf = str(my_results.responses[0].abuf) dnsmsg = dns.message.from_wire(base64.b64decode(abuf)) rcode = dnsmsg.rcode() prb_id = lines['prb_id'] fw = lines['fw'] answers.append( str(src_result) + ',' + str(dst_result) + ',' + str(proto_result) + ',' + str(rtt_result) + ',' + str(prb_id) + ',' + str(rcode) + ',' + str(fw)) except: # print("ERROR: EMPTY measurement") # if measurement is empty set evert index as empty answers.append(',' + ',' + ',' + ',' + ',' + ',' + ',' + ',') except: print('Error: Measurement is not a DNS measurement') except: # print("ERROR parsing json") # print("Unexpected error:", sys.exc_info()) # answers=[] # answers.append("ERROR parsing json") pass return answers
def get_state_trust_chain(msm_results, msm_attributes, start_date, stop_date, details): """Processes the measurement results from RIPE Atlas for the trust chain. Prints the results as a string. """ vantage_point_state = [] for msm_id, attributes in msm_attributes.items(): for measurement in msm_results[msm_id]: dns_result = DnsResult(measurement) if ~dns_result.is_error: for response in dns_result.responses: if response.abuf is not None: probe_id = dns_result.probe_id destination_address = response.destination_address vantage_point_id = str( probe_id) + '_' + destination_address created = dns_result.created # if vantage_point_id not in vantage_point_state: # vantage_point_state[vantage_point_id] = [] return_code = response.abuf.header.return_code if attributes[1] == 'valid': if attributes[2] == '4': vantage_point_state.append([ vantage_point_id, 'ipv4', 'valid', return_code, created ]) else: vantage_point_state.append([ vantage_point_id, 'ipv6', 'valid', return_code, created ]) else: if attributes[2] == '4': vantage_point_state.append([ vantage_point_id, 'ipv4', 'bogus', return_code, created ]) else: vantage_point_state.append([ vantage_point_id, 'ipv6', 'bogus', return_code, created ]) df_vantage_point_states = pd.DataFrame( vantage_point_state, columns=['vantage_point_id', 'ipv', 'msm', 'return_code', 'created']) df_vantage_point_states = df_vantage_point_states.sort_values( ['vantage_point_id', 'ipv', 'created']) time_series_states = df_vantage_point_states.groupby( ['vantage_point_id', 'ipv']).apply(lambda time_series: get_vp_state(time_series)) time_series_states = time_series_states.reset_index() time_series_states_v4 = time_series_states[time_series_states.ipv == 'ipv4'] time_series_states_v6 = time_series_states[time_series_states.ipv == 'ipv6'] ipv4_time_series = [[], []] ipv6_time_series = [[], []] for vp, ts in time_series_states_v4.iterrows(): ipv4_time_series[0] += ts[0][0] ipv4_time_series[1] += ts[0][1] for vp, ts in time_series_states_v6.iterrows(): ipv6_time_series[0] += ts[0][0] ipv6_time_series[1] += ts[0][1] # print(ipv4_time_series) # ipv4_time_series = pd.DataFrame(ipv4_time_series, columns = ['created', 'state']) ipv4_time_series = pd.DataFrame(ipv4_time_series).transpose() ipv4_time_series.columns = ['created', 'state'] ipv4_time_series = ipv4_time_series.sort_values(['created']) ipv4_time_series['counter'] = 1 ipv4_time_series = ipv4_time_series.groupby([ pd.Grouper(key='created', freq=str(config['TTLS']['ttl_dnskey']) + 'S'), 'state' ]).count().unstack().fillna(0)['counter'] ipv6_time_series = pd.DataFrame(ipv6_time_series).transpose() ipv6_time_series.columns = ['created', 'state'] ipv6_time_series = ipv6_time_series.sort_values(['created']) ipv6_time_series['counter'] = 1 ipv6_time_series = ipv6_time_series.groupby([ pd.Grouper(key='created', freq=str(config['TTLS']['ttl_dnskey']) + 'S'), 'state' ]).count().unstack().fillna(0)['counter'] if details: print('Trust Chain State IPv4 ({} - {})'.format( start_date.strftime('%Y-%m-%d %H:%M'), stop_date.strftime('%Y-%m-%d %H:%M'))) print(ipv4_time_series.to_csv()) print('Trust Chain State IPv6 ({} - {})'.format( start_date.strftime('%Y-%m-%d %H:%M'), stop_date.strftime('%Y-%m-%d %H:%M'))) print(ipv6_time_series.to_csv()) else: if len(ipv4_time_series) > 0: print('Trust Chain State IPv4 ({} - {}) in %'.format( start_date.strftime('%Y-%m-%d %H:%M'), stop_date.strftime('%Y-%m-%d %H:%M'))) print('Total VPs') print(ipv4_time_series.sum()) print('VPs (in %)') print( (ipv4_time_series.sum() / ipv4_time_series.sum().sum() * 100)) if len(ipv6_time_series) > 0: print('Trust Chain State IPv6 ({} - {})'.format( start_date.strftime('%Y-%m-%d %H:%M'), stop_date.strftime('%Y-%m-%d %H:%M'))) print('Total VPs') print(ipv6_time_series.sum()) print('VPs (in %)') print( (ipv6_time_series.sum() / ipv6_time_series.sum().sum() * 100))
def get_state_trust_chain_old(msm_results, msm_attributes, start_date, stop_date, details): """Processes the measurement results from RIPE Atlas for the trust chain. Prints the results as a string. """ vantage_point_state = {} for msm_id, attributes in msm_attributes.items(): for measurement in msm_results[msm_id]: dns_result = DnsResult(measurement) if ~dns_result.is_error: for response in dns_result.responses: if response.abuf is not None: probe_id = dns_result.probe_id destination_address = response.destination_address vantage_point_id = str( probe_id) + '_' + destination_address created = dns_result.created if vantage_point_id not in vantage_point_state: vantage_point_state[vantage_point_id] = { 'ipv4_valid': None, 'ipv4_bogus': None, 'ipv6_valid': None, 'ipv6_bogus': None } return_code = response.abuf.header.return_code if attributes[1] == 'valid': if attributes[2] == '4': vantage_point_state[vantage_point_id][ 'ipv4_valid'] = return_code else: vantage_point_state[vantage_point_id][ 'ipv6_valid'] = return_code else: if attributes[2] == '4': vantage_point_state[vantage_point_id][ 'ipv4_bogus'] = return_code else: vantage_point_state[vantage_point_id][ 'ipv6_bogus'] = return_code ipv4_summary = defaultdict(int) ipv6_summary = defaultdict(int) total_summary = { 'ipv4_valid': defaultdict(int), 'ipv4_bogus': defaultdict(int), 'ipv6_valid': defaultdict(int), 'ipv6_bogus': defaultdict(int) } ipv4_measurements = 0 ipv6_measurements = 0 for vantage_point in vantage_point_state.keys(): state = define_state(vantage_point_state[vantage_point]['ipv4_valid'], vantage_point_state[vantage_point]['ipv4_bogus']) ipv4_summary[state] += 1 ipv4_measurements += 1 state = define_state(vantage_point_state[vantage_point]['ipv6_valid'], vantage_point_state[vantage_point]['ipv6_bogus']) ipv6_summary[state] += 1 ipv6_measurements += 1 total_summary['ipv4_valid'][vantage_point_state[vantage_point] ['ipv4_valid']] += 1 total_summary['ipv4_bogus'][vantage_point_state[vantage_point] ['ipv4_bogus']] += 1 total_summary['ipv6_valid'][vantage_point_state[vantage_point] ['ipv6_valid']] += 1 total_summary['ipv6_bogus'][vantage_point_state[vantage_point] ['ipv6_bogus']] += 1 if len(ipv4_summary) > 0: print('Trust Chain State IPv4 ({} - {})'.format( start_date.strftime('%Y-%m-%d %H:%M'), stop_date.strftime('%Y-%m-%d %H:%M'))) print( 'Insecure:\t{} ({}%)\tSecure:\t{} ({}%)\tBogus:\t{} ({}%)\tUnknown:\t{} ({}%)' .format( ipv4_summary['insecure'], round(ipv4_summary['insecure'] / ipv4_measurements * 100, 2), ipv4_summary['secure'], round(ipv4_summary['secure'] / ipv4_measurements * 100, 2), ipv4_summary['bogus'], round(ipv4_summary['bogus'] / ipv4_measurements * 100, 2), ipv4_summary['unknown'], round(ipv4_summary['unknown'] / ipv4_measurements * 100, 2))) if len(ipv6_summary) > 0: print('Trust Chain State IPv6 ({} - {})'.format( start_date.strftime('%Y-%m-%d %H:%M'), stop_date.strftime('%Y-%m-%d %H:%M'))) print( 'Insecure:\t{} ({}%)\tSecure:\t{} ({}%)\tBogus:\t{} ({}%)\tUnknown:\t{} ({}%)' .format( ipv6_summary['insecure'], round(ipv6_summary['insecure'] / ipv6_measurements * 100, 2), ipv6_summary['secure'], round(ipv6_summary['secure'] / ipv6_measurements * 100, 2), ipv6_summary['bogus'], round(ipv6_summary['bogus'] / ipv6_measurements * 100, 2), ipv6_summary['unknown'], round(ipv6_summary['unknown'] / ipv6_measurements * 100, 2)))
def get_state_publication_and_propagation(msm_results, msm_attributes, start_date, stop_date, details): """Processes the measurement results from RIPE Atlas for the publication and propagation delay. Prints the results as a string. """ time_series = [] for msm_id, attributes in msm_attributes.items(): responses_counter_zsk = 0 responses_counter_ksk = 0 if attributes[0] == 'pubdelay': print('Monitoring {} of {} at {} ({} - {})'.format( attributes[0], attributes[1].upper(), attributes[2], start_date.strftime('%Y-%m-%d %H:%M'), stop_date.strftime('%Y-%m-%d %H:%M'))) else: print('Monitoring {} of {} (IPv{} ({} - {}))'.format( attributes[0], attributes[1].upper(), attributes[2], start_date.strftime('%Y-%m-%d %H:%M'), stop_date.strftime('%Y-%m-%d %H:%M'))) keys_zsk = defaultdict(int) keys_ksk = defaultdict(int) if msm_id in msm_results: for measurement in msm_results[msm_id]: dns_result = DnsResult(measurement) if ~dns_result.is_error: for response in dns_result.responses: if response.abuf is not None: for answer in response.abuf.answers: if answer.raw_data[ 'Type'] == 'DNSKEY' and answer.name == config[ 'ROLLOVER']['zone']: algorithm = answer.raw_data['Algorithm'] protocol = answer.raw_data['Protocol'] flags = answer.raw_data['Flags'] key_tag = calc_keyid( flags, protocol, algorithm, answer.raw_data['Key']) created = dns_result.created if flags == 256: keys_zsk[key_tag] += 1 responses_counter_zsk += 1 elif flags == 257: keys_ksk[key_tag] += 1 responses_counter_ksk += 1 time_series.append( [created, attributes[2], key_tag]) elif (answer.raw_data['Type'] == 'RRSIG' and answer.raw_data['TypeCovered'] == 'DNSKEY' and answer.name == config['ROLLOVER']['zone']): responses_counter_zsk += 1 created = dns_result.created key_tag = answer.raw_data['KeyTag'] keys_zsk[key_tag] += 1 time_series.append( [created, attributes[2], key_tag]) elif (answer.raw_data['Type'] == 'DS') and answer.name == config[ 'ROLLOVER']['zone']: responses_counter_zsk += 1 created = dns_result.created key_tag = answer.raw_data['Tag'] keys_zsk[key_tag] += 1 time_series.append( [created, attributes[2], key_tag]) print('Key Tag\t# Observed (Share %)') for key in keys_zsk.keys(): print('{}\t\t{} ({}%)'.format( key, keys_zsk[key], round(keys_zsk[key] / responses_counter_zsk * 100, 2))) for key in keys_ksk.keys(): print('{}\t\t{} ({}%)'.format( key, keys_ksk[key], round(keys_ksk[key] / responses_counter_ksk * 100, 2))) if details: get_details(time_series, attributes) return
# by rog # https://ripe-atlas-sagan.readthedocs.io/en/latest/use.html#how-to-use-this-library # https://ripe-atlas-sagan.readthedocs.io/en/latest/types.html#dns # https://atlas.ripe.net/api/v2/measurements/23265672/results/?start=1574595600&stop=1574596200&format=json """ Gets the measurement specified by the measurement id (msm_id) start and end time and returns a list of dns nodes responding and their response time for the query """ from ripe.atlas.sagan import DnsResult from ripe.atlas.cousteau import AtlasResultsRequest import dns.message import base64 from collections import defaultdict d = defaultdict(list) kwargs = {"msm_id": 1413716, "start": 1567133400, "stop": 1567134000} # kwargs = {"msm_id": 23265672, "start": 1574595600, "stop": 1574596200} is_success, results = AtlasResultsRequest(**kwargs).create() if is_success: count = 0 for result in results: probe = result['prb_id'] my_result = DnsResult(result) print(my_result.responses[0].abuf.answers[0].data_string + " -> " + str(my_result.responses[0].response_time))
sys.path.append('../ip2asn/') import ip2asn i2a = ip2asn.ip2asn('../ip2asn/db/rib.20200601.pickle.bz2') builtin_msmid = [30001, 30002] fname = sys.argv[1] date = fname.partition('-')[2].rpartition('.')[0] output_fname = f'data/parsed_results_{date}.json' output = [] with bz2.open(fname, 'rb') as fp: for line in fp: line_json = json.loads(line) result = DnsResult(line_json) for response in result.responses: # Skip if something's wrong if (response.is_error or response.is_malformed or not response.destination_address or not response.abuf): continue # Retrieve ASNs dstip = response.destination_address if line_json['from']: srcip = line_json['from'] else: srcip = response.source_address dstasn = i2a.ip2asn(dstip) srcasn = i2a.ip2asn(srcip)