def on_unregister_given_not_asserted_entity(match, state): """Internal function for the specific entity.""" remote_part = parse_guid(state, match[0], match[1], match[2]) remote_oid = get_oid(match[3]) log_warning( "%s %s is unregistering remote %s not previsouly asserted" % (remote_part, remote_oid, entity), state, 2)
def on_receive_data(match, state): """It happens when the reader receives data.""" comm = "best-effort" if match[0] == "Be" else "reliable" reader_oid = get_oid(match[1]) packet = match[2] seqnum = parse_sn(match[3], 16 if match[0] == "Be" else 10) remote = match[5].split('.') writer_addr = parse_guid(state, remote[0], remote[1], remote[2]) writer_oid = get_oid(remote[3]) # Sequece number check full_id = writer_addr + "." + writer_oid + ' to ' + reader_oid if 'last_sn' not in state: state['last_sn'] = {} if full_id in state['last_sn']: prev_seqnum = state['last_sn'][full_id] diff = seqnum - prev_seqnum # Add a warning message per missing packet to have a good count in # the warning summary. for _ in range(diff - 1): log_warning("Missing packet for %s" % full_id, state) state['last_sn'][full_id] = seqnum # Show the message after any possible warning. verb = 1 if is_builtin_entity(remote[3]) else 0 log_recv( writer_addr, reader_oid, "Received %s (%d) from writer %s (%s)" % (packet, seqnum, writer_oid, comm), state, verb)
def on_send_gap(match, state): """It happens when the writer send a GAP message.""" writer_oid = get_oid(match[0]) remote_part = parse_guid(state, match[1], match[2], match[3]) reader_oid = get_oid(match[4]) sn_start = parse_sn(match[5]) sn_end = parse_sn(match[6]) verb = 1 if is_builtin_entity(match[0]) else 0 log_send( remote_part, writer_oid, "Sent GAP to reader %s for [%d, %d)" % (reader_oid, sn_start, sn_end), state, verb) add_statistics_packet(writer_oid, 'send', 'GAP', state) # Check for large sequence number issues. if sn_end - sn_start >= (1 << 31): log_warning("[LP-1] Large Sequence Number difference in GAP.", state) # Check for reliable packet lost if 'packets_lost' not in state: return losts = [] for k in state['packets_lost']: info = k.split("-") oid = info[0] seqnum = int(info[1]) if oid == writer_oid and seqnum >= sn_start and seqnum < sn_end: log_warning("DATA (%d) may have been lost" % seqnum, state) losts.append(k) for k in losts: state['packets_lost'].remove(k)
def main(): """Main application entry.""" args = read_arguments() state = initialize_state(args) expressions = create_regex_list(state) # Read log file and parse state['format_device'].write_header(state) try: parse_log(expressions, state) except KeyboardInterrupt: log_warning("Catched SIGINT", state) # Parse logs again in case this process was piping the output from # another and there are some remaining logs. Also we will be able to # show the end summary. If the signal is sent again, it will quit. try: parse_log(expressions, state) except KeyboardInterrupt: # Catch again the SIGNIT in case the user wants to abort the # log parsing but show the final summary log_warning("Catched SIGINT", state) # Print result of config, errors and warnings. state['format_device'].write_configurations(state) state['format_device'].write_warnings(state) state['format_device'].write_errors(state)
def on_lose_discovery_samples(match, state): """It happens when losing discovery samples.""" entity_type = match[0] entity_oid = get_oid(match[1]) total = match[2] delta = match[3] log_warning( "%s discovery samples lost for %s %s (%s in total)" % (delta, entity_type, entity_oid, total), state)
def check_time_distance(new_clocks, old_clocks, state): """Check that the distance between logs it's not large.""" MAX_TIME_SEC = 60 result = compare_times(old_clocks[1], new_clocks[1], timedelta(seconds=MAX_TIME_SEC)) if result: log_warning("System clock went %s by %s." % (result[0], result[1]), state) if new_clocks[0]: result = compare_times(old_clocks[0], new_clocks[0], MAX_TIME_SEC) if result: log_warning( "Monotonic clock went %s by %.3f." % (result[0], result[1]), state)
def set_local_address(guid, state): """Set the local address.""" address = guid.split() local_address = (address[0], address[1]) # If the local address is already in the list you are most likely # writing the output of two different apps in the same file. if 'local_address' not in state: state['local_address'] = set() elif local_address not in state['local_address']: log_warning("You may have written output from two different apps.", state) state['local_address'].add(local_address) if state['obfuscate']: address[0] = obfuscate(address[0], state)[:15] address[1] = obfuscate(address[1], state)[:5] log_cfg("Local address: %s %s" % (address[0], address[1]), state)
def check_periodic(state, name, msg=""): """Check if the given event is periodic.""" # If there is no clock (timestamped log), returns always true if 'clocks' not in state: return True # Init if 'periodic_event' not in state: state['periodic_event'] = {} # Get the monotonic clock if possible, otherwise use the system clock. has_monotonic = state['clocks'][0] is not None clock = state['clocks'][0] if has_monotonic else state['clocks'][1] # In the first call we don't have enought information if name not in state['periodic_event']: state['periodic_event'][name] = [-1, clock] return True # Get current period and previous one. previous_period = state['periodic_event'][name][0] period = clock - state['periodic_event'][name][1] # Update state['periodic_event'][name][1] = clock state['periodic_event'][name][0] = period # If no previous period, returns true if previous_period == -1: return True # Compare times. tolerance = 0.1 if has_monotonic else timedelta(milliseconds=100) result = compare_times(previous_period, period, tolerance) if result: log_warning( "%s not periodic (%s by %s) %s" % (name, result[0], result[1], msg), state)
def on_reader_exceed_max_entries(match, state): """It happens when the reader resource limits are excceded.""" log_warning("[LP-11] DataReader exceeded resource limits", state)
def on_writer_batching_exceed_max_entries(match, state): """It happens when the batching resource limits are exceeded.""" log_warning("[LP-10] DataWriter with batching exceeded resource limits", state)
def on_drop_unregister_no_ack_instance(match, state): """It happens when unregistering fails because missing ACK.""" log_warning("[LP-9] Cannot drop unregistered instance, missing ACKs", state, 1)
def on_error_unreachable_network(match, state): """It happens when the network is unreachable.""" log_warning("Unreachable network for previous send", state, 1)
def on_unregister_unkeyed_instance(match, state): """It happens when unregistering unkeyed sample.""" log_warning("[LP-6] Try to unregister instance with no key field.", state)
def on_register_unkeyed_instance(match, state): """It happens when registering unkeyed instances.""" log_warning("[LP-4] Try to register instance with no key field.", state)
def on_rejected_data(match, state): """It happens when the reader rejects data.""" seqnum = parse_sn(match[0]) log_process("", "", "Reader rejected DATA (%d)" % seqnum, state) log_warning("A DataReader rejected sample %d" % seqnum, state)
def on_sample_received_from_deleted_writer(match, state): """It happens when the remote writer is deleted.""" log_warning("Sample received from an already gone remote DataWriter.", state, 1)
def on_error_no_transport_available(match, state): """It happens when there isn't transport.""" loc = get_locator(match[0], state) log_warning("[LP-12] No transport available to reach locator %s" % loc, state, 1)