def capture_packets(params, tag='src', src_tag=None): if tag == 'src': port_info = src_info elif tag == 'dst': port_info = dst_info else: error('tag has to be one of [src, dst]!') return # XXX TODO # If src_tag and dst_tag are the same, then we need to monitor on just # br-int. Else, we will need to monitor on qr- ports (router ports) port_info['pids'] = [] for port in port_info['ports'].keys(): intf = port_info['ports'][port] filename = '%s.tcpdump.%s.txt' % (tag, intf) if os.path.isfile(filename): os.remove(filename) cmd = 'sudo tcpdump -v icmp -i %s -c %d -l > %s 2>&1' % ( intf, params['count'], filename) pid = subprocess.Popen(cmd, shell=True).pid port_info['pids'].append(pid) status_update('%s: tcpdump launched with pid %d for interface %s' % (tag, pid, intf))
def capture_packets(params, tag='src', src_tag=None): if tag == 'src': port_info = src_info elif tag == 'dst': port_info = dst_info else: error('tag has to be one of [src, dst]!') return # XXX TODO # If src_tag and dst_tag are the same, then we need to monitor on just # br-int. Else, we will need to monitor on qr- ports (router ports) port_info['pids'] = [] for port in port_info['ports'].keys(): intf = port_info['ports'][port] filename = '%s.tcpdump.%s.txt' % (tag, intf) if os.path.isfile(filename): os.remove(filename) cmd = 'sudo tcpdump -v icmp -i %s -c %d -l > %s 2>&1' % ( intf, params['count'], filename) pid = subprocess.Popen(cmd, shell=True).pid port_info['pids'].append(pid) status_update( '%s: tcpdump launched with pid %d for interface %s' % (tag, pid, intf))
def cleanup_processes (pid_list): pprint.pprint(pid_list) for pid in pid_list: try: os.kill(pid, signal.SIGKILL) status_update('Successfully killed pid: %d' % pid) except OSError: status_update('Process with pid: %d no longer exists' % pid) continue pass
def path(params): global src_info global dst_info global net_info src_info = None dst_info = None net_info = None settings['debug'] = True BASE_DIR = os.path.dirname(os.path.dirname(__file__)) CUR_DIR = os.getcwd() if not re.search('/openstack_dashboard/don/', CUR_DIR): os.chdir(BASE_DIR + '/ovs') NEW_DIR = os.getcwd() debug(BASE_DIR + ':' + CUR_DIR + ':' + NEW_DIR) src_ip = params['src_ip'] dst_ip = params['dst_ip'] json_file = params['json_file'] router = params['router'] debug('Json_file: ' + json_file) info = load_json(json_file) qrouter = router_to_namespace(info, router) params['qrouter'] = qrouter src_info = get_port_info(info, src_ip) dst_info = get_port_info(info, dst_ip) if src_info is None: return "Source ip not found on the network" if dst_info is None: return "Destination ip not found on the network" if qrouter is None: return "No such router information found on the network" # src and dst are in the same network if src_info['tag'] == dst_info['tag']: path_same_network(params) else: status_update('The source and destination are in different networks') next_hop_list = get_next_hop(src_info, dst_info, qrouter, params) if len(next_hop_list) == 0: error('Could not find next hop list from %s to %s' % (src_ip, dst_ip)) path_same_network(params, next_hop_list) pass
def qrouter_usable(qrouter, src_ip, dst_ip, username, passwd): status_update('Testing whether %s is reachable via qrouter %s (dst %s)' % (src_ip, qrouter, dst_ip)) outfile = 'path.testping.txt' ping_process = launch_ping(src_ip, dst_ip, username, passwd, 2, 2, qrouter, outfile) time.sleep(5) ping_pass = process_ping(outfile, src_ip, check_ssh_connectivity_only=True) if ping_pass: status_update('IP %s is reachable via qrouter: %s' % (src_ip, qrouter)) return True else: error('IP %s is reachable via qrouter: %s' % (src_ip, qrouter)) return False
def capture_network_packets (params, hop_list): global net_info net_info = { 'pids' : [], 'hops' : hop_list, } for hop in net_info['hops']: dev = hop['dev'] nms = hop['nms'] filename = 'net.tcpdump.%s.txt' % (dev) if os.path.isfile(filename): os.remove(filename) cmd = 'sudo ip netns exec %s ' % nms cmd += 'tcpdump -v icmp -i %s -c %d -l > %s 2>&1' % (dev, params['count'], filename) pid = subprocess.Popen(cmd, shell=True).pid net_info['pids'].append(pid) status_update('net: tcpdump launched with pid %d for interface %s' % (pid, dev)) pass
def process_ping (filename, ip=None, check_ssh_connectivity_only=False): if not os.path.isfile(filename): return False status_update('Trying to read ' + filename) with open(filename) as f: lines = f.readlines() pprint.pprint(lines) info = load_json(filename) if not check_ssh_connectivity_only: return info.get('pass', False) cmd_list = info['command_list'] for cmd in cmd_list: m = re.search('ssh (\S+) with provided username and passwd', cmd['cmd']) if m: if ip == m.group(1): return cmd['pass'] return False
def main(): check_args() iteration = 0 # Parser of any specific command might add more commands to be executed. # Hence continue in a loop. while True: if (all_commands_executed(commands) or iteration >= 10): break iteration += 1 status_update('Iteration: ' + str(iteration)) sorted_keys = sorted(commands.items(), key=lambda (k, v): v['order']) for (cmd, dontcare) in sorted_keys: # Only collect stuff for which we have written a parser if commands[cmd]['parser']: if commands[cmd].get('done', False): continue if commands[cmd].has_key('help'): status_update(commands[cmd]['help']) shell = commands[cmd].get('shell', False) env = None if commands[cmd].get('env', False): env = myenv sudo = commands[cmd].get('sudo', False) if deployment_type == 'multinode': # handling for network node if cmd.startswith('netns_'): commands[cmd]['output'] = exec_on_remote( commands[cmd]['cmd']) if cmd == 'cat_instance': commands[cmd]['output'] = get_vm_info_from_compute( commands[cmd]['cmd']) print commands[cmd]['output'] else: commands[cmd]['output'] = execute_cmd( commands[cmd]['cmd'], sudo=sudo, shell=shell, env=env).split('\n') else: commands[cmd]['output'] = execute_cmd(commands[cmd]['cmd'], sudo=sudo, shell=shell, env=env).split('\n') commands[cmd]['parser'](commands[cmd]['output']) commands[cmd]['done'] = True debug('============= COMMANDS =============') #debug(pprint.pformat(commands)) status_update('Writing collected info into ' + settings['info_file']) dump_json(info, settings['info_file'])
def main(): check_args() iteration = 0 # Parser of any specific command might add more commands to be executed. # Hence continue in a loop. while True: if (all_commands_executed(commands) or iteration >= 10): break iteration += 1 status_update('Iteration: ' + str(iteration)) sorted_keys = sorted(commands.items(), key=lambda (k, v): v['order']) for (cmd, dontcare) in sorted_keys: # Only collect stuff for which we have written a parser if commands[cmd]['parser']: if commands[cmd].get('done', False): continue if commands[cmd].has_key('help'): status_update(commands[cmd]['help']) shell = commands[cmd].get('shell', False) env = None if commands[cmd].get('env', False): env = myenv sudo = commands[cmd].get('sudo', False) if deployment_type == 'multinode': # handling for network node if cmd.startswith('netns_'): commands[cmd]['output'] = exec_on_remote( commands[cmd]['cmd']) if cmd == 'cat_instance': commands[cmd]['output'] = get_vm_info_from_compute(commands[ cmd]['cmd']) print commands[cmd]['output'] else: commands[cmd]['output'] = execute_cmd( commands[cmd]['cmd'], sudo=sudo, shell=shell, env=env).split('\n') else: commands[cmd]['output'] = execute_cmd( commands[cmd]['cmd'], sudo=sudo, shell=shell, env=env).split('\n') commands[cmd]['parser'](commands[cmd]['output']) commands[cmd]['done'] = True debug('============= COMMANDS =============') # debug(pprint.pformat(commands)) status_update('Writing collected info into ' + settings['info_file']) dump_json(info, settings['info_file'])
def path_same_network (params, nms_hops = None): src_ip = params['src_ip'] dst_ip = params['dst_ip'] json_file = params['json_file'] username = params['username'] passwd = params['passwd'] count = params['count'] timeout = params['timeout'] qrouter = params['qrouter'] router = params['router'] if qrouter_usable(qrouter, src_ip, dst_ip, username, passwd): outfile = 'path.ping.txt' ping_process = launch_ping(src_ip, dst_ip, username, passwd, count, timeout, qrouter, outfile) debug('Ping started with pid: %d' % ping_process.pid) capture_packets(params,'src') capture_packets(params, 'dst', src_tag = src_info['tag']) if src_info['tag'] != dst_info['tag']: capture_network_packets(params, nms_hops) status_update('Waiting %s sec for tcpdump and ping processes to complete' % (params['count'] + 2)) time.sleep(params['count'] + 4) status_update('if processes have not stopped, lets kill them') cleanup_processes([ping_process.pid] + src_info['pids'] + dst_info['pids']) if net_info: cleanup_processes(net_info['pids']) process_captures('src') process_captures('dst') if src_info['tag'] != dst_info['tag']: process_network_captures() ping_pass = process_ping(outfile) debug(pprint.pformat(src_info)) debug(pprint.pformat(dst_info)) debug(pprint.pformat(net_info)) info = { 'src' : src_ip, 'dst' : dst_ip, 'src_info' : src_info, 'dst_info' : dst_info, 'net_info' : net_info, 'ping_pass' : ping_pass, 'error' : '', } status_update('Dumping results into %s in JSON format' % params['path_file']) dump_json(info, params['path_file']) if params['plot']: cmd = 'python plot.py --info_file %s --highlight_file %s --combined_file static/ping' % (json_file, params['path_file']) status_update('Running ' + cmd) output = execute_cmd(cmd, shell=True).split('\n') debug(pprint.pformat(output)) status_update('Done') else: err_msg = 'Cannot reach %s via router %s' % (src_ip, router) info = { 'src' : src_ip, 'dst' : dst_ip, 'src_info' : src_info, 'dst_info' : dst_info, 'ping_pass' : False, 'error' : err_msg } error(err_msg) status_update('Dumping results into %s in JSON format' % params['path_file']) dump_json(info, params['path_file']) status_update('Done')