def setup(self): self.setup_congestion_control() # run remote setup.py if self.remote: rd = parse_remote(self.remote) cmd = rd['ssh_cmd'] + ['python', rd['setup'], self.cc] check_call(cmd)
def install(self): cmd = ['python', self.src_file, 'deps'] deps = check_output(cmd).strip() if deps: sys.stderr.write('Installing dependencies...\n') cmd = 'sudo apt-get -yq --force-yes install ' + deps check_call(cmd, shell=True)
def get_experiment_folder(experiment_arg): experiment = experiment_arg if experiment.endswith('.tar.xz'): if experiment.startswith('https://'): check_call(['wget', '-c', experiment]) experiment = experiment[8:] # strip https:// experiment = experiment.split('/')[-1] # strip url path check_call(['tar', 'xJf', experiment]) experiment = experiment[:-7] # strip .tar.xz return experiment
def pre_setup(self): sys.stderr.write('Performing local pre-setup...\n') self.local_pre_setup() # run remote pre_setup.py if self.remote: sys.stderr.write('\nPerforming remote pre-setup...\n') rd = parse_remote(self.remote) cmd = rd['ssh_cmd'] + ['python', rd['pre_setup']] if self.remote_if: cmd += ['--local-interface', self.remote_if] check_call(cmd)
def generate_report(self): self.friendly_names = get_friendly_names(self.analyze_schemes) latex_path = '/tmp/pantheon-tmp/pantheon-report-%s.tex' % uuid.uuid4() self.latex = open(latex_path, 'w') self.include_summary() self.include_runs() self.latex.close() cmd = [ 'pdflatex', '-output-directory', self.data_dir, '-jobname', 'pantheon_report', latex_path ] check_call(cmd)
def main(): # prepare /tmp/pantheon-tmp to store .tex flie make_sure_path_exists('/tmp/pantheon-tmp') # install texlive, matplotlib, etc. cmd = ('sudo apt-get -yq --force-yes install ' 'texlive python-matplotlib python-numpy python-pip') try: check_call(cmd, shell=True) # install tabulate for compare_two_experiments.py check_call('sudo pip install tabulate', shell=True) except: sys.stderr.write( 'Warning: some dependencies may not be installed properly\n') install_pantheon_tunnel()
def main(): # prepare /tmp/pantheon-tmp to store .tex flie make_sure_path_exists('/tmp/pantheon-tmp') # install texlive cmd1 = ('sudo apt-get -yq --force-yes install texlive') # install python packages cmd2 = ('sudo apt-get -yq --force-yes install ' 'python-matplotlib python-numpy python-tabulate') try: check_call(cmd1, shell=True) check_call(cmd2, shell=True) except: sys.stderr.write( 'Warning: some dependencies may not be installed properly\n') install_pantheon_tunnel()
def initialize(self): sys.stderr.write('Performing intialization commands...\n') cmd = ['python', self.src_file, 'init'] check_call(cmd)
def build(self): sys.stderr.write('Building...\n') cmd = ['python', self.src_file, 'build'] check_call(cmd)
def main(): # arguments and source files location setup args = parse_arguments(path.basename(__file__)) test_dir = path.abspath(path.dirname(__file__)) src_dir = path.abspath(path.join(test_dir, '../src')) root_dir = path.abspath(path.join(test_dir, os.pardir)) pre_setup_src = path.join(test_dir, 'pre_setup.py') setup_src = path.join(test_dir, 'setup.py') test_src = path.join(test_dir, 'test.py') metadata_fname = path.join(test_dir, 'pantheon_metadata.json') # test congestion control schemes pre_setup_cmd = ['python', pre_setup_src] setup_cmd = ['python', setup_src] test_cmd = ['python', test_src] if args.remote: pre_setup_cmd += ['-r', args.remote] setup_cmd += ['-r', args.remote] test_cmd += ['-r', args.remote] test_cmd += ['-t', str(args.runtime), '-f', str(args.flows)] if args.flows > 1: test_cmd += ['--interval', str(args.interval)] if args.remote: test_cmd += ['--tunnel-server', args.server_side] if args.local_addr: test_cmd += ['--local-addr', args.local_addr] if args.ntp_addr: test_cmd += ['--ntp-addr', args.ntp_addr] test_cmd += ['--sender-side', args.sender_side] if args.local_if: pre_setup_cmd += ['--local-interface', args.local_if] test_cmd += ['--local-interface', args.local_if] if args.remote_if: pre_setup_cmd += ['--remote-interface', args.remote_if] test_cmd += ['--remote-interface', args.remote_if] if args.downlink_trace and not args.remote: test_cmd += ['--downlink-trace', args.downlink_trace] if args.uplink_trace and not args.remote: test_cmd += ['--uplink-trace', args.uplink_trace] if args.prepend_mm_cmds and not args.remote: test_cmd += ['--prepend-mm-cmds', args.prepend_mm_cmds] if args.append_mm_cmds and not args.remote: test_cmd += ['--append-mm-cmds', args.append_mm_cmds] if args.extra_mm_link_args and not args.remote: test_cmd += ['--extra-mm-link-args', args.extra_mm_link_args] run_setup = True run_test = True if args.run_only == 'setup': run_test = False elif args.run_only == 'test': run_setup = False cc_schemes = args.schemes.split() if not check_cc_schemes_valid(src_dir, cc_schemes): exit(1) if args.random_order: random.shuffle(cc_schemes) # setup and run each congestion control if run_setup: # check for git version mismatch get_git_info(args, root_dir, check_modified_files=False) check_call(pre_setup_cmd) for cc in cc_schemes: cmd = setup_cmd + [cc] check_call(cmd) error_in_test = False if run_test: git_info = get_git_info(args, root_dir, check_modified_files=True) create_metadata_file(args, git_info, metadata_fname) sys.stderr.write('\n') for run_id in xrange(1, 1 + args.run_times): i = 0 for cc in cc_schemes: i += 1 msg = ('Running scheme %d of %d (%s) for experiment run %d of ' '%d.\n' % (i, len(cc_schemes), cc, run_id, args.run_times)) sys.stderr.write(msg) cmd = test_cmd + ['--run-id', str(run_id), cc] try: check_call(cmd) except subprocess.CalledProcessError as e: sys.stderr.write('run.py returned %d while r%s' % (e.returncode, msg[1:])) sys.stderr.write('It\'s output was %s\n' % str(e.output)) error_in_test = True if error_in_test: sys.stderr.write('Error in tests!\n') exit(1) else: sys.stderr.write('All tests done!\n')
def local_pre_setup(self): # update submodules cmd = ('cd %s && git submodule update --init --recursive' % self.root_dir) check_call(cmd, shell=True) # prepare /tmp/pantheon-tmp make_sure_path_exists('/tmp/pantheon-tmp') # Enable IP forwarding cmd = 'sudo sysctl -w net.ipv4.ip_forward=1' check_call(cmd, shell=True) # Disable Reverse Path Filter if self.local_if: rpf = ' /proc/sys/net/ipv4/conf/%s/rp_filter' cmd = 'echo 0 | sudo tee' + rpf % 'all' + rpf % self.local_if check_call(cmd, shell=True) # update mahimahi source line cmd = ('sudo add-apt-repository -y ppa:keithw/mahimahi') check_call(cmd, shell=True) # update package listings cmd = ('sudo apt-get update') check_call(cmd, shell=True) cmd = ('sudo apt-get -yq --force-yes install mahimahi ntp ntpdate') check_call(cmd, shell=True) install_pantheon_tunnel()
def main(): args = parse_arguments(path.basename(__file__)) analyze_dir = path.abspath(path.dirname(__file__)) if args.s3_link: # download .tar.xz from S3 and decompress os.chdir(args.s3_dir_prefix) tar_name = check_output(['basename', args.s3_link]).strip() tar_dir = tar_name[:-7] check_call(['rm', '-rf', tar_name, tar_dir]) check_call(['wget', args.s3_link]) check_call(['tar', 'xJf', tar_name]) os.chdir(tar_dir) else: os.chdir(args.data_dir) # prepare scripts path analyze_pre_setup = path.join(analyze_dir, 'analysis_pre_setup.py') plot_summary = path.join(analyze_dir, 'plot_summary.py') generate_report = path.join(analyze_dir, 'generate_report.py') if not args.no_pre_setup: check_call(['python', analyze_pre_setup]) plot_summary_cmd = ['python', plot_summary] generate_report_cmd = ['python', generate_report] if args.include_acklink: plot_summary_cmd.append('--include-acklink') generate_report_cmd.append('--include-acklink') if args.analyze_schemes: plot_summary_cmd += ['--analyze-schemes', args.analyze_schemes] generate_report_cmd += ['--analyze-schemes', args.analyze_schemes] check_call(plot_summary_cmd) check_call(generate_report_cmd)
def merge_tunnel_logs(self): datalink_tun_logs = [] acklink_tun_logs = [] for i in xrange(self.flows): tun_id = i + 1 if self.remote: # download logs from remote side scp_cmd = 'scp -C %s:' % self.rd['addr'] scp_cmd += '%(log)s %(log)s' if self.sender_side == 'remote': check_call(scp_cmd % {'log': self.acklink_ingress_logs[i]}, shell=True) check_call(scp_cmd % {'log': self.datalink_egress_logs[i]}, shell=True) else: check_call(scp_cmd % {'log': self.datalink_ingress_logs[i]}, shell=True) check_call(scp_cmd % {'log': self.acklink_egress_logs[i]}, shell=True) uid = uuid.uuid4() datalink_tun_log = ( '/tmp/pantheon-tmp/%s_flow%s_uid%s.log.merged' % (self.datalink_name, tun_id, uid)) acklink_tun_log = ( '/tmp/pantheon-tmp/%s_flow%s_uid%s.log.merged' % (self.acklink_name, tun_id, uid)) cmd = ['merge-tunnel-logs', 'single', '-i', self.datalink_ingress_logs[i], '-e', self.datalink_egress_logs[i], '-o', datalink_tun_log] check_call(cmd) cmd = ['merge-tunnel-logs', 'single', '-i', self.acklink_ingress_logs[i], '-e', self.acklink_egress_logs[i], '-o', acklink_tun_log] check_call(cmd) datalink_tun_logs.append(datalink_tun_log) acklink_tun_logs.append(acklink_tun_log) cmd = ['merge-tunnel-logs', 'multiple', '-o', self.datalink_log_path] if not self.remote: cmd += ['--link-log', self.mm_datalink_log] cmd += datalink_tun_logs check_call(cmd) cmd = ['merge-tunnel-logs', 'multiple', '-o', self.acklink_log_path] if not self.remote: cmd += ['--link-log', self.mm_acklink_log] cmd += acklink_tun_logs check_call(cmd)