def run_tests2(): if not os.path.exists("premake4.lua"): return "premake4.lua doesn't exist in current directory (%s)" % os.getcwd() err = run_premake() if err != None: return err p = os.path.join("vs-premake", "all_tests.sln") if not os.path.exists(p): return "%s doesn't exist" % p os.chdir("vs-premake") try: util.kill_msbuild() except: return "util.kill_msbuild() failed" try: (out, err, errcode) = util.run_cmd("devenv", "all_tests.sln", "/build", "Release") if errcode != 0: return "devenv.exe failed to build all_tests.sln\n" + fmt_out_err(out, err) except: return "devenv.exe not found" p = os.path.join("..", "obj-rel") os.chdir(p) test_files = [f for f in os.listdir(".") if is_test_exe(f)] print("Running %d test executables" % len(test_files)) for f in test_files: try: (out, err, errcode) = util.run_cmd(f) if errcode != 0: return "%s failed with:\n%s" % (f, fmt_out_err(out, err)) print(fmt_out_err(out, err)) except: return "%s failed to run" % f return None
def siamese_seq2seq_data_pretrain(param_dic, args, data_dir): logPrint('siamese_seq2seq_data_pretrain') seq2seq_pair = '%s/pretrain/seq2seq_cgk.txt' % data_dir #seq2seq_cgk of seq2seq architecture cmd = 'python simulate_data.py gen_seq2seq '+\ '--output %s '%seq2seq_pair+\ '--seq_type %s '%param_dic['seq_type']+\ '--seq2seq_type %s '%param_dic['seq2seq_type']+\ '--num %s '%param_dic['n_clusters']+\ '--length %s '%param_dic['cluster_len'] #pdb.set_trace() run_cmd(cmd) if args.purpose == 'train' and param_dic['n_clusters_validation'] > 0: #generate validation data seq2seq_pair_validation = '%s/pretrain/seq2seq_cgk_validation.txt' % data_dir cmd = 'python simulate_data.py gen_seq2seq '+\ '--output %s '%seq2seq_pair_validation+\ '--seq_type %s '%param_dic['seq_type']+\ '--seq2seq_type %s '%param_dic['seq2seq_type']+\ '--num %s '%param_dic['n_clusters_validation']+\ '--length %s '%param_dic['cluster_len'] #pdb.set_trace() run_cmd(cmd) return
def jekyll_build(checkout_path): ''' ''' checkout_lock = checkout_path + '.jekyll-lock' jekyll_path = join(checkout_path, '_site') built_hash_file = checkout_path + '.built-hash' hash_file = checkout_path + '.commit-hash' if exists(jekyll_path) and is_fresh(jekyll_path): return jekyll_path with locked_file(checkout_lock): do_build = True if exists(built_hash_file): built_hash = open(built_hash_file).read().strip() commit_hash = open(hash_file).read().strip() if built_hash == commit_hash: jlogger.debug('Skipping build to ' + jekyll_path) do_build = False if do_build: jlogger.info('Building jekyll ' + jekyll_path) run_cmd(('jekyll', 'build'), checkout_path) if exists(hash_file): copyfile(hash_file, built_hash_file) touch(jekyll_path) return jekyll_path
def git_checkout(repo_path, checkout_path, ref): ''' Check out a git repository to a given reference and path. This function is assumed to be run in a lock. ''' jlogger.info('Checking out to ' + checkout_path) if not exists(checkout_path): mkdir(checkout_path) hash_file = checkout_path + '.commit-hash' commit_hash = get_ref_sha(repo_path, ref) do_checkout = True if exists(hash_file): previous_hash = open(hash_file).read().strip() if previous_hash == commit_hash: jlogger.debug('Skipping checkout to '+checkout_path) do_checkout = False if do_checkout: run_cmd(('git', '--work-tree='+checkout_path, 'checkout', ref, '--', '.'), repo_path) touch(checkout_path) with open(hash_file, 'w') as file: print >> file, commit_hash
def get_cert(site): quoted_site = shellquoted_site(site) Logger.info( 'Retrieving SSL certificate for virtual host at {VHost}. Provider {KeyTalkProvider}, service {KeyTalkService}, user {KeyTalkUser}' .format(**site)) cmd = KTCLIENT_APP_PATH + \ ' --provider {KeyTalkProvider} --service {KeyTalkService} --user {KeyTalkUser}'.format( **quoted_site) if site['KeyTalkPassword'] is not None: cmd += ' --password {KeyTalkPassword}'.format(**quoted_site) try: util.run_cmd(cmd, Logger, censored_text_list=[site['KeyTalkPassword']]) except util.CmdFailedException as ex: if ex.retval == util.AUTH_DELAY: raise Exception( 'Authentication to service "{}" of provider "{}" unsuccessful. Invalid credentials, delay before reattempt possible, message: "{}" "{}"' .format(site['KeyTalkService'], site['KeyTalkProvider'], ex.stderr, ex.stdout)) elif ex.retval == util.AUTH_USER_LOCKED: raise Exception( 'Authentication to service "{}" of provider "{}" unsuccessful. User locked out, message: "{}" "{}"' .format(site['KeyTalkService'], site['KeyTalkProvider'], ex.stderr, ex.stdout)) elif ex.retval == util.PASSWD_EXPIRED: raise Exception( 'Authentication to service "{}" of provider "{}" unsuccessful. Password expired, message: "{}" "{}"' .format(site['KeyTalkService'], site['KeyTalkProvider'], ex.stderr, ex.stdout)) else: raise pem_cert_key_path = max(glob.glob(KEYSTORE_DIR + '/*.pem'), key=os.path.getctime) Logger.debug('Retrieved KeyTalk certificate at ' + pem_cert_key_path) return pem_cert_key_path
def invoke_fun(name, data, expected): if str(data).isdigit(): output = util.run_cmd([util.cli, "service", "invoke", name, "--json", "--", "-w", "\\n", "-d", str(data)]) else: output = util.run_cmd([util.cli, "service", "invoke", name, "--text", "--", "-w", "\\n", "-d", data]) result = output[len(output)-1] assert result == str(expected)
def run_assembler(assembler_name, read_alignment, res_gtf, params=None): """This function calls assembler (e.g. stringtie etc). Args: assembler_name: which assembler to use read_alignment: absolute path of read alignment file res_gtf: absolute path to store assembly res params: a dictionary of key as param string (e.g. -h etc) and val as the param value. Default is None. Returns: assembler output in gtf format stored in res_gtf """ cmd = get_default_cmds(assembler_name, read_alignment, res_gtf) if params is not None: cmd = cmd + util.params_dic2str(params) # pdb.set_trace() util.run_cmd(cmd) util.logging('%s written'%(res_gtf)) return res_gtf
def git_checkout(repo_path, checkout_path, ref): ''' Check out a git repository to a given reference and path. This function is assumed to be run in a lock. ''' jlogger.info('Checking out to ' + checkout_path) if not exists(checkout_path): mkdir(checkout_path) hash_file = checkout_path + '.commit-hash' commit_hash = get_ref_sha(repo_path, ref) do_checkout = True if exists(hash_file): previous_hash = open(hash_file).read().strip() if previous_hash == commit_hash: jlogger.debug('Skipping checkout to ' + checkout_path) do_checkout = False if do_checkout: run_cmd(('git', '--work-tree=' + checkout_path, 'checkout', ref, '--', '.'), repo_path) touch(checkout_path) with open(hash_file, 'w') as file: print >> file, commit_hash
def is_apache_running(): try: util.run_cmd("pgrep -x httpd") except util.CmdFailedException: return False return True
def update_gem5_disk_image(): print(' * Updating GEM5 disk image') image_path = configuration_gem5.disk_image_path image_folder = configuration_gem5.disk_image_mount_folder build_folder = configuration_gem5.build_folder cmd = 'mount | grep %(image_folder)s' % locals() if util.run_cmd(cmd) == 0: raise Exception('Gem5 image already mounted') if not os.path.isdir(image_folder): os.makedirs(image_folder) cmd = 'sudo mount -o loop,offset=32256 %(image_path)s %(image_folder)s' % locals() if util.run_cmd(cmd) != 0: raise Exception('Unable to mount image') cmd = 'sudo cp -r %(build_folder)s %(image_folder)s' % locals() if util.run_cmd(cmd) != 0: raise Exception('Unable to copy build folder') if util.run_cmd('sudo umount %(image_folder)s' % locals()) != 0: raise Exception('Unable to umount image') os.rmdir(image_folder)
def stop_and_get_result(self): """ Returns the result as a TcpdumpResult object. """ util.run_cmd('pkill tcpdump').wait() # Parse the number of packets dropped by the kernel. logf = open('/tmp/tcpdump.log') result = TcpdumpResult() for line in logf: r = re.search('(\d+) packets received by filter', line) if r: result.recvd_pkt_count = int(r.group(1)) r = re.search('(\d+) packets dropped by kernel', line) if r: result.dropped_pkt_count = int(r.group(1)) logf.close() # Displays the result of tcpdump if self.config.verbose: print 'TCPDUMP - received packets:', print result.recvd_pkt_count print 'dropped packets:', print result.dropped_pkt_count return result
def test_removing_simple_package(self): out1, return_code1, err1 = run_cmd("apt-get -y install rolldice") out2, return_code2, err2 = run_cmd("apt-get -y install rolldice-") assert err2 == None and return_code2 == 0 out3 = run_cmd("dpkg --status rolldice | grep ^Status")[0] assert "is not installed" in "\n".join( list(out3) ), "Expected 'is not installed' status after success removing package"
def test_instal_simple_package_with_another_package(self): out1, return_code1, err1 = run_cmd( "apt-get -y install rolldice nsnake") assert err1 == None and return_code1 == 0 out2 = run_cmd("dpkg --status rolldice | grep ^Status")[0] assert "ok installed" in "\n".join(list(out2)) out3 = run_cmd("dpkg --status nsnake | grep ^Status")[0] assert "ok installed" in "\n".join(list(out3))
def test_instal_simple_package_version(self): out1, return_code1, err1 = run_cmd( "apt-get -y install rolldice=1.10-5") assert err1 == None and return_code1 == 0 out2 = run_cmd("dpkg --status rolldice | grep ^Status")[0] assert "ok installed" in list(out2)[0] out3 = run_cmd("dpkg --status rolldice | grep ^Version")[0] assert "1.10-5" in list(out3)[0]
def run_step(step): '''run one step''' # remove old logs util.run_cmd('/bin/rm -f logs/*.BIN logs/LASTLOG.TXT') if step == "prerequisites": return test_prerequisites() if step == 'build.ArduPlane': return util.build_SIL('ArduPlane', j=opts.j) if step == 'build.APMrover2': return util.build_SIL('APMrover2', j=opts.j) if step == 'build.ArduCopter': return util.build_SIL('ArduCopter', j=opts.j) if step == 'defaults.ArduPlane': return get_default_params('ArduPlane') if step == 'defaults.ArduCopter': return get_default_params('ArduCopter') if step == 'defaults.APMrover2': return get_default_params('APMrover2') if step == 'fly.ArduCopter': return arducopter.fly_ArduCopter(viewerip=opts.viewerip, map=opts.map) if step == 'fly.CopterAVC': return arducopter.fly_CopterAVC(viewerip=opts.viewerip, map=opts.map) if step == 'fly.ArduPlane': return arduplane.fly_ArduPlane(viewerip=opts.viewerip, map=opts.map) if step == 'drive.APMrover2': return apmrover2.drive_APMrover2(viewerip=opts.viewerip, map=opts.map) if step == 'build.All': return build_all() if step == 'build.Binaries': return build_binaries() if step == 'build.DevRelease': return build_devrelease() if step == 'build.Examples': return build_examples() if step == 'build.Parameters': return build_parameters() if step == 'convertgpx': return convert_gpx() raise RuntimeError("Unknown step %s" % step)
def run_step(step): '''run one step''' # remove old logs util.run_cmd('/bin/rm -f logs/*.BIN logs/LASTLOG.TXT') if step == "prerequisites": return test_prerequisites() if step == 'build.ArduPlane': return util.build_SIL('ArduPlane') if step == 'build.APMrover2': return util.build_SIL('APMrover2') if step == 'build.ArduCopter': return util.build_SIL('ArduCopter') if step == 'defaults.ArduPlane': return get_default_params('ArduPlane') if step == 'defaults.ArduCopter': return get_default_params('ArduCopter') if step == 'defaults.APMrover2': return get_default_params('APMrover2') if step == 'fly.ArduCopter': return arducopter.fly_ArduCopter(viewerip=opts.viewerip, map=opts.map) if step == 'fly.CopterAVC': return arducopter.fly_CopterAVC(viewerip=opts.viewerip, map=opts.map) if step == 'fly.ArduPlane': return arduplane.fly_ArduPlane(viewerip=opts.viewerip, map=opts.map) if step == 'drive.APMrover2': return apmrover2.drive_APMrover2(viewerip=opts.viewerip, map=opts.map) if step == 'build.All': return build_all() if step == 'build.Binaries': return build_binaries() if step == 'build.DevRelease': return build_devrelease() if step == 'build.Examples': return build_examples() if step == 'build.Parameters': return build_parameters() if step == 'convertgpx': return convert_gpx() raise RuntimeError("Unknown step %s" % step)
def start(self): """ Sniff traffic. Save the text output to check for kernel-dropped packets. """ util.run_cmd('tcpdump -i ', self.config.sniff_iface, ' -vnnxStt -s 96 -w ', self.config.tmp_pcap_file, ' "%s" > /tmp/tcpdump.log 2>&1' % self._filter) time.sleep(2)
def reload_apache(): try: util.run_cmd('service apache2 status', Logger) except util.CmdFailedException as ex: if ex.retval == 3: return # Apache inactive, nothing to be done else: raise util.run_cmd('service apache2 reload', Logger)
def test_cmd(self): ret = run_cmd(['ping', 'www.baidu.com', '-c', '3']) self.assertIsNotNone(ret) retcode = ret[-1] print("retcode:", retcode) ret = run_cmd("ping www.baidu.com1 -c 3") self.assertIsNotNone(ret) retcode = ret[-1] print("retcode:", retcode)
def extract_refs(src_path): #it's necessary to wrap the script because the lib is in python2.7 dst_path = util.get_tmp_file(suffix='.json') util.run_cmd([ cfg.extract_refs_script_path, src_path, dst_path, ]) refs = util.load_json(dst_path) return refs
def build_gem5_scons(source_dir, build_dir, bin, opt_level, targets): print(' building: ' + ' '.join(targets)) os.chdir(source_dir) cmd = 'scons -j 7 ' + bin if util.run_cmd(cmd) != 0: raise Exception('Unable to build atomic ' + cmd) for target in targets: cmd = 'cp ' + bin + ' ' + build_dir + 'gem5.' + opt_level + '.' + target if util.run_cmd(cmd) != 0: raise Exception('Copy failure ' + cmd)
def test_instal_simple_package_latest_version(self): out1, return_code1, err1 = run_cmd("apt-cache policy rolldice") assert err1 == None and return_code1 == 0 latest_version = re.findall(re.compile("Candidate: (.+)\n"), "\n".join(list(out1)))[0] out2, return_code2, err2 = run_cmd("apt-get -y install rolldice") assert err2 == None and return_code2 == 0 out2 = run_cmd("dpkg --status rolldice | grep ^Version")[0] assert str(latest_version) in "\n".join( list(out2) ), "Expect that candidate version and installed version has been matched"
def test_confirm_removing_simple_package(self, full_clean): out1, return_code1, err1 = run_cmd("apt-get -y install rolldice") out2, err2 = run_cmd_with_interaction("apt-get install rolldice-", "Y") assert err2 == None assert re.search( re.compile( "The following packages will be REMOVED:(\s)*\n(\s)*rolldice"), out2.decode()) != None out3 = run_cmd("dpkg --status rolldice | grep ^Status")[0] assert "is not installed" in "\n".join( list(out3) ), "Expected 'is not installed' status after confirm removing package"
def test_dump_to_stdout(self, model_name): """ Try dumping json to stdout Args: options (arrayof str): commandline options for command model_name (str): name of file from which to dump json """ model_file = os.path.join(util.MODELS_DIR, model_name) self.assertTrue(os.path.exists(model_file)) cmd = [self.script, model_file] print(cmd) util.run_cmd(self, cmd).expect_exit_code(0).expect_stdout( lambda o: is_valid_json('\n'.join(o)))
def is_apache_running(): if (os_version == "RedHatEnterpriseServer" or os_version == "CentOS"): try: util.run_cmd("pgrep -x httpd") except util.CmdFailedException: return False return True if (os_version == "Debian" or os_version == "Ubuntu"): try: util.run_cmd("pgrep -x apache2") except util.CmdFailedException: return False return True
def build_release(stats, ver): config = "CFG=rel" obj_dir = "obj-rel" extcflags = "EXTCFLAGS=-DSVN_PRE_RELEASE_VER=%s" % ver platform = "PLATFORM=X86" shutil.rmtree(obj_dir, ignore_errors=True) shutil.rmtree(os.path.join("mupdf", "generated"), ignore_errors=True) (out, err, errcode) = run_cmd("nmake", "-f", "makefile.msvc", config, extcflags, platform, "all_sumatrapdf") log_path = os.path.join(get_logs_cache_dir(), ver + "_rel_log.txt") build_log = out + "\n====STDERR:\n" + err build_log = strip_empty_lines(build_log) open(log_path, "w").write(build_log) stats.rel_build_log = "" stats.rel_failed = False if errcode != 0: stats.rel_build_log = build_log stats.rel_failed = True return stats.rel_sumatrapdf_exe_size = file_size_in_obj("SumatraPDF.exe") stats.rel_sumatrapdf_no_mupdf_exe_size = file_size_in_obj("SumatraPDF-no-MuPDF.exe") stats.rel_libmupdf_dll_size = file_size_in_obj("libmupdf.dll") stats.rel_nppdfviewer_dll_size = file_size_in_obj("npPdfViewer.dll") stats.rel_pdffilter_dll_size = file_size_in_obj("PdfFilter.dll") stats.rel_pdfpreview_dll_size = file_size_in_obj("PdfPreview.dll") build_installer_data(obj_dir) run_cmd_throw("nmake", "-f", "makefile.msvc", "Installer", config, platform, extcflags) p = os.path.join(obj_dir, "Installer.exe") stats.rel_installer_exe_size = file_size(p)
def build_parameters(): """run the param_parse.py script""" print("Running param_parse.py") if util.run_cmd(util.reltopdir("Tools/autotest/param_metadata/param_parse.py"), dir=util.reltopdir(".")) != 0: print("Failed param_parse.py") return False return True
def build_examples(): """run the build_examples.sh script""" print("Running build_examples.sh") if util.run_cmd(util.reltopdir("Tools/scripts/build_examples.sh"), dir=util.reltopdir(".")) != 0: print("Failed build_examples.sh") return False return True
def build_parameters(): '''run the param_parse.py script''' print("Running param_parse.py") if util.run_cmd(util.reltopdir('Tools/autotest/param_metadata/param_parse.py'), dir=util.reltopdir('.')) != 0: print("Failed param_parse.py") return False return True
def get_instance_count(self, service, namespace="default"): output = util.run_cmd( "kubectl get deployment " + service.name + " | tail -n +2 | awk '{print $4}'", self.timeout) if output != "": return float(output) return 0
def is_vs2008(): # vcbuild.exe no longer exists for VS2010 and later try: (out, err, errcode) = util.run_cmd("vcbuild", "/help") return errcode == 0 except: return False
def test_email(self): # given mailbox_dir = '/var/mail' random_string = str(uuid.uuid4()) subject = 'Test Mail ' + random_string msg_body = 'Message body: ' + random_string attachments = [('attachment1-name-' + random_string, 'attachment1-body'), ('attachment2-name-' + random_string, 'attachment2-body')] # when util.send_email( smtp_server_addr='localhost', sender='root@localhost', recipients=['root@localhost'], subject=subject, message=msg_body, attachments=attachments) # then time.sleep(2) self.assertTrue(os.listdir(mailbox_dir)) mailbox_file_name = util.run_cmd('ls -Art /var/mail| tail -n 1') mail_file = mailbox_dir + '/' + mailbox_file_name mails = open(mail_file).read() self.assertTrue('From: root@localhost' in mails) self.assertTrue('To: root@localhost' in mails) self.assertTrue('Subject: ' + subject in mails) self.assertTrue(msg_body in mails) self.assertTrue('filename=\\"{}\\"";'.format(attachments[0][0]) in mails) self.assertTrue('filename=\\"{}\\"";'.format(attachments[1][0]) in mails)
def __init__(self): self.date = time.asctime() self.githash = util.run_cmd('git rev-parse HEAD', output=True, dir=util.reltopdir('.')).strip() self.tests = [] self.files = []
def build_all(): '''run the build_all.sh script''' print("Running build_all.sh") if util.run_cmd(util.reltopdir('Tools/scripts/build_all.sh'), dir=util.reltopdir('.')) != 0: print("Failed build_all.sh") return False return True
def verify_not_tagged_yet(ver): out, err, errcode = run_cmd( "svn", "info", "https://sumatrapdf.googlecode.com/svn/tags/%srel" % ver) #print("out: '%s'\nerr:'%s'\nerrcode:%d" % (out, err, errcode)) assert errcode == 1, "out: '%s'\nerr:'%s'\nerrcode:%d" % (out, err, errcode)
def build_examples(): '''run the build_examples.sh script''' print("Running build_examples.sh") if util.run_cmd(util.reltopdir('Tools/scripts/build_examples.sh'), dir=util.reltopdir('.')) != 0: print("Failed build_examples.sh") return False return True
def layout(): # The following command does not work well # select-layout tiled => Always splits a window vertically first, so does # not save the direction. count, w, h = util.display(['window_panes', 'client_width', 'client_height']) env = util.harukam_env() m = { "HOME_DESKTOP": { 4: "0c9d,272x64,0,0{136x64,0,0[136x32,0,0,14,136x31,0,33,17],135x64,137,0[135x32,137,0,15,135x31,137,33,16]}", 6: "b6cc,272x64,0,0{90x64,0,0[90x32,0,0,6,90x31,0,33,4],90x64,91,0[90x32,91,0,7,90x31,91,33,2],90x64,182,0[90x32,182,0,5,90x31,182,33,3]}" }, "HOME_ASUS": { 4: "90ea,272x62,0,0{136x62,0,0[136x31,0,0,13,136x30,0,32,16],135x62,137,0[135x31,137,0,14,135x30,137,32,15]}", 6: "188a,272x62,0,0{90x62,0,0[90x31,0,0,7,90x30,0,32,10],90x62,91,0[90x31,91,0,8,90x30,91,32,11],90x62,182,0[90x31,182,0,9,90x30,182,32,12]}" }, "HOME_MAC": { 4: "49f3,238x54,0,0{119x54,0,0[119x27,0,0,13,119x26,0,28,16],118x54,120,0[118x27,120,0,14,118x26,120,28,15]}", 6: "9de9,238x54,0,0{78x54,0,0[78x27,0,0,7,78x26,0,28,10],78x54,79,0[78x27,79,0,8,78x26,79,28,11],80x54,158,0[80x27,158,0,9,80x26,158,28,12]}" }, "OFFICE": { 4: "93cb,245x79,0,0{122x79,0,0[122x39,0,0,13,122x39,0,40,16],122x79,123,0[122x39,123,0,14,122x39,123,40,15]}", 6: "dddc,245x79,0,0{81x79,0,0[81x39,0,0,7,81x39,0,40,10],81x79,82,0[81x39,82,0,8,81x39,82,40,11],81x79,164,0[81x39,164,0,9,81x39,164,40,12]}" } } util.exit0_ifnot(env, "no env") util.exit0_ifnot(env in m, "unknown env: " + env) util.exit0_ifnot(count in m[env], "not supported count: " + str(count)) layout = m[env][count] exitcode, _ = util.run_cmd(['tmux', 'select-layout', layout]) util.exit0_ifnot(exitcode == 0, "select-layout failed")
def parse_pkt(self, pkt_func): """ Loops to parse output from tcpdump. An example would be: [recvd_time ] [ ] <- (flow_id + pktgen.MIN_PORT) 1329098408.055825 IP 192.168.1.20.10007 > 192.168.1.1.9: UDP, length 22 0x0000: 4500 0032 066e 0000 2011 10e8 c0a8 0114 <- ignore 0x0010: c0a8 0101 2717 0009 001e 0000 be9b e955 <- ignore 0x0020: 0000 066f 4f38 6ea6 000e 4402 0000 0000 [seq_num] [tvsec ] [tvusec ] ... the rest of the lines can be ignored Each time a new packet arrives, invokes the pkt_func callback function. The pkt_func should have arguments (flow_id, seq_number, sent_time, recvd_time). This allows users to handle incoming packets, based on these four parameters, accordingly. """ # Initialize fields to extract. recvd_time = flow_id = seq_num = tvsec = tvusec = None # Regex applied on udp header to extract recvd_time and flow_id. regex_udp = re.compile('(\d+\.\d+) IP .*\.(\d+) >') # Regex applied on the pktgen payload. regex_pktgen = re.compile('0x0020:\s+(.{10})(.{10})(.{10})') # Parse with tcpdump -r p_tcpdump = util.run_cmd('tcpdump -nnxStt -r ', self.config.tmp_pcap_file, stdout=subprocess.PIPE) for line in p_tcpdump.stdout: re_udp = regex_udp.search(line) if re_udp: recvd_time = float(re_udp.group(1)) flow_id = int(re_udp.group(2)) - pktgen.Pktgen.MIN_PORT continue re_pktgen = regex_pktgen.search(line) if re_pktgen: # Here, the seq_num is a global value. We need to convert it to # a per-flow sequence number. seq_num = util.hex_to_int(re_pktgen.group(1)) seq_num = seq_num / self.config.flow_count # Convert the recvd timestamp to float. tvsec = util.hex_to_int(re_pktgen.group(2)) tvusec = util.hex_to_int(re_pktgen.group(3)) sent_time = tvsec + tvusec / 1000000.0 # We should have obtained all necessary fields to form a packet. assert None not in (recvd_time, flow_id) pkt_func(flow_id, seq_num, sent_time, recvd_time) # Reset all fields. recvd_time = flow_id = seq_num = tvsec = tvusec = None
def run_premake(action="vs2010"): try: (out, err, errcode) = util.run_cmd("premake4", action) if errcode != 0: return out + err except: return "premake4.exe not in %PATH%" return None
def build_clean(ver): config = "CFG=rel" obj_dir = "obj-rel" extcflags = "EXTCFLAGS=-DSVN_PRE_RELEASE_VER=%s" % str(ver) platform = "PLATFORM=X86" shutil.rmtree(obj_dir, ignore_errors=True) shutil.rmtree(os.path.join("mupdf", "generated"), ignore_errors=True) (out, err, errcode) = util.run_cmd("nmake", "-f", "makefile.msvc", config, extcflags, platform, "all_sumatrapdf")
def build_mac(): (out, err, errcode) = util.run_cmd("./build.sh") if errcode != 0: print_error(out, err, errcode) # trying to be helpful and tell user how to resolve specific problems # TODO: also detect lack of pcre if "No package 'liblzma' found" in err: fatal("\nIf you're using homebrew, you need to install xz package to get liblzma\nRun: brew install xz") sys.exit(1)
def verify_efi_present(): try: (out, err, errcode) = util.run_cmd("efi.exe") except: print("Must have efi.exe in the %PATH%!!!") sys.exit(1) if "Usage:" not in out: print("efi.exe created unexpected output:\n%s" % out) sys.exit(1)
def convert_gpx(): '''convert any tlog files to GPX and KML''' import glob mavlog = glob.glob("buildlogs/*.tlog") for m in mavlog: util.run_cmd(util.reltopdir("../mavlink/pymavlink/tools/mavtogpx.py") + " --nofixcheck " + m) gpx = m + '.gpx' kml = m + '.kml' util.run_cmd('gpsbabel -i gpx -f %s -o kml,units=m,floating=1,extrude=1 -F %s' % (gpx, kml), checkfail=False) util.run_cmd('zip %s.kmz %s.kml' % (m, m), checkfail=False) util.run_cmd("mavflightview.py --imagefile=%s.png %s" % (m,m)) return True
def convert_gpx(): """convert any tlog files to GPX and KML""" import glob mavlog = glob.glob(util.reltopdir("../buildlogs/*.tlog")) for m in mavlog: util.run_cmd(util.reltopdir("../mavlink/pymavlink/tools/mavtogpx.py") + " --nofixcheck " + m) gpx = m + ".gpx" kml = m + ".kml" util.run_cmd("gpsbabel -i gpx -f %s -o kml,units=m,floating=1,extrude=1 -F %s" % (gpx, kml), checkfail=False) util.run_cmd("zip %s.kmz %s.kml" % (m, m), checkfail=False) util.run_cmd(util.reltopdir("../MAVProxy/tools/mavflightview.py") + " --imagefile=%s.png %s" % (m, m)) return True
def build_all(): '''run the build_all.sh script''' print("Running build_all.sh") flags = "" if opts.incremental == True: print("Building for incremental") flags = "-i" if util.run_cmd(util.reltopdir('Tools/scripts/build_all.sh') + " " + flags , dir=util.reltopdir('.')) != 0: print("Failed build_all.sh") return False return True
def main(): verify_started_in_right_directory() config = "CFG=rel" obj_dir = "obj-rel" ver = "1000" # doesn't matter what version we claim extcflags = "EXTCFLAGS=-DSVN_PRE_RELEASE_VER=%s" % ver platform = "PLATFORM=X86" shutil.rmtree(obj_dir, ignore_errors=True) shutil.rmtree(os.path.join("mupdf", "generated"), ignore_errors=True) (out, err, errcode) = run_cmd("nmake", "-f", "makefile.msvc", "WITH_ANALYZE=yes", config, extcflags, platform, "all_sumatrapdf") pretty_print_errors(out)
def checkpath(path, srcpath=None): LOG.debug('%s', locals()) LOG.debug(get_cmd(path)) status, lines=run_cmd(get_cmd(path)) LOG.debug('=> status=%s, lines=%s', status, lines) if status not in (0,): LOG.critical('=> error status %s', status) parsed = parse( lines ) exp = expand( parsed ) out = remap( exp ) out = list(out) LOG.debug('=> parse=%s', out) return out
def build_devrelease(): '''run the build_devrelease.sh script''' print("Running build_devrelease.sh") import shutil # copy the script as it changes git branch, which can change the script while running orig=util.reltopdir('Tools/scripts/build_devrelease.sh') copy=util.reltopdir('./build_devrelease.sh') shutil.copyfile(orig, copy) shutil.copymode(orig, copy) if util.run_cmd(copy, dir=util.reltopdir('.')) != 0: print("Failed build_devrelease.sh") return False return True
def check_logs(step): '''check for log files from a step''' print("check step: ", step) if step.startswith('fly.'): vehicle = step[4:] elif step.startswith('drive.'): vehicle = step[6:] else: return logs = glob.glob("logs/*.BIN") for log in logs: bname = os.path.basename(log) newname = "buildlogs/%s-%s" % (vehicle, bname) print("Renaming %s to %s" % (log, newname)) os.rename(log, newname) corefile = "core" if os.path.exists(corefile): newname = "buildlogs/%s.core" % vehicle print("Renaming %s to %s" % (corefile, newname)) os.rename(corefile, newname) util.run_cmd('/bin/cp A*/A*.elf ../buildlogs', dir=util.reltopdir('.'))
def git_fetch(repo_path, ref, sha): ''' Run `git fetch` inside a local git repository. ''' jlogger.info('Fetching in ' + repo_path) try: found_sha = get_ref_sha(repo_path, ref) except RuntimeError: # # Account for a missing ref by performing a complete fetch. # jlogger.debug('Complete fetch in '+repo_path) run_cmd(('git', 'fetch'), repo_path) found_sha = get_ref_sha(repo_path, ref) if sha == found_sha: jlogger.debug('Skipping fetch in '+repo_path) else: run_cmd(('git', 'fetch'), repo_path) touch(repo_path)
def build_binaries(): """run the build_binaries.sh script""" print("Running build_binaries.sh") import shutil # copy the script as it changes git branch, which can change the script while running orig = util.reltopdir("Tools/scripts/build_binaries.sh") copy = util.reltopdir("./build_binaries.sh") shutil.copyfile(orig, copy) shutil.copymode(orig, copy) if util.run_cmd(copy, dir=util.reltopdir(".")) != 0: print("Failed build_binaries.sh") return False return True
def build_analyze(stats, ver): config = "CFG=rel" obj_dir = "obj-rel" extcflags = "EXTCFLAGS=-DSVN_PRE_RELEASE_VER=%s" % ver platform = "PLATFORM=X86" shutil.rmtree(obj_dir, ignore_errors=True) shutil.rmtree(os.path.join("mupdf", "generated"), ignore_errors=True) (out, err, errcode) = run_cmd("nmake", "-f", "makefile.msvc", "WITH_ANALYZE=yes", config, extcflags, platform, "all_sumatrapdf") stats.analyze_out = out log_path = os.path.join(get_logs_cache_dir(), ver + "_analyze_log.txt") s = out + "\n====STDERR:\n" + err open(log_path, "w").write(strip_empty_lines(s))
def run_ag_and_verify_results(cmd_info): args = [ag_exe_path()] + cmd_info.cmd.split() (stdout, stderr, errcmd) = util.run_cmd(*args) if errcmd != 0: fatal("Error %d. Stdout:\n'%s'\n Stderr:\n'%s'\n" % (errcmd, stdout, stderr)) if stderr != "": fatal("Non-empty stderr. Stdout:\n'%s'\n Stderr:\n'%s'\n" % (stdout, stderr)) # TODO: don't know why there's 0 at the end of stdout, so strip it if len(stdout) > 0 and stdout[-1] == chr(0): stdout = stdout[:-1] result = util.normalize_str(stdout) if len(result) > 0 and result[-1] == '\n': result = result[:-1] expected = util.normalize_str(cmd_info.expected) if result != expected: fatal("Unexpected value. Stdout:\n'%s'\nExpected:\n'%s'\n" % (result, expected))
def convert_gpx(): '''convert any mavlog files to GPX and KML''' import glob mavlog = glob.glob(util.reltopdir("../buildlogs/*.mavlog")) for m in mavlog: util.run_cmd(util.reltopdir("../mavlink/pymavlink/examples/mavtogpx.py") + " --nofixcheck " + m) gpx = m + '.gpx' kml = m + '.kml' util.run_cmd('gpsbabel -i gpx -f %s -o kml,units=m,floating=1,extrude=1 -F %s' % (gpx, kml), checkfail=False) util.run_cmd('zip %s.kmz %s.kml' % (m, m), checkfail=False) return True
def execute(self): result = True pub_key = capture('cat ~/.ssh/id_rsa.pub').replace('\n','') cmd = "grep -F \"%s\" ~/.ssh/authorized_keys > /dev/null 2>&1" % pub_key status = run_cmd(cmd) if (status != 0): self.error_message+=" Error: ~/.ssh/id_rsa.pub not found in ~/.ssh/authorized_keys.\n" return False cmd2 ="awk '{if ($1!=\"ssh-dss\" && $1!=\"ssh-rsa\" || NF <= 1) print $0}' ~/.ssh/authorized_keys" cmd2_out = capture(cmd2) if cmd2_out: self.error_message+=" Error: ~/.ssh/authorized_keys includes invalid or broken key(s).\n" return False; return result