def setup_go(targ): """Set up go-specific stuff.""" if os.path.exists("gofrontend"): u.verbose(0, "... 'gofrontend' already exists, skipping clone") return docmd("git clone https://go.googlesource.com/gofrontend") dochdir("gofrontend") try: with open("./.clang-format", "w") as wf: wf.write(clang_format_contents) wf.write("\n") except IOError: u.error("open/write failed for .clang-format") dochdir("..") dochdir(targ) docmd("rm -rf gcc/go/gofrontend") docmd("ln -s ../../../gofrontend/go gcc/go/gofrontend") docmd("rm -rf libgo") docmd("mkdir libgo") if flag_dryrun: u.verbose(0, "for f in GOFRONTEND/libgo/*; " "do ln -s $f libgo/`basename $f`; done") else: libgo = "../gofrontend/libgo" for item in os.listdir(libgo): docmd("ln -s ../../gofrontend/libgo/%s libgo/%s" % (item, item)) dochdir("..")
def parse_env_options(): """Option parsing from env var.""" global flag_echo, flag_dryrun, flag_nollvm, flag_trace_llinvoc optstr = os.getenv("GOLLVM_WRAP_OPTIONS") if not optstr: return args = optstr.split() try: optlist, _ = getopt.getopt(args, "detDG") except getopt.GetoptError as err: # unrecognized option usage(str(err)) for opt, _ in optlist: if opt == "-d": u.increment_verbosity() elif opt == "-e": flag_echo = True elif opt == "-t": flag_trace_llinvoc = True elif opt == "-D": flag_dryrun = True elif opt == "-G": flag_nollvm = True u.verbose(1, "env var options parsing complete")
def mksnap_subcommand(volname, snapname): """Snapshot an existing BTRFS subvolume or snapshot.""" # Determine /ssd root ssdroot = u.determine_btrfs_ssdroot(os.getcwd()) u.verbose(1, "ssdroot=%s" % ssdroot) # Normalize snap name, volume name volname = normalize(ssdroot, volname) snapname = normalize(ssdroot, snapname) # Existing volume should exist oldvolume = "%s/%s" % (ssdroot, volname) if not os.path.exists(oldvolume): u.error("unable to locate existing subvolume %s" % oldvolume) # Check to make sure the new snapshot doesn't already exist newsnap = "%s/%s" % (ssdroot, snapname) if os.path.exists(newsnap): u.error("path %s already exists -- can't create" % newsnap) # Here goes u.docmd("sudo btrfs subvolume snapshot %s %s" % (oldvolume, newsnap)) # Repair ownership/permissions repair(newsnap) sys.stderr.write("... new snapshot %s created\n" % newsnap)
def perform(): """Main driver routine.""" save_environment() munge_makefiles_if_needed() if flag_targets: allbuilds = flag_targets else: builds = [] for abuild, val in available_builds.iteritems(): if val: builds.append(abuild) allbuilds = sorted(builds) passed = [] failed = [] for build_item in allbuilds: u.verbose(0, "starting build for '%s'" % build_item) rc = perform_build(build_item) if rc != 0: if flag_exit_on_err: u.verbose(0, "early exit due to build failure") failed.append(build_item) else: passed.append(build_item) print "Summary of results:" if passed: print "passed: %s" % " ".join(passed) if failed: print "failed: %s" % " ".join(failed)
def grab_current_sha(): """Grab current sha for repo.""" global current_sha lines = u.docmdlines("git log --no-abbrev-commit --pretty=oneline -1") ar = lines[0].split() current_sha = ar[0] u.verbose(1, "current sha: %s" % current_sha)
def process_commit(idx, branchname, githash, comment): """Process a commit by hash.""" tag = "" if flag_tag: tag = ".tag=%s" % flag_tag fn = "/tmp/item%d.branch%s%s.commit%s.txt" % (idx, branchname, tag, githash) if flag_dryrun: u.verbose(0, "<dryrun: emit diff for " "%s^ %s to %s>" % (githash, githash, fn)) return files_emitted.append(fn) try: outf = open(fn, "w") except IOError as e: u.error("unable to open %s: %s" % (fn, e.strerror)) outf.write("// comment: %s\n" % comment) outf.write("//\n") lines = u.docmdlines("git log --name-only -1 %s" % githash) if not lines: u.error("empty output from 'git log --name-only -1 %s'" % githash) for line in lines: outf.write(line) outf.write("\n") outf.write("--------------------------------------------------------------\n") lines = u.docmdlines("git diff %s^ %s" % (githash, githash)) if not lines: u.error("empty output from 'git diff %s^ %s'" % (githash, githash)) for line in lines: outf.write(line) outf.write("\n") outf.close() u.verbose(1, "wrote %d diff lines to %s" % (len(lines), fn))
def save_temps(tfile, revision): """Save copy of temp file.""" scrubbed = scrub_filename(flag_target_file) savepath = "/tmp/R%d.%s" % (revision, scrubbed) u.docmd("cp %s %s" % (tfile.name, savepath)) u.verbose(0, "... saved revision %d copy of " "%s into %s" % (revision, flag_target_file, savepath))
def collect_files(): """Collect files of interest from src.""" exceptions = read_exceptions() for item in os.listdir(flag_source_dir): if file_regex.match(item) and item not in exceptions: files_to_examine.append(item) u.verbose(1, "found %d items in src dir" % len(files_to_examine))
def parse_args(): """Command line argument parsing.""" global flag_dryrun, flag_reverse global flag_source_dir, flag_dest_dir try: optlist, _ = getopt.getopt(sys.argv[1:], "dD:S:CR") except getopt.GetoptError as err: # unrecognized option usage(str(err)) for opt, arg in optlist: if opt == "-d": u.increment_verbosity() elif opt == "-D": flag_dest_dir = arg elif opt == "-S": flag_source_dir = arg elif opt == "-C": flag_dryrun = False elif opt == "-R": flag_reverse = True if not os.path.exists(flag_source_dir): usage("source dir %s does not exist" % flag_source_dir) if not os.path.isdir(flag_source_dir): usage("source dir %s is not a directory" % flag_source_dir) if not os.path.exists(flag_dest_dir): usage("dest dir %s does not exist" % flag_dest_dir) if not os.path.isdir(flag_dest_dir): usage("dest dir %s is not a directory" % flag_dest_dir) if flag_reverse: flag_source_dir, flag_dest_dir = flag_dest_dir, flag_source_dir u.verbose(1, "src dir: %s" % flag_source_dir) u.verbose(1, "dst dir: %s" % flag_dest_dir)
def parse_args(): """Command line argument parsing.""" global flag_dryrun, flag_mode, flag_noclean try: optlist, args = getopt.getopt(sys.argv[1:], "hdDSF:") except getopt.GetoptError as err: # unrecognized option usage(str(err)) for opt, arg in optlist: if opt == "-d": u.increment_verbosity() elif opt == "-h": usage() elif opt == "-D": flag_dryrun = True elif opt == "-S": flag_noclean = True elif opt == "-F": if os.path.exists(arg): u.verbose(0, "adding %s to cmakefiles dict" % arg) cmakefiles[arg] = 1 else: u.error("-F arg %s doesn't seem to exist" % arg) # Check for mode if len(args) != 1: usage("supply a single mode argument (either 'pre' or 'post')") if args[0] == "pre": flag_mode = "pre" elif args[0] == "post": flag_mode = "post" else: usage("unknown mode argument %s" % args[0])
def grab_addr_from_symtab(func, tgt): """Grab starting address and size from ELF symtab or dynsym.""" flavs = ["-t"] if has_dynamic_section(tgt): flavs.append("-T") staddr = 0 enaddr = 0 for flav in flavs: u.verbose(1, "looking for %s in output of " "objdump %s %s" % (func, flav, tgt)) lines = u.docmdlines("objdump %s %s" % (flav, tgt)) hexstaddr = None hexsize = None for line in lines: hexstaddr, hexsize = grabaddrsize(line, func) if hexstaddr: break if not hexstaddr: continue try: staddr = int(hexstaddr, 16) size = int(hexsize, 16) enaddr = staddr + size except ValueError: u.verbose(0, "... malformed staddr/size (%s, %s) " "for %s, skipping" % (hexstaddr, hexsize, func)) return 0, 0 return staddr, enaddr
def collect_ranged_items(lm, dies): """Collect items that have start/end ranges.""" results = [] # Ranges refs (stored as decimal offset) rlrefs = defaultdict(list) for off, lines in dies.items(): _, tag, attrs = expand_die(lines) # Does it have a PC range? lodec, hidec = get_pc_range(attrs) if lodec != -1 and hidec != -1: # Success. Call a helper to collect more info, and add to list name = collect_die_nametag(attrs, off, tag, dies) tup = (name, lodec, hidec) results.append(tup) continue # Reference to range list? Store for later processing if so rlref = grab_hex_attr(attrs, "DW_AT_ranges") if rlref != -1: u.verbose(1, "queued rref=%x tag=%s off=%x in rlrefs" % (rlref, tag, off)) tup = (attrs, off, tag) rlrefs[rlref].append(tup) if rlrefs: results = postprocess_rangerefs(lm, rlrefs, dies, results) return results
def examinefile(filename): """Perform symbol analysis on specified file.""" u.verbose(2, "examinefile(%s)" % filename) if filename not in all_loadmodules: fullpath = os.path.join(os.getcwd(), filename) if fullpath in all_loadmodules: filename = fullpath else: u.warning("unable to visit %s (not " "in %s out)" % (filename, flag_filemode)) return if all_loadmodules[filename] == 1: return if not in_symbols_dir(filename): u.warning("%s: does not appear to be in " "%s/symbols directory? skipping" % (filename, apo)) return soname = examine_deps(filename) if not soname: all_loadmodules[filename] = 1 return worklist = [] ddict = depends[soname] for dep in ddict: pdict = base_to_paths[dep] for path in pdict: if path in all_loadmodules and all_loadmodules[path] == 0: all_loadmodules[path] = 1 worklist.append(path) for item in worklist: examine_deps(item)
def grabaddrsize(line, func): """Grab address and size from objdump line if sym matches.""" # # ELF symtab examples: # # 000000000052b410 l F .text 0000000000000010 .hidden stat64 # 000000000000e990 g F .text 0000000000000008 _Unwind_SetIP # # Dynamic table examples: # # 000000000000e990 g DF .text 0000000000000008 GCC_3.0 _Unwind_SetIP # 0000000000520c70 g DF .text 0000000000000043 Base sinl # regexes = [re.compile(r"^(\S+)\s.+\.text\s+(\S+)\s+(\S+)$"), re.compile(r"^(\S+)\s.+\.text\s+(\S+)\s+\S+\s+(\S+)$")] hexstaddr = None hexsize = None for r in regexes: m = r.match(line) if m: name = m.group(3) u.verbose(2, "=-= name is %s" % name) if name == func: # Found hexstaddr = m.group(1) hexsize = m.group(2) break if hexstaddr and hexsize == "00000000": u.warning("warning -- malformed hexsize for func %s" % func) hexsize = "4" return (hexstaddr, hexsize)
def remove_from_file_if_present(mfile, todel): """Remove specified line from makefile if present.""" if not os.path.exists(mfile): u.error("bad entry in munge makefile table-- %s " "does not appear to exist" % mfile) mfile_new = "%s.munged" % mfile found = False u.verbose(2, "examining %s in remove munge" % mfile) with open(mfile, "r") as rf: with open(mfile_new, "w") as wf: lines = rf.readlines() linecount = 0 for line in lines: linecount += 1 sline = line.strip() if sline == todel: found = True u.verbose(2, "found todel %s at line %d " "in %s" % (todel, linecount, mfile)) continue wf.write(line) if found: docmd("mv -f %s %s" % (mfile_new, mfile)) return True return False
def perform(): """Main driver routine.""" lines = u.docmdlines("usb-devices") dmatch = re.compile(r"^\s*T:\s*Bus\s*=\s*(\d+)\s+.*\s+Dev#=\s*(\d+).*$") smatch = re.compile(r"^\s*S:\s*SerialNumber=(.*)$") device = None found = False for line in lines: m = dmatch.match(line) if m: p1 = int(m.group(1)) p2 = int(m.group(2)) device = "/dev/bus/usb/%03d/%03d" % (p1, p2) u.verbose(1, "setting device: %s" % device) continue m = smatch.match(line) if m: ser = m.group(1) if ser == flag_serial: u.verbose(1, "matched serial, invoking reset") issue_ioctl_to_device(device) found = True break if not found: u.error("unable to locate device with serial number %s" % flag_serial)
def do_setup_cmake(targdir): """Run cmake in each of the bin dirs.""" dochdir(ssdroot) dochdir(targdir) pool = None if flag_parallel: nworkers = len(cmake_flavors) pool = multiprocessing.Pool(processes=nworkers) results = [] for flav in cmake_flavors: docmd("mkdir build.%s" % flav) dochdir("build.%s" % flav) emit_rebuild_scripts(flav, targdir) cmake_cmd = emit_cmake_cmd_script(flav, targdir) if flag_parallel and not flag_dryrun: u.verbose(0, "...kicking off cmake for %s in parallel..." % flav) builddir = "%s/%s/build.%s" % (ssdroot, targdir, flav) r = pool.apply_async(run_cmake, [builddir, cmake_cmd]) results.append(r) else: doscmd(cmake_cmd) dochdir("..") nr = len(results) rc = 0 for idx in range(0, nr): r = results[idx] u.verbose(1, "waiting on result %d" % idx) res = r.get(timeout=600) if res != 0: rc = 1 if rc: u.error("one or more cmake cmds failed")
def emitdump(passname, funcname, looplab, lines): """Emit single dump for module/pass or fn/pass.""" u.verbose(2, "emitdump(%s,%s,%s,lines=%d)" % (passname, funcname, looplab, len(lines))) if not lines: return tag = funcname if not funcname: tag = "__module__" if looplab: dump = "%s:L%s:%s" % (tag, looplab, passname) else: dump = "%s:%s" % (tag, passname) dumpver = dumps[dump] dumps[dump] += 1 dumpname = "%s:%d" % (dump, dumpver) ofname = os.path.join(flag_outdir, dumpname) try: with open(ofname, "w") as wf: for line in lines: wf.write(line) except IOError: u.error("open failed for %s" % ofname) u.verbose(1, "emitted dump %d of %d " "lines to %s" % (dumpver, len(lines), ofname)) # book-keeping dumpidx = len(alldumps) alldumps.append(dumpname) if funcname: funcdumps[funcname].append(dumpidx) return dumpname
def examine_files(): """Look at each file.""" total_copied = 0 for afile in files_to_examine: total_copied += examine_file(afile) if total_copied: u.verbose(0, "... %d file(s) copied" % total_copied)
def parse_args(): """Command line argument parsing.""" global whichdev try: optlist, args = getopt.getopt(sys.argv[1:], "d") except getopt.GetoptError as err: # unrecognized option usage(str(err)) for opt, _ in optlist: if opt == "-d": u.increment_verbosity() if args: usage("unrecognized arg") # Check to make sure we can run adb u.doscmd("which adb") # Collect device flavor lines = u.docmdlines("whichdevice.sh") if len(lines) != 1: u.error("unexpected output from whichdevice.sh") whichdev = lines[0].strip() u.verbose(1, "device: %s" % whichdev)
def emit(v, resdict, voldict, lev): """Emit results for volume v and children.""" u.verbose(1, "emit for volsnap %s" % v) apair = resdict[v] # Disk space summary dutf = apair[0] amt = "<unknown>" with open(dutf, "r") as rdf: lines = rdf.readlines() amtl = lines[0].strip() a = amtl.split() amt = a[0] os.unlink(dutf) indlev(lev) outf.write("%s: %s\n" % (v, amt)) # Repo status summary rptf = apair[1] if rptf: with open(rptf, "r") as rrf: lines = rrf.readlines() indlev(lev+2) outf.write("----------------------------------------\n") emitlines(lines, lev+2) indlev(lev+2) outf.write("----------------------------------------\n") os.unlink(rptf) # Now any subvolumes subdict = voldict[v] for sv in sorted(subdict.keys()): u.verbose(1, "considering sv %s" % sv) if sv in resdict: emit(sv, resdict, voldict, lev+4)
def wrapup(): """Emit stats.""" if symbolized_oat_size: delta = symbolized_oat_size - orig_oat_size if delta: frac = 100.0 * (1.0 * delta) / (1.0 * orig_oat_size) u.verbose(0, "total expansion of %d bytes %f percent " "from symbolization" % (delta, frac))
def emit(): """Emit output DOT.""" if flag_outfile: u.verbose(1, "opening %s" % flag_outfile) fh = open(flag_outfile, "w") else: fh = sys.stdout emit_to_file(fh)
def examine_file(filename): """Perform symbol analysis on specified file.""" if not in_symbols_dir(filename): u.warning("%s: does not appear to be in " "%s/symbols directory? skipping" % (filename, apo)) return u.verbose(1, "visiting file %s" % filename) examine_sections(filename)
def cleanup(): """Remove token.""" if flag_dryrun: u.verbose(0, "removing %s" % flag_tokenfile) if flag_noclean: u.verbose(0, "skipping cleanup since -S specified") return else: os.unlink(flag_tokenfile)
def setup_binutils(): """Set up binutils.""" if os.path.exists("binutils"): u.verbose(0, "... 'binutils' already exists, skipping clone") return binutils_git = "git://sourceware.org/git/binutils-gdb.git" if flag_use_mirrors: binutils_git = "https://github.com/bminor/binutils-gdb" docmd("git clone --depth 1 %s binutils" % binutils_git)
def disdump(producer): """Dump a bitcode file to a .ll file.""" dumpfile = emitted_path(producer, "ll") bcfile = emitted_path(producer, "bc") args = ("%s %s -o %s " % (toolpaths["llvm-dis"], bcfile, dumpfile)) rc = docmdnf(args) if rc != 0: u.verbose(1, "llvm-dis returns %d" % rc) return
def setup_prereqs(targ): """Set up prerequistics.""" dochdir(targ) if os.path.exists("gmp"): u.verbose(0, "... 'gmp' already exists, skipping clone") dochdir("..") return docmd("sh contrib/download_prerequisites") dochdir("..")
def read_device_info(): """Read info from environment about connected devices.""" devtags = os.environ["DEVTAGS"] dtv = "'DEVTAGS' environment variable" if not devtags: u.warning("no setting for %s -- " "unable to flash to device(s)" % dtv) return False codenametotag = os.environ["CODENAMETOTAG"] cnv = "'CODENAMETOTAG' environment variable" if not devtags: u.warning("no setting for %s -- " "unable to flash to device(s)" % cnv) return False chunks = devtags.split() for chunk in chunks: pair = chunk.split(":") if len(pair) != 2: u.warning("malformed chunk %s in %s " "(skipping)" % (chunk, dtv)) continue tag = pair[0] serial = pair[1] u.verbose(2, "tag %s serial %s" % (tag, serial)) tag_to_serial[tag] = serial if not tag_to_serial: u.warning("malformed %s: no devices" % dtv) return False chunks = codenametotag.split() for chunk in chunks: pair = chunk.split(":") if len(pair) != 2: u.warning("malformed chunk %s in %s " "(skipping)" % (chunk, cnv)) continue codename = pair[0] tag = pair[1] u.verbose(2, "codename %s tag %s" % (codename, tag)) codename_to_tag[codename] = tag if not codename_to_tag: u.warning("malformed %s: no devices" % cnv) return False # Reading complete. Now match things up to make sure # that we have at least some correspondence. found_count = 0 for codename, tag in codename_to_tag.iteritems(): if tag not in tag_to_serial: u.warning("CODENAMETOTAG mentions tag %s, which " "does not appear in DEVTAGS" % tag) else: found_count += 1 if found_count == 0: u.warning("no devices mentioned in CODENAMETOTAG " "are listed in DEVTAGS") return False return True
def parse_args(): """Command line argument parsing.""" global flag_echo, flag_dryrun, flag_infiles, flag_outdir, flag_tag global flag_binary, flag_pprof_path try: optlist, args = getopt.getopt(sys.argv[1:], "deDi:o:t:p:b:") except getopt.GetoptError as err: # unrecognized option usage(str(err)) if args: usage("unknown extra args: %s" % " ".join(args)) for opt, arg in optlist: if opt == "-d": u.increment_verbosity() elif opt == "-D": u.verbose(0, "+++ dry run mode") flag_dryrun = True flag_echo = True elif opt == "-e": flag_echo = True elif opt == "-i": infiles = arg.split(":") u.verbose(1, "%d input files" % len(infiles)) for inf in infiles: if not os.path.exists(inf): usage("unable to access -i input file %s" % inf) flag_infiles.append(inf) elif opt == "-o": if not os.path.exists(arg): usage("unable to access -o argument %s" % arg) if not os.path.isdir(arg): usage("-o argument %s not a directory" % arg) flag_outdir = arg elif opt == "-p": if not os.path.exists(arg): usage("unable to access -p argument %s" % arg) flag_pprof_path = arg elif opt == "-t": flag_tag = arg elif opt == "-b": if not os.path.exists(arg): usage("unable to access -b argument %s" % arg) flag_binary = arg if not flag_infiles: usage("supply input files with -i") if not flag_outdir: usage("supply output dir with -o") if not flag_tag: usage("supply tag with -t") if not flag_binary: usage("supply executable path with -b")
def shell_is_bash(): """Return TRUE if the shell being used is bash.""" if "SHELL" not in os.environ: u.warning("no definition for SHELL in environment (?)") return False shell = os.environ["SHELL"] u.verbose(1, "SHELL set to: %s" % shell) matcher = re.compile(r"^.*/bash$") m = matcher.match(shell) if m is not None: return True return False
def do_gollvm_clean(): """Clean a gollv build dir.""" lgd = "tools/gollvm/libgo" u.verbose(1, "visiting %s" % lgd) do_clean(lgd) files = ["vet", "test2json", "buildid", "go", "gofmt", "cgo"] for f in files: p = "tools/gollvm/gotools/" + f if os.path.exists(p): u.verbose(1, "cleaning %s" % p) if not flag_dryrun: os.unlink(p)
def postprocess(scraper): """Postprocess contents of scraped target web page.""" if u.verbosity_level() > 0: sys.stderr.write("dump of scraper state\n") scraper.dump() blobtable = scraper.blobtable() version = scraper.version() for device, rows in blobtable.iteritems(): idx = 0 for r in rows: u.verbose(1, "device=%s idx=%d blob=%s\n" % (device, idx, r[2])) download_blob(device, version, r[2])
def open_pprof_output(): """Open pprof script output file.""" if flag_dryrun: return (None, None) try: outf = tempfile.NamedTemporaryFile(mode="w", delete=True) ppo = open(outf.name, "w") except IOError: u.verbose(0, "open failed for %s" % outf.name) ppo.write("#!/bin/sh\n") ppo.write("PIDS=\n") return (outf, ppo)
def form_golibargs(driver): """Form correct go library args.""" ddir = os.path.dirname(driver) bdir = os.path.dirname(ddir) cmd = "find %s/lib64 -name runtime.gox -print" % bdir lines = u.docmdlines(cmd) if not lines: u.error("no output from %s -- bad gccgo install dir?" % cmd) line = lines[0] rdir = os.path.dirname(line) u.verbose(1, "libdir is %s" % rdir) return ["-L", rdir]
def find_cmakefiles(): """Locate files to consider.""" if cmakefiles: return lines = u.docmdlines("find . -name \"*.cmake\" -print " "-o -name CMakeLists.txt -print") for line in lines: f = line.strip() if not f: continue u.verbose(2, "adding %s to cmakefiles" % f) cmakefiles[f] = 1
def perform(): """Main driver routine.""" # Step 1: dump only compilation unit info. cmd = ("%s --dwarf=info " "--dwarf-depth=0 %s" % (flag_objdump, flag_loadmodule)) u.verbose(1, "running: %s" % cmd) lines = u.docmdlines(cmd) cre = re.compile(r"^\s*Compilation Unit \@ offset 0x(\S+)\:\s*$") units = 0 lo = -1 hi = -1 maxoff = -1 selectoff = -1 for line in lines: m = cre.match(line) if m: binoff = int(m.group(1), 16) if binoff <= flag_offset_to_find: lo = units selectoff = binoff if binoff > flag_offset_to_find: hi = units break maxoff = binoff units += 1 if units == 0 or lo == -1: u.warning("no DWARF compile units in %s, dump aborted" % flag_loadmodule) return if hi == -1: u.warning("could not find CU with offset higher than %x; " "dumping last CU at offset %x" % (flag_offset_to_find, maxoff)) # Step 2: issue the dump cmd = ("%s --dwarf=info " "--dwarf-start=%d %s" % (flag_objdump, selectoff, flag_loadmodule)) u.verbose(1, "dump cmd is: %s" % cmd) args = shlex.split(cmd) mypipe = subprocess.Popen(args, stdout=subprocess.PIPE) cure = re.compile(r"^.+\(DW_TAG_compile_unit\).*") ncomps = 0 while True: line = mypipe.stdout.readline() if not line: break m = cure.match(line) if m: ncomps += 1 if ncomps > 1: break sys.stdout.write(line)
def parse_args(): """Parse command line arguments for the script.""" global flag_target_file, flag_prev_revision, target_is_url global flag_revision_pair, flag_diff_cmd, flag_save_temps try: optlist, args = getopt.getopt(sys.argv[1:], "dsr:R:c:") except getopt.GetoptError as err: # unrecognized option usage(str(err)) for opt, arg in optlist: if opt == "-d": u.increment_verbosity() u.verbose(1, "debug level now %d" % u.verbosity_level()) elif opt == "-r": flag_prev_revision = int(arg) elif opt == "-s": flag_save_temps = True elif opt == "-c": flag_diff_cmd = arg elif opt == "-R": plist = arg.split(":") if plist[1] == "PREV": lr = int(plist[0]) p = (lr, lr - 1) else: p = (int(plist[0]), int(plist[1])) if p[0] <= 0: usage("specify positive left revision with -R") if p[1] <= 0: usage("specify positive right revision with -R") if p[0] == p[1]: usage("specify different revisions with -R") u.verbose(1, "revision pair: %d %d" % (p[0], p[1])) flag_revision_pair = p if not args or len(args) != 1: u.error("supply single file to diff") flag_target_file = args[0] um = re.compile(r"^.+://.+$") res = um.match(flag_target_file) if res: target_is_url = True if not flag_revision_pair: usage("URL target can only be used with -R option.") else: if not os.path.exists(flag_target_file): u.error("target file %s does not appear to exist" % flag_target_file) if not os.path.isfile(flag_target_file): u.error("target file %s is not a file" % flag_target_file)
def create_or_check_link(src, dst): """Create or check a symbolic link.""" if not os.path.exists(dst): u.verbose(0, "... creating link %s -> %s" % (dst, src)) os.symlink(src, dst) else: u.verbose(0, "... verifying link %s -> %s" % (dst, src)) if not os.path.islink(dst): u.error("can't proceed: %s exists but is not a link" % dst) ltarget = os.readlink(dst) if ltarget != src: u.error("can't proceed: %s exists but points to %s " "instead of %s" % (dst, ltarget, src))
def perform(): """Top level driver routine.""" if not os.path.exists(".git"): u.error("unable to locate top level .git in current dir") lines = u.docmdlines("find . -depth -name .git -print") lines.reverse() repos = lines for r in repos: u.verbose(1, "checking %s" % r) do_check(r) for r in repos: u.verbose(1, "visiting %s" % r) do_fetch(r)
def create_token(): """Deposit token file.""" u.verbose(0, "creating token %s" % flag_tokenfile) try: with open(flag_tokenfile, "w") as tf: for f in sorted(cmakefiles): st = os.stat(f) u.verbose( 2, "storing %s at=%d mt=%d " "to token" % (f, st.st_atime, st.st_mtime)) tf.write("%s %d %d\n" % (f, st.st_atime, st.st_mtime)) except IOError: u.error("unable to write to %s" % flag_tokenfile)
def ppo_append(ppo, cmd, outf): """Append cmd to ppo command file.""" global ppolines cmd = cmd.replace(r"$", r"\$") if flag_dryrun: u.verbose(0, "%s" % cmd) return errf = "/tmp/ppo-err.%d.%d" % (ppolines, len(cmd)) if not outf: outf = "/tmp/ppo-out.%d.%d" % (ppolines, len(cmd)) ppolines += 1 ppo.write("%s 1> %s 2> %s &\n" % (cmd, outf, errf)) ppo.write("PIDS=\"$PIDS $!:%s\"\n" % errf)
def do_subvol_create(): """Create new LNT/testsuite trunk subvolume.""" here = os.getcwd() ssdroot = u.determine_btrfs_ssdroot(here) docmd("snapshotutil.py mkvol %s" % flag_subvol) dochdir(ssdroot) dochdir(flag_subvol) u.verbose(1, "cloning LNT") doscmd("git clone %s" % lnt_git) u.verbose(1, "cloning test suite") doscmd("git clone %s" % testsuite_git) doscmd("virtualenv virtualenv") doscmd("./virtualenv/bin/python ./lnt/setup.py develop")
def examine(afile): """Dump go exports for specified file.""" objfile = afile arcmd = "ar t %s" % afile # Run 'ar' command, suppressing error output. If # if succeeds, then continue on the ar path, otherwise # treat input as an object. if u.doscmd(arcmd, True, True): # Handle archives lines = u.docmdlines(arcmd, True) if not lines: u.warning("skipping %s, can't index archive %s", afile) return # Extract elem from archive elem = lines[0].strip() u.verbose(1, "%s contains %s" % (afile, elem)) rc = u.docmdnf("ar x %s %s" % (afile, elem)) if rc: u.warning("skipping %s, can't extract object" % afile) return objfile = elem gexptemp = tempfile.NamedTemporaryFile(mode="w", prefix="go_export", delete=True) # Handle objects cmd = ("objcopy -O binary --only-section=.go_export " "--set-section-flags .go_export=alloc %s " "%s" % (objfile, gexptemp.name)) rc = u.docmdnf(cmd) if rc: u.warning("skipping %s, can't extract export " "data (cmd failed: %s)" % (objfile, cmd)) return try: inf = open(gexptemp.name, "rb") except IOError as e: u.error("unable to open tempfile %s: " "%s" % (gexptemp.name, e.strerror)) print "== %s ==" % afile lines = inf.readlines() if not lines: u.warning("skipping %s, no .go_export section present" % objfile) for line in lines: print line.strip() inf.close() if objfile != afile: os.unlink(objfile)
def examine_file(f): """Examine and copy a file if it needs copying.""" rval = 0 sfile = os.path.join(flag_source_dir, f) if not os.path.exists(sfile): u.warning("file %s does not exist in src dir -- skipping" % f) return 0 dfile = os.path.join(flag_dest_dir, f) docopy = False if not os.path.exists(dfile): u.verbose(1, "file %s does not exist in dest dir" % f) docopy = True else: scksum = checksum_file(sfile) dcksum = checksum_file(dfile) if scksum != dcksum: u.verbose( 1, "checksum mismatch (%s vs %s) " "on file %s" % (scksum, dcksum, f)) docopy = True if docopy: if flag_dryrun: u.verbose(0, "dryrun: cp %s %s" % (sfile, dfile)) else: u.verbose(0, "cp %s %s" % (sfile, dfile)) u.docmd("cp %s %s" % (sfile, dfile)) u.docmd("chmod 0755 %s" % dfile) rval = 1 return rval
def archive(): """Archive modifications.""" if flag_oldsha: dcmd = "git diff %s..%s" % (flag_oldsha, flag_newsha) elif flag_branch_to_diff: dcmd = "git diff %s master" % flag_branch_to_diff else: dcmd = "git diff --cached" docmdout(dcmd, "%s/git.diff.txt" % flag_destdir) docmdout("git log -10", "%s/git.log10.txt" % flag_destdir) grab_current_sha() emit_deletions_and_renames() nf = emit_modified_files() u.verbose(0, "... diff, log, and %d files copied" % nf)
def restore_mtimes(): """Restore mtimes from tokenfile.""" u.verbose(1, "reading token file %s" % flag_tokenfile) restored = 0 try: with open(flag_tokenfile, "r") as tf: pat = re.compile(r"^\s*(\S+)\s+(\d+)\s+(\d+)\s*$") lines = tf.readlines() for line in lines: m = pat.match(line) if not m: u.error("pattern match failed on token file line %s" % line) f = m.group(1) st = os.stat(f) u.verbose( 2, "before restore for %s, at=%d " "mt=%d" % (f, st.st_atime, st.st_mtime)) mt = int(m.group(2)) at = int(m.group(3)) newtimes = (at, mt) os.utime(f, newtimes) u.verbose(2, "restoring at=%d mt=%d for %s" % (at, mt, f)) st = os.stat(f) u.verbose( 2, "after restore for %s, at=%d " "mt=%d" % (f, st.st_atime, st.st_mtime)) restored += 1 except IOError: u.error("unable to read token file %s" % flag_tokenfile) return restored
def setup_build_dirs(targ): """Set up build_dirs.""" root = os.getcwd() bb = "build-binutils" if os.path.exists(bb): u.verbose(0, "... binutils build dir '%s' already exists, " "skipping setup" % bb) else: os.mkdir("build-binutils") dochdir("build-binutils") u.verbose(0, "... running configure in build dir 'build-binutils'") doscmd("../binutils/configure --prefix=%s/binutils-cross " "--enable-gold=default --enable-plugins" % root) dochdir("..") for b, d in build_flavors.iteritems(): if os.path.exists(b): u.verbose(0, "... build dir '%s' already exists, skipping setup" % b) continue prefix = d["prefix"] extra = d["extra"] os.mkdir(b) dochdir(b) u.verbose(0, "... running configure in build dir '%s'" % b) doscmd("../%s/configure --prefix=%s/%s " "--enable-languages=c,c++,go --enable-libgo " "--disable-bootstrap --with-ld=%s/binutils-cross/bin/ld.gold " "%s" % (targ, root, prefix, root, extra)) dochdir("..")
def install_shim(scriptpath): """Install shim into gccgo install dir.""" # Make sure we're in the right place (gccgo install dir) if not os.path.exists("bin"): usage("expected to find bin subdir") if not os.path.exists("lib64/libgo.so"): usage("expected to find lib64/libgo.so") if not os.path.exists("bin/gccgo"): usage("expected to find bin/gccgo") # Copy script, or update if already in place. docmd("cp %s bin" % scriptpath) sdir = os.path.dirname(scriptpath) docmd("cp %s/script_utils.py bin" % sdir) # Test to see if script installed already cmd = "file bin/gccgo" lines = u.docmdlines(cmd) if not lines: u.error("no output from %s -- bad gccgo install dir?" % cmd) else: reg = re.compile(r"^.+ ELF .+$") m = reg.match(lines[0]) if not m: u.warning("wrapper appears to be installed already in this dir") return # Move aside the real gccgo binary docmd("mv bin/gccgo bin/gccgo.real") # Emit a script into gccgo sys.stderr.write("emitting wrapper script into bin/gccgo\n") if not flag_dryrun: try: with open("./bin/gccgo", "w") as wf: here = os.getcwd() wf.write("#!/bin/sh\n") wf.write("P=%s/bin/gollvm-wrap.py\n" % here) wf.write("exec python ${P} \"$@\"\n") except IOError: u.error("open/write failed for bin/gccgo wrapper") docmd("chmod 0755 bin/gccgo") # Success u.verbose(0, "wrapper installed successfully") # Done return 0
def perform(): """Main driver routine.""" # Volumes volumes = {} # Snapshots snapshots = {} # Key is vol, value is dictionary of subvolumes voldict = defaultdict(lambda: defaultdict(int)) # Multiprocessing pool nworkers = 8 pool = multiprocessing.Pool(processes=nworkers) # Get info on volumes collect_subvolumes_and_snapshots(volumes, snapshots, voldict) # Kick off job for each volume, then snapshot results = [] snapvols = [] for v in volumes: u.verbose(1, "enqueue job for vol %s" % v) r = pool.apply_async(process_volsnap, [v]) results.append(r) snapvols.append(v) if flag_shortrun: break for sv in snapshots: u.verbose(1, "enqueue job for snap %s" % sv) r = pool.apply_async(process_volsnap, [sv]) results.append(r) snapvols.append(sv) if flag_shortrun: break # Collect results resdict = {} nr = len(results) for idx in range(0, nr): r = results[idx] v = snapvols[idx] u.verbose(1, "waiting on result %d %s" % (idx, v)) pair = r.get(timeout=200) resdict[v] = pair # Emit results for v in volumes: if v in resdict: emit(v, resdict, voldict, 0) outf.close() if flag_email_dest: cmd = ("sendgmr --to=%s --body_file=%s " "--subject='repo status " "summary'" % (whoami, flag_outfile)) u.verbose(1, "email cmd is: %s" % cmd) u.docmd(cmd)
def collect_all_loadmodules(): """Collect names of all loadmodules in $ANDROID_PRODUCT_OUT/symbols/system.""" cmd = "find %s/symbols/system -type f -print" % apo u.verbose(1, "find cmd: %s" % cmd) cargs = shlex.split(cmd) mypipe = subprocess.Popen(cargs, stdout=subprocess.PIPE) pout, _ = mypipe.communicate() if mypipe.returncode != 0: u.error("command failed (rc=%d): cmd was %s" % (mypipe.returncode, cmd)) encoding = locale.getdefaultlocale()[1] decoded = pout.decode(encoding) lines = decoded.strip().split("\n") u.verbose(1, "found a total of %d libs" % len(lines)) return sorted(lines)
def process_volsnap(v): """Examine a given subvolume or snapshot.""" me = whoami sv = re.sub("/", "_", v) tf1 = "/tmp/ssnap-%s-%s-du.txt" % (me, sv) os.chdir(v) u.verbose(1, "collecting disk for %s into %s" % (v, tf1)) u.docmdout("du -sh", tf1) tf2 = None rp = os.path.join(v, ".repo") if os.path.exists(rp): tf2 = "/tmp/ssnap-%s-%s-rpstat.txt" % (me, sv) u.verbose(1, "collecting rpstat for %s into %s" % (v, tf2)) u.docmdout("repo status", tf2) return (tf1, tf2)
def disas(func, addr, tgt): """Disassemble a specified function.""" staddr, enaddr = grab_addr_from_symtab(func, addr, tgt) if staddr == 0: u.verbose(0, "... could not find %s in " "output of objdump, skipping" % what(func, addr)) return cmd = ("objdump --no-show-raw-insn --wide -dl " "--start-address=0x%x " "--stop-address=0x%x %s" % (staddr, enaddr, tgt)) if flag_dwarf_cu and not flag_dryrun: lines = u.docmdlines(cmd) dodwarf(lines, tgt) else: docmd(cmd)
def inspect_path(): """Inspect path components.""" if "PATH" not in os.environ: u.error("no definition for PATH in environment (?)") path = os.environ["PATH"] u.verbose(1, "PATH set to: %s" % path) path_directories = path.split(":") matcher = re.compile(r"^.*%s.*$" % flag_text) for d in path_directories: u.verbose(2, "+ considering dir %s" % d) if os.path.isdir(d): for filename in os.listdir(d): m = matcher.match(filename) if m is not None: print "%s/%s" % (d, filename)
def locate_binaries(clangcmd): """Locate executables of interest.""" global toolpaths # Figure out what to invoke u.verbose(1, "clangcmd is %s" % clangcmd) toolpaths["clang"] = clangcmd reg = re.compile("(^.*)/(.*)$") m = reg.match(clangcmd) bindir = None clcmd = None if m: bindir = m.group(1) clcmd = m.group(2) else: if not flag_dryrun: lines = u.docmdlines("which %s" % clangcmd) if not lines: u.error("which %s returned empty result" % clangcmd) clangbin = lines[0].strip() bindir = os.path.dirname(clangbin) + "/" clcmd = os.path.basename(clangbin) u.verbose(1, "clang bindir is %s" % bindir) else: bindir = "" toolpaths["clang"] = os.path.join(bindir, clcmd) toolpaths["opt"] = os.path.join(bindir, "opt") toolpaths["llc"] = os.path.join(bindir, "llc") toolpaths["llvm-dis"] = os.path.join(bindir, "llvm-dis") if flag_dryrun: return # If clang is versioned, then version llvm-dis reg2 = re.compile("^.+(\-\d\.\d)$") m2 = reg2.match(clangcmd) if m2: toolpaths["llvm-dis"] = os.path.join(bindir, "llvm-dis%s" % m2.group(1)) # Check for existence and executability tocheck = ["clang", "opt", "llc", "llvm-dis"] for tc in tocheck: path = toolpaths[tc] if not os.path.exists(path): u.warning("can't access binary %s at path %s" % (tc, path)) if not os.access(path, os.X_OK): u.warning("no execute permission on binary %s" % path)
def examinefile(filename): """Perform symbol analysis on specified file.""" if not in_symbols_dir(filename): u.warning("%s: does not appear to be in " "%s/symbols directory? skipping" % (filename, apo)) return secsizes = examine_sections(filename) if not secsizes: u.verbose(1, "skipping file %s, no contents" % filename) return if file_is_stripped(secsizes): u.verbose(1, "skipping file %s, already stripped" % filename) return for secname, secsize in secsizes.iteritems(): allsecsizes[secname] += secsize examine_symbols(filename, secsizes)
def install_blob(blob, devdir): """Install a single blob.""" # Determine blob name blobpath = "%s/%s" % (devdir, blob) lines = u.docmdlines("tar tzf %s" % blobpath) if len(lines) != 1: u.error("error while examining blob %s: expected single file" % blob) # Unpack blobfile = lines[0] u.verbose(1, "unpacking blob %s" % blob) u.docmd("tar xzf %s" % blobpath) # Invoke installer u.docmd("blobinstall.py %s" % blobfile)
def do_gccgo_clean(): """Clean a gccgo build dir.""" if not os.path.exists("config.log"): u.error("no 'config.log' here -- needs to be run in GCC build dir") lines = u.docmdlines("find . -depth -name libgo -print") lines.reverse() libgodirs = lines for lgd in libgodirs: u.verbose(1, "visiting %s" % lgd) do_clean(lgd) files = ["vet", "test2json", "buildid", "go", "gofmt", "cgo"] for f in files: p = "gotools/" + f if not flag_dryrun and os.path.exists(p): u.verbose(1, "cleaning %s" % p) os.unlink(p)
def parse_args(): """Command line argument parsing.""" global flag_subdir, flag_strace, flag_toplevel, flag_dryrun global flag_showcommands, flag_dependencies, flag_parfactor global flag_dashk, flag_checkbuild try: optlist, args = getopt.getopt(sys.argv[1:], "adkstTDSx:j:") except getopt.GetoptError as err: # unrecognized option usage(str(err)) for opt, arg in optlist: if opt == "-d": u.increment_verbosity() elif opt == "-s": flag_showcommands = " showcommands" elif opt == "-k": flag_dashk = "-k" elif opt == "-a": flag_dependencies = True elif opt == "-S": flag_strace = "strace -f -o trace.txt " elif opt == "-D": flag_dryrun = True elif opt == "-t": flag_toplevel = True elif opt == "-T": flag_toplevel = True flag_checkbuild = True elif opt == "-x": u.verbose(0, "adding extra make arg %s" % arg) flag_extra_make_args.append(arg) elif opt == "-j": flag_parfactor = int(arg) if not flag_toplevel: if not args: usage("supply dirname arg") if len(args) != 1: usage("supply single dirname arg") if flag_extra_make_args: usage("-x option can only be supplied with -t") flag_subdir = args[0] else: if flag_dependencies: usage("specify at most one of -t, -a")
def run_objdump_cmd(cargs, filename): """Run objdump with specified args, returning list of lines.""" if not objdump_cmd: determine_objdump(filename) cmd = "%s %s %s" % (objdump_cmd, cargs, filename) u.verbose(2, "objdump cmd: %s" % cmd) splargs = shlex.split(cmd) mypipe = subprocess.Popen(splargs, stdout=subprocess.PIPE) pout, _ = mypipe.communicate() if mypipe.returncode != 0: u.error("command failed (rc=%d): cmd was %s" % (mypipe.returncode, cmd)) encoding = locale.getdefaultlocale()[1] decoded = pout.decode(encoding) return decoded.strip().split("\n")