def process_commit(idx, branchname, githash, comment): """Process a commit by hash.""" tag = "" if flag_tag: tag = ".tag=%s" % flag_tag fn = "/tmp/item%d.branch%s%s.commit%s.txt" % (idx, branchname, tag, githash) if flag_dryrun: u.verbose(0, "<dryrun: emit diff for " "%s^ %s to %s>" % (githash, githash, fn)) return files_emitted.append(fn) try: outf = open(fn, "w") except IOError as e: u.error("unable to open %s: %s" % (fn, e.strerror)) outf.write("// comment: %s\n" % comment) outf.write("//\n") lines = u.docmdlines("git log --name-only -1 %s" % githash) if not lines: u.error("empty output from 'git log --name-only -1 %s'" % githash) for line in lines: outf.write(line) outf.write("\n") outf.write("--------------------------------------------------------------\n") lines = u.docmdlines("git diff %s^ %s" % (githash, githash)) if not lines: u.error("empty output from 'git diff %s^ %s'" % (githash, githash)) for line in lines: outf.write(line) outf.write("\n") outf.close() u.verbose(1, "wrote %d diff lines to %s" % (len(lines), fn))
def process_commit(idx, branchname, githash, comment): """Process a commit by hash.""" tag = "" if flag_tag: tag = ".tag=%s" % flag_tag fn = "/tmp/item%d.branch%s%s.commit%s.txt" % (idx, branchname, tag, githash) if flag_dryrun: u.verbose( 0, "<dryrun: emit diff for " "%s^ %s to %s>" % (githash, githash, fn)) return files_emitted.append(fn) try: outf = open(fn, "w") except IOError as e: u.error("unable to open %s: %s" % (fn, e.strerror)) outf.write("// comment: %s\n" % comment) outf.write("//\n") lines = u.docmdlines("git log --name-only -1 %s" % githash) if not lines: u.error("empty output from 'git log --name-only -1 %s'" % githash) for line in lines: outf.write(line) outf.write("\n") outf.write( "--------------------------------------------------------------\n") lines = u.docmdlines("git diff %s^ %s" % (githash, githash)) if not lines: u.error("empty output from 'git diff %s^ %s'" % (githash, githash)) for line in lines: outf.write(line) outf.write("\n") outf.close() u.verbose(1, "wrote %d diff lines to %s" % (len(lines), fn))
def perform(): """Main driver routine.""" #tf = tempfile.NamedTemporaryFile(mode="w", delete=True) lines = u.docmdlines("git status -sb") if not lines: u.error("empty output from git status -sb") brnreg = re.compile(r"^## (\S+)\.\.(\S+) \[ahead (\d+)\]\s*$") m = brnreg.match(lines[0]) if not m: u.error("can't pattern match output of git status -sb: %s" % lines[0]) branchname = m.group(1).strip(".") commits = int(m.group(3)) u.verbose(1, "branch is: %s commits: %d" % (branchname, commits)) # Grab info on commits lines = u.docmdlines("git log --oneline -%d" % commits) if not lines: u.error("empty output from 'git log --oneline'") # Process commits in reverse order firsthash = None lasthash = None creg = re.compile(r"^(\S+) (\S.+)$") lines.reverse() idx = 0 for cl in lines: idx += 1 m = creg.match(cl) if not m: u.error("can't pattern match git log output: %s" % cl) githash = m.group(1) lasthash = githash if not firsthash: firsthash = githash comment = m.group(2) u.verbose(1, "processing hash %s comment %s" % (githash, comment)) process_commit(idx, branchname, githash, comment) # Emit index file n = len(files_emitted) + 1 fn = "/tmp/item%d.branch=%s.index.txt" % (n, branchname) try: outf = open(fn, "w") except IOError as e: u.error("unable to open %s: %s" % (fn, e.strerror)) outf.write("Files emitted:\n\n") outf.write(" ".join(files_emitted)) outf.write("\n\nBranch log:\n\n") u.verbose(1, "index diff cmd hashes: %s %s" % (firsthash, lasthash)) lines = u.docmdlines("git log --name-only -%d HEAD" % len(files_emitted)) for line in lines: outf.write(line) outf.write("\n") outf.close() u.verbose(0, "... index file emitted to %s\n" % fn)
def perform(): """Main driver routine.""" #tf = tempfile.NamedTemporaryFile(mode="w", delete=True) lines = u.docmdlines("git status -sb") if not lines: u.error("empty output from git status -sb") brnreg = re.compile(r"^## (\S+)\.\.(\S+) \[ahead (\d+)\]\s*$") m = brnreg.match(lines[0]) if not m: u.error("can't pattern match output of git status -sb: %s" % lines[0]) branchname = m.group(1).strip(".") commits = int(m.group(3)) u.verbose(1, "branch is: %s commits: %d" % (branchname, commits)) # Grab info on commits lines = u.docmdlines("git log --oneline -%d" % commits) if not lines: u.error("empty output from 'git log --oneline'") # Process commits in reverse order firsthash = None lasthash = None creg = re.compile(r"^(\S+) (\S.+)$") lines.reverse() idx = 0 for cl in lines: idx += 1 m = creg.match(cl) if not m: u.error("can't pattern match git log output: %s" % cl) githash = m.group(1) lasthash = githash if not firsthash: firsthash = githash comment = m.group(2) u.verbose(1, "processing hash %s comment %s" % (githash, comment)) process_commit(idx, branchname, githash, comment) # Emit index file n = len(files_emitted) + 1 fn = "/tmp/item%d.branch=%s.index.txt" % (n, branchname) try: outf = open(fn, "w") except IOError as e: u.error("unable to open %s: %s" % (fn, e.strerror)) outf.write("Files emitted:\n\n") outf.write(" ".join(files_emitted)) outf.write("\n\nBranch log:\n\n") u.verbose(1, "index diff cmd hashes: %s %s" % (firsthash, lasthash)) lines = u.docmdlines("git log --name-only -%d HEAD" % len(files_emitted)) for line in lines: outf.write(line) outf.write("\n") outf.close() u.verbose(0, "... index file emitted to %s\n" % fn)
def grab_current_sha(): """Grab current sha for repo.""" global current_sha lines = u.docmdlines("git log --no-abbrev-commit --pretty=oneline -1") ar = lines[0].split() current_sha = ar[0] u.verbose(1, "current sha: %s" % current_sha)
def grab_addr_from_symtab(func, addr, tgt): """Grab starting address and size from ELF symtab or dynsym.""" flavs = ["-t"] if has_dynamic_section(tgt): flavs.append("-T") staddr = 0 enaddr = 0 for flav in flavs: u.verbose(1, "looking for %s in output of " "objdump %s %s" % (what(func, addr), flav, tgt)) lines = u.docmdlines("objdump %s %s" % (flav, tgt)) hexstaddr = None hexsize = None for line in lines: hexstaddr, hexsize = grabaddrsize(line, func, addr) if hexstaddr: break if not hexstaddr: continue try: staddr = int(hexstaddr, 16) size = int(hexsize, 16) enaddr = staddr + size except ValueError: u.verbose(0, "... malformed staddr/size (%s, %s) " "for %s, skipping" % (hexstaddr, hexsize, func)) return 0, 0 return staddr, enaddr
def human_dmesg(): """Post-process dmesg output to yield human-readable dates.""" now = datetime.now() uptime_diff = None try: with open("/proc/uptime") as f: uptime_diff = f.read().strip().split()[0] except IndexError: return try: uptime = now - timedelta(seconds=int(uptime_diff.split(".")[0]), microseconds=int(uptime_diff.split(".")[1])) print "uptime is" print uptime except IndexError: return dmesg_data = u.docmdlines("dmesg") unmatched = 0 matched = 0 for line in dmesg_data: if not line: continue match = _dmesg_line_regex.match(line) if match: seconds = int(match.groupdict().get("time", "").split(".")[0]) nanoseconds = int(match.groupdict().get("time", "").split(".")[1]) microseconds = int(round(nanoseconds * 0.001)) line = match.groupdict().get("line", "") t = uptime + timedelta(seconds=seconds, microseconds=microseconds) print "[%s]%s" % (t.strftime(_datetime_format), line) matched += 1 else: unmatched += 1 if unmatched > matched/2: print "matched %d unmatched %d" % (matched, unmatched)
def grab_current_sha(): """Grab current sha for repo.""" global current_sha lines = u.docmdlines("git log --no-abbrev-commit --pretty=oneline -1") ar = lines[0].split() current_sha = ar[0] u.verbose(1, "current sha: %s" % current_sha)
def perform(): """Main driver routine.""" global flag_branches # Run 'git branch' lines = u.docmdlines("git branch", True) if not lines: u.error("not currently in git workspace") reg = re.compile(r"^[\*\s]*(\S+)\s*$") # Interpret output of git branch branches = {} for l in lines: u.verbose(3, "line is: =%s=" % l) m = reg.match(l) if not m: u.error("internal error: unable to match " "'git branch' on: %s" % l) bname = m.group(1) if bname == "master": continue u.verbose(2, "capturing local branch: %s" % bname) branches[bname] = 1 # Did we see branches of interest? if flag_branches: for b in flag_branches: if b not in branches: u.error("specified branch %s not present " "in output of 'git branch'" % b) if flag_allbranches: flag_branches = branches u.verbose(1, "pulling master") docmd("git checkout master") docmd("git pull") for b in flag_branches: visit_branch(b)
def human_dmesg(): """Post-process dmesg output to yield human-readable dates.""" now = datetime.now() uptime_diff = None try: with open("/proc/uptime") as f: uptime_diff = f.read().strip().split()[0] except IndexError: return try: uptime = now - timedelta(seconds=int(uptime_diff.split(".")[0]), microseconds=int(uptime_diff.split(".")[1])) print "uptime is" print uptime except IndexError: return dmesg_data = u.docmdlines("dmesg") unmatched = 0 matched = 0 for line in dmesg_data: if not line: continue match = _dmesg_line_regex.match(line) if match: seconds = int(match.groupdict().get("time", "").split(".")[0]) nanoseconds = int(match.groupdict().get("time", "").split(".")[1]) microseconds = int(round(nanoseconds * 0.001)) line = match.groupdict().get("line", "") t = uptime + timedelta(seconds=seconds, microseconds=microseconds) print "[%s]%s" % (t.strftime(_datetime_format), line) matched += 1 else: unmatched += 1 if unmatched > matched / 2: print "matched %d unmatched %d" % (matched, unmatched)
def perform(): """Main driver routine.""" global flag_branches # Run 'git branch' lines = u.docmdlines("git branch", True) if not lines: u.error("not currently in git workspace") reg = re.compile(r"^[\*\s]*(\S+)\s*$") # Interpret output of git branch branches = {} for l in lines: u.verbose(3, "line is: =%s=" % l) m = reg.match(l) if not m: u.error("internal error: unable to match " "'git branch' on: %s" % l) bname = m.group(1) if bname == "master": continue u.verbose(2, "capturing local branch: %s" % bname) branches[bname] = 1 # Did we see branches of interest? if flag_branches: for b in flag_branches: if b not in branches: u.error("specified branch %s not present " "in output of 'git branch'" % b) if flag_allbranches: flag_branches = branches u.verbose(1, "pulling master") docmd("git checkout master") docmd("git pull") for b in flag_branches: visit_branch(b)
def perform(): """Main driver routine.""" lines = u.docmdlines("usb-devices") dmatch = re.compile(r"^\s*T:\s*Bus\s*=\s*(\d+)\s+.*\s+Dev#=\s*(\d+).*$") smatch = re.compile(r"^\s*S:\s*SerialNumber=(.*)$") device = None found = False for line in lines: m = dmatch.match(line) if m: p1 = int(m.group(1)) p2 = int(m.group(2)) device = "/dev/bus/usb/%03d/%03d" % (p1, p2) u.verbose(1, "setting device: %s" % device) continue m = smatch.match(line) if m: ser = m.group(1) if ser == flag_serial: u.verbose(1, "matched serial, invoking reset") issue_ioctl_to_device(device) found = True break if not found: u.error("unable to locate device with serial number %s" % flag_serial)
def grab_addr_from_symtab(func, tgt): """Grab starting address and size from ELF symtab or dynsym.""" flavs = ["-t"] if has_dynamic_section(tgt): flavs.append("-T") staddr = 0 enaddr = 0 for flav in flavs: u.verbose(1, "looking for %s in output of " "objdump %s %s" % (func, flav, tgt)) lines = u.docmdlines("objdump %s %s" % (flav, tgt)) hexstaddr = None hexsize = None for line in lines: hexstaddr, hexsize = grabaddrsize(line, func) if hexstaddr: break if not hexstaddr: continue try: staddr = int(hexstaddr, 16) size = int(hexsize, 16) enaddr = staddr + size except ValueError: u.verbose(0, "... malformed staddr/size (%s, %s) " "for %s, skipping" % (hexstaddr, hexsize, func)) return 0, 0 return staddr, enaddr
def parse_args(): """Command line argument parsing.""" global whichdev try: optlist, args = getopt.getopt(sys.argv[1:], "d") except getopt.GetoptError as err: # unrecognized option usage(str(err)) for opt, _ in optlist: if opt == "-d": u.increment_verbosity() if args: usage("unrecognized arg") # Check to make sure we can run adb u.doscmd("which adb") # Collect device flavor lines = u.docmdlines("whichdevice.sh") if len(lines) != 1: u.error("unexpected output from whichdevice.sh") whichdev = lines[0].strip() u.verbose(1, "device: %s" % whichdev)
def setup(): """Perform assorted setups prior to main part of run.""" global abt, apo, whichdev, cpu_arch, dxpath # Check to make sure we can run adb, etc u.doscmd("which adb") rc = u.docmdnf("which dx") if rc != 0: u.doscmd("which prebuilts/sdk/tools/dx") dxpath = "prebuilts/sdk/tools/dx" u.doscmd("which javac") # Collect device flavor lines = u.docmdlines("whichdevice.sh") if len(lines) != 1: u.error("unexpected output from whichdevice.sh") whichdev = lines[0].strip() u.verbose(1, "device: %s" % whichdev) bitness = 32 cpu_tup_idx = 0 if flag_64bit: bitness = 64 cpu_tup_idx = 1 # Figure out what architecture we're working with, # and make sure it supports the requested mode (32 or 64 bit) output = u.docmdlines("adb shell uname -m") tag = output[0].strip() if tag not in uname_to_cpu_arch: u.error("internal error: unsupported output %s from " "from uname -m -- please update script" % tag) tup = uname_to_cpu_arch[tag] cpu_arch = tup[cpu_tup_idx] if not cpu_arch: u.error("%d-bit support not available on " "this arch (uname -m: %s)" % (bitness, tag)) # Did we run lunch? abt = os.getenv("ANDROID_BUILD_TOP") if abt is None: u.error("ANDROID_BUILD_TOP not set (did you run lunch?)") apo = os.getenv("ANDROID_PRODUCT_OUT") if apo is None: u.error("ANDROID_PRODUCT_OUT not set (did you run lunch?)") u.verbose(1, "ANDROID_PRODUCT_OUT: %s" % apo)
def setup(): """Perform assorted setups prior to main part of run.""" global abt, apo, whichdev, cpu_arch, dxpath # Check to make sure we can run adb, etc u.doscmd("which adb") rc = u.docmdnf("which dx") if rc != 0: u.doscmd("which prebuilts/sdk/tools/dx") dxpath = "prebuilts/sdk/tools/dx" u.doscmd("which javac") # Collect device flavor lines = u.docmdlines("whichdevice.sh") if len(lines) != 1: u.error("unexpected output from whichdevice.sh") whichdev = lines[0].strip() u.verbose(1, "device: %s" % whichdev) bitness = 32 cpu_tup_idx = 0 if flag_64bit: bitness = 64 cpu_tup_idx = 1 # Figure out what architecture we're working with, # and make sure it supports the requested mode (32 or 64 bit) output = u.docmdlines("adb shell uname -m") tag = output[0].strip() if tag not in uname_to_cpu_arch: u.error("internal error: unsupported output %s from " "from uname -m -- please update script" % tag) tup = uname_to_cpu_arch[tag] cpu_arch = tup[cpu_tup_idx] if not cpu_arch: u.error("%d-bit support not available on " "this arch (uname -m: %s)" % (bitness, tag)) # Did we run lunch? abt = os.getenv("ANDROID_BUILD_TOP") if abt is None: u.error("ANDROID_BUILD_TOP not set (did you run lunch?)") apo = os.getenv("ANDROID_PRODUCT_OUT") if apo is None: u.error("ANDROID_PRODUCT_OUT not set (did you run lunch?)") u.verbose(1, "ANDROID_PRODUCT_OUT: %s" % apo)
def check_btrfs(rdir): """Check to make sure that 'rdir' is a BTRFS filesystem.""" outlines = u.docmdlines("stat -f --printf=%%T %s" % rdir) if not outlines: u.error("internal error-- could not determine FS type for dir %s" % rdir) if outlines[0] != "btrfs": u.error("FS type for %s is %s, not btrfs (can't " "proceed)" % (rdir, outlines[0]))
def collect_propval(propname, serial): """Collect value for a given system property.""" lines = u.docmdlines("adb -s %s shell getprop" % serial) regex = re.compile(r"\[(\S+)\]\:\s+\[(.+)\]\s*$") for line in lines: m = regex.match(line) if m and m.group(1) == propname: return m.group(2) return None
def check_btrfs(rdir): """Check to make sure that 'rdir' is a BTRFS filesystem.""" outlines = u.docmdlines("stat -f --printf=%%T %s" % rdir) if not outlines: u.error("internal error-- could not determine FS type for dir %s" % rdir) if outlines[0] != "btrfs": u.error("FS type for %s is %s, not btrfs (can't " "proceed)" % (rdir, outlines[0]))
def do_check(subdir): """Make sure this repo has the master branch checked out.""" here = os.getcwd() dn = os.path.dirname(subdir) dochdir(dn) lines = u.docmdlines("git rev-parse --abbrev-ref HEAD") if lines[0] != "master": u.error("error: repo at %s not on master " "branch (on '%s' instead" % (dn, lines[0])) dochdir(here)
def do_check(subdir): """Make sure this repo has the master branch checked out.""" here = os.getcwd() dn = os.path.dirname(subdir) dochdir(dn) lines = u.docmdlines("git rev-parse --abbrev-ref HEAD") if lines[0] != "master": u.error("error: repo at %s not on master " "branch (on '%s' instead" % (dn, lines[0])) dochdir(here)
def restore_single_makefile(mfile): """Insure that specified makefile is unmunged.""" u.verbose(1, "examining makefile %s for restore" % mfile) components = mfile.split("/") bpath = "/".join(components[1:]) os.chdir("build") lines = u.docmdlines("git diff --exit-code --name-status %s" % bpath, True) if not lines: u.verbose(1, "restoring munged makefile %s" % mfile) docmd("git checkout %s" % bpath) os.chdir("..")
def parse_args(): """Command line argument parsing.""" global flag_offset_to_find, flag_loadmodule, flag_objdump try: optlist, args = getopt.getopt(sys.argv[1:], "dm:x:T:") except getopt.GetoptError as err: # unrecognized option usage(str(err)) if args: usage("unknown extra args") for opt, arg in optlist: if opt == "-d": u.increment_verbosity() elif opt == "-T": flag_objdump = arg elif opt == "-x": r = re.compile(r"^0x(\S+)$") m = r.match(arg) if not m: usage( "supply argument of the form 0x<hexliteral> to -x option") hexdigits = m.group(1) try: v = int(hexdigits, 16) except ValueError: usage( "supply argument of the form 0x<hexliteral> to -x option") u.verbose( 1, "restricting output to compunit " "containing DIE offset %x\n" % v) flag_offset_to_find = v elif opt == "-m": if not os.path.exists(arg): usage("argument '%s' to -m option does not exist" % arg) flag_loadmodule = arg # Make sure at least one function, loadmodule if not flag_loadmodule: usage("specify loadmodule -m") if not flag_offset_to_find: usage("specify offset to find with -x") # Pick objdump variant based on Os. if not flag_objdump: lines = u.docmdlines("uname") if not lines: u.error("unable to run/interpret 'uname'") if lines[0] == "Darwin": flag_objdump = "gobjdump" else: flag_objdump = "objdump"
def process_commit(idx, branchname, githash, comment, summaryf): """Process a commit by hash.""" tag = "" if flag_tag: tag = ".tag=%s" % flag_tag fn = "/tmp/item%d.branch%s%s.commit%s.txt" % (idx, branchname, tag, githash) if flag_dryrun: u.verbose( 0, "<dryrun: run %s for %s to %s>" % (flag_script_to_run, githash, fn)) return files_emitted.append(fn) doscmd("git checkout %s" % githash) try: outf = open(fn, "w") except IOError as e: u.error("unable to open %s: %s" % (fn, e.strerror)) outf.write("// comment: %s\n" % comment) outf.write("//\n") lines = u.docmdlines("git log --name-only -1 %s" % githash) if not lines: u.error("empty output from 'git log --name-only -1 %s'" % githash) for line in lines: outf.write(line) outf.write("\n") outf.write( "--------------------------------------------------------------\n") lines = u.docmdlines("git diff %s^ %s" % (githash, githash)) if not lines: u.error("empty output from 'git diff %s^ %s'" % (githash, githash)) for line in lines: outf.write(line) outf.write("\n") u.verbose(1, "wrote %d diff lines to %s" % (len(lines), fn)) if flag_script_to_run: dotestaction("bash %s" % flag_script_to_run, githash, outf, idx, summaryf) for pk in flag_pkgtests: dotestaction("go test %s" % pk, githash, outf, idx, summaryf) outf.close()
def find_cmakefiles(): """Locate files to consider.""" if cmakefiles: return lines = u.docmdlines("find . -name \"*.cmake\" -print " "-o -name CMakeLists.txt -print") for line in lines: f = line.strip() if not f: continue u.verbose(2, "adding %s to cmakefiles" % f) cmakefiles[f] = 1
def rmvolsnap(volsnapname, which): """Remove an existing btrfs snapshot or subvolume.""" # Determine /ssd root ssdroot = u.determine_btrfs_ssdroot(os.getcwd()) u.verbose(1, "ssdroot=%s" % ssdroot) # Normalize snap name volsnapname = normalize(ssdroot, volsnapname) # Check for existence oldvolsnap = "%s/%s" % (ssdroot, volsnapname) if not os.path.exists(oldvolsnap): u.error("unable to locate existing %s %s" % (which, oldvolsnap)) # Determine whether there is a parent uuid isvol = -1 showlines = u.docmdlines("sudo btrfs subvolume show %s" % oldvolsnap) if not showlines: u.error("unable to get subvolume info for %s" % oldvolsnap) matcher = re.compile(r"^\s*Parent uuid\:\s+(\S+).*$") for line in showlines: m = matcher.match(line) if m: puid = m.group(1) if puid == "-": isvol = 1 else: isvol = 0 u.verbose(2, "isvol=%d for %s" % (isvol, oldvolsnap)) if isvol == -1: u.warning("unable to determine snapshot/subvolume status for %s" % oldvolsnap) elif isvol == 0: if which == "volume": u.warning("%s appears to be snapshot, not subvolume" % oldvolsnap) else: if which == "snapshot": u.warning("%s appears to be subvolume, not snapshot" % oldvolsnap) # Here goes rc = u.docmdnf("sudo btrfs subvolume delete %s" % oldvolsnap) if rc != 0: # Couldn't delete the subvolume. Suggest running lsof sys.stderr.write( "** deletion failed -- trying to determine open file:\n") sys.stderr.write(" lsof +D %s\n" % oldvolsnap) u.docmdnf("lsof +D %s\n" % oldvolsnap) exit(1) sys.stderr.write("... %s %s deleted\n" % (which, oldvolsnap))
def form_golibargs(driver): """Form correct go library args.""" ddir = os.path.dirname(driver) bdir = os.path.dirname(ddir) cmd = "find %s/lib64 -name runtime.gox -print" % bdir lines = u.docmdlines(cmd) if not lines: u.error("no output from %s -- bad gccgo install dir?" % cmd) line = lines[0] rdir = os.path.dirname(line) u.verbose(1, "libdir is %s" % rdir) return ["-L", rdir]
def find_cmakefiles(): """Locate files to consider.""" if cmakefiles: return lines = u.docmdlines("find . -name \"*.cmake\" -print " "-o -name CMakeLists.txt -print") for line in lines: f = line.strip() if not f: continue u.verbose(2, "adding %s to cmakefiles" % f) cmakefiles[f] = 1
def form_golibargs(driver): """Form correct go library args.""" ddir = os.path.dirname(driver) bdir = os.path.dirname(ddir) cmd = "find %s/lib64 -name runtime.gox -print" % bdir lines = u.docmdlines(cmd) if not lines: u.error("no output from %s -- bad gccgo install dir?" % cmd) line = lines[0] rdir = os.path.dirname(line) u.verbose(1, "libdir is %s" % rdir) return ["-L", rdir]
def has_dynamic_section(tgt): """Figure out if a given executable has a dynamic section.""" u.verbose(1, "running objdump -h %s" % tgt) lines = u.docmdlines("objdump -h %s" % tgt) dreg = re.compile(r"^\s*\d+\s+\.dynamic\s.+$") for line in lines: m = dreg.match(line) if m: u.verbose(1, "%s has .dynamic section" % tgt) return True u.verbose(1, "%s does not have a .dynamic section" % tgt) return False
def has_dynamic_section(tgt): """Figure out if a given executable has a dynamic section.""" u.verbose(1, "running objdump -h %s" % tgt) lines = u.docmdlines("objdump -h %s" % tgt) dreg = re.compile(r"^\s*\d+\s+\.dynamic\s.+$") for line in lines: m = dreg.match(line) if m: u.verbose(1, "%s has .dynamic section" % tgt) return True u.verbose(1, "%s does not have a .dynamic section" % tgt) return False
def rmvolsnap(volsnapname, which): """Remove an existing btrfs snapshot or subvolume.""" # Determine /ssd root ssdroot = u.determine_btrfs_ssdroot(os.getcwd()) u.verbose(1, "ssdroot=%s" % ssdroot) # Normalize snap name volsnapname = normalize(ssdroot, volsnapname) # Check for existence oldvolsnap = "%s/%s" % (ssdroot, volsnapname) if not os.path.exists(oldvolsnap): u.error("unable to locate existing %s %s" % (which, oldvolsnap)) # Determine whether there is a parent uuid isvol = -1 showlines = u.docmdlines("sudo btrfs subvolume show %s" % oldvolsnap) if not showlines: u.error("unable to get subvolume info for %s" % oldvolsnap) matcher = re.compile(r"^\s*Parent uuid\:\s+(\S+).*$") for line in showlines: m = matcher.match(line) if m: puid = m.group(1) if puid == "-": isvol = 1 else: isvol = 0 u.verbose(2, "isvol=%d for %s" % (isvol, oldvolsnap)) if isvol == -1: u.warning("unable to determine snapshot/subvolume status for %s" % oldvolsnap) elif isvol == 0: if which == "volume": u.warning("%s appears to be snapshot, not subvolume" % oldvolsnap) else: if which == "snapshot": u.warning("%s appears to be subvolume, not snapshot" % oldvolsnap) # Here goes rc = u.docmdnf("sudo btrfs subvolume delete %s" % oldvolsnap) if rc != 0: # Couldn't delete the subvolume. Suggest running lsof sys.stderr.write("** deletion failed -- trying to determine open file:\n") sys.stderr.write(" lsof +D %s\n"% oldvolsnap) u.docmdnf("lsof +D %s\n" % oldvolsnap) exit(1) sys.stderr.write("... %s %s deleted\n" % (which, oldvolsnap))
def perform(): """Main driver routine.""" # Step 1: dump only compilation unit info. cmd = ("%s --dwarf=info " "--dwarf-depth=0 %s" % (flag_objdump, flag_loadmodule)) u.verbose(1, "running: %s" % cmd) lines = u.docmdlines(cmd) cre = re.compile(r"^\s*Compilation Unit \@ offset 0x(\S+)\:\s*$") units = 0 lo = -1 hi = -1 maxoff = -1 selectoff = -1 for line in lines: m = cre.match(line) if m: binoff = int(m.group(1), 16) if binoff <= flag_offset_to_find: lo = units selectoff = binoff if binoff > flag_offset_to_find: hi = units break maxoff = binoff units += 1 if units == 0 or lo == -1: u.warning("no DWARF compile units in %s, dump aborted" % flag_loadmodule) return if hi == -1: u.warning("could not find CU with offset higher than %x; " "dumping last CU at offset %x" % (flag_offset_to_find, maxoff)) # Step 2: issue the dump cmd = ("%s --dwarf=info " "--dwarf-start=%d %s" % (flag_objdump, selectoff, flag_loadmodule)) u.verbose(1, "dump cmd is: %s" % cmd) args = shlex.split(cmd) mypipe = subprocess.Popen(args, stdout=subprocess.PIPE) cure = re.compile(r"^.+\(DW_TAG_compile_unit\).*") ncomps = 0 while True: line = mypipe.stdout.readline() if not line: break m = cure.match(line) if m: ncomps += 1 if ncomps > 1: break sys.stdout.write(line)
def collect_files_to_symbolize(location): """Generate list of OAT files to symbolize.""" # This will hoover up everything, including things we don't want # to look at (ex: boot.art) lines = u.docmdlines("adb shell find %s -type f -print" % location) files = [] regex = re.compile(r"^.+@boot\.art$") for line in lines: afile = line.strip() if regex.match(afile): continue files.append(afile) return files
def perform(): """Top level driver routine.""" if not os.path.exists(".git"): u.error("unable to locate top level .git in current dir") lines = u.docmdlines("find . -depth -name .git -print") lines.reverse() repos = lines for r in repos: u.verbose(1, "checking %s" % r) do_check(r) for r in repos: u.verbose(1, "visiting %s" % r) do_fetch(r)
def collect_files_to_symbolize(location): """Generate list of OAT files to symbolize.""" # This will hoover up everything, including things we don't want # to look at (ex: boot.art) lines = u.docmdlines("adb shell find %s -type f -print" % location) files = [] regex = re.compile(r"^.+@boot\.art$") for line in lines: afile = line.strip() if regex.match(afile): continue files.append(afile) return files
def examine(afile): """Dump go exports for specified file.""" objfile = afile arcmd = "ar t %s" % afile # Run 'ar' command, suppressing error output. If # if succeeds, then continue on the ar path, otherwise # treat input as an object. if u.doscmd(arcmd, True, True): # Handle archives lines = u.docmdlines(arcmd, True) if not lines: u.warning("skipping %s, can't index archive %s", afile) return # Extract elem from archive elem = lines[0].strip() u.verbose(1, "%s contains %s" % (afile, elem)) rc = u.docmdnf("ar x %s %s" % (afile, elem)) if rc: u.warning("skipping %s, can't extract object" % afile) return objfile = elem gexptemp = tempfile.NamedTemporaryFile(mode="w", prefix="go_export", delete=True) # Handle objects cmd = ("objcopy -O binary --only-section=.go_export " "--set-section-flags .go_export=alloc %s " "%s" % (objfile, gexptemp.name)) rc = u.docmdnf(cmd) if rc: u.warning("skipping %s, can't extract export " "data (cmd failed: %s)" % (objfile, cmd)) return try: inf = open(gexptemp.name, "rb") except IOError as e: u.error("unable to open tempfile %s: " "%s" % (gexptemp.name, e.strerror)) print "== %s ==" % afile lines = inf.readlines() if not lines: u.warning("skipping %s, no .go_export section present" % objfile) for line in lines: print line.strip() inf.close() if objfile != afile: os.unlink(objfile)
def install_shim(scriptpath): """Install shim into gccgo install dir.""" # Make sure we're in the right place (gccgo install dir) if not os.path.exists("bin"): usage("expected to find bin subdir") if not os.path.exists("lib64/libgo.so"): usage("expected to find lib64/libgo.so") if not os.path.exists("bin/gccgo"): usage("expected to find bin/gccgo") # Copy script, or update if already in place. docmd("cp %s bin" % scriptpath) sdir = os.path.dirname(scriptpath) docmd("cp %s/script_utils.py bin" % sdir) # Test to see if script installed already cmd = "file bin/gccgo" lines = u.docmdlines(cmd) if not lines: u.error("no output from %s -- bad gccgo install dir?" % cmd) else: reg = re.compile(r"^.+ ELF .+$") m = reg.match(lines[0]) if not m: u.warning("wrapper appears to be installed already in this dir") return # Move aside the real gccgo binary docmd("mv bin/gccgo bin/gccgo.real") # Emit a script into gccgo sys.stderr.write("emitting wrapper script into bin/gccgo\n") if not flag_dryrun: try: with open("./bin/gccgo", "w") as wf: here = os.getcwd() wf.write("#!/bin/sh\n") wf.write("P=%s/bin/gollvm-wrap.py\n" % here) wf.write("exec python ${P} \"$@\"\n") except IOError: u.error("open/write failed for bin/gccgo wrapper") docmd("chmod 0755 bin/gccgo") # Success u.verbose(0, "wrapper installed successfully") # Done return 0
def visit_branch(b): """Work on specified branch.""" docmd("git checkout %s" % b) # Query upstream branch for this branch. If not set, then don't # try to work on it. lines = u.docmdlines( "git rev-parse --symbolic-full-name --abbrev-ref @{u}", True) if not lines: u.warning("no upstream branch set for branch %s, skipping" % b) return docmd("git rebase") docmd("git checkout master") doscmd("git branch -d %s" % b, True)
def disas(func, addr, tgt): """Disassemble a specified function.""" staddr, enaddr = grab_addr_from_symtab(func, addr, tgt) if staddr == 0: u.verbose(0, "... could not find %s in " "output of objdump, skipping" % what(func, addr)) return cmd = ("objdump --no-show-raw-insn --wide -dl " "--start-address=0x%x " "--stop-address=0x%x %s" % (staddr, enaddr, tgt)) if flag_dwarf_cu and not flag_dryrun: lines = u.docmdlines(cmd) dodwarf(lines, tgt) else: docmd(cmd)
def locate_binaries(clangcmd): """Locate executables of interest.""" global toolpaths # Figure out what to invoke u.verbose(1, "clangcmd is %s" % clangcmd) toolpaths["clang"] = clangcmd reg = re.compile("(^.*)/(.*)$") m = reg.match(clangcmd) bindir = None clcmd = None if m: bindir = m.group(1) clcmd = m.group(2) else: if not flag_dryrun: lines = u.docmdlines("which %s" % clangcmd) if not lines: u.error("which %s returned empty result" % clangcmd) clangbin = lines[0].strip() bindir = os.path.dirname(clangbin) + "/" clcmd = os.path.basename(clangbin) u.verbose(1, "clang bindir is %s" % bindir) else: bindir = "" toolpaths["clang"] = os.path.join(bindir, clcmd) toolpaths["opt"] = os.path.join(bindir, "opt") toolpaths["llc"] = os.path.join(bindir, "llc") toolpaths["llvm-dis"] = os.path.join(bindir, "llvm-dis") if flag_dryrun: return # If clang is versioned, then version llvm-dis reg2 = re.compile("^.+(\-\d\.\d)$") m2 = reg2.match(clangcmd) if m2: toolpaths["llvm-dis"] = os.path.join(bindir, "llvm-dis%s" % m2.group(1)) # Check for existence and executability tocheck = ["clang", "opt", "llc", "llvm-dis"] for tc in tocheck: path = toolpaths[tc] if not os.path.exists(path): u.warning("can't access binary %s at path %s" % (tc, path)) if not os.access(path, os.X_OK): u.warning("no execute permission on binary %s" % path)
def install_blob(blob, devdir): """Install a single blob.""" # Determine blob name blobpath = "%s/%s" % (devdir, blob) lines = u.docmdlines("tar tzf %s" % blobpath) if len(lines) != 1: u.error("error while examining blob %s: expected single file" % blob) # Unpack blobfile = lines[0] u.verbose(1, "unpacking blob %s" % blob) u.docmd("tar xzf %s" % blobpath) # Invoke installer u.docmd("blobinstall.py %s" % blobfile)
def do_gccgo_clean(): """Clean a gccgo build dir.""" if not os.path.exists("config.log"): u.error("no 'config.log' here -- needs to be run in GCC build dir") lines = u.docmdlines("find . -depth -name libgo -print") lines.reverse() libgodirs = lines for lgd in libgodirs: u.verbose(1, "visiting %s" % lgd) do_clean(lgd) files = ["vet", "test2json", "buildid", "go", "gofmt", "cgo"] for f in files: p = "gotools/" + f if not flag_dryrun and os.path.exists(p): u.verbose(1, "cleaning %s" % p) os.unlink(p)
def find_ssdroots(): """Return a list of all BTRFS filesystems mounted.""" btrfsmounts = u.docmdlines("mount -l -t btrfs") matcher = re.compile(r"^\S+ on (\S+) ") rootlist = [] for line in btrfsmounts: m = matcher.match(line) if m is None: u.warning("warning: pattern match failed for " "output of mount -l: %s" % line) else: rootlist.append(m.group(1)) if not rootlist: u.error("unable to locate any BTRFS mounts " "from 'mount -l -t btrfs' -- aborting") return rootlist
def visit(filename): """Examine specified file.""" if not os.path.exists(filename): u.warning("unable to access file '%s', skipping" % filename) return u.verbose(1, "about to invoke readelf") lines = u.docmdlines("readelf -p .comment %s" % filename, True) if not lines: u.warning("unable to extract comment from %s, skipping" % filename) return matcher1 = re.compile(r"^\s*\[\s*\d+\]\s+(\S.+)$") matcher2 = re.compile(r"^GCC\:.+$") matcher3 = re.compile(r"^clang version \d.*$") res = "" sep = "" found = False comms = {} for line in lines: u.verbose(2, "line is %s" % line) m = matcher1.match(line) if not m: continue found = True comm = m.group(1).strip() u.verbose(1, "comm is %s" % comm) if comm in comms: continue comms[comm] = 1 m2 = matcher2.match(comm) if m2: versioncount[comm] += 1 res += sep + comm sep = ", " m3 = matcher3.match(comm) if m3: versioncount[comm] += 1 res += sep + comm sep = ", " if not found: res = "<comment not found>" versioncount[res] += 1 elif not res: res = "<unknown>" versioncount[res] += 1 print "%s: %s" % (filename, res)
def determine_objdump(filename): """Figure out what flavor of object dumper we should use.""" global objdump_cmd lines = u.docmdlines("file %s" % filename) matchers = [(re.compile(r".*ELF.+ARM aarch64"), "aarch64-linux-android-objdump"), (re.compile(r".*ELF.+ARM"), "arm-linux-androideabi-objdump"), (re.compile(r".*ELF.+x86\-64"), "objdump"), (re.compile(r".*ELF.+Intel"), "objdump")] for l in lines: for tup in matchers: m = tup[0] res = m.match(l) if res is None: continue objdump_cmd = tup[1] return u.error("unable to determine objdump flavor to use on %s" % filename)
def disas(func, repo, tag, fn, ppo, perf_work): """Disassemble a specified function.""" # 00691d40 g F .text 00000632 ssa.applyRewrite regex = re.compile(r"^(\S+)\s.+\s(\S+)\s+(\S+)$") tgt = "%s/pkg/tool/linux_amd64/compile" % repo u.verbose(1, "looking for %s in output of objdump -t %s" % (func, tgt)) if flag_dryrun: return lines = u.docmdlines("objdump -t %s" % tgt) hexstaddr = None hexsize = None for line in lines: m = regex.match(line) if m: name = m.group(3) if name == func: # Found hexstaddr = m.group(1) hexsize = m.group(2) break if not hexstaddr: u.verbose( 0, "... could not find %s in " "output of objdump, skipping" % func) return try: staddr = int(hexstaddr, 16) size = int(hexsize, 16) enaddr = staddr + size except ValueError: u.verbose( 0, "... malformed staddr/size (%s, %s) " "for %s, skipping" % (hexstaddr, hexsize, func)) return asm_file = "%s/asm%d.%s.txt" % (perf_work, fn, tag) ppo_append( ppo, "objdump -dl --start-address=0x%x " "--stop-address=0x%x %s" % (staddr, enaddr, tgt), asm_file) generated_reports[asm_file] = 1 pprof_file = "%s/pprofdis%d.%s.txt" % (perf_work, fn, tag) ppo_append(ppo, "pprof --disasm=%s perf.data.%s " % (func, tag), pprof_file) generated_reports[pprof_file] = 1