def __setup_value(self): if "path" in dir(self.__clone_url): self.subobjects["clone-url"] = self.__clone_url self.__clone_url = self.__clone_url.path logging.info("copying git archive %s", self.__clone_url) with self.tmp_directory as d: os.mkdir(self.name) if self.__shallow: cmd = "cd '%s' && git archive --format=tar --remote=%s %s | tar x" args = (self.name, self.__clone_url, self.__ref) else: cmd = "git clone %s %s" args = (self.__clone_url, self.name) (lines, ret) = shell(cmd, *args) if ret != 0: print("\n".join(lines)) sys.exit(-1) if not self.__shallow: cmd = "cd %s && git gc && git fetch %s %s && git checkout FETCH_HEAD" args = (self.name, self.__clone_url, self.__ref) (lines, ret) = shell(cmd, *args) if ret != 0: print("\n".join(lines)) sys.exit(-1) return Directory(os.path.abspath(self.name))
def call_configure(self, path): if self.project_name() == "postgresql": shell("cd %s; mkdir -p build; cd build; ../configure --enable-depend", path) elif self.project_name() in ("musl", "bash"): shell_failok("cd %s; ./configure", path) elif self.project_name() in ("samba"): # Samba does not do optimizations if the argv[0] of the compiler is unknown. The default is -O2 for gcc. Therefore, we also use that. shell_failok("cd %s; ADDITIONAL_CFLAGS=-O2 ./buildtools/bin/waf configure", path) elif self.project_name() in ("cpython",): shell("cd %s; mkdir -p build build/Modules;", path) shell("cd %s; cp -u Modules/Setup.dist build/Modules/Setup", path) shell("cd %s; cd build; ../configure", path) elif self.project_name() in ('mbedtls'): shell("cd %s; mkdir -p build; cd build; cmake .. -DCMAKE_C_COMPILER=$CC -DENABLE_PROGRAMS=OFF", path) elif self.project_name() in ('lua',): # This is an ugly hack to make it possible to override the # CC variable from the outsite. with open("%s/makefile" % path) as fd: content = fd.readlines() content += "\nCC=%s\n" % (os.environ["CC"]) with open("%s/makefile" % path, "w") as fd: fd.write("".join(content)) else: raise RuntimeError("Not a valid project")
def __setup_value(self): if "path" in dir(self.__clone_url): self.subobjects["clone-url"] = self.__clone_url self.__clone_url = self.__clone_url.path logging.info("copying git archive %s", self.__clone_url) with self.tmp_directory as d: os.mkdir(self.name) if self.__shallow: cmd = "cd '%s' && git archive --format=tar --remote=%s %s | tar x" args = (self.name, self.__clone_url, self.__ref) else: cmd = "git clone %s %s" args = (self.__clone_url, self.name) (lines, ret) = shell(cmd, *args) if ret != 0: print("\n".join(lines)) sys.exit(-1) if not self.__shallow: cmd = "cd %s && git gc && git fetch %s %s && git checkout FETCH_HEAD" args = (self.name, self.__clone_url, self.__ref) (lines, ret) = shell(cmd, *args) if ret != 0: print("\n".join(lines)) sys.exit(-1) return Directory(os.path.abspath(self.name))
def shell(self, command, *args): """Like :func:`~versuchung.execute.shell`, but logs the start and stop of the process in the ``".events"``-file.""" _args = ["'%s'" % x.replace("'", "\'") for x in args] _command = command % tuple(_args) start = self.event("process started", _command) shell(command, *args) stop = self.event("process finished", _command) self.event("process duration", _command, stop - start)
def shell(self, command, *args): """Like :func:`~versuchung.execute.shell`, but logs the start and stop of the process in the ``".events"``-file.""" _args = ["'%s'"%x.replace("'", "\'") for x in args] _command = command % tuple(_args) start = self.event("process started", _command) shell(command, *args) stop = self.event("process finished", _command) self.event("process duration", _command, stop - start)
def call_reconfigure(self, path): if self.project_name() in ('lua','mbedtls'): self.call_configure(path) if self.project_name() in ('cpython',): shell("cd %s; mkdir -p build/Modules; cp -u Modules/Setup.dist build/Modules/Setup", path) shell_failok("cd %s/build; make config.status;", path) if self.project_name() == "postgresql": shell_failok("cd %s/build; make config.status", path) if self.project_name() == "bash": shell_failok("cd %s; make config.status", path) if self.project_name() == "samba": shell_failok("cd %s; ADDITIONAL_CFLAGS=-O2 ./buildtools/bin/waf reconfigure", path)
def gather_global_hashes(local_hashes, occurred_errors): global_hashes = {} for symbol in local_hashes: symbol = symbol.split(':')[ 1] # Remove the prefix ('function:' etc.) try: shell("cd %s; %s/clang-hash-global --definition %s", src_path, self.inputs.clang_hash.path, symbol) except Exception as e: occurred_errors[commit[0]] = e # don't raise exception return global_hashes
def path(self): """Decompress file into the temporary directory and return path to this location""" assert self.tmp_directory is not None, \ "Can gunzip file only as part of an active experiment" path = File.path.fget(self) base = os.path.basename(path.rstrip(".gz")) filename = os.path.join(self.tmp_directory.path, self.name + "_" + base) if not os.path.exists(filename): shell("gunzip < %s > %s", path, filename) return filename
def path(self): """Decompress file into the temporary directory and return path to this location""" assert self.tmp_directory is not None, \ "Can gunzip file only as part of an active experiment" path = File.path.fget(self) base = os.path.basename(path.rstrip(".gz")) filename = os.path.join(self.tmp_directory.path, self.name + "_" + base) if not os.path.exists(filename): shell("gunzip < %s > %s", path, filename) return filename
def __references(self, prefix_filter=None, regex_filter=None): (lines, ret) = shell("git ls-remote %s 'refs/*'", self.__clone_url, stderr=sys.stderr) if ret != 0 or lines == 0: print("\n".join(lines)) raise RuntimeError("Could not list references in repository") ret = {} for line in lines: commit_hash, refname = line.strip().split("\t", 1) # Apply regex and prefix filter if prefix_filter: if not refname.startswith(prefix_filter): continue # I have no idea where these names come from if refname.startswith("refs/tags/") and refname.endswith("^{}"): continue # Shorten the Prefix refname = refname[len(prefix_filter):] if regex_filter and not re.match(regex_filter, refname): continue ret[refname] = commit_hash return ret
def __setup_value(self): if "path" in dir(self.__filename): self.subobjects["filename"] = self.__filename self.__filename = self.__filename.path self.__filename = os.path.abspath(self.__filename) fn = self.__filename extract_mode = "" if "tar.gz" in fn or "tgz" in fn: extract_mode = "z" if "tar.bz2" in fn or "bzip2" in fn: extract_mode = "j" if "tar.xz" in fn or "txz" in fn: extract_mode = "J" with self.tmp_directory as d: try: os.mkdir(self.name) except OSError: # ignore errors if the directory should already exist for some reason pass with Directory(self.name) as d2: dirname = os.path.abspath(".") (out, ret) = shell("tar %sxvf %s", extract_mode, fn) if ret != 0: raise RuntimeError("Extracting of %s failed" % fn) cd = None for line in out: if (cd == None or len(line) < len(cd)) and line.endswith("/"): cd = line if cd and all([x.startswith(cd) for x in out]): dirname = cd return Directory(os.path.abspath(dirname))
def __setup_value(self): if "path" in dir(self.__filename): self.subobjects["filename"] = self.__filename self.__filename = self.__filename.path self.__filename = os.path.abspath(self.__filename) fn = self.__filename extract_mode = "" if "tar.gz" in fn or "tgz" in fn: extract_mode = "x" if "tar.bz2" in fn or "bzip2" in fn: extract_mode = "j" with self.tmp_directory as d: try: os.mkdir(self.name) except OSError: # ignore errors if the directory should already exist for some reason pass with Directory(self.name) as d2: dirname = os.path.abspath(".") (out, ret) = shell("tar %szvf %s", extract_mode, fn) if ret != 0: raise RuntimeError("Extracting of %s failed" % fn) cd = None for line in out: if (cd == None or len(line) < len(cd)) and line.endswith("/"): cd = line if cd and all([x.startswith(cd) for x in out]): dirname = cd return Directory(os.path.abspath(dirname))
def rebuild(self, path, info, fail_ok=False): if "ccache" in self.mode.value: shell("ccache --zero-stats") if "clang-hash" in self.mode.value: hash_log = tempfile.NamedTemporaryFile() os.environ["CLANG_HASH_LOGFILE"] = hash_log.name os.environ["CLANG_HASH_OUTPUT_DIR"] = path # TODO: create new mode 'collect' os.environ["HASH_VERBOSE"] = '1' # Recompile! start_time = time.time() try: ret = self.call_make(path) except CommandFailed as e: if not fail_ok: raise else: info['failed'] = True ret = ("", 1) end_time = time.time() build_time = int((end_time - start_time) * 1e9) info['build-time'] = build_time #info['build-log'] = ret[0] # Record Cache misses and hits if "ccache" in self.mode.value: ccache_hits, ccache_misses, log = self.ccache_hits() info['ccache-log'] = log info['ccache-hits'] = ccache_hits info['ccache-misses'] = ccache_misses if "clang-hash" in self.mode.value: log = hash_log.read() info['clang-hash-log'] = log info['clang-hash-hits'] = log.count("H") info['clang-hash-misses'] = log.count("M") hash_log.close() logging.info("Rebuild done[%s]: %s s; failed=%s", info.get("filename") or info.get("commit"), build_time / 1e9, info.get("failed", False)) return info
def checkout(self, ref): cmd = "cd '%s' && git checkout '%s'" % (self.value.path, ref) (lines, ret) = shell(cmd) if ret != 0 or lines == 0: print("\n".join(lines)) sys.exit(-1) self.__ref = ref self.checkout_hash()
def run(self): # Determine the mode modes = ('normal', 'ccache', 'clang-hash') if not self.mode.value in modes: raise RuntimeError("Mode can only be one of: %s" % modes) logging.info("Build the Clang-Hash Plugin") with self.clang_hash as cl_path: shell("cd %s; mkdir build; cd build; cmake ..; make -j 4", cl_path) # Project name logging.info("Cloning project... %s", self.project_name()) self.build_info = { "project-name": self.project_name(), "commit-hash": self.metadata["project-hash"], 'builds': [] } with self.project as src_path: # First, we redirect all calls to the compiler to our # clang hash wrapper self.setup_compiler_paths(cl_path) # Count the number of files sources = list(self.get_sources(src_path)) nr_files = len(sources) logging.info("#files: %d", nr_files) self.build_info['file-count'] = nr_files # Initial build of the given project self.call_configure(src_path) info = {"filename": "FRESH_BUILD"} self.rebuild(src_path, info) self.build_info["builds"].append(info) # Iterate over all files for fn in sources: self.touch(fn) info = {"filename": fn} self.rebuild(src_path, info) self.build_info["builds"].append(info) # Output the summary of this build into the statistics file. with open(self.stats.path, "w+") as fd: fd.write(repr(self.build_info))
def ccache_hits(self): (lines, _) = shell("ccache -s") ccache_hits = 0 ccache_misses = 0 for line in lines: if "cache hit" in line and "rate" not in line: ccache_hits += int(line[line.index(")")+1:].strip()) if "cache miss" in line: ccache_misses += int(line[line.index("miss")+4:].strip()) return ccache_hits, ccache_misses, "\n".join(lines)
def run(self): logging.info("Cloning ERIKA...") with self.erika as erika_path: shell("cd %s/examples/x86/coptermock-isorc; make", erika_path) self.iso.copy_contents(os.path.join(erika_path, "examples/x86/coptermock-isorc/Debug/erika.iso")) self.elf.copy_contents(os.path.join(erika_path, "examples/x86/coptermock-isorc/Debug/Debug/out.elf")) shell(("cd %(resultdir)s; python %(bochs)s -F 50 -i %(iso)s -e %(elf)s -f %(fail)s" + " -m 8 -1 -- -Wf,--end-symbol=test_finish -Wf,--start-symbol=EE_oo_StartOS" + " -Wf,--trace-file=trace.pb -Wf,--save-symbol=EE_oo_StartOS") % { "resultdir": self.trace.path, "bochs": self.bochs_runner.path, "iso": self.iso.path, "elf": self.elf.path, "fail": self.erika_tracing.path } )
def run(self): variant = "erika/error-hook" for (label, importer, importer_args) in [\ ("mem", "MemoryImporter", []), ("regs", "RegisterImporter", []), ("ip", "RegisterImporter", ["--no-gp", "--ip"]), ("flags", "RegisterImporter", ["--no-gp", "--flags"]), ]: benchmark = label logging.info("Importing coredos/%s", benchmark) cmdline = "%(path)s/import-trace -v %(variant)s -b %(benchmark)s -i %(importer)s "\ + "-t %(trace)s -e %(elf)s %(args)s" shell(cmdline %\ {"path": self.fail_tool_dir.path, "variant": variant, "benchmark": benchmark, "importer": importer, "trace": os.path.join(self.trace.trace.path, "trace.pb"), "elf": self.trace.elf.path, "args": " ".join(importer_args)}) shell("%s/prune-trace -v %s -b %% -p basic --overwrite", self.fail_tool_dir.path, variant)
def tags(self, regex=None): cmd = "git ls-remote %s refs/tags/* | cut -f2 | cut -d '/' -f3" % (self.__clone_url) (lines, ret) = shell(cmd) if ret != 0 or lines == 0: print("\n".join(lines)) sys.exit(-1) ret = [] for line in lines: if line.endswith("^{}"): continue if regex is None or (regex is not None and re.match(regex, line)): ret.append(line) return ret
def checkout_hash(self): """Return the hash of the HEAD commit hash as string""" if not self.__hash: cmd = "git ls-remote %s %s" % (self.__clone_url, self.__ref) (lines, ret) = shell(cmd) if ret != 0 or lines == 0: print("\n".join(lines)) sys.exit(-1) self.__hash = lines[0].split("\t")[0] if self.__hash == "": self.__hash = self.__ref return self.__hash
def checkout_hash(self): """Return the hash of the HEAD commit hash as string""" if not self.__hash: cmd = "git ls-remote %s %s" % (self.__clone_url, self.__ref) (lines, ret) = shell(cmd) if ret != 0 or lines == 0: print("\n".join(lines)) sys.exit(-1) self.__hash = lines[0].split("\t")[0] if self.__hash == "": self.__hash = self.__ref return self.__hash
def run(self): shell.track(self.path) shell("date") try: shell("/bin/false") # should always raise the exception assert False except CommandFailed: pass # this must not fail the experiment shell_failok("/bin/false") assert (['2 23'], 0) == shell("echo %(foo)s %(bar)s", {"foo": "2", "bar": "23"}) shell("cat %s", experiment_file)
def build_parent(self, commit, from_scratch=False): def eq_hash(a, b): if len(a) == 0 or len(b) == 0: return if len(a) > len(b): return a.startswith(b) else: return b.startswith(a) src_path = self.project.path if from_scratch: shell("cd %s; git clean -dfx -e '*.hash' -e '*.hash.copy'", src_path) logging.info("Parent [%s^]: clean build", commit) shell("cd %s; git reset --hard %s^", src_path, commit) info = {"commit": commit + "^"} self.call_configure(src_path) self.rebuild(src_path, info, True) # Did initial commit fail? Try again if info.get("failed"): logging.info("Parent[%s^]: failed", commit) return False return True else: (lines, _) = shell("cd %s; git rev-parse %s^", src_path, commit) parent_revision = lines[0].strip() if self.current_revision and eq_hash(self.current_revision, parent_revision): logging.info("Parent[%s^]: resuse good parent", commit) return True else: logging.info("Parent[%s^]: resuse similar build directory", commit) shell("cd %s; git reset --hard %s^", src_path, commit) info = {"commit": commit + "^"} self.call_reconfigure(src_path) self.rebuild(src_path, info, True) # Did initial commit fail? Try again if info.get("failed"): return self.build_parent(commit, from_scratch=True) return True
def run(self): shell.track(self.path) shell("date") try: shell("/bin/false") # should always raise the exception assert False except CommandFailed: pass # this must not fail the experiment shell_failok("/bin/false") assert (['2 23'], 0) == shell("echo %(foo)s %(bar)s", { "foo": "2", "bar": "23" }) shell("cat %s", __file__)
def checkout(self, branch=None, tag=None): if branch: visible_branches = self.__metadata.get("branches", {}) if branch not in visible_branches: raise RuntimeError("Branch `%s' is not visible, please parametrize GitArchive(branches=...) correctly" % branch) self.__ref = "refs/heads/" + branch self.__hash = visible_branches[branch] if tag: visible_tags = self.__metadata.get("tags", {}) if tag not in visible_tags: raise RuntimeError("Tag `%s' is not visible, please parametrize GitArchive(tags=...) correctly" % tag) self.__ref = "refs/tags/" + tag self.__hash = visible_tags[tag] if not self.__ref: raise RuntimeError("GitArchive.checkout() requires branch or tag parameter") cmd = "cd '%s' && git checkout %s" (lines, ret) = shell(cmd, self.value.path, self.__ref, stderr=sys.stderr) if ret != 0: print("\n".join(lines)) raise RuntimeError("GitArchive.checkout(%s) failed" % self.__ref) return (self.__ref, self.__hash)
def __setup_value(self): if "path" in dir(self.__clone_url): self.subobjects["clone-url"] = self.__clone_url self.__clone_url = self.__clone_url.path logging.info("copying git archive %s", self.__clone_url) with self.tmp_directory as d: os.mkdir(self.name) if self.__shallow: cmd = "cd '%s' && git archive --format=tar --remote=%s %s | tar x" args = (self.name, self.__clone_url, self.__ref) else: cmd = "git clone %s %s" args = (self.__clone_url, self.name) (lines, ret) = shell(cmd, *args, stderr=sys.stderr) if ret != 0: print("\n".join(lines)) sys.exit(-1) if not self.__shallow: cmd = "cd %s && git gc && git fetch %s %s && git checkout FETCH_HEAD" args = (self.name, self.__clone_url, self.__ref) (lines, ret) = shell(cmd, *args, stderr=sys.stderr) if ret != 0: print("\n".join(lines)) sys.exit(-1) # Fetch all visible branches and tags for branch in self.__metadata.get("branches", {}): cmd = "cd %s && git fetch %s refs/heads/%s && git update-ref refs/heads/%s FETCH_HEAD" shell(cmd, self.name, self.__clone_url, branch, branch, stderr=sys.stderr) for tag in self.__metadata.get("tags", {}): cmd = "cd %s && git fetch %s refs/tags/%s && git update-ref refs/tags/%s FETCH_HEAD" shell(cmd, self.name, self.__clone_url, tag, tag, stderr=sys.stderr) return Directory(os.path.abspath(self.name))
def run(self): shell("sleep 0.5") shell("seq 1 100 | while read a; do echo > /dev/null; done") shell("sleep 0.5")
def after_experiment_run(self, parameter_type): File.after_experiment_run(self, parameter_type) if parameter_type == "output": shell("gzip -c %s > %s.1", self.path, self.path) shell("mv %s.1 %s", self.path, self.path)
def run(self): shell("sleep 0.5") shell("seq 1 100 | while read a; do echo > /dev/null; done") shell("sleep 0.5")
def call_make(self, path): if self.project_name() in ("mbedtls", "cpython", "postgresql"): return shell("cd %s/build; make -k -j %s", path, str(self.jobs.value)) else: return shell("cd %s; make -k -j %s", path, str(self.jobs.value))
def run(self): # Determine the mode modes = ('normal', 'ccache', 'clang-hash', 'ccache-clang-hash') if not self.mode.value in modes: raise RuntimeError("Mode can only be one of: %s" % modes) logging.info("Build the Clang-Hash Plugin") with self.clang_hash as cl_path: shell( "cd %s; mkdir build; cd build; cmake .. -DCMAKE_BUILD_TYPE=Release; make -j 4", cl_path) shell("strip %s/build/clang-plugin/*.so", cl_path) # Project name logging.info("Cloning project... %s", self.project_name()) self.build_info = { "project-name": self.project_name(), "commit-hash": self.metadata["project-hash"], 'builds': [] } with self.project as src_path: (commits, _) = shell( "cd %s; git log --no-merges --oneline --topo-order --format='%%H %%P %%s'", src_path) # [0] is hash. [1] is parent, [2] rest commits = [x.split(" ", 2) for x in reversed(commits)] commits = commits[-self.commits.value:] self.current_revision = None # First, we redirect all calls to the compiler to our # clang hash wrapper self.setup_compiler_paths(cl_path) time = 0 last_failed = True nr_of_commits = len(commits) original_commits = commits[:] occurred_errors = {} # map commit -> [error strings] def gather_local_hashes(src_path): remove_keys = [ 'project', 'return-code', 'start-time', 'run_id', 'compile-duration', 'processed-bytes', 'hash-duration', 'hash-start-time', 'object-file-size' ] # TODO: ofile-size useful? hashes = read_hash_directory(src_path, remove_keys) local_hashes = {} for entry in hashes: element_hashes = entry['element-hashes'] for element in element_hashes: local_hashes[element[0]] = element[1] return local_hashes def gather_global_hashes(local_hashes, occurred_errors): global_hashes = {} for symbol in local_hashes: symbol = symbol.split(':')[ 1] # Remove the prefix ('function:' etc.) try: shell("cd %s; %s/clang-hash-global --definition %s", src_path, self.inputs.clang_hash.path, symbol) except Exception as e: occurred_errors[commit[0]] = e # don't raise exception return global_hashes def add_additional_commit_info_to(info): gitshow = subprocess.Popen(["git", "show"], stdout=subprocess.PIPE) dstat_out = subprocess.check_output(('diffstat'), stdin=gitshow.stdout) gitshow.wait() lines = dstat_out.split('\n') index = -1 while lines[index] == '': index -= 1 last_line = lines[index] changedInsertionsDeletions = [ int(s) for s in last_line.split() if s.isdigit() ] if "insertion" in last_line: info['insertions'] = changedInsertionsDeletions[1] if "deletion" in last_line: info['deletions'] = changedInsertionsDeletions[2] elif "deletion" in last_line: info['deletions'] = changedInsertionsDeletions[1] # Get changed files changed_files = {} for line in lines: if '|' in line: elems = line.split() assert elems[1] == '|' filename = elems[0] nr_of_changes = int(elems[2]) changed_files[filename] = nr_of_changes assert len(changed_files) == changedInsertionsDeletions[0] info['changes'] = changed_files while commits: # Search for a child of the current revision commit = None if self.current_revision: for idx in range(0, len(commits)): if commits[idx][1] == self.current_revision: commit = commits[idx] del commits[idx] break # No Child found -> Take the first one. if not commit: commit = commits.pop(0) info = { "commit": commit[0], "parent": commit[1], "summary": commit[2] } # First, we build the parent. In a total linear # history, this is a NOP. Otherwise, we try to reset # to the actual parent, and rebuild the project. This # may fail, since the current commit might fix this. ret = self.build_parent(commit[0], from_scratch=last_failed) info['parent-ok'] = ret parent_info = {} add_additional_commit_info_to(parent_info) info['parent-info'] = parent_info # Gather hashes of parent parent_local_hashes = gather_local_hashes(src_path) parent_global_hashes = gather_global_hashes( parent_local_hashes, occurred_errors) #info['parent-local-hashes'] = parent_local_hashes #info['parent-global-hashes'] = parent_global_hashes # Change to the ACTUAL commit. shell("cd %s; git reset --hard %s", src_path, commit[0]) add_additional_commit_info_to(info) # Call reconfigure, and then go on building the commit. self.call_reconfigure(src_path) if os.path.exists("/tmp/clang-hash.log"): os.unlink("/tmp/clang-hash.log") # Rebuild and Measure self.rebuild(src_path, info, fail_ok=True) # Don't need those atm del info['clang-hash-hits'] del info['clang-hash-misses'] # Gather hashes local_hashes = gather_local_hashes(src_path) global_hashes = gather_global_hashes(local_hashes, occurred_errors) #info['local-hashes'] = local_hashes #info['global-hashes'] = global_hashes # Compare hashes/search for changed hashes # The parent's global hashes are copied to find removed symbols changed_symbols = {} parent_hashes = deepcopy(parent_global_hashes) for symbol, global_hash in global_hashes.iteritems(): parent_global_hash = parent_hashes.pop(symbol, None) if global_hash != parent_global_hash: # Store it as [before, after] changed_symbols[symbol] = [ parent_global_hash, global_hash ] # Add removed symbols for symbol, parent_global_hash in parent_hashes.iteritems(): changed_symbols[symbol] = [parent_global_hash, None] # Compare hashes/search for changed hashes # The parent's global hashes are copied to find removed symbols local_changed_symbols = {} parent_hashes = deepcopy(parent_local_hashes) for symbol, local_hash in local_hashes.iteritems(): parent_local_hash = parent_hashes.pop(symbol, None) if local_hash != parent_local_hash: # Store it as [before, after] local_changed_symbols[symbol] = [ parent_local_hash, local_hash ] # Add removed symbols for symbol, parent_local_hash in parent_hashes.iteritems(): local_changed_symbols[symbol] = [parent_local_hash, None] info['changed-symbols'] = changed_symbols #info['local-changed-symbols'] = local_changed_symbols info['local-changed-sym-count'] = len(local_changed_symbols) # TODO: add more analysis # TODO: for each changed local hash, the symbol's global hash should also change... # check every symbol for changed global hash\ # also check the commits, if the correct ones are used... if os.path.exists( "/tmp/clang-hash.log") and not info.get("failed"): with open("/tmp/clang-hash.log") as fd: self.clang_hash_log.value += fd.read() self.build_info["builds"].append(info) if not info.get("failed"): time += info['build-time'] / 1e9 # Build was good. Remember that. self.current_revision = commit[0] last_failed = False else: self.current_revision = None last_failed = True logging.info("Rebuild for %d commits takes %f minutes", self.commits.value, time / 60.) print "\n\noccurred errors:\n" print occurred_errors print "\n\nchanged symbols:\n" print changed_symbols print "\n\nlocal changed symbols:\n" print local_changed_symbols print "\n\n\n" if len(changed_symbols) or len(local_changed_symbols): print "!!! success: found one !!!" # Output the summary of this build into the statistics file. with open(self.stats.path, "w+") as fd: fd.write(repr(self.build_info))
def run(self): # Determine the mode modes = ('normal', 'ccache', 'clang-hash', 'ccache-clang-hash') if not self.mode.value in modes: raise RuntimeError("Mode can only be one of: %s"%modes) logging.info("Build the Clang-Hash Plugin") with self.clang_hash as cl_path: shell("cd %s; mkdir build; cd build; cmake .. -DCMAKE_BUILD_TYPE=Release; make -j 4", cl_path) shell("strip %s/build/clang-plguin/*.so", cl_path) # Project name logging.info("Cloning project... %s", self.project_name()) self.build_info = {"project-name": self.project_name(), "commit-hash": self.metadata["project-hash"], 'builds': []} with self.project as src_path: (commits, _) = shell("cd %s; git log --no-merges --oneline --topo-order --format='%%H %%P %%s'", src_path) # [0] is hash. [1] is parent, [2] rest commits = [x.split(" ", 2) for x in reversed(commits)] commits = commits[-self.commits.value:] self.current_revision = None # First, we redirect all calls to the compiler to our # clang hash wrapper self.setup_compiler_paths(cl_path) time = 0 last_failed = True while commits: # Search for a child of the current revision commit = None if self.current_revision: for idx in range(0, len(commits)): if commits[idx][1] == self.current_revision: commit = commits[idx] del commits[idx] break # No Child found -> Take the first one. if not commit: commit = commits.pop(0) # Bash initial commit if commit[0] == "726f63884db0132f01745f1fb4465e6621088ccf": continue info = {"commit": commit[0], "parent": commit[1], "summary": commit[2]} # Somehow this commit in musl is weird. It behaves # totally different, if build with a fresh parent and # a non-fresh parent. With this we are one the save side if commit[0] == "416d1c7a711807384cc21a18163475cf757bbcb5": last_failed = True # First, we build the parent. In a total linear # history, this is a NOP. Otherwise, we try to reset # to the actual parent, and rebuild the project. This # may fail, since the current commit might fix this. ret = self.build_parent(commit[0], from_scratch = last_failed) info['parent-ok'] = ret # Change to the ACTUAL commit. Call reconfigure, and # then go on building the commit. shell("cd %s; git reset --hard %s", src_path, commit[0]) self.call_reconfigure(src_path) if os.path.exists("/tmp/clang-hash.log"): os.unlink("/tmp/clang-hash.log") # Rebuild and Measure self.rebuild(src_path, info, fail_ok=True) if os.path.exists("/tmp/clang-hash.log") and not info.get("failed"): with open("/tmp/clang-hash.log") as fd: self.clang_hash_log.value += fd.read() self.build_info["builds"].append(info) if not info.get("failed"): time += info['build-time'] / 1e9 # Build was good. Remember that. self.current_revision = commit[0] last_failed = False else: self.current_revision = None last_failed = True logging.info("Rebuild for %d commits takes %f minutes", self.commits.value, time/60.) # Output the summary of this build into the statistics file. with open(self.stats.path, "w+") as fd: fd.write(repr(self.build_info))
def value(self): path = File.path.fget(self) if self.parameter_type == "input" and not os.path.exists(path): shell("gunzip < %s > %s", self.__original_filename, path) return File.value.fget(self)
def value(self): path = File.path.fget(self) if self.parameter_type == "input" and not os.path.exists(path): shell("gunzip < %s > %s", self.__original_filename, path) return File.value.fget(self)
def run(self): shell.track(self.path) shell("echo 1") shell("cd %s && test -x ./sh", "/bin")
def execute(self, cmdline, *args): """Does start the executable with meth:`versuchung.execute.shell` and args, which is of type list, as arguments.""" from versuchung.execute import shell shell(self.path + " " + cmdline, *args)
def execute(self, cmdline, *args): """Does start the executable with meth:`versuchung.execute.shell` and args, which is of type list, as arguments.""" from versuchung.execute import shell shell(self.path + " " + cmdline, *args)
def after_experiment_run(self, parameter_type): File.after_experiment_run(self, parameter_type) if parameter_type == "output": shell("gzip -c %s > %s.1", self.path, self.path) shell("mv %s.1 %s", self.path, self.path)