Exemple #1
0
    def _flush():
        """Let all pending operations finish and flush anything to any disks
        E.g. iscsi etc

        pipe() is used to capture the output of the calls
        """
        # Don't litter the screen with output, so get a handle to /dev/null
        with open(os.devnull, 'wb') as DEVNULL:
            process.call(["udevadm", "settle"], stdout=DEVNULL, stderr=DEVNULL)
def resolve_document_using_git(git, docref):
    document = docref_to_dict(docref)
    kind = docref.kind
    id = docref.id
    get = lambda f, n=docref.name: call(
        git + ["show", "--quiet", "--format=format:%s" % (f,), n],
        do_check=False)
    get = lambda a: git_show(git, docref.name, a)
    if kind == "branches":
        branches = [trim(a, prefix="  remotes/origin/")
                    for a in read_lines(call(git + ["branch", "-a"]))]
        document["branches"] = []
        for branch in branches:
            document["branches"].append(docref_to_dict(BranchDocref(branch)))
    elif kind == "branch":
        sha = get1(
            read_lines(
                call(git + ["rev-parse", "remotes/origin/" + docref.name])))
        document["commit"] = docref_to_dict(ShaDocRef("commit", sha))
    elif kind == "commit":
        document.update(
            {"author": {"name": get("%an"),
                        "email": get("%ae"),
                        "date": get("%ai")},
             "committer": {"name": get("%cn"),
                           "email": get("%ce"),
                           "date": get("%ci")},
             "message": get("%B"),
             "tree": docref_to_dict(ShaDocRef("tree", get("%T"))),
             "parents": [],
             })
        for p in sorted(get("%P").split(" ")):
            if p == "":
                continue
            document["parents"].append(
                docref_to_dict(ShaDocRef("commit", p)))
    elif kind == "tree":
        document["children"] = []
        for line in read_lines(call(git + ["ls-tree", docref.name])):
            child_mode, child_kind, rest = line.split(" ", 2)
            child_sha, child_basename = rest.split("\t", 1)
            ref = {"child": docref_to_dict(ShaDocRef(child_kind, child_sha)),
                   "basename": child_basename,
                   "mode": octal_to_symbolic_mode(child_mode)}
            document["children"].append(ref)
        document["children"].sort(key=lambda a: a["child"]["sha"])
    elif kind == "blob":
        blob = call(git + ["show", docref.name], do_crlf_fix=False)
        if is_text(blob):
            document["encoding"] = "raw"
            document["raw"] = blob
        else:
            document["encoding"] = "base64"
            document["base64"] = base64.b64encode(blob)
    else:
        raise NotImplementedError(kind)
    return document
Exemple #3
0
def notify(*, away=False, back=False):
    assert away or back
    if away:
        message = "Marked as Away"
    else:
        message = "Marked as Back"
    call(
        [cfg.terminal_notifier]
        + ["-message", message]
        + ["-title", "Time Tracker"]
        + ["-sender", "uno.glob.timetracker"]
    )
Exemple #4
0
def reset_and_run_testcase(testcase_id, category, release):
    """Resets the chromium repo and runs the testcase."""

    delete_if_exists(CHROMIUM_OUT)
    delete_if_exists(CLUSTERFUZZ_CACHE_DIR)
    process.call('git checkout -f HEAD', cwd=CHROMIUM_SRC)

    # Clean untracked files. Because untracked files in submodules are not removed
    # with `git checkout -f HEAD`.
    process.call('git clean -d -f -f', cwd=CHROMIUM_SRC)

    version = prepare_binary_and_get_version(release)
    update_auth_header()
    stackdriver_logging.send_run(testcase_id, category, version, release,
                                 run_testcase(testcase_id))
Exemple #5
0
def copy_dir_if_not_exist(orig, target):
    """function to copy missing directories from one location to another
    should only be used when syncing a directory structure from iso
    to disk like /var/log
    use case -- upgrade adds some service which logs to /var/log/<service>
    need to have the directory created, but it's not on iso upgrade
    """
    for f in os.listdir(orig):
        if os.path.isdir("%s/%s" % (orig, f)):
            if not os.path.exists("%s/%s" % (target, f)):
                process.call("cp -av %s/%s %s &>/dev/null" % (orig, f, target),
                             shell=True)
            else:
                copy_dir_if_not_exist("%s/%s" % (orig, f),
                                      "%s/%s" % (target, f))
Exemple #6
0
 def hg(argv, **kwargs):
     kwargs.setdefault("cwd", hg_path)
     kwargs.setdefault("do_print", True)
     prefix = ["hg"]
     if username is not None and argv[0] in ("commit", "ci"):
         prefix.extend(["--config", "ui.username=%s" % (username,)])
     return call(prefix + argv, **kwargs)
Exemple #7
0
    def update(self):
        cib_str = process.call(CIB.GET_CIB_CMD.split(" "))
        self._cib_el = ET.fromstring(cib_str)
        self._state_el = CIB.get_real_time_state()

        self._nodes_el = self._cib_el.find(CIB.NODES_XPATH)
        self._resources_el = self._cib_el.find(CIB.RESOURCES_XPATH)
        self._constraints_el = self._cib_el.find(CIB.CONSTRAINTS_XPATH)
Exemple #8
0
def couchapp(url, local_path):
    url = url.encode("ascii")
    local_path = os.path.abspath(local_path)
    design_json = call(["couchapp", "push", "--export"],
                        stderr=None, cwd=local_path)
    # Strangely it seems to print a line before the design document
    design_json = design_json[design_json.find("{"):]
    put_update(url, lambda a=None: json.loads(design_json))
def git_show(git, sha, attr):
    start = "#start#"
    end = "#end#"
    out = call(git + ["show", "--format=format:%s%s%s" % (start, attr, end),
                      "--quiet", sha], do_check=False)
    sindex = out.index(start) + len(start)
    eindex = out.rindex(end)
    assert sindex <= eindex, out
    return out[sindex:eindex]
Exemple #10
0
def run_testcase(testcase_id):
    """Attempts to reproduce a testcase."""
    try:
        process.call('/python-daemon/clusterfuzz reproduce %s' % testcase_id,
                     cwd=HOME,
                     env={
                         'CF_QUIET': '1',
                         'USER': '******',
                         'CHROMIUM_SRC': CHROMIUM_SRC,
                         'GOMA_GCE_SERVICE_ACCOUNT': 'default',
                         'PATH': '%s:%s' % (os.environ['PATH'], DEPOT_TOOLS)
                     })
        success = True
    except subprocess.CalledProcessError:
        success = False

    TESTCASE_CACHE[testcase_id] = success
    return success
def git_show(git, sha, attr):
    try:
        start = "#start#"
        end = "#end#"
        out = call(git + ["show", 
                          "--format=format:%s%s%s" % (start, attr, end),
                          "--quiet", sha], do_check=False)
        sindex = out.index(start) + len(start)
        # TODO: Shouldn't need these magic markers
        eindex = out.index(end)
        assert sindex <= eindex, out
        return out[sindex:eindex]
    except Exception:
        raise Exception("Unable to get git attribute %r from %r" % (attr, sha))
Exemple #12
0
def build_master_and_get_version():
    """Checks out the latest master build and creates a new binary."""
    if not os.path.exists(TOOL_SOURCE):
        process.call(
            'git clone https://github.com/google/clusterfuzz-tools.git',
            cwd=HOME)
    process.call('git fetch', cwd=TOOL_SOURCE)
    process.call('git checkout origin/master -f', cwd=TOOL_SOURCE)
    process.call('./pants binary tool:clusterfuzz-ci',
                 cwd=TOOL_SOURCE,
                 env={'HOME': HOME})

    delete_if_exists(BINARY_LOCATION)
    shutil.copy(os.path.join(TOOL_SOURCE, 'dist', 'clusterfuzz-ci.pex'),
                BINARY_LOCATION)

    # The full SHA is too long and unpleasant to show in logs. So, we use the
    # first 7 characters of the SHA instead.
    return process.call('git rev-parse HEAD', capture=True,
                        cwd=TOOL_SOURCE).strip()[:7]
Exemple #13
0
def get_release_file(base_url):
    for src, dst in [("", "")] + list(PORTS_MIRRORS):
        url = base_url
        if url.startswith(src):
            url = dst + trim(url, prefix=src)
        try:
            file_data = get(url).content
        except Exception, e:
            try:
                file_data_bz2 = get(url + ".bz2").content
            except Exception, e:
                try:
                    file_data_gz = get(url + ".gz").content
                except Exception, e:
                    continue
                else:
                    file_data = call(["gunzip"], 
                                     stdin_data=file_data_bz2,
                                     do_crlf_fix=False)
Exemple #14
0
def reboot():
    """Reboot the system
    """
    process.call(["reboot"])
Exemple #15
0
def ubuntu_to_hg(hg_path, username, do_development=False):

    def hg(argv, **kwargs):
        kwargs.setdefault("cwd", hg_path)
        kwargs.setdefault("do_print", True)
        prefix = ["hg"]
        if username is not None and argv[0] in ("commit", "ci"):
            prefix.extend(["--config", "ui.username=%s" % (username,)])
        return call(prefix + argv, **kwargs)

    with with_ubuntu_keyring() as gpg:
        meta_release_data = get(
            join(BASE_URL, "meta-release-development")).content
        meta_release = parse_control_file(meta_release_data)
        group_by(meta_release, lambda r: r["dist"])
        if not os.path.exists(hg_path):
            os.makedirs(hg_path)
            hg(["init"])
        branches = set([a.split()[0] for a in read_lines(hg(["branches"]))])
        ok_branches = set()
        seen_supported_non_lts = False
        for release in meta_release:
            branch = "ubuntu_codename_%s" % (release["dist"],)
            is_lts = "LTS" in release["version"]
            is_supported = release["supported"] == "1"
            if is_supported and not is_lts:
                seen_supported_non_lts = True
            is_development = not is_supported and seen_supported_non_lts
            if not is_supported and not is_development:
                continue
            ok_branches.add(branch)
            if is_development and not do_development:
                continue
            done = set()
            if branch not in branches:
                hg(["update", "--clean", "--rev", "00"])
                hg(["branch", "--force", branch])
            else:
                hg(["update", "--clean", branch])
            hg(["--config", "extensions.purge=", "purge", "--all"])
            release_gpg_path = os.path.join(hg_path, "Release.gpg")
            release_path = os.path.join(hg_path, "Release")
            old_sha1sums = {}
            release_gpg_data = get(release["release-file"] + ".gpg").content
            if os.path.exists(release_gpg_path):
                if release_gpg_data == read_file(release_gpg_path):
                    continue
                release_data = read_file(release_path)
                old_sha1sums = get_release_sha1sums(release_data)
                old_sha1sums["Release"] = hashlib.sha1(
                    release_data).hexdigest()
                old_sha1sums["Release.gpg"] = hashlib.sha1(
                    release_gpg_data).hexdigest()
                # for relpath in sorted(old_sha1sums):
                #     if posixpath.dirname(relpath) == "Index":
                #         index_data = read_file(os.path.join(hg_path, relpath))
                #         child_sha1sums = get_release_sha1sums(index_data)
                #         for relpath2 in sorted(child_sha1sums):
                #             relpath3 = posixpath.join(
                #                 posixpath.dirname(relpath), relpath2)
                #             old_sha1sums[relpath3] = child_sha1sums[relpath2]
            release_data = get(release["release-file"]).content
            with open(release_gpg_path, "wb") as fh:
                fh.write(release_gpg_data)
            done.add("Release")
            with open(release_path, "wb") as fh:
                fh.write(release_data)
            done.add("Release.gpg")
            gpg(["--verify", release_gpg_path, release_path])
            new_sha1sums = get_release_sha1sums(release_data)
            new_sha1sums["Release.gpg"] = hashlib.sha1(
                release_gpg_data).hexdigest()
            new_sha1sums["Release"] = hashlib.sha1(
                release_data).hexdigest()
            # for relpath in sorted(new_sha1sums):
            #     if posixpath.basename(relpath) == "Index":
            #         if new_sha1sums[relpath] == old_sha1sums.get(relpath):
            #             index_data = read_file(os.path.join(hg_path, relpath))
            #         else:
            #             index_data = get(
            #                 posixpath.join(
            #                     posixpath.dirname(release["Release-File"]),
            #                     relpath)).content
            #             sha1sum = hashlib.sha1(index_data).hexdigest()
            #             if sha1sum != new_sha1sums[relpath]:
            #                 raise Exception("sha1sum mismatch for %r: "
            #                                 "got %s expecting %s"
            #                                 % (url, sha1sum, 
            #                                    new_sha1sums[relpath]))
            #             index_path = os.path.join(hg_path, relpath)
            #             if not os.path.exists(os.path.dirname(index_path)):
            #                 os.makedirs(os.path.dirname(index_path))
            #             with open(index_path, "wb") as fh:
            #                 fh.write(index_data)
            #         done.add(relpath)
            #         child_sha1sums = get_release_sha1sums(index_data)
            #         for relpath2 in sorted(child_sha1sums):
            #             relpath3 = posixpath.join(
            #                 posixpath.dirname(relpath), relpath2)
            #             new_sha1sums[relpath3] = child_sha1sums[relpath2]
            for relpath in old_sha1sums:
                if relpath in new_sha1sums:
                    continue
                file_path = os.path.join(hg_path, relpath)
                call(["rm", "-rf", "--one-file-system", file_path])
            for relpath in new_sha1sums:
                if relpath in old_sha1sums:
                    if new_sha1sums[relpath] == old_sha1sums[relpath]:
                        continue
                if (relpath.endswith(".gz") 
                        and trim(relpath, suffix=".gz") in new_sha1sums):
                    continue
                if (relpath.endswith(".bz2") 
                        and trim(relpath, suffix=".bz2") in new_sha1sums):
                    continue
                if relpath in done:
                    continue
                file_path = os.path.join(hg_path, relpath)
                file_data = get_release_file(
                    posixpath.join(
                        posixpath.dirname(release["release-file"]), relpath))
                sha1sum = hashlib.sha1(file_data).hexdigest()
                if sha1sum != new_sha1sums[relpath]:
                    raise Exception("sha1sum mismatch for %r: "
                                    "got %s expecting %s"
                                    % (url, sha1sum, new_sha1sums[relpath]))
                if not os.path.exists(os.path.dirname(file_path)):
                    os.makedirs(os.path.dirname(file_path))
                with open(file_path, "wb") as fh:
                    fh.write(file_data)
            hg(["addremove"])
            if len(read_lines(hg(["status"]))) > 0:
                hg(["commit", "-m", "Update from upstream"])
        for branch in branches:
            if branch == "default" or branch in ok_branches:
                continue
            hg(["update", "--clean", branch])
            hg(["commit", "--close-branch", 
                "-m", "Closing unsupported release"])
def git_to_couchdb(cache_root, git_url, couchdb_url):
    if git_url is None:
        git = ["git"]
    else:
        cache_dir = os.path.join(cache_root, encode_as_c_identifier(git_url))
        git = ["bash", "-c", 'cd "$1" && shift && exec "$@"', "-", cache_dir, 
               "git"]
        call(["mkdir", "-p", cache_dir])
        call(git + ["init"])
        for r in read_lines(call(git + ["remote"])):
            call(git + ["remote", "rm", r])
        call(git + ["remote", "add", "origin", git_url])
        call(git + ["fetch", "origin"], stdout=None, stderr=None)
    resolve_document = lambda d: resolve_document_using_git(git, d)
    fetch_all(resolve_document, couchdb_url, [BRANCHES_DOCREF])
Exemple #17
0
 def restart(self):
     self.logger.debug("Restarting SSH")
     process.call("service sshd restart &>/dev/null")
Exemple #18
0
            file_data = get(url).content
        except Exception, e:
            try:
                file_data_bz2 = get(url + ".bz2").content
            except Exception, e:
                try:
                    file_data_gz = get(url + ".gz").content
                except Exception, e:
                    continue
                else:
                    file_data = call(["gunzip"], 
                                     stdin_data=file_data_bz2,
                                     do_crlf_fix=False)
            else:
                file_data = call(["bzip2", "-d"], 
                                 stdin_data=file_data_bz2,
                                 do_crlf_fix=False)
                break
        else:
            break
    else:
        raise Exception("Failed to fetch %r including gz and "
                        "bz2 variants and ports mirror"
                        % (base_url,))
    return file_data

@contextlib.contextmanager
def with_ubuntu_keyring():
    keyrings = [
        "/usr/share/keyrings/ubuntu-master-keyring.gpg",
        "/usr/share/keyrings/ubuntu-archive-keyring.gpg",
Exemple #19
0
def poweroff():
    """Poweroff the system
    """
    process.call(["poweroff"])
Exemple #20
0
 def get_real_time_state():
     xml_str = process.call(CIB.GET_REALTIME_STATUS_CMD.split(" "))
     return ET.fromstring(xml_str)
def resolve_document_using_git(git, docref):
    document = docref_to_dict(docref)
    kind = docref.kind
    id = docref.id
    get = lambda a: git_show(git, docref.name, a)
    if kind == "branches":
        branches = set()
        for line in read_lines(call(git + ["branch", "-a"])):
            if line.startswith("  "):
                line = trim(line, prefix="  ")
            elif line.startswith("* "):
                line = trim(line, prefix="* ")
                if line == "(no branch)":
                    continue
            else:
                raise NotImplementedError(line)
            if " -> " in line:
                ba, bb = line.split(" -> ", 1)
                branches.add(posixpath.basename(ba))
                branches.add(posixpath.basename(bb))
            else:
                branches.add(posixpath.basename(line))
        branches = list(sorted(b for b in branches if b != "HEAD"))
        document["branches"] = []
        for branch in branches:
            document["branches"].append(docref_to_dict(BranchDocref(branch)))
    elif kind == "branch":
        sha = get1(
            read_lines(
                call(git + ["rev-parse", docref.name])))
        document["commit"] = docref_to_dict(ShaDocRef("commit", sha))
    elif kind == "commit":
        document.update(
            {"author": {"name": get("%an"),
                        "email": get("%ae"),
                        "date": get("%ai")},
             "committer": {"name": get("%cn"),
                           "email": get("%ce"),
                           "date": get("%ci")},
             "message": get("%B"),
             "tree": docref_to_dict(ShaDocRef("tree", get("%T"))),
             "parents": [],
             })
        for p in sorted(get("%P").split(" ")):
            if p == "":
                continue
            document["parents"].append(
                docref_to_dict(ShaDocRef("commit", p)))
    elif kind == "tree":
        document["children"] = []
        for line in read_lines(call(git + ["ls-tree", docref.name])):
            child_mode, child_kind, rest = line.split(" ", 2)
            child_sha, child_basename = rest.split("\t", 1)
            ref = {"child": docref_to_dict(ShaDocRef(child_kind, child_sha)),
                   "basename": child_basename,
                   "mode": octal_to_symbolic_mode(child_mode)}
            document["children"].append(ref)
        document["children"].sort(key=lambda a: a["child"]["sha"])
    elif kind == "blob":
        blob = call(git + ["show", docref.name], do_crlf_fix=False)
        if is_text(blob):
            document["encoding"] = "raw"
            document["raw"] = blob
        else:
            document["encoding"] = "base64"
            document["base64"] = base64.b64encode(blob)
    else:
        raise NotImplementedError(kind)
    return document
Exemple #22
0
 def gpg(argv, **kwargs):
     return call(["gpg", "--homedir", temp_dir] + argv, **kwargs)
Exemple #23
0
def main(argv):
    parser = optparse.OptionParser(__doc__)
    parser.add_option("--install", dest="do_install",
                      default=False, action="store_true")
    parser.add_option("--skip-proxy-install", dest="do_proxy_install",
                      default=True, action="store_false")
    parser.add_option("--skip-blank-sources-list-install", dest="do_blank",
                      default=True, action="store_false")
    parser.add_option("--proxy", dest="proxy", 
                      default="http://ubuntu.devel.cmedltd.com:3142/")
    parser.add_option("--no-proxy", dest="proxy", action="store_const",
                      const=None)
    parser.add_option("--proxy-basename", dest="proxy_basename",
                      default="02-generated-proxy")
    parser.add_option("--basename", dest="basename", default="generated")
    options, args = parser.parse_args(argv)
    custom_json = json.dumps({})
    if len(args) > 0:
        custom_json = args.pop(0)
    if len(args) > 0:
        parser.error("Unexpected: %r" % (args,))
    default_json = json.dumps(DEFAULT_CONFIG)
    defaults = json.loads(default_json)
    custom = json.loads(custom_json)
    for key, value in defaults.items():
        custom.setdefault(key, value)
    if custom["distribution"] is None:
        custom["distribution"] = trim(
            call(["lsb_release", "--short", "--codename"]), suffix="\r\n")
    output = render_to_sources_list(custom)
    if options.do_install:
        assert "\0" not in options.basename and "/" not in options.basename,\
            repr(options.basename)
        file_data = {
            "/etc/apt/sources.list.d/%s.list" % (options.basename,): output}
        if options.do_blank:
            file_data["/etc/apt/sources.list"] = BLANK_SOURCES
        if options.do_proxy_install:
            if options.proxy is None:
                proxy_data = "# No proxy\r\n"
            else:
                assert '"' not in options.proxy, repr(options.proxy)
                proxy_data = ('Acquire::HTTP { Proxy::"%s"; }\r\n'
                              % (options.proxy,))
            assert ("\0" not in options.proxy_basename 
                    and "/" not in options.proxy_basename), \
                    repr(options.proxy_basename)
            file_data["/etc/apt/apt.conf.d/%s" % (options.proxy_basename,)] = \
                proxy_data
        child = subprocess.Popen(["sudo", "python", "-c", """\
assert __name__ == "__main__"
import sys
assert len(sys.argv) == 1
import json
file_data = json.loads(sys.stdin.read())
for file_path, data in sorted(file_data.items()):
    fh = open(file_path, "wb")
    try:
        fh.write(data)
    finally:
        fh.close()
"""], stdin=subprocess.PIPE)
        child.communicate(json.dumps(file_data))
        assert child.returncode == 0, child.returncode
    sys.stdout.write(output)
Exemple #24
0
def get_supported_jobtypes():
    """Returns a hash of supported job types."""
    out = process.call(build_command('supported_job_types'), capture=True)
    result = yaml.load(out)
    result.pop('Version', None)
    return result
Exemple #25
0
 def __call__(self, cb):
     self.usercb = cb
     process.call(self.cline, loc=self.loc, cb=self._cb, context=self.context, **self.kwargs)
Exemple #26
0
 def restart(self):
     self.logger.debug("Restarting SSH")
     process.call("service sshd restart &>/dev/null")
Exemple #27
0
 def cleanup(self, id):
     process.call(CIB.CLEANUP_CMD.format(id=id).split(" "))
Exemple #28
0
def get_binary_version():
    """Returns the version of the binary."""
    out = process.call(build_command('supported_job_types'), capture=True)
    return yaml.load(out)['Version']
 def bbcall(argv, **kwargs):
     kwargs.setdefault("do_print", True)
     argv = basebox.get_ssh_argv(config, argv)
     return call(argv, **kwargs)