Пример #1
0
def download_source(path):
    """Download sources for the package whose abs dir is path.

    If sources are not downloaded for some reason, a file called
    /sources/$PKGNAME.log will contain more information about the
    reason.
    """

    abs_name = os.path.basename(path)

    source_dir = os.path.join("/sources", abs_name)
    shutil.copytree(path, source_dir)
    recursive_chown(source_dir)

    # makepkg must be run in the directory where the PKGBUILD is.
    # However, we cannot use os.chdir because the cwd is process-wide,
    # not thread-local, and we're using threading rather than
    # multiprocessing.  Therefore, pass a new cwd to the subprocess.
    #
    # We want to download and extract the source, but not build it.
    # PKGBUILDs also have a prepare() function that gets executed
    # after extraction but before building; we shouldn't run that
    # function either, since it might need some of the packages in
    # `makedepends` to be installed. We should avoid installing those,
    # since we would need to lock otherwise (only one Pacman can be
    # running at a time).
    #
    # We don't pass --syncdeps (so that dependencies don't get
    # installed) and also pass --nodeps (so that makepkg doesn't error
    # out when it notices that not all dependencies are installed),
    # because we're not building anything. Anything that is required for
    # downloading and source extraction should already be installed by
    # the create_base_image/main.sh script.
    #
    # GPG and SHA take forever, and there's a serious risk that we fill
    # up the container's process table with unreaped children if we do
    # integrity checks; so don't bother.
    cmd = ("sudo -u tuscan makepkg --noprepare --nobuild --nocheck"
           " --nodeps --noarchive --skipinteg --nocolor --nosign")
    proc = subprocess.Popen(cmd.split(),
                            stdout=subprocess.PIPE,
                            stderr=subprocess.STDOUT,
                            cwd=source_dir)
    try:
        out, _ = proc.communicate(timeout=1800)
    except subprocess.TimeoutExpired:
        report_failure("sources", "timed out", abs_name)
        return
    output = codecs.decode(out, errors="replace")
    if proc.returncode:
        shutil.rmtree(source_dir)
        report_failure("sources", output, abs_name)
Пример #2
0
def get_package_source_dir(args):
    """Get the sources for a package.

    PKGBUILDs contain instructions for downloading sources and then
    building them. We don't want to download the sources before every
    build, so this function downloads sources and stores them in a
    standard location so that they can be copied later rather than
    re-downloaded.

    If this function returns successfully, the abs directory for package
    will have been copied to sources_directory, and the sources for that
    package will have been downloaded into it.
    """
    if not os.path.isdir(args.abs_dir):
        die(Status.failure, "Could not find abs directory for dir '%s'" % args.abs_dir)
    shutil.copytree(args.abs_dir, args.permanent_source_dir)
    recursive_chown(args.permanent_source_dir)
    os.chdir(args.permanent_source_dir)

    # The --nobuild flag to makepkg causes it to download sources, but
    # not build them.
    command = (
        "sudo -u tuscan makepkg --nobuild --syncdeps "
        "--skipinteg --skippgpcheck --skipchecksums "
        "--noconfirm --nocolor --log --noprogressbar "
        "--nocheck --nodeps"
    )
    time = timestamp()
    cp = subprocess.run(command.split(), stdout=subprocess.PIPE, stderr=subprocess.STDOUT, universal_newlines=True)
    log("command", command, cp.stdout.splitlines(), time)

    success = False
    if cp.returncode:
        shutil.rmtree(args.permanent_source_dir)
    else:
        success = True

    return success
Пример #3
0
def copy_and_build(args):
    try:
        shutil.copytree(args.permanent_source_dir, args.build_dir)
    except shutil.Error as e:
        # e.args will be a list, containing a single list of 3-tuples.
        # We are interested in the third item of each tuple.
        errors = [err[2] for err in e.args[0]]
        die(Status.failure, "No source directory in source volume: %s" % args.permanent_source_dir, output=errors)
    recursive_chown(args.build_dir)
    os.chdir(args.build_dir)

    proc = subprocess.Popen(["/usr/bin/sloccount", "src"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
    out, _ = proc.communicate()
    output = codecs.decode(out, errors="replace")
    if proc.returncode:
        log("die", "SLOCCount failed", output.splitlines())
    else:
        log_sloc(output.splitlines())

    # Add the --host option to invocations of ./configure
    with open("PKGBUILD", encoding="utf-8") as f:
        pkgbuild = f.read().splitlines()

    pkgbuild = [re.sub(r"configure\s", "configure --host x86_64-unknown-linux ", line) for line in pkgbuild]

    with open("PKGBUILD", "w", encoding="utf-8") as f:
        f.write("\n".join(pkgbuild))

    # The difference between this invocation and the one in
    # get_package_source_dir() is the --noextract flag. Sources should
    # already have been downloaded and extracted by
    # get_package_source_dir(), so we just want to build them.
    #
    # Also, the invocation in get_package_source_dir has the --nodeps
    # option, since we just wanted to download sources there. Here, we
    # do want to install dependencies (from our local toolchain
    # repository), so don't pass the --nodeps flag.
    if args.env_vars == None:
        args.env_vars = []

    command_env = os.environ.copy()
    for pair in args.env_vars:
        var, val = pair.split("=")
        command_env[var] = val

    command = (
        "sudo -u tuscan " + " ".join(args.env_vars) + " bear makepkg --noextract --syncdeps"
        " --skipinteg --skippgpcheck --skipchecksums"
        " --noconfirm --nocolor --log --noprogressbar"
        " --nocheck"
    )
    time = timestamp()

    proc = subprocess.Popen(command.split(), stdout=subprocess.PIPE, stderr=subprocess.STDOUT, env=command_env)

    stdout_data, _ = proc.communicate()
    output = codecs.decode(stdout_data, errors="replace")

    log("command", command, output.splitlines(), time)

    # Pick up output left by bear
    if os.path.exists("compile_commands.json"):
        with open("compile_commands.json") as f:
            bear_output = json.load(f)
        log("bear", "bear", output=bear_output)
    else:
        log("die", "No bear output found in dir '%s'" % os.getcwd())

    native_tools = {}
    for native in glob("/tmp/tuscan-native-*"):
        with open(native) as f:
            tool = f.readlines()
        if tool:
            tool = tool[0].strip()
        if tool not in native_tools:
            native_tools[tool] = 0
        native_tools[tool] += 1
    if native_tools:
        log("native_tools", "native_tools", native_tools)

    return proc.returncode
Пример #4
0
def toolchain_specific_setup(args):
    log("info", "Running android-specific setup")

    if not os.path.isdir("/sysroot"):
        os.mkdir("/sysroot")
    recursive_chown("/sysroot")

    # wget and curl output unsuitable progress bars even when not
    # connected to a TTY. Turn them off.
    with open("/etc/wgetrc", "a") as f:
        print("verbose = off", file=f)

    with open("/etc/.curlrc", "a") as f:
        print("silent", file=f)
        print("show-error", file=f)

    log("info", "Downloading & unpacking NDK")
    os.chdir("/home/tuscan")

    setup_file = "/home/tuscan/ndk.bin"

    cmd = ("wget -O %s"
           " http://dl.google.com/android/ndk/android-"
           "ndk-r10e-linux-x86_64.bin" % (setup_file))
    run_cmd(cmd)

    cmd = "chmod +x " + setup_file
    run_cmd(cmd)

    run_cmd(setup_file, output=False)

    log("info", "Setting up toolchain")

    cmd = ("/home/tuscan/android-ndk-r10e/build/tools/"
           "make-standalone-toolchain.sh"
           " --arch=arm --platform=android-21 "
           " --install-dir=" + "/sysroot")
    run_cmd(cmd)

    cmd = "chown -R tuscan: " + "/sysroot"
    run_cmd(cmd, as_root=True)

    cmd = "chown -R tuscan: /home/tuscan/android-ndk-r10e"
    run_cmd(cmd, as_root=True)

    bindirs = [
        "/sysroot/bin", "/sysroot/libexec/gcc/arm-linux-androideabi/4.8"
    ]
    for d in bindirs:
        for f in os.listdir(d):
            f = os.path.join(d, f)
            cmd = "chmod a+rx %s" % f
            run_cmd(cmd, as_root=True)

    for f in os.listdir("/sysroot"):
        if os.path.isdir(os.path.join("/sysroot", f)):
            shutil.copytree(os.path.join("/sysroot", f),
                            os.path.join("/toolchain_root", f))
        elif os.path.isfile(os.path.join("/sysroot", f)):
            shutil.copy(os.path.join("/sysroot", f), "/toolchain_root")
    recursive_chown("/toolchain_root")
Пример #5
0
def main():
    """Install vanilla bootstrap packages from local mirror.

    Installing all the bootstrap packages is a lengthy (and highly
    disk-IO bound, thus serializing) procedure, so it's best to do it
    only once. Instead of having each container running the make_package
    stage installing the boostrap packages, we install the bootstrap
    packages in this container and then base the make_package containers
    on the image of this container.
    """
    parser = get_argparser()
    args = parser.parse_args()

    # GPG takes time. Remove package signature checks.
    lines = []
    with open("/etc/pacman.conf") as f:
        for line in f:
            if re.search("SigLevel", line):
                lines.append("SigLevel = Never")
            else:
                lines.append(line.strip())
    with open("/etc/pacman.conf", "w") as f:
        for line in lines:
            print(line.strip(), file=f)

    name_data_file = os.path.join(args.shared_directory,
                                  "get_base_package_names", "latest",
                                  "names.json")

    with open(name_data_file) as f:
        name_data = json.load(f)
    bootstrap_packs = (name_data["base"] + name_data["base_devel"] +
                       name_data["tools"] + ["sloccount"])

    cmd = "pacman -S --needed --noconfirm %s" % " ".join(set(bootstrap_packs))
    proc = subprocess.Popen(cmd.split(),
                            stdout=subprocess.PIPE,
                            stderr=subprocess.STDOUT)
    out, _ = proc.communicate()
    out = codecs.decode(out, errors="replace")
    if proc.returncode:
        log("die", cmd, out.splitlines())
        exit(1)
    else:
        log("command", cmd, out.splitlines())

    # When building red, we need to supply it with a list of defines
    # suitable for this toolchain. Construct those defines here and
    # write out the PKGBUILD with those defines.

    with open("/build/tool_redirect_rules.yaml") as f:
        transforms = yaml.load(f)

    log("info",
        "Before rules %s" % yaml.dump(transforms, default_flow_style=False))

    for tool in transforms["overwrite"]:
        transforms["replacements"][tool] = tool

    log("info",
        "After rules %s" % yaml.dump(transforms, default_flow_style=False))

    defines = []

    for tool, replacement in transforms["replacements"].items():
        # The tool & replacement will be written just like the name of
        # the tool binary, e.g. "scan-view", "clang++", etc. These are
        # not valid identifiers (because they contain - or +), so the
        # libred cmake define variable will write them as SCAN_VIEW and
        # CLANGPP. Do that transformation here, but leave the name of
        # the original tool intact.
        var_name = re.sub("-", "_", tool)
        var_name = re.sub("\+\+", "pp", var_name)
        var_name = var_name.upper()

        path = os.path.join(transforms["bin-dir"],
                            "%s%s" % (transforms["prefix"], replacement))
        defines.append('-DRED_%s="%s"' % (var_name, path))

        log("info", "Redirecting %s to %s" % (var_name, path))

    if transforms["bin-dir"]:
        defines.append('-DRED_ENSURE_PATH="%s"' % transforms["bin-dir"])

    jinja = jinja2.Environment(loader=jinja2.FileSystemLoader(["/build"]))
    pkgbuild_temp = jinja.get_template("red-PKGBUILD")
    pkgbuild = pkgbuild_temp.render(defines=(" ".join(defines)))

    with open("/build/PKGBUILD", "w") as f:
        f.write(pkgbuild)

    log("info", "Generated PKGBUILD for red", output=pkgbuild.splitlines())

    # Build and install red
    with tempfile.TemporaryDirectory() as d:
        red_tar = os.path.join(d, "red.tar.xz")
        with tarfile.open(red_tar, "w:xz") as tar:
            tar.add("/red", arcname="red")
        shutil.copyfile("/build/PKGBUILD", os.path.join(d, "PKGBUILD"))
        shutil.chown(d, user="******")
        os.chdir(d)
        cmd = "sudo -u tuscan makepkg --nocolor"
        cp = subprocess.run(cmd.split(),
                            stdout=subprocess.PIPE,
                            stderr=subprocess.STDOUT,
                            universal_newlines=True)
        if cp.returncode:
            log("die", cmd, cp.stdout.splitlines())
            exit(1)
        else:
            log("command", cmd, cp.stdout.splitlines())
        package = glob("red*.pkg.tar.xz")
        if not len(package) == 1:
            log("die", "More than one package found", package)
            exit(1)
        cmd = "pacman -U --noconfirm %s" % package[0]
        cp = subprocess.run(cmd.split(),
                            stdout=subprocess.PIPE,
                            stderr=subprocess.STDOUT,
                            universal_newlines=True)
        if cp.returncode:
            log("die", cmd, cp.stdout.splitlines())
            exit(1)
        else:
            log("command", cmd, cp.stdout.splitlines())

    if not os.path.isdir("/toolchain_root"):
        log("die", "/toolchain_root is not mounted")
        exit(1)

    if os.listdir("/toolchain_root"):
        log("info", ("Skipping toolchain-specific setup as "
                     "/toolchain_root contains files. Listing:"),
            output=list(os.listdir("/toolchain_root")))
    else:
        log("info", ("/toolchain_root is empty, performing "
                     "toolchain-specific setup"),
            output=list(os.listdir("/toolchain_root")))
        setup.toolchain_specific_setup(args)

    recursive_chown("/toolchain_root")

    exit(0)
Пример #6
0
def copy_and_build(args):
    try:
        shutil.copytree(args.permanent_source_dir, args.build_dir)
    except shutil.Error as e:
        # e.args will be a list, containing a single list of 3-tuples.
        # We are interested in the third item of each tuple.
        errors = [err[2] for err in e.args[0]]
        die(Status.failure,
            "No source directory in source volume: %s" %
            args.permanent_source_dir,
            output=errors)
    recursive_chown(args.build_dir)
    os.chdir(args.build_dir)

    # Add the --host option to invocations of ./configure
    with open("PKGBUILD", encoding="utf-8", errors="ignore") as f:
        pkgbuild = f.read().splitlines()

    if args.toolchain == "android":
        pkgbuild = [
            re.sub(r"configure\s", ("configure --build=x86_64-unknown-linux "
                                    "--host=arm-linux-androideabi "), line)
            for line in pkgbuild
        ]
    else:
        pkgbuild = [
            re.sub(r"configure\s", "configure --host=x86_64-unknown-linux ",
                   line) for line in pkgbuild
        ]

    with open("PKGBUILD", "w", encoding="utf-8") as f:
        f.write("\n".join(pkgbuild))

    # This invocation of makepkg has the --noextract flag, because
    # sources should already have been extracted during the creation of
    # the base image (see stages/create_base_image/getpkgs.py). We still
    # need to perform all other stages of package building, including
    # the prepare() function that is called just before the build()
    # function.
    #
    # The invocation also has the --syncdeps flag; this is fine, because
    # anything that this package depends on should already have been
    # built and its hybrid package will have been installed.
    if args.env_vars == None:
        args.env_vars = []

    command_env = os.environ.copy()
    for pair in args.env_vars:
        var, val = pair.split("=")
        command_env[var] = val

    command = ("sudo -u tuscan " + " ".join(args.env_vars) +
               " red makepkg --noextract --syncdeps"
               " --skipinteg --skippgpcheck --skipchecksums"
               " --noconfirm --nocolor --log --noprogressbar"
               " --nocheck")
    time = timestamp()

    proc = subprocess.Popen(command.split(),
                            stdout=subprocess.PIPE,
                            stderr=subprocess.STDOUT,
                            env=command_env)

    stdout_data, _ = proc.communicate()
    output = codecs.decode(stdout_data, errors="replace")

    log("command", command, output.splitlines(), time)

    # Measure LOC
    loc_proc = subprocess.Popen(
        ["/usr/bin/sloccount", "--addlang", "makefile", "src"],
        stdout=subprocess.PIPE,
        stderr=subprocess.STDOUT)
    out, _ = loc_proc.communicate()
    output = codecs.decode(out, errors="replace")
    if loc_proc.returncode:
        log("die", "SLOCCount failed", output.splitlines())
    else:
        log_sloc(output.splitlines())

    # Pick up output left by red
    try:
        if os.path.exists("compile_commands.json"):
            with open("compile_commands.json") as f:
                red_output = json.load(f)
            log("red", "red", output=red_output)
        else:
            log("die", "No red output found in dir '%s'" % os.getcwd())
    except json.decoder.JSONDecodeError as e:
        log("red", "red", output=[])

    red_errors = []
    for native in glob("/tmp/red-error-*"):
        with open(native) as f:
            lines = f.readlines()
        red_errors.append({
            "category": lines[0].strip(),
            "pid": lines[1].strip(),
            "info": "\n".join(lines[2:])
        })
        os.unlink(native)

    log("red_errors", "red_errors", output=red_errors)

    return proc.returncode
Пример #7
0
def main():
    parser = get_argparser()
    parser.add_argument("--abs-dir", dest="abs_dir", required=True)
    parser.add_argument("--sysroot", default="sysroot")
    args = parser.parse_args()
    args.mirror_directory = "/mirror"

    os.nice(10)

    dump_build_information(args)

    pkg_dir = os.path.basename(args.abs_dir)

    if pkg_dir in os.listdir(args.sources_directory):
        args.permanent_source_dir = os.path.join(args.sources_directory,
                                                 pkg_dir)
    else:
        die(
            Status.failure, "No source directory in source volume: %s" %
            args.sources_directory)

    args.build_dir = os.path.join("/tmp", pkg_dir)

    set_local_repository_location(args.toolchain_directory,
                                  toolchain_repo_name())

    if not os.path.isdir("/sysroot"):
        os.makedirs("/sysroot")

    copied_files = []
    existing_files = []

    for d in os.listdir("/toolchain_root"):
        base = os.path.basename(d)
        src = os.path.join("/toolchain_root", d)
        dst = os.path.join("/sysroot", base)

        # This can happen if we built the toolchain for the first time
        # on this run. If we're using a pre-built toolchain, the file
        # won't exist.
        if os.path.lexists(dst):
            existing_files.append(dst)
            continue

        if os.path.isfile(src):
            copied_files.append((src, dst))
            shutil.copyfile(src, dst)
        elif os.path.isdir(src):
            copied_files.append((src, dst))
            shutil.copytree(src, dst)

    copied_files = ["%s  -->  %s" % (src, dst) for (src, dst) in copied_files]
    if copied_files:
        log("info",
            "Copied permanent toolchain into container-local sysroot",
            output=copied_files)
    if existing_files:
        log("info",
            "There were existing files in /sysroot, using those",
            output=existing_files)

    recursive_chown("/sysroot")

    result = copy_and_build(args)
    if result:
        die(Status.failure)

    paths_to_packages = create_hybrid_packages(args)

    if not paths_to_packages:
        die(Status.failure, "No hybrid packages were created.")

    for path in paths_to_packages:
        add_package_to_toolchain_repo(path, args.toolchain_directory)

    die(Status.success)