Ejemplo n.º 1
0
def main():
    parser = get_argparser()
    parser.add_argument("--abs-dir", dest="abs_dir", required=True)
    args = parser.parse_args()
    args.mirror_directory = "/mirror"

    dump_build_information(args)

    pkg_dir = os.path.basename(args.abs_dir)

    args.permanent_source_dir = os.path.join(args.sources_directory, pkg_dir)
    args.build_dir = os.path.join("/tmp", pkg_dir)

    sanity_checks(args)

    initialize_repositories(args)

    result = copy_and_build(args)
    if result:
        die(Status.failure)

    paths_to_packages = create_hybrid_packages(args)

    if not paths_to_packages:
        die(Status.failure, "No hybrid packages were created.")

    for path in paths_to_packages:
        add_package_to_toolchain_repo(path, args.toolchain_directory)

    die(Status.success)
Ejemplo n.º 2
0
def main():
    """Create empty local toolchain repository.

    Only one instance of this stage should be run, and it should run
    before any instances of the make_package stage run. We don't want to
    be creating the local repository several times concurrently.
    """
    parser = get_argparser()
    args = parser.parse_args()

    # We can't create an empty database. We need to add a fake package
    # to a new database, then remove it again.

    path = "/tmp/dummy_pack"
    pkg_name = "dummy-pack"

    os.makedirs(path, exist_ok=True)
    pkg_info = (textwrap.dedent("""\
          # Generated by makepkg 4.1.2
          # using fakeroot version 1.20
          # Mon 20 Oct 21 14:19:27 UTC 2013
          pkgname = %s
          pkgver = 1.0.0-0
          url = abc.xyz
          builddate 1382364167
          packager = bog
          size = 1000000
          arch = any
          """) % (pkg_name))

    log("info", "Writing fake package .PKGINFO:",
        pkg_info.splitlines())

    with open(os.path.join(path, ".PKGINFO"), "w") as f:
        print(pkg_info, file=f)

    log("info", "Building fake package")
    pkg = create_package(path, pkg_name, args)
    log("info", "Initializing toolchain repo")
    add_package_to_toolchain_repo(pkg, args.toolchain_directory,
                                  remove_name=pkg_name)
Ejemplo n.º 3
0
def main():
    """Create empty local toolchain repository.

    Only one instance of this stage should be run, and it should run
    before any instances of the make_package stage run. We don't want to
    be creating the local repository several times concurrently.
    """
    parser = get_argparser()
    args = parser.parse_args()

    # We can't create an empty database. We need to add a fake package
    # to a new database, then remove it again.

    path = "/tmp/dummy_pack"
    pkg_name = "dummy-pack"

    os.makedirs(path, exist_ok=True)
    pkg_info = (textwrap.dedent("""\
          # Generated by makepkg 4.1.2
          # using fakeroot version 1.20
          # Mon 20 Oct 21 14:19:27 UTC 2013
          pkgname = %s
          pkgver = 1.0.0-0
          url = abc.xyz
          builddate 1382364167
          packager = bog
          size = 1000000
          arch = any
          """) % (pkg_name))

    log("info", "Writing fake package .PKGINFO:", pkg_info.splitlines())

    with open(os.path.join(path, ".PKGINFO"), "w") as f:
        print(pkg_info, file=f)

    log("info", "Building fake package")
    pkg = create_package(path, pkg_name, args)
    log("info", "Initializing toolchain repo")
    add_package_to_toolchain_repo(pkg,
                                  args.toolchain_directory,
                                  remove_name=pkg_name)
Ejemplo n.º 4
0
def main():
    parser = get_argparser()
    args = parser.parse_args()

    name_data = {}

    name_data["base"] = package_list("base")
    name_data["base_devel"] = package_list("base-devel")
    name_data["tools"] = tools()

    with OutputDirectory(args) as out_dir:
        with open(out_dir + "/names.json", "w") as names:
            json.dump(name_data, names)

    # Touch-files need to be created for each of these packages, outside
    # the container (in the results directory). So print their names to
    # stdout and let the top-level script outside the container create
    # the touch-files.
    for cat in ["base", "base_devel", "tools"]:
        for pkg in name_data[cat]:
            print(pkg + ".json")
Ejemplo n.º 5
0
def main():
    parser = get_argparser()
    args = parser.parse_args()

    name_data = {}

    name_data["base"] = package_list("base")
    name_data["base_devel"] = package_list("base-devel")
    name_data["tools"] = tools()

    with OutputDirectory(args) as out_dir:
        with open(out_dir + "/names.json", "w") as names:
            json.dump(name_data, names)


    # Touch-files need to be created for each of these packages, outside
    # the container (in the results directory). So print their names to
    # stdout and let the top-level script outside the container create
    # the touch-files.
    for cat in ["base", "base_devel", "tools"]:
        for pkg in name_data[cat]:
            print(pkg + ".json")
Ejemplo n.º 6
0
def main():
    """This script should be run inside a container."""

    parser = get_argparser()
    args = parser.parse_args()

    name_data_file = os.path.join(args.shared_directory,
                                  "get_base_package_names", "latest",
                                  "names.json")
    with open(name_data_file) as f:
        name_data = json.load(f)

    with open("/build/provides.json") as f:
        provides = json.load(f)

    abss = glob.glob("/var/abs/*/*")

    # Build a list of package information. This involves interpreting
    # Bash files, so split across multiple processes for speed.
    man = multiprocessing.Manager()
    global_infos = man.list()

    curry_gather = functools.partial(gather_package_data,
                                     name_data=name_data,
                                     args=args,
                                     global_infos=global_infos,
                                     provides=provides)

    with multiprocessing.Pool(multiprocessing.cpu_count()) as p:
        p.map(curry_gather, abss)

    # Back to sequential mode. Do various cleanups on the list of
    # package information

    infos = [info for info in global_infos]
    resolve_provides(infos, provides)
    circulars = infos_with_circular_deps(infos, name_data)
    infos = drop_excluded(infos, circulars, name_data)

    infos = drop_tools(infos, name_data)

    # Finally, get a list of builds and write them out.
    builds = build_triples(infos, args)

    ninja = ninja_syntax.Writer(sys.stdout, 72)

    ninja.rule(
        "makepkg",
        (
            "container_build_dir/package_build_wrapper.py"
            " --shared-directory {shared_directory}"
            " --shared-volume {shared_volume}"
            " --toolchain-directory {toolchain_directory}"
            " --toolchain-volume {toolchain_volume}"
            " --toolchain {toolchain}"
            " --output-directory {output_directory}"
            # ${in} and ${out} are NOT format strings, they need to
            # be written out like this to the ninja file. So (python)
            # escape by using double-curly brackets
            " --abs-dir ${{in}}"
            " ${{out}}").format(shared_directory=args.shared_directory,
                                shared_volume=args.shared_volume,
                                output_directory=args.output_directory,
                                toolchain_directory=args.toolchain_directory,
                                toolchain_volume=args.toolchain_volume,
                                toolchain=args.toolchain),
        description="Building '${in}'")

    for outs, rule, ins in builds:
        ninja.build(outs, rule, ins)

    sys.stdout.flush()
Ejemplo n.º 7
0
def main():
    """Install vanilla bootstrap packages from local mirror.

    Installing all the bootstrap packages is a lengthy (and highly
    disk-IO bound, thus serializing) procedure, so it's best to do it
    only once. Instead of having each container running the make_package
    stage installing the boostrap packages, we install the bootstrap
    packages in this container and then base the make_package containers
    on the image of this container.
    """
    parser = get_argparser()
    args = parser.parse_args()

    # GPG takes time. Remove package signature checks.
    lines = []
    with open("/etc/pacman.conf") as f:
        for line in f:
            if re.search("SigLevel", line):
                lines.append("SigLevel = Never")
            else:
                lines.append(line.strip())
    with open("/etc/pacman.conf", "w") as f:
        for line in lines:
            print(line.strip(), file=f)

    name_data_file = os.path.join(args.shared_directory, "get_base_package_names", "latest", "names.json")

    with open(name_data_file) as f:
        name_data = json.load(f)
    bootstrap_packs = name_data["base"] + name_data["base_devel"] + name_data["tools"] + ["sloccount"]

    vanilla = "file://" + args.mirror_directory + "/$repo/os/$arch"
    log("info", "Printing %s to mirrorlist" % vanilla)
    with open("/etc/pacman.d/mirrorlist", "w") as f:
        print("Server = " + vanilla, file=f)

    cmd = "pacman -Syy --noconfirm"
    time = timestamp()
    cp = subprocess.run(cmd.split(), stdout=subprocess.PIPE, stderr=subprocess.STDOUT, universal_newlines=True)
    log("command", cmd, cp.stdout.splitlines(), time)
    if cp.returncode:
        exit(1)

    cmd = "pacman -Su --noconfirm " + " ".join(bootstrap_packs)
    time = timestamp()
    cp = subprocess.run(cmd.split(), stdout=subprocess.PIPE, stderr=subprocess.STDOUT, universal_newlines=True)
    log("command", cmd, cp.stdout.splitlines(), time)
    if cp.returncode:
        exit(1)

    run_cmd("useradd -m -s /bin/bash tuscan", as_root=True)

    # User 'tuscan' needs to be able to use sudo without being harassed
    # for passwords) and so does root (to su into tuscan)
    with open("/etc/sudoers", "a") as f:
        print("tuscan ALL=(ALL) NOPASSWD: ALL", file=f)
        print("root ALL=(ALL) NOPASSWD: ALL", file=f)

    # Download and install bear
    with tempfile.TemporaryDirectory() as d:
        url = "https://github.com/karkhaz/Bear/blob/master/" "bear-2.1.5-1-x86_64.pkg.tar.xz?raw=true"
        response = urllib.request.urlopen(url)
        tar_file = response.read()
        pkg_name = "bear.pkg.tar.xz"
        with open(os.path.join(d, pkg_name), "wb") as f:
            f.write(tar_file)
        os.chdir(d)
        cmd = "pacman -U --noconfirm %s" % pkg_name
        cp = subprocess.run(cmd.split(), stdout=subprocess.PIPE, stderr=subprocess.STDOUT, universal_newlines=True)
        log("command", cmd, cp.stdout.splitlines())
        if cp.returncode:
            exit(1)

    os.mkdir("/toolchain_root")
    shutil.chown("/toolchain_root", "tuscan")

    # Replace native tools with thin wrappers
    with open("/build/tool_redirect_rules.yaml") as f:
        transforms = yaml.load(f)
    execs = transforms["overwrite"] + list(transforms["replacements"].keys())
    for e in set(execs):
        execs.remove(e)
    if execs:
        log(
            "error",
            ("The following executables have been specified " "twice in the tool_redirect_rules.yaml: %s" % str(execs)),
        )
        exit(1)

    for e in transforms["overwrite"]:
        transforms["replacements"][e] = e
    transforms.pop("overwrite", None)

    jinja = jinja2.Environment(loader=jinja2.FileSystemLoader(["/build"]))

    wrapper_temp = jinja.get_template("tool_wrapper.c")

    with tempfile.TemporaryDirectory() as tmp_dir:
        for native, toolchain in transforms["replacements"].items():
            wrapper = wrapper_temp.render(
                native_program=native, toolchain_prefix=transforms["prefix"], toolchain_program=toolchain
            )

            with tempfile.NamedTemporaryFile("w", suffix=".c") as temp:
                temp.write(wrapper)
                temp.flush()
                cmd = "gcc -o %s %s" % (os.path.join(tmp_dir, native), temp.name)
                proc = subprocess.Popen(
                    cmd.split(), stdout=subprocess.PIPE, stderr=subprocess.STDOUT, universal_newlines=True
                )
                out, _ = proc.communicate()
                if proc.returncode:
                    body = "%s\n%s\n%s" % (cmd, out, wrapper)
                    log("error", "Failed to compile compiler wrapper", body=body)
                    exit(1)
        for wrapper in os.listdir(tmp_dir):
            shutil.move(os.path.join(tmp_dir, wrapper), os.path.join("/usr/bin", wrapper))

    setup.toolchain_specific_setup(args)

    exit(0)
Ejemplo n.º 8
0
def main():
    """Install vanilla bootstrap packages from local mirror.

    Installing all the bootstrap packages is a lengthy (and highly
    disk-IO bound, thus serializing) procedure, so it's best to do it
    only once. Instead of having each container running the make_package
    stage installing the boostrap packages, we install the bootstrap
    packages in this container and then base the make_package containers
    on the image of this container.
    """
    parser = get_argparser()
    args = parser.parse_args()

    # GPG takes time. Remove package signature checks.
    lines = []
    with open("/etc/pacman.conf") as f:
        for line in f:
            if re.search("SigLevel", line):
                lines.append("SigLevel = Never")
            else:
                lines.append(line.strip())
    with open("/etc/pacman.conf", "w") as f:
        for line in lines:
            print(line.strip(), file=f)

    name_data_file = os.path.join(args.shared_directory,
                                  "get_base_package_names", "latest",
                                  "names.json")

    with open(name_data_file) as f:
        name_data = json.load(f)
    bootstrap_packs = (name_data["base"] + name_data["base_devel"] +
                       name_data["tools"] + ["sloccount"])

    cmd = "pacman -S --needed --noconfirm %s" % " ".join(set(bootstrap_packs))
    proc = subprocess.Popen(cmd.split(),
                            stdout=subprocess.PIPE,
                            stderr=subprocess.STDOUT)
    out, _ = proc.communicate()
    out = codecs.decode(out, errors="replace")
    if proc.returncode:
        log("die", cmd, out.splitlines())
        exit(1)
    else:
        log("command", cmd, out.splitlines())

    # When building red, we need to supply it with a list of defines
    # suitable for this toolchain. Construct those defines here and
    # write out the PKGBUILD with those defines.

    with open("/build/tool_redirect_rules.yaml") as f:
        transforms = yaml.load(f)

    log("info",
        "Before rules %s" % yaml.dump(transforms, default_flow_style=False))

    for tool in transforms["overwrite"]:
        transforms["replacements"][tool] = tool

    log("info",
        "After rules %s" % yaml.dump(transforms, default_flow_style=False))

    defines = []

    for tool, replacement in transforms["replacements"].items():
        # The tool & replacement will be written just like the name of
        # the tool binary, e.g. "scan-view", "clang++", etc. These are
        # not valid identifiers (because they contain - or +), so the
        # libred cmake define variable will write them as SCAN_VIEW and
        # CLANGPP. Do that transformation here, but leave the name of
        # the original tool intact.
        var_name = re.sub("-", "_", tool)
        var_name = re.sub("\+\+", "pp", var_name)
        var_name = var_name.upper()

        path = os.path.join(transforms["bin-dir"],
                            "%s%s" % (transforms["prefix"], replacement))
        defines.append('-DRED_%s="%s"' % (var_name, path))

        log("info", "Redirecting %s to %s" % (var_name, path))

    if transforms["bin-dir"]:
        defines.append('-DRED_ENSURE_PATH="%s"' % transforms["bin-dir"])

    jinja = jinja2.Environment(loader=jinja2.FileSystemLoader(["/build"]))
    pkgbuild_temp = jinja.get_template("red-PKGBUILD")
    pkgbuild = pkgbuild_temp.render(defines=(" ".join(defines)))

    with open("/build/PKGBUILD", "w") as f:
        f.write(pkgbuild)

    log("info", "Generated PKGBUILD for red", output=pkgbuild.splitlines())

    # Build and install red
    with tempfile.TemporaryDirectory() as d:
        red_tar = os.path.join(d, "red.tar.xz")
        with tarfile.open(red_tar, "w:xz") as tar:
            tar.add("/red", arcname="red")
        shutil.copyfile("/build/PKGBUILD", os.path.join(d, "PKGBUILD"))
        shutil.chown(d, user="******")
        os.chdir(d)
        cmd = "sudo -u tuscan makepkg --nocolor"
        cp = subprocess.run(cmd.split(),
                            stdout=subprocess.PIPE,
                            stderr=subprocess.STDOUT,
                            universal_newlines=True)
        if cp.returncode:
            log("die", cmd, cp.stdout.splitlines())
            exit(1)
        else:
            log("command", cmd, cp.stdout.splitlines())
        package = glob("red*.pkg.tar.xz")
        if not len(package) == 1:
            log("die", "More than one package found", package)
            exit(1)
        cmd = "pacman -U --noconfirm %s" % package[0]
        cp = subprocess.run(cmd.split(),
                            stdout=subprocess.PIPE,
                            stderr=subprocess.STDOUT,
                            universal_newlines=True)
        if cp.returncode:
            log("die", cmd, cp.stdout.splitlines())
            exit(1)
        else:
            log("command", cmd, cp.stdout.splitlines())

    if not os.path.isdir("/toolchain_root"):
        log("die", "/toolchain_root is not mounted")
        exit(1)

    if os.listdir("/toolchain_root"):
        log("info", ("Skipping toolchain-specific setup as "
                     "/toolchain_root contains files. Listing:"),
            output=list(os.listdir("/toolchain_root")))
    else:
        log("info", ("/toolchain_root is empty, performing "
                     "toolchain-specific setup"),
            output=list(os.listdir("/toolchain_root")))
        setup.toolchain_specific_setup(args)

    recursive_chown("/toolchain_root")

    exit(0)
Ejemplo n.º 9
0
def main():
    parser = get_argparser()
    parser.add_argument("--abs-dir", dest="abs_dir", required=True)
    parser.add_argument("--sysroot", default="sysroot")
    args = parser.parse_args()
    args.mirror_directory = "/mirror"

    os.nice(10)

    dump_build_information(args)

    pkg_dir = os.path.basename(args.abs_dir)

    if pkg_dir in os.listdir(args.sources_directory):
        args.permanent_source_dir = os.path.join(args.sources_directory,
                                                 pkg_dir)
    else:
        die(
            Status.failure, "No source directory in source volume: %s" %
            args.sources_directory)

    args.build_dir = os.path.join("/tmp", pkg_dir)

    set_local_repository_location(args.toolchain_directory,
                                  toolchain_repo_name())

    if not os.path.isdir("/sysroot"):
        os.makedirs("/sysroot")

    copied_files = []
    existing_files = []

    for d in os.listdir("/toolchain_root"):
        base = os.path.basename(d)
        src = os.path.join("/toolchain_root", d)
        dst = os.path.join("/sysroot", base)

        # This can happen if we built the toolchain for the first time
        # on this run. If we're using a pre-built toolchain, the file
        # won't exist.
        if os.path.lexists(dst):
            existing_files.append(dst)
            continue

        if os.path.isfile(src):
            copied_files.append((src, dst))
            shutil.copyfile(src, dst)
        elif os.path.isdir(src):
            copied_files.append((src, dst))
            shutil.copytree(src, dst)

    copied_files = ["%s  -->  %s" % (src, dst) for (src, dst) in copied_files]
    if copied_files:
        log("info",
            "Copied permanent toolchain into container-local sysroot",
            output=copied_files)
    if existing_files:
        log("info",
            "There were existing files in /sysroot, using those",
            output=existing_files)

    recursive_chown("/sysroot")

    result = copy_and_build(args)
    if result:
        die(Status.failure)

    paths_to_packages = create_hybrid_packages(args)

    if not paths_to_packages:
        die(Status.failure, "No hybrid packages were created.")

    for path in paths_to_packages:
        add_package_to_toolchain_repo(path, args.toolchain_directory)

    die(Status.success)
Ejemplo n.º 10
0
def main():
    """This script should be run inside a container."""

    parser = get_argparser()
    args = parser.parse_args()

    name_data_file = os.path.join(args.shared_directory,
            "get_base_package_names", "latest", "names.json")
    with open(name_data_file) as f:
        name_data = json.load(f)

    with open("/build/provides.json") as f:
        provides = json.load(f)

    abss = glob.glob("/var/abs/*/*")

    # Build a list of package information. This involves interpreting
    # Bash files, so split across multiple processes for speed.
    man = multiprocessing.Manager()
    global_infos = man.list()

    curry_gather = functools.partial(gather_package_data,
            name_data=name_data, args=args, global_infos=global_infos,
            provides=provides)

    with multiprocessing.Pool(multiprocessing.cpu_count()) as p:
        p.map(curry_gather, abss)

    # Back to sequential mode. Do various cleanups on the list of
    # package information

    infos = [info for info in global_infos]
    resolve_provides(infos, provides)
    circulars = infos_with_circular_deps(infos, name_data)
    infos = drop_excluded(infos, circulars, name_data)

    infos = drop_tools(infos, name_data)

    # Finally, get a list of builds and write them out.
    builds = build_triples(infos, args)

    ninja  = ninja_syntax.Writer(sys.stdout, 72)

    ninja.rule("makepkg",
               ("container_build_dir/package_build_wrapper.py"
               " --shared-directory {shared_directory}"
               " --shared-volume {shared_volume}"
               " --sources-directory {sources_directory}"
               " --sources-volume {sources_volume}"
               " --toolchain-directory {toolchain_directory}"
               " --toolchain-volume {toolchain_volume}"
               " --toolchain {toolchain}"
               " --output-directory {output_directory}"
               # ${in} and ${out} are NOT format strings, they need to
               # be written out like this to the ninja file. So (python)
               # escape by using double-curly brackets
               " --abs-dir ${{in}}"
               " ${{out}}"
    ).format(shared_directory=args.shared_directory,
             shared_volume=args.shared_volume,
             sources_directory=args.sources_directory,
             sources_volume=args.sources_volume,
             output_directory=args.output_directory,
             toolchain_directory=args.toolchain_directory,
             toolchain_volume=args.toolchain_volume,
             toolchain=args.toolchain),
    description="Building '${in}'")

    for outs, rule, ins in builds:
        ninja.build(outs, rule, ins)

    sys.stdout.flush()