def build_gcc(client, image, host_platform): """Build GCC in the Docker image.""" gcc_archive = download_entry("gcc", DOWNLOADS_PATH) gmp_archive = download_entry("gmp", DOWNLOADS_PATH) isl_archive = download_entry("isl", DOWNLOADS_PATH) mpc_archive = download_entry("mpc", DOWNLOADS_PATH) mpfr_archive = download_entry("mpfr", DOWNLOADS_PATH) with build_environment(client, image) as build_env: install_sccache(build_env) log("copying archives to container...") for a in (gcc_archive, gmp_archive, isl_archive, mpc_archive, mpfr_archive): build_env.copy_file(a) build_env.copy_file(toolchain_archive_path("binutils", host_platform)) build_env.copy_file(SUPPORT / "build-gcc.sh") env = { "BINUTILS_VERSION": DOWNLOADS["binutils"]["version"], "GCC_VERSION": DOWNLOADS["gcc"]["version"], "GMP_VERSION": DOWNLOADS["gmp"]["version"], "ISL_VERSION": DOWNLOADS["isl"]["version"], "MPC_VERSION": DOWNLOADS["mpc"]["version"], "MPFR_VERSION": DOWNLOADS["mpfr"]["version"], } add_env_common(env) build_env.run("build-gcc.sh", environment=env) build_env.get_tools_archive( toolchain_archive_path("gcc", host_platform), "host")
def build_gcc(client, image): """Build GCC in the Docker image.""" gcc_archive = download_entry("gcc", DOWNLOADS_PATH) gmp_archive = download_entry("gmp", DOWNLOADS_PATH) isl_archive = download_entry("isl", DOWNLOADS_PATH) mpc_archive = download_entry("mpc", DOWNLOADS_PATH) mpfr_archive = download_entry("mpfr", DOWNLOADS_PATH) with build_environment(client, image) as build_env: log("copying archives to container...") for a in (gcc_archive, gmp_archive, isl_archive, mpc_archive, mpfr_archive): build_env.copy_file(a) build_env.copy_file(archive_path("binutils", "linux64")) build_env.copy_file(SUPPORT / "build-gcc.sh") build_env.run( "build-gcc.sh", environment={ "BINUTILS_VERSION": DOWNLOADS["binutils"]["version"], "GCC_VERSION": DOWNLOADS["gcc"]["version"], "GMP_VERSION": DOWNLOADS["gmp"]["version"], "ISL_VERSION": DOWNLOADS["isl"]["version"], "MPC_VERSION": DOWNLOADS["mpc"]["version"], "MPFR_VERSION": DOWNLOADS["mpfr"]["version"], }, ) build_env.get_tools_archive(archive_path("gcc", "linux64"), "host")
def build_clang(client, image, host_platform): if "linux" in host_platform: cmake_archive = download_entry("cmake-linux-bin", DOWNLOADS_PATH) ninja_archive = download_entry("ninja-linux-bin", DOWNLOADS_PATH) elif "macos" in host_platform: cmake_archive = download_entry("cmake-macos-bin", DOWNLOADS_PATH) ninja_archive = download_entry("ninja-macos-bin", DOWNLOADS_PATH) clang_archive = download_entry("clang", DOWNLOADS_PATH) clang_rt_archive = download_entry("clang-compiler-rt", DOWNLOADS_PATH) lld_archive = download_entry("lld", DOWNLOADS_PATH) llvm_archive = download_entry("llvm", DOWNLOADS_PATH) libcxx_archive = download_entry("libc++", DOWNLOADS_PATH) libcxxabi_archive = download_entry("libc++abi", DOWNLOADS_PATH) with build_environment(client, image) as build_env: install_sccache(build_env) log("copying archives to container...") for a in ( cmake_archive, ninja_archive, clang_archive, clang_rt_archive, lld_archive, llvm_archive, libcxx_archive, libcxxabi_archive, ): build_env.copy_file(a) tools_path = "clang-%s" % host_platform build_sh = "build-clang-%s.sh" % host_platform binutils = install_binutils(host_platform) gcc = binutils env = { "CLANG_COMPILER_RT_VERSION": DOWNLOADS["clang-compiler-rt"]["version"], "CLANG_VERSION": DOWNLOADS["clang"]["version"], "CMAKE_VERSION": DOWNLOADS["cmake-linux-bin"]["version"], "COMPILER_RT_VERSION": DOWNLOADS["clang-compiler-rt"]["version"], "GCC_VERSION": DOWNLOADS["gcc"]["version"], "LIBCXX_VERSION": DOWNLOADS["libc++"]["version"], "LIBCXXABI_VERSION": DOWNLOADS["libc++abi"]["version"], "LLD_VERSION": DOWNLOADS["lld"]["version"], "LLVM_VERSION": DOWNLOADS["llvm"]["version"], } add_env_common(env) build_env.install_toolchain(BUILD, host_platform, binutils=binutils, gcc=gcc) build_env.copy_file(SUPPORT / build_sh) build_env.run(build_sh, environment=env) build_env.get_tools_archive( toolchain_archive_path("clang", host_platform), tools_path )
def process_setup_line(line, variant=None): d = parse_setup_line(line, variant) if not d: return extension = d["extension"] log("processing extension %s (variant %s)" % (extension, d["variant"])) objs = [] for obj in sorted(d["posix_obj_paths"]): obj = pathlib.Path("build") / obj log("adding object file %s for extension %s" % (obj, extension)) objs.append(str(obj)) # Mark object file as used so we don't include it in the core # object files below. .remove() would be nicer, as we would catch # missing object files. But some sources (like math.c) are used by # multiple modules! modules_objs.discard(obj) links = [] for framework in sorted(d["frameworks"]): log("adding framework %s for extension %s" % (framework, extension)) links.append({"name": framework, "framework": True}) for libname in sorted(d["links"]): log("adding library %s for extension %s" % (libname, extension)) if libname in libraries: entry = { "name": libname, "path_static": "build/lib/lib%s.a" % libname } links.append(entry) else: links.append({"name": libname, "system": True}) entry = { "in_core": False, "init_fn": "PyInit_%s" % extension, "links": links, "objs": objs, "variant": d["variant"], } if libressl: ignore_keys = {"openssl"} else: ignore_keys = {"libressl"} add_licenses_to_extension_entry(entry, ignore_keys=ignore_keys) bi["extensions"].setdefault(extension, []).append(entry)
def python_build_info( build_env, version, platform, musl, optimizations, config_c_in, setup_dist, setup_local, libressl=False, ): """Obtain build metadata for the Python distribution.""" log("resolving Python distribution build info") bi = {"core": {"objs": [], "links": []}, "extensions": {}} binary_suffix = "" if platform == "linux64": bi["core"][ "static_lib"] = "install/lib/python{version}/config-{version}{binary_suffix}-x86_64-linux-gnu/libpython{version}{binary_suffix}.a".format( version=version, binary_suffix=binary_suffix) if not musl: bi["core"]["shared_lib"] = "install/lib/libpython%s%s.so.1.0" % ( version, binary_suffix, ) if optimizations in ("lto", "pgo+lto"): object_file_format = "llvm-bitcode:%s" % DOWNLOADS["llvm"][ "version"] else: object_file_format = "elf" elif platform == "macos": bi["core"][ "static_lib"] = "install/lib/python{version}/config-{version}{binary_suffix}-darwin/libpython{version}{binary_suffix}.a".format( version=version, binary_suffix=binary_suffix) bi["core"]["shared_lib"] = "install/lib/libpython%s%s.dylib" % ( version, binary_suffix, ) if optimizations in ("lto", "pgo+lto"): object_file_format = "llvm-bitcode:%s" % DOWNLOADS["llvm"][ "version"] else: object_file_format = "mach-o" else: raise Exception("unsupported platform: %s" % platform) bi["object_file_format"] = object_file_format # Object files for the core distribution are found by walking the # build artifacts. core_objs = set() modules_objs = set() for f in build_env.find_output_files("python/build", "*.o"): rel_path = pathlib.Path("build") / f if rel_path.parts[1] in ("Objects", "Parser", "Python"): core_objs.add(rel_path) if rel_path.parts[1] == "Modules": modules_objs.add(rel_path) for p in sorted(core_objs): log("adding core object file: %s" % p) bi["core"]["objs"].append(str(p)) assert pathlib.Path("build/Modules/config.o") in modules_objs bi["inittab_object"] = "build/Modules/config.o" bi["inittab_source"] = "build/Modules/config.c" # TODO ideally we'd get these from the build environment bi["inittab_cflags"] = ["-std=c99", "-DNDEBUG", "-DPy_BUILD_CORE"] libraries = set() for f in build_env.find_output_files("python/build/lib", "*.a"): # Strip "lib" prefix and ".a" suffix. libname = f[3:-2] libraries.add(libname) # Extension data is derived by "parsing" the Setup.dist and Setup.local files. def process_setup_line(line, variant=None): d = parse_setup_line(line, variant) if not d: return extension = d["extension"] log("processing extension %s (variant %s)" % (extension, d["variant"])) objs = [] for obj in sorted(d["posix_obj_paths"]): obj = pathlib.Path("build") / obj log("adding object file %s for extension %s" % (obj, extension)) objs.append(str(obj)) # Mark object file as used so we don't include it in the core # object files below. .remove() would be nicer, as we would catch # missing object files. But some sources (like math.c) are used by # multiple modules! modules_objs.discard(obj) links = [] for framework in sorted(d["frameworks"]): log("adding framework %s for extension %s" % (framework, extension)) links.append({"name": framework, "framework": True}) for libname in sorted(d["links"]): log("adding library %s for extension %s" % (libname, extension)) if libname in libraries: entry = { "name": libname, "path_static": "build/lib/lib%s.a" % libname } links.append(entry) else: links.append({"name": libname, "system": True}) entry = { "in_core": False, "init_fn": "PyInit_%s" % extension, "links": links, "objs": objs, "variant": d["variant"], } if libressl: ignore_keys = {"openssl"} else: ignore_keys = {"libressl"} add_licenses_to_extension_entry(entry, ignore_keys=ignore_keys) bi["extensions"].setdefault(extension, []).append(entry) found_start = False for line in setup_dist.splitlines(): if not found_start: if line.startswith(b"PYTHONPATH="): found_start = True continue continue process_setup_line(line) for line in setup_local.splitlines(): if line.startswith(b"*static*"): continue if line.startswith(b"*disabled*"): break process_setup_line(line) # Extension variants are denoted by the presence of # Modules/VARIANT-<extension>-<variant>.data files that describe the # extension. Find those files and process them. tf = build_env.get_output_archive("python/build/Modules", as_tar=True) for ti in tf: basename = os.path.basename(ti.name) if not basename.startswith("VARIANT-") or not basename.endswith( ".data"): continue variant = basename[:-5].split("-")[2] line = tf.extractfile(ti).read().strip() process_setup_line(line, variant=variant) # There are also a setup of built-in extensions defined in config.c.in which # aren't built using the Setup.* files and are part of the core libpython # distribution. Define extensions entries for these so downstream consumers # can register their PyInit_ functions. for name, init_fn in sorted(config_c_in.items()): log("adding in-core extension %s" % name) bi["extensions"].setdefault(name, []).append({ "in_core": True, "init_fn": init_fn, "links": [], "objs": [], "variant": "default", }) with (SUPPORT / ("required-extensions.%s.%s" % (version, platform))).open("r") as fh: required_extensions = {l.strip() for l in fh if l.strip()} for extension, entries in bi["extensions"].items(): for entry in entries: entry["required"] = extension in required_extensions # Any paths left in modules_objs are not part of any extension and are # instead part of the core distribution. for p in sorted(modules_objs): log("adding core object file %s" % p) bi["core"]["objs"].append(str(p)) return bi
def process_setup_line(line, variant=None): d = parse_setup_line(line, variant) if not d: return extension = d["extension"] log("processing extension %s (variant %s)" % (extension, d["variant"])) objs = [] for obj in sorted(d["posix_obj_paths"]): obj = pathlib.Path("build") / obj log("adding object file %s for extension %s" % (obj, extension)) objs.append(str(obj)) # Mark object file as used so we don't include it in the core # object files below. .remove() would be nicer, as we would catch # missing object files. But some sources (like math.c) are used by # multiple modules! modules_objs.discard(obj) links = [] for framework in sorted(d["frameworks"]): log("adding framework %s for extension %s" % (framework, extension)) links.append({"name": framework, "framework": True}) for libname in sorted(d["links"]): log("adding library %s for extension %s" % (libname, extension)) if libname in libraries: entry = { "name": libname, "path_static": "build/lib/lib%s.a" % libname } links.append(entry) else: links.append({"name": libname, "system": True}) if platform == "macos": # For some reason, Python's build system adds libintl as a link # against libpythonX.Y/pythonX.Y instead of the _locale extension # despite the _location extension being the only user of its # symbols. We add libintl here to work around that. links.append({ "name": "intl", "path_static": "build/lib/libintl.a" }) # And symbols in our built libintl reference iconv symbols. So we # need to include that dependency as well. links.append({"name": "iconv", "system": True}) entry = { "in_core": False, "init_fn": "PyInit_%s" % extension, "links": links, "objs": objs, "variant": d["variant"], } if libressl: ignore_keys = {"openssl"} else: ignore_keys = {"libressl"} add_licenses_to_extension_entry(entry, ignore_keys=ignore_keys) bi["extensions"].setdefault(extension, []).append(entry)