def _build(self): make = self.sh_get_command('make', relative_to='sourceroot') command = shlex.split(make) tools.cmd(*command, cwd=str(self._srcroot), stdout=self._io.output.stream, stderr=subprocess.STDOUT)
def install_build_deps(self, build, spec): tools.cmd( 'dnf', 'builddep', '-y', spec, cwd=str(build.get_spec_root(relative_to=None)), stdout=build._io.output.stream, stderr=subprocess.STDOUT, )
def prepare(self) -> None: tools.cmd("brew", "update") brew_inst = ( 'if brew ls --versions "$1"; then brew upgrade "$1"; ' 'else brew install "$1"; fi' ) for tool in self._get_necessary_host_tools(): tools.cmd("/bin/sh", "-c", brew_inst, "--", tool)
def install_build_deps(self, build: rpmbuild.Build, spec: str) -> None: tools.cmd( "yum-builddep", "-y", spec, cwd=str(build.get_spec_root(relative_to="fsroot")), stdout=build._io.output.stream, stderr=subprocess.STDOUT, )
def _build(self) -> None: make = self.sh_get_command("make", relative_to="sourceroot") command = shlex.split(make) tools.cmd( *command, cwd=str(self._srcroot), stdout=self._io.output.stream, stderr=subprocess.STDOUT, )
def _apply_patches(self): proot = self.get_patches_root(relative_to=None) patch_cmd = shlex.split(self.sh_get_command('patch')) sroot = self.get_dir('thirdparty', relative_to=None) for patchname in self._patches: patch = proot / patchname tools.cmd( *(patch_cmd + ['-p1', '-i', patch]), cwd=sroot, )
def _apply_patches(self) -> None: proot = self.get_patches_root(relative_to="fsroot") patch_cmd = shlex.split(self.sh_get_command("patch")) sroot = self.get_dir("thirdparty", relative_to="fsroot") for patchname in self._patches: patch = proot / patchname tools.cmd( *(patch_cmd + ["-p1", "-i", str(patch)]), cwd=sroot, )
def _package(self): pkg = self._root_pkg title = pkg.name image_root = self.get_image_root(relative_to='sourceroot') files = tools.cmd( 'find', image_root, '-type', 'f', cwd=str(self._srcroot), ).strip().split('\n') self._outputroot.mkdir(parents=True, exist_ok=True) version = pkg.pretty_version suffix = self._revision if self._subdist: suffix = f'{suffix}.{self._subdist}' an = f'{title}{pkg.slot_suffix}_{version}_{suffix}' if pkg.get_package_layout(self) is packages.PackageFileLayout.FLAT: if len(files) == 1: fn = pathlib.Path(files[0]) tools.cmd( 'cp', str(self._srcroot / files[0]), f'{self._outputroot / an}{fn.suffix}', ) return else: with zipfile.ZipFile( self._outputroot / f'{an}.zip', mode='w', compression=zipfile.ZIP_DEFLATED, ) as z: for file in files: z.write( str(self._srcroot / file), arcname=pathlib.Path(file).name, ) else: with zipfile.ZipFile( self._outputroot / f'{an}.zip', mode='w', compression=zipfile.ZIP_DEFLATED, ) as z: for file in files: z.write( str(self._srcroot / file), arcname=(an / pathlib.Path(file).relative_to(image_root)), )
def _dpkg_buildpackage(self) -> None: env = os.environ.copy() env["DEBIAN_FRONTEND"] = "noninteractive" tools.cmd( "apt-get", "update", env=env, cwd=str(self._srcroot), stdout=self._io.output.stream, stderr=subprocess.STDOUT, ) tools.cmd( "apt-get", "install", "-y", "--no-install-recommends", "equivs", "devscripts", env=env, cwd=str(self._srcroot), stdout=self._io.output.stream, stderr=subprocess.STDOUT, ) tools.cmd( "mk-build-deps", "-t", "apt-get -y --no-install-recommends", "-i", str(self._debroot / "control"), env=env, cwd="/tmp", stdout=self._io.output.stream, stderr=subprocess.STDOUT, ) args = ["-us", "-uc", "--source-option=--create-empty-orig"] if not self._build_source: args.append("-b") tools.cmd( "dpkg-buildpackage", *args, cwd=str(self._srcroot), stdout=self._io.output.stream, stderr=subprocess.STDOUT, ) # Ubuntu likes to call their dbgsym packages ddebs, # whereas Debian tools, including reprepro like it # to just be a .deb. for changes in self._pkgroot.glob("*.changes"): with open(changes, "r+t") as f: f.seek(0) patched = f.read().replace(".ddeb", ".deb") f.seek(0) f.write(patched)
def resolve(cls, io, *, ref=None, version=None) -> 'BundledRustPackage': repo_dir = cls.resolve_vcs_source(io, ref=ref) out = tools.cmd('cargo', 'pkgid', cwd=repo_dir).strip() _, _, version = out.rpartition('#') package = cls(version, source_version=ref or 'HEAD') return package
def supports_lto(self): # LTO more-or-less stabilized in GCC 4.9.0. gcc_ver = tools.cmd('gcc', '--version') m = re.match(r'^gcc.*?(\d+(?:\.\d+)+)', gcc_ver, re.M) if not m: raise RuntimeError(f'cannot determine gcc version:\n{gcc_ver}') return tuple(int(v) for v in m.group(1).split('.')) >= (4, 9)
def supports_lto(self) -> bool: # LTO more-or-less stabilized in GCC 4.9.0. gcc_ver = tools.cmd("gcc", "--version") m = re.match(r"^gcc.*?(\d+(?:\.\d+)+)", gcc_ver, re.M) if not m: raise RuntimeError(f"cannot determine gcc version:\n{gcc_ver}") return tuple(int(v) for v in m.group(1).split(".")) >= (4, 9)
def apt_get_packages(self, name: str) -> list[poetry_pkg.Package]: system_name = PACKAGE_MAP.get(name, name) try: output = tools.cmd( "apt-cache", "policy", system_name, errors_are_fatal=False ) except subprocess.CalledProcessError: return [] else: policy = self._parse_apt_policy_output(output.strip()) if not policy: return [] else: packages = [] for pkgmeta in policy: for version in pkgmeta["versions"]: norm_version = _debian_version_to_pep440(version) pkg = SystemPackage( name, norm_version, pretty_version=version, system_name=pkgmeta["name"], ) packages.append(pkg) return packages
def apt_get_packages(self, name: str) -> list[poetry_pkg.Package]: system_name = PACKAGE_MAP.get(name, name) try: output = tools.cmd( "yum", "--showduplicates", "list", system_name, errors_are_fatal=False, hide_stderr=True, ) except subprocess.CalledProcessError: return [] else: policy = self._parse_yum_list_output(output.strip()) if not policy: return [] else: packages = [] for version in policy["versions"]: norm_version = _rpm_version_to_pep440(version) pkg = SystemPackage( name, norm_version, pretty_version=version, system_name=system_name, ) packages.append(pkg) return packages
def get_resource_path( self, build: targets.Build, resource: str ) -> pathlib.Path | None: if resource == "systemd-units": return pathlib.Path( tools.cmd("rpm", "--eval", "%_unitdir").strip() ) else: return super().get_resource_path(build, resource)
def _fixup_rpath(self, image_root: pathlib.Path, binary_relpath: pathlib.Path) -> None: inst_prefix = self.get_full_install_prefix() full_path = image_root / binary_relpath inst_path = pathlib.Path("/") / binary_relpath rpath_record = tools.cmd("patchelf", "--print-rpath", full_path).strip() rpaths = [] if rpath_record: for entry in rpath_record.split(os.pathsep): entry = entry.strip() if not entry: continue if entry.startswith("$ORIGIN"): # rpath is already relative rpaths.append(entry) else: rpath = pathlib.Path(entry) if rpath.is_relative_to(inst_prefix): rel_rpath = os.path.relpath(rpath, start=inst_path.parent) rpaths.append(f"$ORIGIN/{rel_rpath}") else: print( f"RPATH {entry} points outside of install image, " f"removing") if rpaths: new_rpath_record = os.pathsep.join(rp for rp in rpaths) if new_rpath_record != rpath_record: tools.cmd( "patchelf", "--force-rpath", "--set-rpath", new_rpath_record, full_path, ) elif rpath_record: tools.cmd( "patchelf", "--remove-rpath", full_path, )
def get_shlib_refs( self, build: targets.Build, image_root: pathlib.Path, install_path: pathlib.Path, *, resolve: bool = True, ) -> tuple[set[pathlib.Path], set[pathlib.Path]]: shlibs = set() rpaths = set() output = tools.cmd("otool", "-l", image_root / install_path) section_re = re.compile(r"^Section$", re.I) load_cmd_re = re.compile(r"^Load command (\d+)\s*$", re.I) lc_load_dylib_cmd_re = re.compile(r"^\s*cmd\s+LC_LOAD_DYLIB\s*$") lc_load_dylib_name_re = re.compile(r"^\s*name\s+([^(]+).*$") lc_rpath_cmd_re = re.compile(r"^\s*cmd\s+LC_RPATH\s*$") lc_rpath_path_re = re.compile(r"^\s*path\s+([^(]+).*$") state = "skip" for line in output.strip().split("\n"): line = line.strip() if not line: continue if state == "skip": if load_cmd_re.match(line): state = "load_cmd" elif state == "load_cmd": if lc_load_dylib_cmd_re.match(line): state = "lc_load_dylib" elif lc_rpath_cmd_re.match(line): state = "lc_rpath" elif section_re.match(line): state = "skip" elif state == "lc_load_dylib": if m := lc_load_dylib_name_re.match(line): dylib = pathlib.Path(m.group(1).strip()) if dylib.parts[0] == "@rpath" and resolve: dylib = pathlib.Path(*dylib.parts[1:]) shlibs.add(dylib) state = "skip" elif section_re.match(line): state = "skip" elif load_cmd_re.match(line): state = "load_cmd" elif state == "lc_rpath": if m := lc_rpath_path_re.match(line): entry = m.group(1).strip() if entry.startswith("@loader_path") and resolve: relpath = entry[len("@loader_path/") :] rpath = ( pathlib.Path("/") / install_path.parent / relpath ) else: rpath = pathlib.Path(entry) rpaths.add(rpath) state = "skip"
def shrinkwrap(self) -> None: if not self._outputroot.exists(): self._outputroot.mkdir(parents=True, exist_ok=True) pkg = self._root_pkg pkg_name = pkg.name pkg_ver = pkg.version.to_string(short=False) tgt_ident = self.target.ident tarball = f"{pkg_name}__{pkg_ver}__{tgt_ident}.tar" tar = self.sh_get_command("tar") intermediates = self.get_intermediate_output_dir(relative_to="fsroot") shipment = str(self.get_temp_root(relative_to="fsroot") / tarball) tools.cmd( tar, "--transform", f"flags=r;s|^\\./||", "-c", "-f", os.path.relpath(shipment, start=intermediates), ".", cwd=intermediates, ) shutil.copy2(shipment, self._outputroot)
def get_dist(path: pathlib.Path) -> distlib.database.InstalledDistribution: with tempfile.TemporaryDirectory() as d: tools.cmd(sys.executable, "setup.py", "dist_info", "--egg-base", d, cwd=path) distinfos = glob.glob(str(pathlib.Path(d) / "*.dist-info")) if not distinfos: raise RuntimeError( f"{path.name}/setup.py dist_info did not produce " f"any distinfos") elif len(distinfos) > 1: raise RuntimeError(f"{path.name}/setup.py dist_info produced " f"too many distinfos") distinfo = distinfos[0] dist = distlib.database.InstalledDistribution(distinfo) return dist
def get_dist(path): with tempfile.TemporaryDirectory() as d: tools.cmd(sys.executable, 'setup.py', 'dist_info', '--egg-base', d, cwd=path) distinfos = glob.glob(str(pathlib.Path(d) / '*.dist-info')) if not distinfos: raise RuntimeError( f'{path.name}/setup.py dist_info did not produce ' f'any distinfos') elif len(distinfos) > 1: raise RuntimeError(f'{path.name}/setup.py dist_info produced ' f'too many distinfos') distinfo = distinfos[0] dist = distlib.database.InstalledDistribution(distinfo) return dist
def _list_installed_files(self) -> list[pathlib.Path]: image_root = self.get_image_root(relative_to="sourceroot") find = self.sh_get_command("find", relative_to="sourceroot") listing = (tools.cmd( find, image_root, "-type", "f", "-o", "-type", "l", cwd=str(self._srcroot), ).strip().split("\n")) return [ pathlib.Path(entry).relative_to(image_root) for entry in listing ]
def _rpmbuild(self): tools.cmd( 'yum', 'install', '-y', 'rpm-build', 'rpmlint', 'yum-utils', stdout=self._io.output.stream, stderr=subprocess.STDOUT ) self.target.install_build_deps( self, f'{self._root_pkg.name_slot}.spec') image_root = self.get_image_root(relative_to=None) args = [ f'{self._root_pkg.name_slot}.spec', f'--define=%_topdir {self._srcroot}', f'--buildroot={image_root}', '--verbose', ] if self._build_source: args.append('-ba') else: args.append('-bb') tools.cmd( 'rpmbuild', *args, cwd=str(self.get_spec_root(relative_to=None)), stdout=self._io.output.stream, stderr=subprocess.STDOUT) tools.cmd( 'rpmlint', '-i', f'{self._root_pkg.name_slot}.spec', cwd=str(self.get_spec_root(relative_to=None)), stdout=self._io.output.stream, stderr=subprocess.STDOUT) if self._outputroot is not None: if not self._outputroot.exists(): self._outputroot.mkdir(parents=True, exist_ok=True) rpms = self.get_dir('RPMS', relative_to=None) / platform.machine() for rpm in glob.glob(str(rpms / '*.rpm')): rpm = pathlib.Path(rpm) shutil.copy2(rpm, self._outputroot / rpm.name) srpms = self.get_dir('SRPMS', relative_to=None) for rpm in glob.glob(str(srpms / '*.rpm')): rpm = pathlib.Path(rpm) shutil.copy2(rpm, self._outputroot / rpm.name)
def get_shlib_refs( self, build: Build, image_root: pathlib.Path, install_path: pathlib.Path, *, resolve: bool = True, ) -> tuple[set[pathlib.Path], set[pathlib.Path]]: # Scan the .dynamic section of the given ELF binary to find # which shared objects it needs and what the library runpath is. # # We have to rely on parsing the output of readelf, as there # seems to be no other reliable way to do this other than resorting # to the use of complex ELF-parsing libraries, which might be buggier # than binutils. shlib_re = re.compile(r".*\(NEEDED\)\s+Shared library: \[([^\]]+)\]") rpath_re = re.compile( r".*\((?:RPATH|RUNPATH)\)\s+Library.*path: \[([^\]]+)\]" ) shlibs = set() rpaths = set() output = tools.cmd("readelf", "-d", image_root / install_path) for line in output.strip().split("\n"): line = line.strip() if not line: continue if m := shlib_re.match(line): shlibs.add(pathlib.Path(m.group(1))) if m := rpath_re.match(line): for entry in m.group(1).split(os.pathsep): if entry.startswith("$ORIGIN") and resolve: # $ORIGIN means the directory of the referring binary. relpath = entry[len("$ORIGIN/") :] rpath = ( pathlib.Path("/") / install_path.parent / relpath ) else: rpath = pathlib.Path(entry) rpaths.add(rpath)
def _rpmbuild(self) -> None: tools.cmd( "yum", "install", "-y", "rpm-build", "rpmlint", "yum-utils", stdout=self._io.output.stream, stderr=subprocess.STDOUT, ) self.target.install_build_deps( # type: ignore self, f"{self._root_pkg.name_slot}.spec") image_root = self.get_image_root(relative_to="fsroot") args = [ f"{self._root_pkg.name_slot}.spec", f"--define=%_topdir {self._srcroot}", f"--buildroot={image_root}", "--verbose", ] if self._build_source: args.append("-ba") else: args.append("-bb") tools.cmd( "rpmbuild", *args, cwd=str(self.get_spec_root(relative_to="fsroot")), stdout=self._io.output.stream, stderr=subprocess.STDOUT, ) tools.cmd( "rpmlint", "-i", f"{self._root_pkg.name_slot}.spec", cwd=str(self.get_spec_root(relative_to="fsroot")), stdout=self._io.output.stream, stderr=subprocess.STDOUT, )
def tarball(self, pkg, name_tpl: typing.Optional[str] = None, *, target_dir: pathlib.Path, io) -> pathlib.Path: self.download(io) repo = tools.git.repo(self.url) if name_tpl is None: name_tpl = f'{pkg.unique_name}{{part}}.tar{{comp}}' target_path = target_dir / name_tpl.format(part='', comp='') if self.ref is not None: ref = self.ref else: ref = 'HEAD' repo.run('archive', f'--output={target_path}', '--format=tar', f'--prefix={pkg.unique_name}/', ref) submodules = repo.run('submodule', 'foreach', '--recursive').strip('\n') if submodules: submodules = submodules.split('\n') for submodule in submodules: path_m = re.match("Entering '([^']+)'", submodule) if not path_m: raise ValueError( 'cannot parse git submodule foreach output') path = path_m.group(1) module_repo = tools.git.Git(repo._work_dir / path) with tempfile.NamedTemporaryFile() as f: module_repo.run('archive', '--format=tar', f'--output={f.name}', f'--prefix={pkg.unique_name}/{path}/', 'HEAD') if platform.system() == 'Darwin': with tarfile.open(f.name) as modf, \ tarfile.open(target_path, 'a') as tf: for m in modf.getmembers(): if m.issym(): # Skip broken symlinks. target = os.path.normpath("/".join( filter(None, ( os.path.dirname(m.name), m.linkname, )))) try: modf.getmember(target) except KeyError: continue tf.addfile(m, modf.extractfile(m)) else: tools.cmd('tar', '--concatenate', '--file', target_path, f.name) tools.cmd('gzip', target_path, cwd=target_dir) return pathlib.Path(f'{target_path}.gz')
def prepare(self) -> None: tools.cmd("apt-get", "update")
def get_arch_libdir(self) -> pathlib.Path: arch = tools.cmd("dpkg-architecture", "-qDEB_HOST_MULTIARCH").strip() return pathlib.Path("/usr/lib") / arch
def get_sys_bindir(self) -> pathlib.Path: return pathlib.Path(tools.cmd("rpm", "--eval", "%_bindir").strip())
def get_arch_libdir(self) -> pathlib.Path: return pathlib.Path(tools.cmd("rpm", "--eval", "%_libdir").strip())
def tarball( self, pkg: mpkg.BasePackage, name_tpl: typing.Optional[str] = None, *, target_dir: pathlib.Path, io: cleo_io.IO, ) -> pathlib.Path: self.download(io) repo = tools.git.repo(self.url) if name_tpl is None: name_tpl = f"{pkg.unique_name}{{part}}.tar{{comp}}" target_path = target_dir / name_tpl.format(part="", comp="") if self.ref is not None: ref = self.ref else: ref = "HEAD" repo.run( "archive", f"--output={target_path}", "--format=tar", f"--prefix={pkg.unique_name}/", ref, ) submodules = repo.run("submodule", "foreach", "--recursive").strip("\n") if submodules: for submodule in submodules.split("\n"): path_m = re.match("Entering '([^']+)'", submodule) if not path_m: raise ValueError( "cannot parse git submodule foreach output") path = path_m.group(1) module_repo = tools.git.Git(repo._work_dir / path) with tempfile.NamedTemporaryFile() as f: module_repo.run( "archive", "--format=tar", f"--output={f.name}", f"--prefix={pkg.unique_name}/{path}/", "HEAD", ) if platform.system() == "Darwin": with tarfile.open(f.name) as modf, tarfile.open( target_path, "a") as tf: for m in modf.getmembers(): if m.issym(): # Skip broken symlinks. target = os.path.normpath("/".join( filter( None, ( os.path.dirname(m.name), m.linkname, ), ))) try: modf.getmember(target) except KeyError: continue tf.addfile(m, modf.extractfile(m)) else: tools.cmd( "tar", "--concatenate", "--file", target_path, f.name, ) tools.cmd("gzip", target_path, cwd=target_dir) return pathlib.Path(f"{target_path}.gz")