def test_gpkg_missing_manifest_signature(self): if sys.version_info.major < 3: self.skipTest("Not support Python 2") playground = ResolverPlayground( user_config={ "make.conf": ( 'FEATURES="${FEATURES} binpkg-signing ' 'binpkg-request-signature"', 'BINPKG_FORMAT="gpkg"', ), }) tmpdir = tempfile.mkdtemp() try: settings = playground.settings gpg = GPG(settings) gpg.unlock() orig_full_path = os.path.join(tmpdir, "orig/") os.makedirs(orig_full_path) data = urandom(1048576) with open(os.path.join(orig_full_path, "data"), "wb") as f: f.write(data) binpkg_1 = gpkg(settings, "test", os.path.join(tmpdir, "test-1.gpkg.tar")) binpkg_1.compress(orig_full_path, {}) with tarfile.open(os.path.join(tmpdir, "test-1.gpkg.tar"), "r") as tar_1: with tarfile.open(os.path.join(tmpdir, "test-2.gpkg.tar"), "w") as tar_2: for f in tar_1.getmembers(): if f.name == "Manifest": manifest = tar_1.extractfile(f).read().decode( "UTF-8") manifest = manifest.replace( "-----BEGIN PGP SIGNATURE-----", "") manifest = manifest.replace( "-----END PGP SIGNATURE-----", "") manifest_data = io.BytesIO( manifest.encode("UTF-8")) manifest_data.seek(0, io.SEEK_END) f.size = manifest_data.tell() manifest_data.seek(0) tar_2.addfile(f, manifest_data) else: tar_2.addfile(f, tar_1.extractfile(f)) binpkg_2 = gpkg(settings, "test", os.path.join(tmpdir, "test-2.gpkg.tar")) self.assertRaises(InvalidSignature, binpkg_2.decompress, os.path.join(tmpdir, "test")) finally: shutil.rmtree(tmpdir) playground.cleanup()
def test_gpkg_missing_signature(self): if sys.version_info.major < 3: self.skipTest("Not support Python 2") playground = ResolverPlayground( user_config={ "make.conf": ( 'FEATURES="${FEATURES} binpkg-signing ' 'binpkg-request-signature"', 'BINPKG_FORMAT="gpkg"', ), }) tmpdir = tempfile.mkdtemp() try: settings = playground.settings gpg = GPG(settings) gpg.unlock() orig_full_path = os.path.join(tmpdir, "orig/") os.makedirs(orig_full_path) data = urandom(1048576) with open(os.path.join(orig_full_path, "data"), "wb") as f: f.write(data) binpkg_1 = gpkg(settings, "test", os.path.join(tmpdir, "test-1.gpkg.tar")) binpkg_1.compress(orig_full_path, {}) with tarfile.open(os.path.join(tmpdir, "test-1.gpkg.tar"), "r") as tar_1: with tarfile.open(os.path.join(tmpdir, "test-2.gpkg.tar"), "w") as tar_2: for f in tar_1.getmembers(): if f.name.endswith(".sig"): pass else: tar_2.addfile(f, tar_1.extractfile(f)) binpkg_2 = gpkg(settings, "test", os.path.join(tmpdir, "test-2.gpkg.tar")) self.assertRaises(MissingSignature, binpkg_2.decompress, os.path.join(tmpdir, "test")) finally: shutil.rmtree(tmpdir) playground.cleanup()
def test_gpkg_untrusted_signature(self): if sys.version_info.major < 3: self.skipTest("Not support Python 2") gpg_test_path = os.environ["PORTAGE_GNUPGHOME"] playground = ResolverPlayground( user_config={ "make.conf": ( 'FEATURES="${FEATURES} binpkg-signing ' 'binpkg-request-signature"', 'BINPKG_FORMAT="gpkg"', f'BINPKG_GPG_SIGNING_BASE_COMMAND="flock {gpg_test_path}/portage-binpkg-gpg.lock /usr/bin/gpg --sign --armor --batch --no-tty --yes --pinentry-mode loopback --passphrase GentooTest [PORTAGE_CONFIG]"', 'BINPKG_GPG_SIGNING_DIGEST="SHA512"', f'BINPKG_GPG_SIGNING_GPG_HOME="{gpg_test_path}"', 'BINPKG_GPG_SIGNING_KEY="0x8812797DDF1DD192"', 'BINPKG_GPG_VERIFY_BASE_COMMAND="/usr/bin/gpg --verify --batch --no-tty --yes --no-auto-check-trustdb --status-fd 1 [PORTAGE_CONFIG] [SIGNATURE]"', f'BINPKG_GPG_VERIFY_GPG_HOME="{gpg_test_path}"', ), }) tmpdir = tempfile.mkdtemp() try: settings = playground.settings gpg = GPG(settings) gpg.unlock() orig_full_path = os.path.join(tmpdir, "orig/") os.makedirs(orig_full_path) data = urandom(1048576) with open(os.path.join(orig_full_path, "data"), "wb") as f: f.write(data) binpkg_1 = gpkg(settings, "test", os.path.join(tmpdir, "test-1.gpkg.tar")) binpkg_1.compress(orig_full_path, {}) binpkg_2 = gpkg(settings, "test", os.path.join(tmpdir, "test-1.gpkg.tar")) self.assertRaises(InvalidSignature, binpkg_2.decompress, os.path.join(tmpdir, "test")) finally: shutil.rmtree(tmpdir) playground.cleanup()
class ResolverPlayground: """ This class helps to create the necessary files on disk and the needed settings instances, etc. for the resolver to do its work. """ config_files = frozenset(( "eapi", "layout.conf", "make.conf", "modules", "package.accept_keywords", "package.keywords", "package.license", "package.mask", "package.properties", "package.provided", "packages", "package.unmask", "package.use", "package.use.aliases", "package.use.force", "package.use.mask", "package.use.stable.force", "package.use.stable.mask", "soname.provided", "unpack_dependencies", "use.aliases", "use.force", "use.mask", "layout.conf", )) metadata_xml_template = """<?xml version="1.0" encoding="UTF-8"?> <!DOCTYPE pkgmetadata SYSTEM "https://www.gentoo.org/dtd/metadata.dtd"> <pkgmetadata> <maintainer type="person"> <email>[email protected]</email> <description>Description of the maintainership</description> </maintainer> <longdescription>Long description of the package</longdescription> <use> %(flags)s </use> </pkgmetadata> """ portage_bin = ( "ebuild", "egencache", "emerge", "emerge-webrsync", "emirrordist", "glsa-check", "portageq", "quickpkg", ) portage_sbin = ( "archive-conf", "dispatch-conf", "emaint", "env-update", "etc-update", "fixpackages", "regenworld", ) def __init__( self, ebuilds={}, binpkgs={}, installed={}, profile={}, repo_configs={}, user_config={}, sets={}, world=[], world_sets=[], distfiles={}, eclasses={}, eprefix=None, targetroot=False, debug=False, ): """ ebuilds: cpv -> metadata mapping simulating available ebuilds. installed: cpv -> metadata mapping simulating installed packages. If a metadata key is missing, it gets a default value. profile: settings defined by the profile. """ self.debug = debug if eprefix is None: self.eprefix = normalize_path(tempfile.mkdtemp()) # EPREFIX/bin is used by fake true_binaries. Real binaries goes into EPREFIX/usr/bin eubin = os.path.join(self.eprefix, "usr", "bin") ensure_dirs(eubin) for x in self.portage_bin: os.symlink(os.path.join(PORTAGE_BIN_PATH, x), os.path.join(eubin, x)) eusbin = os.path.join(self.eprefix, "usr", "sbin") ensure_dirs(eusbin) for x in self.portage_sbin: os.symlink(os.path.join(PORTAGE_BIN_PATH, x), os.path.join(eusbin, x)) essential_binaries = ( "awk", "basename", "bzip2", "cat", "chgrp", "chmod", "chown", "comm", "cp", "egrep", "env", "find", "flock", "grep", "head", "install", "ln", "mkdir", "mkfifo", "mktemp", "mv", "readlink", "rm", "sed", "sort", "tar", "tr", "uname", "uniq", "xargs", "zstd", ) # Exclude internal wrappers from PATH lookup. orig_path = os.environ["PATH"] included_paths = [] for path in orig_path.split(":"): if path and not fnmatch.fnmatch(path, "*/portage/*/ebuild-helpers*"): included_paths.append(path) try: os.environ["PATH"] = ":".join(included_paths) for x in essential_binaries: path = find_binary(x) if path is None: raise portage.exception.CommandNotFound(x) os.symlink(path, os.path.join(eubin, x)) finally: os.environ["PATH"] = orig_path else: self.eprefix = normalize_path(eprefix) # Tests may override portage.const.EPREFIX in order to # simulate a prefix installation. It's reasonable to do # this because tests should be self-contained such that # the "real" value of portage.const.EPREFIX is entirely # irrelevant (see bug #492932). self._orig_eprefix = portage.const.EPREFIX portage.const.EPREFIX = self.eprefix.rstrip(os.sep) self.eroot = self.eprefix + os.sep if targetroot: self.target_root = os.path.join(self.eroot, "target_root") else: self.target_root = os.sep self.distdir = os.path.join(self.eroot, "var", "portage", "distfiles") self.pkgdir = os.path.join(self.eprefix, "pkgdir") self.vdbdir = os.path.join(self.eroot, "var/db/pkg") os.makedirs(self.vdbdir) if not debug: portage.util.noiselimit = -2 self._repositories = {} # Make sure the main repo is always created self._get_repo_dir("test_repo") self._create_distfiles(distfiles) self._create_ebuilds(ebuilds) self._create_installed(installed) self._create_profile(ebuilds, eclasses, installed, profile, repo_configs, user_config, sets) self._create_world(world, world_sets) self.settings, self.trees = self._load_config() self.gpg = None self._create_binpkgs(binpkgs) self._create_ebuild_manifests(ebuilds) portage.util.noiselimit = 0 def reload_config(self): """ Reload configuration from disk, which is useful if it has been modified after the constructor has been called. """ for eroot in self.trees: portdb = self.trees[eroot]["porttree"].dbapi portdb.close_caches() self.settings, self.trees = self._load_config() def _get_repo_dir(self, repo): """ Create the repo directory if needed. """ if repo not in self._repositories: if repo == "test_repo": self._repositories["DEFAULT"] = {"main-repo": repo} repo_path = os.path.join(self.eroot, "var", "repositories", repo) self._repositories[repo] = {"location": repo_path} profile_path = os.path.join(repo_path, "profiles") try: os.makedirs(profile_path) except os.error: pass repo_name_file = os.path.join(profile_path, "repo_name") with open(repo_name_file, "w") as f: f.write("%s\n" % repo) return self._repositories[repo]["location"] def _create_distfiles(self, distfiles): os.makedirs(self.distdir) for k, v in distfiles.items(): with open(os.path.join(self.distdir, k), "wb") as f: f.write(v) def _create_ebuilds(self, ebuilds): for cpv in ebuilds: a = Atom("=" + cpv, allow_repo=True) repo = a.repo if repo is None: repo = "test_repo" metadata = ebuilds[cpv].copy() copyright_header = metadata.pop("COPYRIGHT_HEADER", None) eapi = metadata.pop("EAPI", "0") misc_content = metadata.pop("MISC_CONTENT", None) metadata.setdefault("DEPEND", "") metadata.setdefault("SLOT", "0") metadata.setdefault("KEYWORDS", "x86") metadata.setdefault("IUSE", "") unknown_keys = set(metadata).difference( portage.dbapi.dbapi._known_keys) if unknown_keys: raise ValueError( "metadata of ebuild '%s' contains unknown keys: %s" % (cpv, sorted(unknown_keys))) repo_dir = self._get_repo_dir(repo) ebuild_dir = os.path.join(repo_dir, a.cp) ebuild_path = os.path.join(ebuild_dir, a.cpv.split("/")[1] + ".ebuild") try: os.makedirs(ebuild_dir) except os.error: pass with open(ebuild_path, "w") as f: if copyright_header is not None: f.write(copyright_header) f.write('EAPI="%s"\n' % eapi) for k, v in metadata.items(): f.write('%s="%s"\n' % (k, v)) if misc_content is not None: f.write(misc_content) def _create_ebuild_manifests(self, ebuilds): tmpsettings = config(clone=self.settings) tmpsettings["PORTAGE_QUIET"] = "1" for cpv in ebuilds: a = Atom("=" + cpv, allow_repo=True) repo = a.repo if repo is None: repo = "test_repo" repo_dir = self._get_repo_dir(repo) ebuild_dir = os.path.join(repo_dir, a.cp) ebuild_path = os.path.join(ebuild_dir, a.cpv.split("/")[1] + ".ebuild") portdb = self.trees[self.eroot]["porttree"].dbapi tmpsettings["O"] = ebuild_dir if not digestgen(mysettings=tmpsettings, myportdb=portdb): raise AssertionError("digest creation failed for %s" % ebuild_path) def _create_binpkgs(self, binpkgs): # When using BUILD_ID, there can be mutiple instances for the # same cpv. Therefore, binpkgs may be an iterable instead of # a dict. items = getattr(binpkgs, "items", None) items = items() if items is not None else binpkgs binpkg_format = self.settings.get("BINPKG_FORMAT", SUPPORTED_GENTOO_BINPKG_FORMATS[0]) if binpkg_format == "gpkg": if self.gpg is None: self.gpg = GPG(self.settings) self.gpg.unlock() for cpv, metadata in items: a = Atom("=" + cpv, allow_repo=True) repo = a.repo if repo is None: repo = "test_repo" pn = catsplit(a.cp)[1] cat, pf = catsplit(a.cpv) metadata = metadata.copy() metadata.setdefault("SLOT", "0") metadata.setdefault("KEYWORDS", "x86") metadata.setdefault("BUILD_TIME", "0") metadata["repository"] = repo metadata["CATEGORY"] = cat metadata["PF"] = pf metadata["BINPKG_FORMAT"] = binpkg_format repo_dir = self.pkgdir category_dir = os.path.join(repo_dir, cat) if "BUILD_ID" in metadata: if binpkg_format == "xpak": binpkg_path = os.path.join( category_dir, pn, "%s-%s.xpak" % (pf, metadata["BUILD_ID"])) elif binpkg_format == "gpkg": binpkg_path = os.path.join( category_dir, pn, "%s-%s.gpkg.tar" % (pf, metadata["BUILD_ID"])) else: raise InvalidBinaryPackageFormat(binpkg_format) else: if binpkg_format == "xpak": binpkg_path = os.path.join(category_dir, pf + ".tbz2") elif binpkg_format == "gpkg": binpkg_path = os.path.join(category_dir, pf + ".gpkg.tar") else: raise InvalidBinaryPackageFormat(binpkg_format) ensure_dirs(os.path.dirname(binpkg_path)) if binpkg_format == "xpak": t = portage.xpak.tbz2(binpkg_path) t.recompose_mem(portage.xpak.xpak_mem(metadata)) elif binpkg_format == "gpkg": t = portage.gpkg.gpkg(self.settings, a.cpv, binpkg_path) t.compress(os.path.dirname(binpkg_path), metadata) else: raise InvalidBinaryPackageFormat(binpkg_format) def _create_installed(self, installed): for cpv in installed: a = Atom("=" + cpv, allow_repo=True) repo = a.repo if repo is None: repo = "test_repo" vdb_pkg_dir = os.path.join(self.vdbdir, a.cpv) try: os.makedirs(vdb_pkg_dir) except os.error: pass metadata = installed[cpv].copy() metadata.setdefault("SLOT", "0") metadata.setdefault("BUILD_TIME", "0") metadata.setdefault("COUNTER", "0") metadata.setdefault("KEYWORDS", "~x86") unknown_keys = set(metadata).difference( portage.dbapi.dbapi._known_keys) unknown_keys.discard("BUILD_TIME") unknown_keys.discard("BUILD_ID") unknown_keys.discard("COUNTER") unknown_keys.discard("repository") unknown_keys.discard("USE") unknown_keys.discard("PROVIDES") unknown_keys.discard("REQUIRES") if unknown_keys: raise ValueError( "metadata of installed '%s' contains unknown keys: %s" % (cpv, sorted(unknown_keys))) metadata["repository"] = repo for k, v in metadata.items(): with open(os.path.join(vdb_pkg_dir, k), "w") as f: f.write("%s\n" % v) ebuild_path = os.path.join(vdb_pkg_dir, a.cpv.split("/")[1] + ".ebuild") with open(ebuild_path, "w") as f: f.write('EAPI="%s"\n' % metadata.pop("EAPI", "0")) for k, v in metadata.items(): f.write('%s="%s"\n' % (k, v)) env_path = os.path.join(vdb_pkg_dir, "environment.bz2") with bz2.BZ2File(env_path, mode="w") as f: with open(ebuild_path, "rb") as inputfile: f.write(inputfile.read()) def _create_profile(self, ebuilds, eclasses, installed, profile, repo_configs, user_config, sets): user_config_dir = os.path.join(self.eroot, USER_CONFIG_PATH) try: os.makedirs(user_config_dir) except os.error: pass for repo in self._repositories: if repo == "DEFAULT": continue repo_dir = self._get_repo_dir(repo) profile_dir = os.path.join(repo_dir, "profiles") metadata_dir = os.path.join(repo_dir, "metadata") os.makedirs(metadata_dir) # Create $REPO/profiles/categories categories = set() for cpv in ebuilds: ebuilds_repo = Atom("=" + cpv, allow_repo=True).repo if ebuilds_repo is None: ebuilds_repo = "test_repo" if ebuilds_repo == repo: categories.add(catsplit(cpv)[0]) categories_file = os.path.join(profile_dir, "categories") with open(categories_file, "w") as f: for cat in categories: f.write(cat + "\n") # Create $REPO/profiles/license_groups license_file = os.path.join(profile_dir, "license_groups") with open(license_file, "w") as f: f.write("EULA TEST\n") repo_config = repo_configs.get(repo) if repo_config: for config_file, lines in repo_config.items(): if config_file not in self.config_files and not any( fnmatch.fnmatch(config_file, os.path.join(x, "*")) for x in self.config_files): raise ValueError("Unknown config file: '%s'" % config_file) if config_file in ("layout.conf", ): file_name = os.path.join(repo_dir, "metadata", config_file) else: file_name = os.path.join(profile_dir, config_file) if "/" in config_file and not os.path.isdir( os.path.dirname(file_name)): os.makedirs(os.path.dirname(file_name)) with open(file_name, "w") as f: for line in lines: f.write("%s\n" % line) # Temporarily write empty value of masters until it becomes default. # TODO: Delete all references to "# use implicit masters" when empty value becomes default. if config_file == "layout.conf" and not any( line.startswith(("masters =", "# use implicit masters")) for line in lines): f.write("masters =\n") # Create $profile_dir/eclass (we fail to digest the ebuilds if it's not there) eclass_dir = os.path.join(repo_dir, "eclass") os.makedirs(eclass_dir) for eclass_name, eclass_content in eclasses.items(): with open( os.path.join(eclass_dir, "{}.eclass".format(eclass_name)), "wt") as f: if isinstance(eclass_content, str): eclass_content = [eclass_content] for line in eclass_content: f.write("{}\n".format(line)) # Temporarily write empty value of masters until it becomes default. if not repo_config or "layout.conf" not in repo_config: layout_conf_path = os.path.join(repo_dir, "metadata", "layout.conf") with open(layout_conf_path, "w") as f: f.write("masters =\n") if repo == "test_repo": # Create a minimal profile in /var/db/repos/gentoo sub_profile_dir = os.path.join(profile_dir, "default", "linux", "x86", "test_profile") os.makedirs(sub_profile_dir) if not (profile and "eapi" in profile): eapi_file = os.path.join(sub_profile_dir, "eapi") with open(eapi_file, "w") as f: f.write("0\n") make_defaults_file = os.path.join(sub_profile_dir, "make.defaults") with open(make_defaults_file, "w") as f: f.write('ARCH="x86"\n') f.write('ACCEPT_KEYWORDS="x86"\n') use_force_file = os.path.join(sub_profile_dir, "use.force") with open(use_force_file, "w") as f: f.write("x86\n") parent_file = os.path.join(sub_profile_dir, "parent") with open(parent_file, "w") as f: f.write("..\n") if profile: for config_file, lines in profile.items(): if config_file not in self.config_files: raise ValueError("Unknown config file: '%s'" % config_file) file_name = os.path.join(sub_profile_dir, config_file) with open(file_name, "w") as f: for line in lines: f.write("%s\n" % line) # Create profile symlink os.symlink(sub_profile_dir, os.path.join(user_config_dir, "make.profile")) gpg_test_path = os.environ["PORTAGE_GNUPGHOME"] make_conf = { "ACCEPT_KEYWORDS": "x86", "BINPKG_GPG_SIGNING_BASE_COMMAND": f"flock {gpg_test_path}/portage-binpkg-gpg.lock /usr/bin/gpg --sign --armor --yes --pinentry-mode loopback --passphrase GentooTest [PORTAGE_CONFIG]", "BINPKG_GPG_SIGNING_GPG_HOME": gpg_test_path, "BINPKG_GPG_SIGNING_KEY": "0x5D90EA06352177F6", "BINPKG_GPG_VERIFY_GPG_HOME": gpg_test_path, "CLEAN_DELAY": "0", "DISTDIR": self.distdir, "EMERGE_WARNING_DELAY": "0", "FEATURES": "${FEATURES} binpkg-signing binpkg-request-signature " "gpg-keepalive", "PKGDIR": self.pkgdir, "PORTAGE_INST_GID": str(portage.data.portage_gid), "PORTAGE_INST_UID": str(portage.data.portage_uid), "PORTAGE_TMPDIR": os.path.join(self.eroot, "var/tmp"), } if os.environ.get("NOCOLOR"): make_conf["NOCOLOR"] = os.environ["NOCOLOR"] # Pass along PORTAGE_USERNAME and PORTAGE_GRPNAME since they # need to be inherited by ebuild subprocesses. if "PORTAGE_USERNAME" in os.environ: make_conf["PORTAGE_USERNAME"] = os.environ["PORTAGE_USERNAME"] if "PORTAGE_GRPNAME" in os.environ: make_conf["PORTAGE_GRPNAME"] = os.environ["PORTAGE_GRPNAME"] make_conf_lines = [] for k_v in make_conf.items(): make_conf_lines.append('%s="%s"' % k_v) if "make.conf" in user_config: make_conf_lines.extend(user_config["make.conf"]) if not portage.process.sandbox_capable or os.environ.get( "SANDBOX_ON") == "1": # avoid problems from nested sandbox instances make_conf_lines.append( 'FEATURES="${FEATURES} -sandbox -usersandbox"') configs = user_config.copy() configs["make.conf"] = make_conf_lines for config_file, lines in configs.items(): if config_file not in self.config_files: raise ValueError("Unknown config file: '%s'" % config_file) file_name = os.path.join(user_config_dir, config_file) with open(file_name, "w") as f: for line in lines: f.write("%s\n" % line) # Create /usr/share/portage/config/make.globals make_globals_path = os.path.join(self.eroot, GLOBAL_CONFIG_PATH.lstrip(os.sep), "make.globals") ensure_dirs(os.path.dirname(make_globals_path)) os.symlink(os.path.join(cnf_path, "make.globals"), make_globals_path) # Create /usr/share/portage/config/sets/portage.conf default_sets_conf_dir = os.path.join(self.eroot, "usr/share/portage/config/sets") try: os.makedirs(default_sets_conf_dir) except os.error: pass provided_sets_portage_conf = os.path.join(cnf_path, "sets", "portage.conf") os.symlink( provided_sets_portage_conf, os.path.join(default_sets_conf_dir, "portage.conf"), ) set_config_dir = os.path.join(user_config_dir, "sets") try: os.makedirs(set_config_dir) except os.error: pass for sets_file, lines in sets.items(): file_name = os.path.join(set_config_dir, sets_file) with open(file_name, "w") as f: for line in lines: f.write("%s\n" % line) if cnf_path_repoman is not None: # Create /usr/share/repoman repoman_share_dir = os.path.join(self.eroot, "usr", "share", "repoman") os.symlink(cnf_path_repoman, repoman_share_dir) def _create_world(self, world, world_sets): # Create /var/lib/portage/world var_lib_portage = os.path.join(self.eroot, "var", "lib", "portage") os.makedirs(var_lib_portage) world_file = os.path.join(var_lib_portage, "world") world_set_file = os.path.join(var_lib_portage, "world_sets") with open(world_file, "w") as f: for atom in world: f.write("%s\n" % atom) with open(world_set_file, "w") as f: for atom in world_sets: f.write("%s\n" % atom) def _load_config(self): create_trees_kwargs = {} if self.target_root != os.sep: create_trees_kwargs["target_root"] = self.target_root env = { "PORTAGE_REPOSITORIES": "\n".join("[%s]\n%s" % ( repo_name, "\n".join("%s = %s" % (k, v) for k, v in repo_config.items()), ) for repo_name, repo_config in self._repositories.items()) } if self.debug: env["PORTAGE_DEBUG"] = "1" trees = portage.create_trees(env=env, eprefix=self.eprefix, **create_trees_kwargs) for root, root_trees in trees.items(): settings = root_trees["vartree"].settings settings._init_dirs() setconfig = load_default_config(settings, root_trees) root_trees["root_config"] = RootConfig(settings, root_trees, setconfig) return trees[trees._target_eroot]["vartree"].settings, trees def run(self, atoms, options={}, action=None): options = options.copy() options["--pretend"] = True if self.debug: options["--debug"] = True if action is None: if options.get("--depclean"): action = "depclean" elif options.get("--prune"): action = "prune" if "--usepkgonly" in options: options["--usepkg"] = True global_noiselimit = portage.util.noiselimit global_emergelog_disable = _emerge.emergelog._disable try: if not self.debug: portage.util.noiselimit = -2 _emerge.emergelog._disable = True if action in ("depclean", "prune"): depclean_result = _calc_depclean( self.settings, self.trees, None, options, action, InternalPackageSet(initial_atoms=atoms, allow_wildcard=True), None, ) result = ResolverPlaygroundDepcleanResult( atoms, depclean_result.returncode, depclean_result.cleanlist, depclean_result.ordered, depclean_result.req_pkg_count, depclean_result.depgraph, ) else: params = create_depgraph_params(options, action) success, depgraph, favorites = backtrack_depgraph( self.settings, self.trees, options, params, action, atoms, None) depgraph._show_merge_list() depgraph.display_problems() result = ResolverPlaygroundResult(atoms, success, depgraph, favorites) finally: portage.util.noiselimit = global_noiselimit _emerge.emergelog._disable = global_emergelog_disable return result def run_TestCase(self, test_case): if not isinstance(test_case, ResolverPlaygroundTestCase): raise TypeError( "ResolverPlayground needs a ResolverPlaygroundTestCase") for atoms in test_case.requests: result = self.run(atoms, test_case.options, test_case.action) if not test_case.compare_with_result(result): return def cleanup(self): if self.gpg is not None: self.gpg.stop() for eroot in self.trees: portdb = self.trees[eroot]["porttree"].dbapi portdb.close_caches() if self.debug: print("\nEROOT=%s" % self.eroot) else: shutil.rmtree(self.eroot) if hasattr(self, "_orig_eprefix"): portage.const.EPREFIX = self._orig_eprefix
def test_gpkg_unknown_signature(self): if sys.version_info.major < 3: self.skipTest("Not support Python 2") playground = ResolverPlayground( user_config={ "make.conf": ( 'FEATURES="${FEATURES} binpkg-signing ' 'binpkg-request-signature"', 'BINPKG_FORMAT="gpkg"', ), }) tmpdir = tempfile.mkdtemp() try: settings = playground.settings gpg = GPG(settings) gpg.unlock() orig_full_path = os.path.join(tmpdir, "orig/") os.makedirs(orig_full_path) data = urandom(1048576) with open(os.path.join(orig_full_path, "data"), "wb") as f: f.write(data) binpkg_1 = gpkg(settings, "test", os.path.join(tmpdir, "test-1.gpkg.tar")) binpkg_1.compress(orig_full_path, {}) with tarfile.open(os.path.join(tmpdir, "test-1.gpkg.tar"), "r") as tar_1: with tarfile.open(os.path.join(tmpdir, "test-2.gpkg.tar"), "w") as tar_2: for f in tar_1.getmembers(): if f.name == "Manifest": sig = b""" -----BEGIN PGP SIGNED MESSAGE----- Hash: SHA256 DATA test/image.tar.zst 1049649 BLAKE2B 3112adba9c09023962f26d9dcbf8e74107c05220f2f29aa2ce894f8a4104c3bb238f87095df73735befcf1e1f6039fc3abf4defa87e68ce80f33dd01e09c055a SHA512 9f584727f2e20a50a30e0077b94082c8c1f517ebfc9978eb3281887e24458108e73d1a2ce82eb0b59f5df7181597e4b0a297ae68bbfb36763aa052e6bdbf2c59 DATA test/image.tar.zst.sig 833 BLAKE2B 214724ae4ff9198879c8c960fd8167632e27982c2278bb873f195abe75b75afa1ebed4c37ec696f5f5bc35c3a1184b60e0b50d56695b072b254f730db01eddb5 SHA512 67316187da8bb6b7a5f9dc6a42ed5c7d72c6184483a97f23c0bebd8b187ac9268e0409eb233c935101606768718c99eaa5699037d6a68c2d88c9ed5331a3f73c -----BEGIN PGP SIGNATURE----- iNUEARYIAH0WIQSMe+CQzU+/D/DeMitA3PGOlxUHlQUCYVrQal8UgAAAAAAuAChp c3N1ZXItZnByQG5vdGF0aW9ucy5vcGVucGdwLmZpZnRoaG9yc2VtYW4ubmV0OEM3 QkUwOTBDRDRGQkYwRkYwREUzMjJCNDBEQ0YxOEU5NzE1MDc5NQAKCRBA3PGOlxUH lbmTAP4jdhMTW6g550/t0V7XcixqVtBockOTln8hZrZIQrjAJAD/caDkxgz5Xl8C EP1pgSXXGtlUnv6akg/wueFJKEr9KQs= =edEg -----END PGP SIGNATURE----- """ data = io.BytesIO(sig) f.size = len(sig) tar_2.addfile(f, data) data.close() else: tar_2.addfile(f, tar_1.extractfile(f)) binpkg_2 = gpkg(settings, "test", os.path.join(tmpdir, "test-2.gpkg.tar")) self.assertRaises(InvalidSignature, binpkg_2.decompress, os.path.join(tmpdir, "test")) finally: shutil.rmtree(tmpdir) playground.cleanup()
def test_gpkg_invalid_signature(self): if sys.version_info.major < 3: self.skipTest("Not support Python 2") playground = ResolverPlayground( user_config={ "make.conf": ( 'FEATURES="${FEATURES} binpkg-signing ' 'binpkg-request-signature"', 'BINPKG_FORMAT="gpkg"', ), }) tmpdir = tempfile.mkdtemp() try: settings = playground.settings gpg = GPG(settings) gpg.unlock() orig_full_path = os.path.join(tmpdir, "orig/") os.makedirs(orig_full_path) data = urandom(1048576) with open(os.path.join(orig_full_path, "data"), "wb") as f: f.write(data) binpkg_1 = gpkg(settings, "test", os.path.join(tmpdir, "test-1.gpkg.tar")) binpkg_1.compress(orig_full_path, {}) with tarfile.open(os.path.join(tmpdir, "test-1.gpkg.tar"), "r") as tar_1: with tarfile.open(os.path.join(tmpdir, "test-2.gpkg.tar"), "w") as tar_2: for f in tar_1.getmembers(): if f.name == "Manifest": sig = b""" -----BEGIN PGP SIGNED MESSAGE----- Hash: SHA512 DATA test/image.tar.zst 1049649 BLAKE2B 3112adba9c09023962f26d9dcbf8e74107c05220f2f29aa2ce894f8a4104c3bb238f87095df73735befcf1e1f6039fc3abf4defa87e68ce80f33dd01e09c055a SHA512 9f584727f2e20a50a30e0077b94082c8c1f517ebfc9978eb3281887e24458108e73d1a2ce82eb0b59f5df7181597e4b0a297ae68bbfb36763aa052e6bdbf2c59 DATA test/image.tar.zst.sig 833 BLAKE2B 214724ae4ff9198879c8c960fd8167632e27982c2278bb873f195abe75b75afa1ebed4c37ec696f5f5bc35c3a1184b60e0b50d56695b072b254f730db01eddb5 SHA512 67316187da8bb6b7a5f9dc6a42ed5c7d72c6184483a97f23c0bebd8b187ac9268e0409eb233c935101606768718c99eaa5699037d6a68c2d88c9ed5331a3f73c -----BEGIN PGP SIGNATURE----- iQIzBAEBCgAdFiEEBrOjEb13XCgNIqkwXZDqBjUhd/YFAmFazXEACgkQXZDqBjUh d/YFZA//eiXkYAS2NKxim6Ppr1HcZdjU1f6H+zyQzC7OdPkAh7wsVXpSr1aq+giD G4tNtI6nsFokpA5CMhDf+ffBofKmFY5plk9zyQHr43N/RS5G6pcb2LHk0mQqgIdB EsZRRD75Na4uGDWjuNHRmsasPTsc9qyW7FLckjwUsVmk9foAoiLYYaTsilsEGqXD Bl/Z6PaQXvdd8txbcP6dOXfhVT06b+RWcnHI06KQrmFkZjZQh/7bCIeCVwNbXr7d Obo8SVzCrQbTONei57AkyuRfnPqBfP61k8rQtcDUmCckQQfyaRwoW2nDIewOPfIH xfvM137to2GEI2RR1TpWmGfu3iQzgC71f4svdX9Tyi5N7aFmfud7LZs6/Un3IdVk ZH9/AmRzeH6hKllqSv/6WuhjsTNvr0bOzGbskkhqlLga2tml08gHFYOMWRJb/bRz N8FZMhHzFoc0hsG8SU9uC+OeW+y5NdqpbRnQwgABmAiKEpgAPnABTsr0HjyxvjY+ uCUdvMMHvnTxTjNEZ3Q+UQ2VsSoZzPbW9Y4PuM0XxxmTI8htdn4uIhy9dLNPsJmB eTE8aov/1uKq9VMsYC8wcx5vLMaR7/O/9XstP+r6PaZwiLlyrKHGexV4O52sj6LC qGAN3VUF+8EsdcsV781H0F86PANhyBgEYTGDrnItTGe3/vAPjCo= =S/Vn -----END PGP SIGNATURE----- """ data = io.BytesIO(sig) f.size = len(sig) tar_2.addfile(f, data) data.close() else: tar_2.addfile(f, tar_1.extractfile(f)) binpkg_2 = gpkg(settings, "test", os.path.join(tmpdir, "test-2.gpkg.tar")) self.assertRaises(InvalidSignature, binpkg_2.decompress, os.path.join(tmpdir, "test")) finally: shutil.rmtree(tmpdir) playground.cleanup()
def test_gpkg_get_metadata_url_unknown_signature(self): if sys.version_info.major < 3: self.skipTest("Not support Python 2") if sys.version_info.major == 3 and sys.version_info.minor <= 6: self.skipTest("http server not support change root dir") playground = ResolverPlayground( user_config={ "make.conf": ( 'BINPKG_COMPRESS="gzip"', 'FEATURES="${FEATURES} binpkg-signing ' 'binpkg-request-signature"', ), }) tmpdir = tempfile.mkdtemp() try: settings = playground.settings gpg = GPG(settings) gpg.unlock() for _ in range(0, 5): port = random.randint(30000, 60000) try: server = self.start_http_server(tmpdir, port) except OSError: continue break orig_full_path = os.path.join(tmpdir, "orig/") os.makedirs(orig_full_path) with open(os.path.join(orig_full_path, "test"), "wb") as test_file: test_file.write(urandom(1048576)) gpkg_file_loc = os.path.join(tmpdir, "test-1.gpkg.tar") test_gpkg = gpkg(settings, "test", gpkg_file_loc) meta = { "test1": b"{abcdefghijklmnopqrstuvwxyz, 1234567890}", "test2": urandom(102400), } test_gpkg.compress(os.path.join(tmpdir, "orig"), meta) with tarfile.open(os.path.join(tmpdir, "test-1.gpkg.tar"), "r") as tar_1: with tarfile.open(os.path.join(tmpdir, "test-2.gpkg.tar"), "w") as tar_2: for f in tar_1.getmembers(): if f.name == "test/metadata.tar.gz": sig = b""" -----BEGIN PGP SIGNATURE----- iHUEABYIAB0WIQRVhCbPGi/rhGTq4nV+k2dcK9uyIgUCXw4ehAAKCRB+k2dcK9uy IkCfAP49AOYjzuQPP0n5P0SGCINnAVEXN7QLQ4PurY/lt7cT2gEAq01stXjFhrz5 87Koh+ND2r5XfQsz3XeBqbb/BpmbEgo= =sc5K -----END PGP SIGNATURE----- """ data = io.BytesIO(sig) f.size = len(sig) tar_2.addfile(f, data) data.close() else: tar_2.addfile(f, tar_1.extractfile(f)) test_gpkg = gpkg(settings, "test") self.assertRaises( InvalidSignature, test_gpkg.get_metadata_url, "http://127.0.0.1:" + str(port) + "/test-2.gpkg.tar", ) finally: shutil.rmtree(tmpdir) playground.cleanup()