def test_sandbox(self): fp = self.generate_script( "spawn-sandbox.sh", "echo $LD_PRELOAD") ret = spawn.spawn_get_output(fp, spawn_type=spawn.spawn_sandbox) assert ret[1], "no output; exit code was %s; script location %s" % (ret[0], fp) assert "libsandbox.so" in [os.path.basename(x.strip()) for x in ret[1][0].split()] os.unlink(fp)
def regen(self, binary, basepath): ignores = ("dir", "dir.old") try: files = listdir_files(basepath) except FileNotFoundError: return if self.should_skip_directory(basepath, files): return # wipe old indexes. for x in set(ignores).intersection(files): os.remove(pjoin(basepath, x)) index = pjoin(basepath, 'dir') for x in files: if x in ignores or x.startswith("."): continue ret, data = spawn.spawn_get_output( [binary, '--quiet', pjoin(basepath, x), '--dir-file', index], collect_fds=(1, 2), split_lines=False) if not data or "already exists" in data or \ "warning: no info dir entry" in data: continue yield pjoin(basepath, x)
def _install_cmd(self): """Install files using `install` command. Args: iterable of (source, dest) tuples of files to install Raises: IpcCommandError on failure """ while True: files = (yield) # `install` forcibly resolves symlinks so split them out files, symlinks = partition( files, predicate=lambda x: os.path.islink(x[0])) self.install_symlinks(symlinks) # group and install sets of files by destination to decrease `install` calls files = sorted(self._prefix_targets(files), key=itemgetter(1)) for dest, files_group in itertools.groupby(files, itemgetter(1)): sources = list(path for path, _ in files_group) command = ['install'] + self.opts.insoptions + sources + [dest] ret, output = spawn.spawn_get_output(command, collect_fds=(2, )) if not ret: raise IpcCommandError('\n'.join(output), code=ret)
def test_get_output(self): filename = "spawn-getoutput.sh" for r, s, text, args in ( [0, ["dar\n"], "echo dar\n", {}], [0, ["dar"], "echo -n dar", {}], [1, ["blah\n", "dar\n"], "echo blah\necho dar\nexit 1", {}], [0, [], "echo dar 1>&2", {"fd_pipes": {1: 1, 2: self.null}}]): fp = self.generate_script(filename, text) assert (r, s) == spawn.spawn_get_output(fp, spawn_type=spawn.spawn_bash, **args) os.unlink(fp)
def get_commit_hash(repo_location, commit='origin/HEAD'): """Retrieve a git repo's commit hash for a specific commit object.""" if not os.path.exists(pjoin(repo_location, '.git')): raise ValueError ret, out = spawn_get_output(['git', 'rev-parse', commit], cwd=repo_location) if ret != 0: raise ValueError(f'failed retrieving {commit} commit hash ' f'for git repo: {repo_location}') return out[0].strip()
def _fallback_file(path): ret, out = spawn_get_output(["file", path]) if ret != 0: raise ValueError( f"file output was non zero- ret:{ret!r} out:{out!r}") out = ''.join(out) if out.startswith(path): out = out[len(path):] if out.startswith(":"): out = out[1:] return out
def feed(self, eclass): ret, err = spawn_get_output(['bash', '-n', eclass.path], collect_fds=(2, )) if ret != 0 and err: lineno = 0 error = [] for line in err: path, line, msg = line.split(': ', 2) lineno = line[5:] error.append(msg.strip('\n')) error = ': '.join(error) yield EclassBashSyntaxError(lineno, error, eclass=eclass)
def test_umask(self): fp = self.generate_script( "spawn-umask.sh", "#!%s\numask" % BASH_BINARY) try: old_umask = os.umask(0) if old_umask == 0: # crap. desired = 0o22 os.umask(desired) else: desired = 0 assert str(desired).lstrip("0") == \ spawn.spawn_get_output(fp)[1][0].strip().lstrip("0") finally: os.umask(old_umask)
def _install_dirs_cmd(self): """Create directories using `install` command. Args: iterable of paths where directories should be created Raises: IpcCommandError on failure """ while True: dirs = (yield) dirs = self._prefix_targets(dirs, files=False) command = ['install', '-d'] + self.opts.diroptions + list(dirs) ret, output = spawn.spawn_get_output(command, collect_fds=(2, )) if not ret: raise IpcCommandError('\n'.join(output), code=ret)
def is_usable_on_filepath(cls, path): bzr_path = os.path.join(path, '.bzr') if cls.disabled or not os.path.isdir(bzr_path): return None code, data = spawn_get_output([cls.binary, "info", path]) if code != 0: # should alert the user somehow return None for line in data: line = line.strip().split(":", 1) if len(line) != 2: continue if line[0] == 'parent branch': uri = f"bzr+{line[1].strip()}" return (cls._rewrite_uri_from_stat(bzr_path, uri), ) return None
def check_args(cls, parser, namespace): if namespace.commits: if namespace.targets: targets = ' '.join(namespace.targets) parser.error('--commits is mutually exclusive with ' f'target{_pl(namespace.targets)}: {targets}') repo = namespace.target_repo ret, out = spawn_get_output( ['git', 'diff', 'origin', '--name-only'] + list(repo.categories), cwd=repo.location) if ret != 0: parser.error( 'git not available to determine targets for --commits') elif not out: # no pkg changes exist parser.exit() pkgs = sorted( atom_cls(os.sep.join(x.split(os.sep, 2)[:2])) for x in out) combined_restrict = packages.OrRestriction(*pkgs) namespace.restrictions = [(base.package_scope, combined_restrict)]
def test_sandbox_empty_dir(self): """sandbox gets pissy if it's ran from a nonexistent dir this verifies our fix works. """ fp = self.generate_script("spawn-sandbox.sh", "echo $LD_PRELOAD") dpath = os.path.join(self.dir, "dar") os.mkdir(dpath) try: cwd = os.getcwd() except OSError: cwd = None try: os.chdir(dpath) os.rmdir(dpath) assert "libsandbox.so" in \ [os.path.basename(x.strip()) for x in spawn.spawn_get_output( fp, spawn_type=spawn.spawn_sandbox, cwd='/')[1][0].split()] os.unlink(fp) finally: if cwd is not None: os.chdir(cwd)
def run(self, args, user=False): if user: patch_type = 'user patches' output_func = self.observer.warn else: patch_type = 'patches' output_func = self.observer.info spawn_kwargs = {'collect_fds': (1, 2)} if self.op.userpriv: spawn_kwargs['uid'] = os_data.portage_uid spawn_kwargs['gid'] = os_data.portage_gid for path, patches in args: prefix = '' if path is not None: output_func(f'Applying {patch_type} from {path!r}:') prefix = ' ' for patch in patches: if path is None: output_func( f'{prefix}Applying {os.path.basename(patch)}...') else: output_func(f'{prefix}{os.path.basename(patch)}...') self.observer.flush() try: with open(patch) as f: ret, output = spawn.spawn_get_output( self.patch_cmd + self.patch_opts, fd_pipes={0: f.fileno()}, **spawn_kwargs) if ret: filename = os.path.basename(patch) msg = f'applying {filename!r} failed: {output[0]}' raise IpcCommandError(msg, code=ret) except OSError as e: raise IpcCommandError( f'failed reading patch file: {patch!r}: {e.strerror}')
def __init__(self, domain, pkg, verified_files, eclass_cache, observer=None, **kwargs): """ :param pkg: :obj:`pkgcore.ebuild.ebuild_src.package` instance we'll be building :param domain_settings: dict bled down from the domain configuration; basically initial env :param eclass_cache: the :class:`pkgcore.ebuild.eclass_cache` we'll be using :param verified_files: mapping of fetchables mapped to their disk location """ use = kwargs.get("use_override", pkg.use) domain_settings = domain.settings format.build.__init__(self, domain, pkg, verified_files, observer) ebd.__init__(self, pkg, initial_env=domain_settings, features=domain_settings["FEATURES"], **kwargs) self.env["FILESDIR"] = pjoin(os.path.dirname(pkg.ebuild.path), "files") self.eclass_cache = eclass_cache self.env["ECLASSDIR"] = eclass_cache.eclassdir # this needs to be deprecated and dropped from future EAPIs self.env["PORTDIR"] = eclass_cache.location self.run_test = self.feat_or_bool("test", domain_settings) self.allow_failed_test = self.feat_or_bool("test-fail-continue", domain_settings) if "test" in self.restrict: self.run_test = False elif "test" not in use: if self.run_test: logger.warning("disabling test for %s due to test use flag being disabled", pkg) self.run_test = False # XXX minor hack path = self.env["PATH"].split(":") for s, default in (("DISTCC", ".distcc"), ("CCACHE", "ccache")): b = (self.feat_or_bool(s, domain_settings) and s not in self.restrict) setattr(self, s.lower(), b) if b: # looks weird I realize, but # pjoin("/foor/bar", "/barr/foo") == "/barr/foo" # and pjoin("/foo/bar", ".asdf") == "/foo/bar/.asdf" self.env.setdefault(s + "_DIR", pjoin(self.domain.tmpdir, default)) # gentoo bug 355283 libdir = self.env.get("ABI") if libdir is not None: libdir = self.env.get("LIBDIR_%s" % (libdir,)) if libdir is not None: libdir = self.env.get(libdir) if libdir is None: libdir = "lib" path.insert(0, "/usr/%s/%s/bin" % (libdir, s.lower())) else: for y in ("_PATH", "_DIR"): if s + y in self.env: del self.env[s+y] self.env["PATH"] = os.pathsep.join(path) # ordering must match appearance order in SRC_URI per PMS self.env["A"] = ' '.join(iter_stable_unique(x.filename for x in pkg.fetchables)) if self.eapi.options.has_AA: pkg = getattr(self.pkg, '_raw_pkg', self.pkg) self.env["AA"] = ' '.join(set( x.filename for x in iflatten_instance(pkg.fetchables, fetch.fetchable))) if self.eapi.options.has_KV: ret = spawn_get_output(['uname', '-r']) if ret[0] == 0: self.env["KV"] = ret[1][0].strip() if self.eapi.options.has_merge_type: self.env["MERGE_TYPE"] = "source" # available user patches for >= EAPI 6 if self.eapi.options.user_patches: self.env["PKGCORE_USER_PATCHES"] = pkg.user_patches if self.setup_is_for_src: self.init_distfiles_env()
def __init__(self, pkg, initial_env=None, env_data_source=None, features=None, observer=None, clean=True, tmp_offset=None, use_override=None, allow_fetching=False): """ :param pkg: :class:`pkgcore.ebuild.ebuild_src.package` instance this env is being setup for :param initial_env: initial environment to use for this ebuild :param env_data_source: a :obj:`snakeoil.data_source.base` instance to restore the environment from- used for restoring the state of an ebuild processing, whether for unmerging, or walking phases during building :param features: ebuild features, hold over from portage, will be broken down at some point """ if use_override is not None: use = use_override else: use = pkg.use self.allow_fetching = allow_fetching if not hasattr(self, "observer"): self.observer = observer if not pkg.eapi.is_supported: raise TypeError( "package %s uses an unsupported eapi: %s" % (pkg, pkg.eapi)) if initial_env is not None: # copy. self.env = dict(initial_env) for x in ("USE", "ACCEPT_LICENSE"): if x in self.env: del self.env[x] else: self.env = {} if "PYTHONPATH" in os.environ: self.env["PYTHONPATH"] = os.environ["PYTHONPATH"] if features is None: features = self.env.get("FEATURES", ()) # XXX: note this is just EAPI 3 compatibility; not full prefix, soon.. self.env["ROOT"] = self.domain.root self.prefix_mode = pkg.eapi.options.prefix_capable or 'force-prefix' in features self.env["PKGCORE_PREFIX_SUPPORT"] = 'false' self.prefix = '/' if self.prefix_mode: self.prefix = self.domain.prefix self.env['EPREFIX'] = self.prefix.rstrip('/') self.env['EROOT'] = abspath( pjoin(self.domain.root, self.prefix.lstrip('/'))).rstrip('/') + '/' self.env["PKGCORE_PREFIX_SUPPORT"] = 'true' # set the list of internally implemented EAPI specific functions that # shouldn't be exported if os.path.exists(pjoin(const.EBD_PATH, 'funcnames', str(pkg.eapi))): with open(pjoin(const.EBD_PATH, 'funcnames', str(pkg.eapi)), 'r') as f: eapi_funcs = f.readlines() else: ret, eapi_funcs = spawn_get_output( [pjoin(const.EBD_PATH, 'generate_eapi_func_list.bash'), str(pkg.eapi)]) if ret != 0: raise Exception("failed to generate list of EAPI %s specific functions" % str(pkg.eapi)) self.env["PKGCORE_EAPI_FUNCS"] = ' '.join(x.strip() for x in eapi_funcs) self.env_data_source = env_data_source if (env_data_source is not None and not isinstance(env_data_source, data_source.base)): raise TypeError( "env_data_source must be None, or a pkgcore.data_source.base " "derivative: %s: %s" % ( env_data_source.__class__, env_data_source)) self.features = set(x.lower() for x in features) self.env["FEATURES"] = ' '.join(sorted(self.features)) iuse_effective_regex = (re.escape(x) for x in pkg.iuse_effective) iuse_effective_regex = "^(%s)$" % "|".join(iuse_effective_regex) iuse_effective_regex = iuse_effective_regex.replace("\\.\\*", ".*") self.env["PKGCORE_IUSE_EFFECTIVE"] = iuse_effective_regex expected_ebuild_env(pkg, self.env, env_source_override=self.env_data_source) self.env["PKGCORE_FINALIZED_RESTRICT"] = ' '.join(str(x) for x in pkg.restrict) self.restrict = pkg.restrict for x in ("sandbox", "userpriv"): setattr(self, x, self.feat_or_bool(x) and not (x in self.restrict)) if self.userpriv and os.getuid() != 0: self.userpriv = False if "PORT_LOGDIR" in self.env: self.logging = pjoin( self.env["PORT_LOGDIR"], "%s:%s:%s.log" % ( pkg.cpvstr, self.__class__.__name__, time.strftime("%Y%m%d-%H%M%S", time.localtime()))) del self.env["PORT_LOGDIR"] else: self.logging = False self.env["XARGS"] = xargs self.bashrc = self.env.pop("bashrc", ()) self.pkg = pkg self.eapi = pkg.eapi wipes = [k for k, v in self.env.iteritems() if not isinstance(v, basestring)] for k in wipes: del self.env[k] self.set_op_vars(tmp_offset) self.clean_at_start = clean self.clean_needed = False
def KV(self): """The version of the running kernel.""" ret, version = spawn_get_output(['uname', '-r']) if ret == 0: return version[0].strip() raise ValueError('unknown kernel version')