def _start(self): pkg = self.pkg scheduler = self.scheduler settings = self.settings cleanup = 0 portage.prepare_build_dirs(pkg.root, settings, cleanup) alist = settings.configdict["pkg"].get("A", "").split() _prepare_fake_distdir(settings, alist) if eapi_exports_replace_vars(settings['EAPI']): vardb = pkg.root_config.trees['vartree'].dbapi settings["REPLACING_VERSIONS"] = " ".join( set(portage.versions.cpv_getversion(match) \ for match in vardb.match(pkg.slot_atom) + \ vardb.match('='+pkg.cpv))) setup_phase = EbuildPhase(background=self.background, phase="setup", scheduler=scheduler, settings=settings) setup_phase.addExitListener(self._setup_exit) self._task_queued(setup_phase) self.scheduler.scheduleSetup(setup_phase)
def _start(self): pkg = self.pkg scheduler = self.scheduler settings = self.settings cleanup = 0 portage.prepare_build_dirs(pkg.root, settings, cleanup) portdb = pkg.root_config.trees['porttree'].dbapi ebuild_path = settings['EBUILD'] mytree = os.path.dirname(os.path.dirname(os.path.dirname(ebuild_path))) alist = portdb.getFetchMap(pkg.cpv, useflags=pkg.use.enabled, mytree=mytree) aalist = portdb.getFetchMap(pkg.cpv, mytree=mytree) settings.configdict["pkg"]["A"] = " ".join(alist) settings.configdict["pkg"]["AA"] = " ".join(aalist) _prepare_fake_distdir(settings, alist) if eapi_exports_replace_vars(settings['EAPI']): vardb = pkg.root_config.trees['vartree'].dbapi settings["REPLACING_VERSIONS"] = " ".join( set(portage.versions.cpv_getversion(match) \ for match in vardb.match(pkg.slot_atom) + \ vardb.match('='+pkg.cpv))) setup_phase = EbuildPhase(background=self.background, phase="setup", scheduler=scheduler, settings=settings) setup_phase.addExitListener(self._setup_exit) self._current_task = setup_phase self.scheduler.scheduleSetup(setup_phase)
def _prefetch_exit(self, prefetcher): if self._was_cancelled(): self.wait() return pkg = self.pkg pkg_count = self.pkg_count if not (self.opts.pretend or self.opts.fetchonly): self._build_dir.lock() # Initialize PORTAGE_LOG_FILE (clean_log won't work without it). portage.prepare_build_dirs(self.settings["ROOT"], self.settings, 1) # If necessary, discard old log so that we don't # append to it. self._build_dir.clean_log() fetcher = BinpkgFetcher(background=self.background, logfile=self.settings.get("PORTAGE_LOG_FILE"), pkg=self.pkg, pretend=self.opts.pretend, scheduler=self.scheduler) if self.opts.getbinpkg and self._bintree.isremote(pkg.cpv): msg = " --- (%s of %s) Fetching Binary (%s::%s)" %\ (pkg_count.curval, pkg_count.maxval, pkg.cpv, fetcher.pkg_path) short_msg = "emerge: (%s of %s) %s Fetch" % \ (pkg_count.curval, pkg_count.maxval, pkg.cpv) self.logger.log(msg, short_msg=short_msg) # Allow the Scheduler's fetch queue to control the # number of concurrent fetchers. fetcher.addExitListener(self._fetcher_exit) self._task_queued(fetcher) self.scheduler.fetch.schedule(fetcher) return self._fetcher_exit(fetcher)
def _start(self): pkg = self.pkg scheduler = self.scheduler settings = self.settings cleanup = 0 portage.prepare_build_dirs(pkg.root, settings, cleanup) if eapi_exports_replace_vars(settings["EAPI"]): vardb = pkg.root_config.trees["vartree"].dbapi settings["REPLACING_VERSIONS"] = " ".join( set( portage.versions.cpv_getversion(match) for match in vardb.match(pkg.slot_atom) + vardb.match("=" + pkg.cpv))) setup_phase = EbuildPhase( background=self.background, phase="setup", scheduler=scheduler, settings=settings, ) setup_phase.addExitListener(self._setup_exit) self._task_queued(setup_phase) self.scheduler.scheduleSetup(setup_phase)
def _start_unmerge(self, lock_task): self._assert_current(lock_task) if lock_task.cancelled: self._default_final_exit(lock_task) return lock_task.future.result() portage.prepare_build_dirs( settings=self.settings, cleanup=True) # Output only gets logged if it comes after prepare_build_dirs() # which initializes PORTAGE_LOG_FILE. retval, pkgmap = _unmerge_display(self.pkg.root_config, self.opts, "unmerge", [self.pkg.cpv], clean_delay=0, writemsg_level=self._writemsg_level) if retval != os.EX_OK: self._async_unlock_builddir(returncode=retval) return self._writemsg_level(">>> Unmerging %s...\n" % (self.pkg.cpv,), noiselevel=-1) self._emergelog("=== Unmerging... (%s)" % (self.pkg.cpv,)) cat, pf = portage.catsplit(self.pkg.cpv) unmerge_task = MergeProcess( mycat=cat, mypkg=pf, settings=self.settings, treetype="vartree", vartree=self.pkg.root_config.trees["vartree"], scheduler=self.scheduler, background=self.background, mydbapi=self.pkg.root_config.trees["vartree"].dbapi, prev_mtimes=self.ldpath_mtimes, logfile=self.settings.get("PORTAGE_LOG_FILE"), unmerge=True) self._start_task(unmerge_task, self._unmerge_exit)
def _start(self): pkg = self.pkg scheduler = self.scheduler settings = self.settings cleanup = 0 portage.prepare_build_dirs(pkg.root, settings, cleanup) portdb = pkg.root_config.trees['porttree'].dbapi ebuild_path = settings['EBUILD'] mytree = os.path.dirname(os.path.dirname( os.path.dirname(ebuild_path))) alist = portdb.getFetchMap(pkg.cpv, useflags=pkg.use.enabled, mytree=mytree) aalist = portdb.getFetchMap(pkg.cpv, mytree=mytree) settings.configdict["pkg"]["A"] = " ".join(alist) settings.configdict["pkg"]["AA"] = " ".join(aalist) _prepare_fake_distdir(settings, alist) if eapi_exports_replace_vars(settings['EAPI']): vardb = pkg.root_config.trees['vartree'].dbapi settings["REPLACING_VERSIONS"] = " ".join( set(portage.versions.cpv_getversion(match) \ for match in vardb.match(pkg.slot_atom) + \ vardb.match('='+pkg.cpv))) setup_phase = EbuildPhase(background=self.background, phase="setup", scheduler=scheduler, settings=settings) setup_phase.addExitListener(self._setup_exit) self._current_task = setup_phase self.scheduler.scheduleSetup(setup_phase)
def _clean_exit(self, clean_phase): if self._default_exit(clean_phase) != os.EX_OK: self._unlock_builddir() self.wait() return dir_path = self._build_dir.dir_path infloc = self._infloc pkg = self.pkg pkg_path = self._pkg_path dir_mode = 0o755 for mydir in (dir_path, self._image_dir, infloc): portage.util.ensure_dirs(mydir, uid=portage.data.portage_uid, gid=portage.data.portage_gid, mode=dir_mode) # This initializes PORTAGE_LOG_FILE. portage.prepare_build_dirs(self.settings["ROOT"], self.settings, 1) self._writemsg_level(">>> Extracting info\n") pkg_xpak = portage.xpak.tbz2(self._pkg_path) check_missing_metadata = ("CATEGORY", "PF") missing_metadata = set() for k in check_missing_metadata: v = pkg_xpak.getfile(_unicode_encode(k, encoding=_encodings['repo.content'])) if not v: missing_metadata.add(k) pkg_xpak.unpackinfo(infloc) for k in missing_metadata: if k == "CATEGORY": v = pkg.category elif k == "PF": v = pkg.pf else: continue f = codecs.open(_unicode_encode(os.path.join(infloc, k), encoding=_encodings['fs'], errors='strict'), mode='w', encoding=_encodings['content'], errors='replace') try: f.write(v + "\n") finally: f.close() # Store the md5sum in the vdb. f = codecs.open(_unicode_encode(os.path.join(infloc, 'BINPKGMD5'), encoding=_encodings['fs'], errors='strict'), mode='w', encoding=_encodings['content'], errors='strict') try: f.write(str(portage.checksum.perform_md5(pkg_path)) + "\n") finally: f.close() env_extractor = BinpkgEnvExtractor(background=self.background, scheduler=self.scheduler, settings=self.settings) self._start_task(env_extractor, self._env_extractor_exit)
def _start_fetcher(self, lock_task=None): if lock_task is not None: self._assert_current(lock_task) lock_task.future.result() # Initialize PORTAGE_LOG_FILE (clean_log won't work without it). portage.prepare_build_dirs(self.settings["ROOT"], self.settings, 1) # If necessary, discard old log so that we don't # append to it. self._build_dir.clean_log() pkg = self.pkg pkg_count = self.pkg_count fetcher = BinpkgFetcher(background=self.background, logfile=self.settings.get("PORTAGE_LOG_FILE"), pkg=self.pkg, pretend=self.opts.pretend, scheduler=self.scheduler) if self.opts.getbinpkg and self._bintree.isremote(pkg.cpv): msg = " --- (%s of %s) Fetching Binary (%s::%s)" %\ (pkg_count.curval, pkg_count.maxval, pkg.cpv, fetcher.pkg_path) short_msg = "emerge: (%s of %s) %s Fetch" % \ (pkg_count.curval, pkg_count.maxval, pkg.cpv) self.logger.log(msg, short_msg=short_msg) # Allow the Scheduler's fetch queue to control the # number of concurrent fetchers. fetcher.addExitListener(self._fetcher_exit) self._task_queued(fetcher) self.scheduler.fetch.schedule(fetcher) return self._fetcher_exit(fetcher)
def _prefetch_exit(self, prefetcher): pkg = self.pkg pkg_count = self.pkg_count if not (self.opts.pretend or self.opts.fetchonly): self._build_dir.lock() # If necessary, discard old log so that we don't # append to it. self._build_dir.clean_log() # Initialze PORTAGE_LOG_FILE. portage.prepare_build_dirs(self.settings["ROOT"], self.settings, 1) fetcher = BinpkgFetcher(background=self.background, logfile=self.settings.get("PORTAGE_LOG_FILE"), pkg=self.pkg, pretend=self.opts.pretend, scheduler=self.scheduler) pkg_path = fetcher.pkg_path self._pkg_path = pkg_path if self.opts.getbinpkg and self._bintree.isremote(pkg.cpv): msg = " --- (%s of %s) Fetching Binary (%s::%s)" %\ (pkg_count.curval, pkg_count.maxval, pkg.cpv, pkg_path) short_msg = "emerge: (%s of %s) %s Fetch" % \ (pkg_count.curval, pkg_count.maxval, pkg.cpv) self.logger.log(msg, short_msg=short_msg) self._start_task(fetcher, self._fetcher_exit) return self._fetcher_exit(fetcher)
def _start(self): pkg = self.pkg scheduler = self.scheduler settings = self.settings cleanup = 0 portage.prepare_build_dirs(pkg.root, settings, cleanup) portdb = pkg.root_config.trees['porttree'].dbapi ebuild_path = settings['EBUILD'] alist = settings.configdict["pkg"].get("A", "").split() _prepare_fake_distdir(settings, alist) if eapi_exports_replace_vars(settings['EAPI']): vardb = pkg.root_config.trees['vartree'].dbapi settings["REPLACING_VERSIONS"] = " ".join( set(portage.versions.cpv_getversion(match) \ for match in vardb.match(pkg.slot_atom) + \ vardb.match('='+pkg.cpv))) setup_phase = EbuildPhase(background=self.background, phase="setup", scheduler=scheduler, settings=settings) setup_phase.addExitListener(self._setup_exit) self._task_queued(setup_phase) self.scheduler.scheduleSetup(setup_phase)
def _install_exit(self, install_phase): if self._default_exit(install_phase) != os.EX_OK: self.wait() return if not os.path.exists(self.settings["D"]): pkg = self.pkg scheduler = self.scheduler settings = self.settings cleanup = 1 # This initializes PORTAGE_LOG_FILE. portage.prepare_build_dirs(pkg.root, settings, cleanup) setup_phase = EbuildPhase(background=self.background, pkg=pkg, phase="setup", scheduler=scheduler, settings=settings, tree=self._tree) setup_phase.addExitListener(self._setup_exit) self._current_task = setup_phase self.scheduler.scheduleSetup(setup_phase) else: self.returncode = os.EX_OK self.wait() return
def _prefetch_exit(self, prefetcher): pkg = self.pkg pkg_count = self.pkg_count if not (self.opts.pretend or self.opts.fetchonly): self._build_dir.lock() # Initialize PORTAGE_LOG_FILE (clean_log won't work without it). portage.prepare_build_dirs(self.settings["ROOT"], self.settings, 1) # If necessary, discard old log so that we don't # append to it. self._build_dir.clean_log() fetcher = BinpkgFetcher(background=self.background, logfile=self.settings.get("PORTAGE_LOG_FILE"), pkg=self.pkg, pretend=self.opts.pretend, scheduler=self.scheduler) pkg_path = fetcher.pkg_path self._pkg_path = pkg_path if self.opts.getbinpkg and self._bintree.isremote(pkg.cpv): msg = " --- (%s of %s) Fetching Binary (%s::%s)" %\ (pkg_count.curval, pkg_count.maxval, pkg.cpv, pkg_path) short_msg = "emerge: (%s of %s) %s Fetch" % \ (pkg_count.curval, pkg_count.maxval, pkg.cpv) self.logger.log(msg, short_msg=short_msg) self._start_task(fetcher, self._fetcher_exit) return self._fetcher_exit(fetcher)
def _start(self): vardb = self.pkg.root_config.trees["vartree"].dbapi dbdir = vardb.getpath(self.pkg.cpv) if not os.path.exists(dbdir): # Apparently the package got uninstalled # already, so we can safely return early. self.returncode = os.EX_OK self._async_wait() return self.settings.setcpv(self.pkg) cat, pf = portage.catsplit(self.pkg.cpv) myebuildpath = os.path.join(dbdir, pf + ".ebuild") try: portage.doebuild_environment(myebuildpath, "prerm", settings=self.settings, db=vardb) except UnsupportedAPIException: # This is safe to ignore since this function is # guaranteed to set PORTAGE_BUILDDIR even though # it raises UnsupportedAPIException. The error # will be logged when it prevents the pkg_prerm # and pkg_postrm phases from executing. pass self._builddir_lock = EbuildBuildDir( scheduler=self.scheduler, settings=self.settings) self._builddir_lock.lock() portage.prepare_build_dirs( settings=self.settings, cleanup=True) # Output only gets logged if it comes after prepare_build_dirs() # which initializes PORTAGE_LOG_FILE. retval, pkgmap = _unmerge_display(self.pkg.root_config, self.opts, "unmerge", [self.pkg.cpv], clean_delay=0, writemsg_level=self._writemsg_level) if retval != os.EX_OK: self._builddir_lock.unlock() self.returncode = retval self._async_wait() return self._writemsg_level(">>> Unmerging %s...\n" % (self.pkg.cpv,), noiselevel=-1) self._emergelog("=== Unmerging... (%s)" % (self.pkg.cpv,)) unmerge_task = MergeProcess( mycat=cat, mypkg=pf, settings=self.settings, treetype="vartree", vartree=self.pkg.root_config.trees["vartree"], scheduler=self.scheduler, background=self.background, mydbapi=self.pkg.root_config.trees["vartree"].dbapi, prev_mtimes=self.ldpath_mtimes, logfile=self.settings.get("PORTAGE_LOG_FILE"), unmerge=True) self._start_task(unmerge_task, self._unmerge_exit)
def _pre_clean_exit(self, pre_clean_phase): if self._final_exit(pre_clean_phase) != os.EX_OK: self._unlock_builddir() self.wait() return # for log handling portage.prepare_build_dirs(self.pkg.root, self.settings, 1) fetcher = EbuildFetcher(config_pool=self.config_pool, fetchall=self.opts.fetch_all_uri, fetchonly=self.opts.fetchonly, background=self.background, logfile=self.settings.get('PORTAGE_LOG_FILE'), pkg=self.pkg, scheduler=self.scheduler) self._start_task(fetcher, self._fetch_exit)
def _start_unmerge(self, lock_task): self._assert_current(lock_task) if lock_task.cancelled: self._default_final_exit(lock_task) return lock_task.future.result() portage.prepare_build_dirs(settings=self.settings, cleanup=True) # Output only gets logged if it comes after prepare_build_dirs() # which initializes PORTAGE_LOG_FILE. retval, _ = _unmerge_display( self.pkg.root_config, self.opts, "unmerge", [self.pkg.cpv], clean_delay=0, writemsg_level=self._writemsg_level, ) if retval != os.EX_OK: self._async_unlock_builddir(returncode=retval) return self._writemsg_level(">>> Unmerging %s...\n" % (self.pkg.cpv, ), noiselevel=-1) self._emergelog("=== Unmerging... (%s)" % (self.pkg.cpv, )) cat, pf = portage.catsplit(self.pkg.cpv) unmerge_task = MergeProcess( mycat=cat, mypkg=pf, settings=self.settings, treetype="vartree", vartree=self.pkg.root_config.trees["vartree"], scheduler=self.scheduler, background=self.background, mydbapi=self.pkg.root_config.trees["vartree"].dbapi, prev_mtimes=self.ldpath_mtimes, logfile=self.settings.get("PORTAGE_LOG_FILE"), unmerge=True, ) self._start_task(unmerge_task, self._unmerge_exit)
def _pre_clean_exit(self, pre_clean_phase): if self._default_exit(pre_clean_phase) != os.EX_OK: self._unlock_builddir() self.wait() return # for log handling portage.prepare_build_dirs(self.pkg.root, self.settings, 1) fetcher = EbuildFetcher(config_pool=self.config_pool, ebuild_path=self._ebuild_path, fetchall=self.opts.fetch_all_uri, fetchonly=self.opts.fetchonly, background=self.background, logfile=self.settings.get('PORTAGE_LOG_FILE'), pkg=self.pkg, scheduler=self.scheduler) try: already_fetched = fetcher.already_fetched(self.settings) except portage.exception.InvalidDependString as e: msg_lines = [] msg = "Fetch failed for '%s' due to invalid SRC_URI: %s" % \ (self.pkg.cpv, e) msg_lines.append(msg) fetcher._eerror(msg_lines) portage.elog.elog_process(self.pkg.cpv, self.settings) self.returncode = 1 self._current_task = None self._unlock_builddir() self.wait() return if already_fetched: # This case is optimized to skip the fetch queue. fetcher = None self._fetch_exit(fetcher) return # Allow the Scheduler's fetch queue to control the # number of concurrent fetchers. fetcher.addExitListener(self._fetch_exit) self._task_queued(fetcher) self.scheduler.fetch.schedule(fetcher)
def _pre_clean_exit(self, pre_clean_phase): if self._default_exit(pre_clean_phase) != os.EX_OK: self._async_unlock_builddir(returncode=self.returncode) return # for log handling portage.prepare_build_dirs(self.pkg.root, self.settings, 1) fetcher = EbuildFetcher(config_pool=self.config_pool, ebuild_path=self._ebuild_path, fetchall=self.opts.fetch_all_uri, fetchonly=self.opts.fetchonly, background=self.background, logfile=self.settings.get('PORTAGE_LOG_FILE'), pkg=self.pkg, scheduler=self.scheduler) self._start_task(AsyncTaskFuture( future=fetcher.async_already_fetched(self.settings)), functools.partial(self._start_fetch, fetcher))
def _prefetch_exit(self, prefetcher): if self._was_cancelled(): self.wait() return pkg = self.pkg pkg_count = self.pkg_count if not (self.opts.pretend or self.opts.fetchonly): self._build_dir.lock() # Initialize PORTAGE_LOG_FILE (clean_log won't work without it). portage.prepare_build_dirs(self.settings["ROOT"], self.settings, 1) # If necessary, discard old log so that we don't # append to it. self._build_dir.clean_log() fetcher = BinpkgFetcher(background=self.background, logfile=self.settings.get("PORTAGE_LOG_FILE"), pkg=self.pkg, pretend=self.opts.pretend, scheduler=self.scheduler) pkg_path = fetcher.pkg_path self._pkg_path = pkg_path # This gives bashrc users an opportunity to do various things # such as remove binary packages after they're installed. self.settings["PORTAGE_BINPKG_FILE"] = pkg_path if self.opts.getbinpkg and self._bintree.isremote(pkg.cpv): msg = " --- (%s of %s) Fetching Binary (%s::%s)" %\ (pkg_count.curval, pkg_count.maxval, pkg.cpv, pkg_path) short_msg = "emerge: (%s of %s) %s Fetch" % \ (pkg_count.curval, pkg_count.maxval, pkg.cpv) self.logger.log(msg, short_msg=short_msg) # Allow the Scheduler's fetch queue to control the # number of concurrent fetchers. fetcher.addExitListener(self._fetcher_exit) self._task_queued(fetcher) self.scheduler.fetch.schedule(fetcher) return self._fetcher_exit(fetcher)
def _pre_clean_exit(self, pre_clean_phase): if self._default_exit(pre_clean_phase) != os.EX_OK: self._async_unlock_builddir(returncode=self.returncode) return # for log handling portage.prepare_build_dirs(self.pkg.root, self.settings, 1) fetcher = EbuildFetcher(config_pool=self.config_pool, ebuild_path=self._ebuild_path, fetchall=self.opts.fetch_all_uri, fetchonly=self.opts.fetchonly, background=self.background, logfile=self.settings.get('PORTAGE_LOG_FILE'), pkg=self.pkg, scheduler=self.scheduler) self._start_task( AsyncTaskFuture( future=fetcher.async_already_fetched(self.settings)), functools.partial(self._start_fetch, fetcher))
def _clean_phase_exit(self, clean_phase): if self._default_exit(clean_phase) != os.EX_OK: self.wait() return pkg = self.pkg scheduler = self.scheduler settings = self.settings cleanup = 1 # This initializes PORTAGE_LOG_FILE. portage.prepare_build_dirs(pkg.root, settings, cleanup) setup_phase = EbuildPhase(background=self.background, pkg=pkg, phase="setup", scheduler=scheduler, settings=settings, tree=self._tree) setup_phase.addExitListener(self._setup_exit) self._current_task = setup_phase self.scheduler.scheduleSetup(setup_phase)
def _prefetch_exit(self, prefetcher): pkg = self.pkg pkg_count = self.pkg_count if not (self.opts.pretend or self.opts.fetchonly): self._build_dir.lock() # Initialize PORTAGE_LOG_FILE (clean_log won't work without it). portage.prepare_build_dirs(self.settings["ROOT"], self.settings, 1) # If necessary, discard old log so that we don't # append to it. self._build_dir.clean_log() fetcher = BinpkgFetcher(background=self.background, logfile=self.settings.get("PORTAGE_LOG_FILE"), pkg=self.pkg, pretend=self.opts.pretend, scheduler=self.scheduler) pkg_path = fetcher.pkg_path self._pkg_path = pkg_path # This gives bashrc users an opportunity to do various things # such as remove binary packages after they're installed. self.settings["PORTAGE_BINPKG_FILE"] = pkg_path if self.opts.getbinpkg and self._bintree.isremote(pkg.cpv): msg = " --- (%s of %s) Fetching Binary (%s::%s)" %\ (pkg_count.curval, pkg_count.maxval, pkg.cpv, pkg_path) short_msg = "emerge: (%s of %s) %s Fetch" % \ (pkg_count.curval, pkg_count.maxval, pkg.cpv) self.logger.log(msg, short_msg=short_msg) # Allow the Scheduler's fetch queue to control the # number of concurrent fetchers. fetcher.addExitListener(self._fetcher_exit) self._task_queued(fetcher) self.scheduler.fetch.schedule(fetcher) return self._fetcher_exit(fetcher)
def _clean_exit(self, clean_phase): if self._default_exit(clean_phase) != os.EX_OK: self._unlock_builddir() self.wait() return dir_path = self._build_dir.dir_path infloc = self._infloc pkg = self.pkg pkg_path = self._pkg_path dir_mode = 0o755 for mydir in (dir_path, self._image_dir, infloc): portage.util.ensure_dirs(mydir, uid=portage.data.portage_uid, gid=portage.data.portage_gid, mode=dir_mode) # This initializes PORTAGE_LOG_FILE. portage.prepare_build_dirs(self.settings["ROOT"], self.settings, 1) self._writemsg_level(">>> Extracting info\n") pkg_xpak = portage.xpak.tbz2(self._pkg_path) check_missing_metadata = ("CATEGORY", "PF") missing_metadata = set() for k in check_missing_metadata: v = pkg_xpak.getfile(_unicode_encode(k, encoding=_encodings['repo.content'])) if not v: missing_metadata.add(k) pkg_xpak.unpackinfo(infloc) for k in missing_metadata: if k == "CATEGORY": v = pkg.category elif k == "PF": v = pkg.pf else: continue f = codecs.open(_unicode_encode(os.path.join(infloc, k), encoding=_encodings['fs'], errors='strict'), mode='w', encoding=_encodings['content'], errors='replace') try: f.write(v + "\n") finally: f.close() # Store the md5sum in the vdb. f = codecs.open(_unicode_encode(os.path.join(infloc, 'BINPKGMD5'), encoding=_encodings['fs'], errors='strict'), mode='w', encoding=_encodings['content'], errors='strict') try: f.write(str(portage.checksum.perform_md5(pkg_path)) + "\n") finally: f.close() # This gives bashrc users an opportunity to do various things # such as remove binary packages after they're installed. settings = self.settings settings.setcpv(self.pkg) settings["PORTAGE_BINPKG_FILE"] = pkg_path settings.backup_changes("PORTAGE_BINPKG_FILE") phase = "setup" setup_phase = EbuildPhase(background=self.background, pkg=self.pkg, phase=phase, scheduler=self.scheduler, settings=settings, tree=self._tree) setup_phase.addExitListener(self._setup_exit) self._current_task = setup_phase self.scheduler.scheduleSetup(setup_phase)
def _clean_exit(self, clean_phase): if self._default_exit(clean_phase) != os.EX_OK: self._unlock_builddir() self.wait() return dir_path = self.settings['PORTAGE_BUILDDIR'] infloc = self._infloc pkg = self.pkg pkg_path = self._pkg_path dir_mode = 0o755 for mydir in (dir_path, self._image_dir, infloc): portage.util.ensure_dirs(mydir, uid=portage.data.portage_uid, gid=portage.data.portage_gid, mode=dir_mode) # This initializes PORTAGE_LOG_FILE. portage.prepare_build_dirs(self.settings["ROOT"], self.settings, 1) self._writemsg_level(">>> Extracting info\n") pkg_xpak = portage.xpak.tbz2(self._pkg_path) check_missing_metadata = ("CATEGORY", "PF") missing_metadata = set() for k in check_missing_metadata: v = pkg_xpak.getfile( _unicode_encode(k, encoding=_encodings['repo.content'])) if not v: missing_metadata.add(k) pkg_xpak.unpackinfo(infloc) for k in missing_metadata: if k == "CATEGORY": v = pkg.category elif k == "PF": v = pkg.pf else: continue f = io.open(_unicode_encode(os.path.join(infloc, k), encoding=_encodings['fs'], errors='strict'), mode='w', encoding=_encodings['content'], errors='backslashreplace') try: f.write(_unicode_decode(v + "\n")) finally: f.close() # Store the md5sum in the vdb. f = io.open(_unicode_encode(os.path.join(infloc, 'BINPKGMD5'), encoding=_encodings['fs'], errors='strict'), mode='w', encoding=_encodings['content'], errors='strict') try: f.write( _unicode_decode( str(portage.checksum.perform_md5(pkg_path)) + "\n")) finally: f.close() env_extractor = BinpkgEnvExtractor(background=self.background, scheduler=self.scheduler, settings=self.settings) self._start_task(env_extractor, self._env_extractor_exit)
def _start(self): root_config = self.pkg.root_config portdb = root_config.trees["porttree"].dbapi ebuild_path = portdb.findname(self.pkg.cpv) if ebuild_path is None: raise AssertionError("ebuild not found for '%s'" % self.pkg.cpv) settings = self.config_pool.allocate() settings.setcpv(self.pkg) if self.prefetch and \ self._prefetch_size_ok(portdb, settings, ebuild_path): self.config_pool.deallocate(settings) self.returncode = os.EX_OK self.wait() return # In prefetch mode, logging goes to emerge-fetch.log and the builddir # should not be touched since otherwise it could interfere with # another instance of the same cpv concurrently being built for a # different $ROOT (currently, builds only cooperate with prefetchers # that are spawned for the same $ROOT). if not self.prefetch: self._build_dir = EbuildBuildDir(pkg=self.pkg, settings=settings) self._build_dir.lock() self._build_dir.clean_log() portage.prepare_build_dirs(self.pkg.root, self._build_dir.settings, 0) if self.logfile is None: self.logfile = settings.get("PORTAGE_LOG_FILE") phase = "fetch" if self.fetchall: phase = "fetchall" # If any incremental variables have been overridden # via the environment, those values need to be passed # along here so that they are correctly considered by # the config instance in the subproccess. fetch_env = os.environ.copy() fetch_env['PORTAGE_CONFIGROOT'] = settings['PORTAGE_CONFIGROOT'] nocolor = settings.get("NOCOLOR") if nocolor is not None: fetch_env["NOCOLOR"] = nocolor fetch_env["PORTAGE_NICENESS"] = "0" if self.prefetch: fetch_env["PORTAGE_PARALLEL_FETCHONLY"] = "1" ebuild_binary = os.path.join( settings["PORTAGE_BIN_PATH"], "ebuild") fetch_args = [ebuild_binary, ebuild_path, phase] debug = settings.get("PORTAGE_DEBUG") == "1" if debug: fetch_args.append("--debug") if not self.background and nocolor not in ('yes', 'true'): # Force consistent color output, in case we are capturing fetch # output through a normal pipe due to unavailability of ptys. fetch_args.append('--color=y') self.args = fetch_args self.env = fetch_env if self._build_dir is None: # Free settings now since we only have a local reference. self.config_pool.deallocate(settings) SpawnProcess._start(self)
def _start(self): root_config = self.pkg.root_config portdb = root_config.trees["porttree"].dbapi ebuild_path = portdb.findname(self.pkg.cpv) if ebuild_path is None: raise AssertionError("ebuild not found for '%s'" % self.pkg.cpv) settings = self.config_pool.allocate() settings.setcpv(self.pkg) if self.prefetch and \ self._prefetch_size_ok(portdb, settings, ebuild_path): self.config_pool.deallocate(settings) self.returncode = os.EX_OK self.wait() return # In prefetch mode, logging goes to emerge-fetch.log and the builddir # should not be touched since otherwise it could interfere with # another instance of the same cpv concurrently being built for a # different $ROOT (currently, builds only cooperate with prefetchers # that are spawned for the same $ROOT). if not self.prefetch: self._build_dir = EbuildBuildDir(pkg=self.pkg, settings=settings) self._build_dir.lock() self._build_dir.clean_log() cleanup=1 # This initializes PORTAGE_LOG_FILE. portage.prepare_build_dirs(self.pkg.root, self._build_dir.settings, cleanup) if self.logfile is None: self.logfile = settings.get("PORTAGE_LOG_FILE") phase = "fetch" if self.fetchall: phase = "fetchall" # If any incremental variables have been overridden # via the environment, those values need to be passed # along here so that they are correctly considered by # the config instance in the subproccess. fetch_env = os.environ.copy() fetch_env['PORTAGE_CONFIGROOT'] = settings['PORTAGE_CONFIGROOT'] nocolor = settings.get("NOCOLOR") if nocolor is not None: fetch_env["NOCOLOR"] = nocolor fetch_env["PORTAGE_NICENESS"] = "0" if self.prefetch: fetch_env["PORTAGE_PARALLEL_FETCHONLY"] = "1" ebuild_binary = os.path.join( settings["PORTAGE_BIN_PATH"], "ebuild") fetch_args = [ebuild_binary, ebuild_path, phase] debug = settings.get("PORTAGE_DEBUG") == "1" if debug: fetch_args.append("--debug") if not self.background and nocolor not in ('yes', 'true'): # Force consistent color output, in case we are capturing fetch # output through a normal pipe due to unavailability of ptys. fetch_args.append('--color=y') self.args = fetch_args self.env = fetch_env if self._build_dir is None: # Free settings now since we only have a local reference. self.config_pool.deallocate(settings) SpawnProcess._start(self)
def _start(self): vardb = self.pkg.root_config.trees["vartree"].dbapi dbdir = vardb.getpath(self.pkg.cpv) if not os.path.exists(dbdir): # Apparently the package got uninstalled # already, so we can safely return early. self.returncode = os.EX_OK self._async_wait() return self.settings.setcpv(self.pkg) cat, pf = portage.catsplit(self.pkg.cpv) myebuildpath = os.path.join(dbdir, pf + ".ebuild") try: portage.doebuild_environment(myebuildpath, "prerm", settings=self.settings, db=vardb) except UnsupportedAPIException: # This is safe to ignore since this function is # guaranteed to set PORTAGE_BUILDDIR even though # it raises UnsupportedAPIException. The error # will be logged when it prevents the pkg_prerm # and pkg_postrm phases from executing. pass self._builddir_lock = EbuildBuildDir(scheduler=self.scheduler, settings=self.settings) self._builddir_lock.lock() portage.prepare_build_dirs(settings=self.settings, cleanup=True) # Output only gets logged if it comes after prepare_build_dirs() # which initializes PORTAGE_LOG_FILE. retval, pkgmap = _unmerge_display(self.pkg.root_config, self.opts, "unmerge", [self.pkg.cpv], clean_delay=0, writemsg_level=self._writemsg_level) if retval != os.EX_OK: self._builddir_lock.unlock() self.returncode = retval self._async_wait() return self._writemsg_level(">>> Unmerging %s...\n" % (self.pkg.cpv, ), noiselevel=-1) self._emergelog("=== Unmerging... (%s)" % (self.pkg.cpv, )) unmerge_task = MergeProcess( mycat=cat, mypkg=pf, settings=self.settings, treetype="vartree", vartree=self.pkg.root_config.trees["vartree"], scheduler=self.scheduler, background=self.background, mydbapi=self.pkg.root_config.trees["vartree"].dbapi, prev_mtimes=self.ldpath_mtimes, logfile=self.settings.get("PORTAGE_LOG_FILE"), unmerge=True) self._start_task(unmerge_task, self._unmerge_exit)
def _unpack_metadata(self): dir_path = self.settings['PORTAGE_BUILDDIR'] infloc = self._infloc pkg = self.pkg pkg_path = self._pkg_path dir_mode = 0o755 for mydir in (dir_path, self._image_dir, infloc): portage.util.ensure_dirs(mydir, uid=portage.data.portage_uid, gid=portage.data.portage_gid, mode=dir_mode) # This initializes PORTAGE_LOG_FILE. portage.prepare_build_dirs(self.settings["ROOT"], self.settings, 1) self._writemsg_level(">>> Extracting info\n") yield self._bintree.dbapi.unpack_metadata(self.settings, infloc) check_missing_metadata = ("CATEGORY", "PF") for k, v in zip( check_missing_metadata, self._bintree.dbapi.aux_get(self.pkg.cpv, check_missing_metadata)): if v: continue elif k == "CATEGORY": v = pkg.category elif k == "PF": v = pkg.pf else: continue f = io.open(_unicode_encode(os.path.join(infloc, k), encoding=_encodings['fs'], errors='strict'), mode='w', encoding=_encodings['content'], errors='backslashreplace') try: f.write(_unicode_decode(v + "\n")) finally: f.close() # Store the md5sum in the vdb. if pkg_path is not None: md5sum, = self._bintree.dbapi.aux_get(self.pkg.cpv, ['MD5']) if not md5sum: md5sum = portage.checksum.perform_md5(pkg_path) with io.open(_unicode_encode(os.path.join(infloc, 'BINPKGMD5'), encoding=_encodings['fs'], errors='strict'), mode='w', encoding=_encodings['content'], errors='strict') as f: f.write(_unicode_decode('{}\n'.format(md5sum))) env_extractor = BinpkgEnvExtractor(background=self.background, scheduler=self.scheduler, settings=self.settings) env_extractor.start() yield env_extractor.async_wait() if env_extractor.returncode != os.EX_OK: raise portage.exception.PortageException( 'failed to extract environment for {}'.format(self.pkg.cpv))