def pulse(self, owner): """Callback to update progress information""" if self.transaction.cancelled: return False self.transaction.progress_details = (self.current_items, self.total_items, self.current_bytes, self.total_bytes, self.current_cps, self.elapsed_time) percent = (((self.current_bytes + self.current_items) * 100.0) / float(self.total_bytes + self.total_items)) progress = int(self.progress_begin + percent/100 * \ (self.progress_end - self.progress_begin)) # If the progress runs backwards emit an illegal progress value # e.g. during cache updates. if self.progress > progress: self.transaction.progress = 101 else: self.transaction.progress = progress self.progress = progress # Show all currently downloaded files items = [] for worker in owner.workers: if not worker.current_item: continue self._emit_acquire_item(worker.current_item, worker.total_size, worker.current_size) items.append(worker.current_item) self._emit_status_details(items) while GObject.main_context_default().pending(): GObject.main_context_default().iteration() return True
def on_media_end_reached(self, event): def advance(): if self.can_go_next(): self.next() # we want advance to be called from the main thread GObject.main_context_default().invoke_full(0, advance) return False
def on_media_end_reached(self, event): def advance(): if self.can_go_next(): self.next() # we want advance to be called from the main thread GObject.main_context_default().invoke_full(0, advance) return False
def media_change(self, medium, drive): """Callback for media changes""" #FIXME: make use of DeviceKit/hal self.transaction.required_medium = medium, drive self.transaction.paused = True self.transaction.status = enums.STATUS_WAITING_MEDIUM while self.transaction.paused: GObject.main_context_default().iteration() self.transaction.status = enums.STATUS_DOWNLOADING if self.transaction.cancelled: return False return True
def update_from_app_install_data(db, cache, datadir=None): """ index the desktop files in $datadir/desktop/*.desktop """ if not datadir: datadir = softwarecenter.paths.APP_INSTALL_DESKTOP_PATH context = GObject.main_context_default() for desktopf in glob(datadir + "/*.desktop"): LOG.debug("processing %s" % desktopf) # process events while context.pending(): context.iteration() try: parser = DesktopConfigParser() parser.read(desktopf) index_app_info_from_parser(parser, db, cache) except Exception as e: # Print a warning, no error (Debian Bug #568941) LOG.debug("error processing: %s %s" % (desktopf, e)) warning_text = _( "The file: '%s' could not be read correctly. The application " "associated with this file will not be included in the " "software catalog. Please consider raising a bug report " "for this issue with the maintainer of that " "application") % desktopf LOG.warning(warning_text) return True
def update_from_app_install_data(db, cache, datadir=None): """ index the desktop files in $datadir/desktop/*.desktop """ if not datadir: datadir = softwarecenter.paths.APP_INSTALL_DESKTOP_PATH context = GObject.main_context_default() for desktopf in glob(datadir + "/*.desktop"): LOG.debug("processing %s" % desktopf) # process events while context.pending(): context.iteration() try: parser = DesktopConfigParser() parser.read(desktopf) index_app_info_from_parser(parser, db, cache) except Exception as e: # Print a warning, no error (Debian Bug #568941) LOG.debug("error processing: %s %s" % (desktopf, e)) warning_text = _( "The file: '%s' could not be read correctly. The application " "associated with this file will not be included in the " "software catalog. Please consider raising a bug report " "for this issue with the maintainer of that " "application") % desktopf LOG.warning(warning_text) return True
def setUp(self): self.loop = GObject.MainLoop(GObject.main_context_default()) self.error = False self.orig_host = os.environ.get("SOFTWARE_CENTER_RECOMMENDER_HOST") if not "SOFTWARE_CENTER_RECOMMENDER_HOST" in os.environ: server = "https://rec.staging.ubuntu.com" #server = "https://rec.ubuntu.com" os.environ["SOFTWARE_CENTER_RECOMMENDER_HOST"] = server
def setUp(self): self.loop = GObject.MainLoop(GObject.main_context_default()) self.error = False self.orig_host = os.environ.get("SOFTWARE_CENTER_RECOMMENDER_HOST") if not "SOFTWARE_CENTER_RECOMMENDER_HOST" in os.environ: #server = "https://rec.staging.ubuntu.com" server = "https://rec.ubuntu.com" os.environ["SOFTWARE_CENTER_RECOMMENDER_HOST"] = server
def on_report(self, element, message): log.info("%s: %s" % (type(element).__name__, message)) model = self.model iter = model.append() model.set_value(iter, PYELEMENT_COLUMN, element) model.set_value(iter, ELEMENT_COLUMN, type(element).__name__) model.set_value(iter, REASON_COLUMN, message) main = GObject.main_context_default() main.iteration(False)
def _conffile(self, current, new): """Callback for a config file conflict""" log.warning("Config file prompt: '%s' (%s)" % (current, new)) self.transaction.config_file_conflict = (current, new) self.transaction.paused = True self.transaction.status = enums.STATUS_WAITING_CONFIG_FILE_PROMPT while self.transaction.paused: GObject.main_context_default().iteration() log.debug("Sending config file answer: %s", self.transaction.config_file_conflict_resolution) if self.transaction.config_file_conflict_resolution == "replace": os.write(self.master_fd, "y\n") elif self.transaction.config_file_conflict_resolution == "keep": os.write(self.master_fd, "n\n") self.transaction.config_file_conflict_resolution = None self.transaction.config_file_conflict = None self.transaction.status = enums.STATUS_COMMITTING return True
def on_report(self, element, message): log.info("%s: %s" % (type(element).__name__, message)) model = self.model iter = model.append() model.set_value(iter, PYELEMENT_COLUMN, element) model.set_value(iter, ELEMENT_COLUMN, type(element).__name__) model.set_value(iter, REASON_COLUMN, message) main = GObject.main_context_default() main.iteration(False)
def wait_for_lock(trans, alt_lock=None): """Acquire the system lock or the optionally given one. If the lock cannot be obtained pause the transaction in the meantime. :param trans: the transaction :param lock: optional alternative lock """ def watch_lock(): """Helper to unpause the transaction if the lock can be obtained. Keyword arguments: trans -- the corresponding transaction alt_lock -- alternative lock to the system lock """ try: if alt_lock: alt_lock.acquire() else: acquire() except LockFailedError: return True trans.paused = False return True try: if alt_lock: alt_lock.acquire() else: acquire() except LockFailedError as error: trans.paused = True trans.status = enums.STATUS_WAITING_LOCK if error.process: #TRANSLATORS: %s is the name of a package manager msg = trans.gettext("Waiting for %s to exit") trans.status_details = msg % error.process lock_watch = GObject.timeout_add_seconds(3, watch_lock) while trans.paused and not trans.cancelled: GObject.main_context_default().iteration() GObject.source_remove(lock_watch) if trans.cancelled: raise TransactionCancelled()
def test_launchpad_login(self): lp = GLaunchpad() lp.connect("login-successful", self._cb_login_successful) # monkey patch lp.login = lambda u,p: True lp.login("user", "password") lp.emit("login-successful", None) main_loop = GObject.main_context_default() while main_loop.pending(): main_loop.iteration() self.assertTrue(self._login_successful)
def wait(self, timeout=0): """Wait until the corouine is finished or return after timeout seconds. This is achieved by running the GTK+ main loop. """ clock = time.clock start_time = clock() main = GObject.main_context_default() while self.is_alive(): main.iteration(False) if timeout and (clock() - start_time >= timeout): return
def test_download_unreachable(self): self.downloader.download_file("http://www.ubuntu.com/really-not-there", self.DOWNLOAD_FILENAME) main_loop = GObject.main_context_default() while self._image_is_reachable is None: while main_loop.pending(): main_loop.iteration() time.sleep(0.1) self.assertNotEqual(self._image_is_reachable, None) self.assertFalse(self._image_is_reachable) self.assertTrue(not os.path.exists(self.DOWNLOAD_FILENAME))
def test_download_unreachable(self): self.downloader.download_file("http://www.ubuntu.com/really-not-there", self.DOWNLOAD_FILENAME) main_loop = GObject.main_context_default() while self._image_is_reachable is None: while main_loop.pending(): main_loop.iteration() time.sleep(0.1) self.assertNotEqual(self._image_is_reachable, None) self.assertFalse(self._image_is_reachable) self.assertTrue(not os.path.exists(self.DOWNLOAD_FILENAME))
def test_download_reachable(self): self.downloader.download_file("http://www.ubuntu.com", self.DOWNLOAD_FILENAME) main_loop = GObject.main_context_default() while (self._image_downloaded_filename is None and not self._error): while main_loop.pending(): main_loop.iteration() time.sleep(0.1) self.assertNotEqual(self._image_is_reachable, None) self.assertTrue(self._image_is_reachable) self.assertTrue(os.path.exists(self.DOWNLOAD_FILENAME))
def test_launchpad_login(self): lp = GLaunchpad() lp.connect("login-successful", self._cb_login_successful) # monkey patch lp.login = lambda u,p: True lp.login("user", "password") lp.emit("login-successful", None) main_loop = GObject.main_context_default() while main_loop.pending(): main_loop.iteration() self.assertTrue(self._login_successful)
def test_download_reachable(self): self.downloader.download_file("http://www.ubuntu.com", self.DOWNLOAD_FILENAME) main_loop = GObject.main_context_default() while (self._image_downloaded_filename is None and not self._error): while main_loop.pending(): main_loop.iteration() time.sleep(0.1) self.assertNotEqual(self._image_is_reachable, None) self.assertTrue(self._image_is_reachable) self.assertTrue(os.path.exists(self.DOWNLOAD_FILENAME))
def __init__(self, useGtk=True): self._simtag = None self._reads = set() self._writes = set() self._sources = {} posixbase.PosixReactorBase.__init__(self) self.context = gobject.main_context_default() self.__pending = self.context.pending self.__iteration = self.context.iteration self.loop = gobject.MainLoop() self.__crash = self.loop.quit self.__run = self.loop.run
def __init__(self, useGtk=True): self._simtag = None self._reads = set() self._writes = set() self._sources = {} posixbase.PosixReactorBase.__init__(self) self.context = gobject.main_context_default() self.__pending = self.context.pending self.__iteration = self.context.iteration self.loop = gobject.MainLoop() self.__crash = self.loop.quit self.__run = self.loop.run
def __init__(self, use_cache=True): LOG.debug("AptHistory.__init__()") self.main_context = GObject.main_context_default() self.history_file = apt_pkg.config.find_file("Dir::Log::History") #Copy monitoring of history file changes from historypane.py self.logfile = Gio.File.new_for_path(self.history_file) self.monitor = self.logfile.monitor_file(0, None) self.monitor.connect("changed", self._on_apt_history_changed) self.update_callback = None LOG.debug("init history") # this takes a long time, run it in the idle handler self._transactions = [] self._history_ready = False GObject.idle_add(self._rescan, use_cache)
def __init__(self, use_cache=True): LOG.debug("AptHistory.__init__()") self.main_context = GObject.main_context_default() self.history_file = apt_pkg.config.find_file("Dir::Log::History") #Copy monitoring of history file changes from historypane.py self.logfile = Gio.File.new_for_path(self.history_file) self.monitor = self.logfile.monitor_file(0, None) self.monitor.connect("changed", self._on_apt_history_changed) self.update_callback = None LOG.debug("init history") # this takes a long time, run it in the idle handler self._transactions = [] self._history_ready = False GObject.idle_add(self._rescan, use_cache)
def ensure_installation_date_and_lazy_history_loading(self, appdetails): # we run two tests, the first is to ensure that we get a # result from installation_data immediately (at this point the # history is not loaded yet) so we expect "None" self.assertEqual(appdetails.installation_date, None) # then we need to wait until the history is loaded in the idle # handler from gi.repository import GObject context = GObject.main_context_default() while context.pending(): context.iteration() # ... and finally we test that its really there # FIXME: this will only work if software-center is installed self.assertNotEqual(appdetails.installation_date, None)
def update_from_software_center_agent(db, cache, ignore_cache=False, include_sca_qa=False): """ update index based on the software-center-agent data """ def _available_cb(sca, available): # print "available: ", available LOG.debug("available: '%s'" % available) sca.available = available sca.good_data = True loop.quit() def _error_cb(sca, error): LOG.warn("error: %s" % error) sca.available = [] sca.good_data = False loop.quit() # use the anonymous interface to s-c-agent, scales much better and is # much cache friendlier from softwarecenter.backend.scagent import SoftwareCenterAgent # FIXME: honor ignore_etag here somehow with the new piston based API sca = SoftwareCenterAgent(ignore_cache) sca.connect("available", _available_cb) sca.connect("error", _error_cb) sca.available = None if include_sca_qa: sca.query_available_qa() else: sca.query_available() # create event loop and run it until data is available # (the _available_cb and _error_cb will quit it) context = GObject.main_context_default() loop = GObject.MainLoop(context) loop.run() # process data for entry in sca.available: # process events while context.pending(): context.iteration() try: # now the normal parser parser = SCAApplicationParser(entry) index_app_info_from_parser(parser, db, cache) except Exception as e: LOG.warning("error processing: %s " % e) # return true if we have updated entries (this can also be an empty list) # but only if we did not got a error from the agent return sca.good_data
def test_auto_fill_in_email(self): def _load_status_changed(view, status): if view.get_property("load-status") == WebKit.LoadStatus.FINISHED: loop.quit() loop = GObject.MainLoop(GObject.main_context_default()) webview = SoftwareCenterWebView() email = "foo@bar" webview.set_auto_insert_email(email) with patch.object(webview, "execute_script") as mock_execute_js: webview.connect("notify::load-status", _load_status_changed) webview.load_uri("https://login.ubuntu.com") loop.run() mock_execute_js.assert_called() mock_execute_js.assert_called_with( SoftwareCenterWebView.AUTO_FILL_EMAIL_JS % email)
def get_all_origins(self): """ return a set of the current channel origins from the apt.Cache itself """ origins = set() for pkg in self._cache: if not pkg.candidate: continue for item in pkg.candidate.origins: context = GObject.main_context_default() while context.pending(): context.iteration() if item.origin: origins.add(item.origin) return origins
def get_all_origins(self): """ return a set of the current channel origins from the apt.Cache itself """ origins = set() for pkg in self._cache: if not pkg.candidate: continue for item in pkg.candidate.origins: context = GObject.main_context_default() while context.pending(): context.iteration() if item.origin: origins.add(item.origin) return origins
def update_from_software_center_agent(db, cache, ignore_cache=False, include_sca_qa=False): """ update index based on the software-center-agent data """ def _available_cb(sca, available): # print "available: ", available LOG.debug("available: '%s'" % available) sca.available = available sca.good_data = True loop.quit() def _error_cb(sca, error): LOG.warn("error: %s" % error) sca.available = [] sca.good_data = False loop.quit() # use the anonymous interface to s-c-agent, scales much better and is # much cache friendlier from softwarecenter.backend.scagent import SoftwareCenterAgent # FIXME: honor ignore_etag here somehow with the new piston based API sca = SoftwareCenterAgent(ignore_cache) sca.connect("available", _available_cb) sca.connect("error", _error_cb) sca.available = None if include_sca_qa: sca.query_available_qa() else: sca.query_available() # create event loop and run it until data is available # (the _available_cb and _error_cb will quit it) context = GObject.main_context_default() loop = GObject.MainLoop(context) loop.run() # process data for entry in sca.available: # process events while context.pending(): context.iteration() try: # now the normal parser parser = SCAApplicationParser(entry) index_app_info_from_parser(parser, db, cache) except Exception as e: LOG.warning("error processing: %s " % e) # return true if we have updated entries (this can also be an empty list) # but only if we did not got a error from the agent return sca.good_data
def update_from_appstream_xml(db, cache, xmldir=None): if not xmldir: xmldir = softwarecenter.paths.APPSTREAM_XML_PATH context = GObject.main_context_default() if os.path.isfile(xmldir): update_from_single_appstream_file(db, cache, xmldir) return True for appstream_xml in glob(os.path.join(xmldir, "*.xml")): LOG.debug("processing %s" % appstream_xml) # process events while context.pending(): context.iteration() update_from_single_appstream_file(db, cache, appstream_xml) return True
def update_from_appstream_xml(db, cache, xmldir=None): if not xmldir: xmldir = softwarecenter.paths.APPSTREAM_XML_PATH context = GObject.main_context_default() if os.path.isfile(xmldir): update_from_single_appstream_file(db, cache, xmldir) return True for appstream_xml in glob(os.path.join(xmldir, "*.xml")): LOG.debug("processing %s" % appstream_xml) # process events while context.pending(): context.iteration() update_from_single_appstream_file(db, cache, appstream_xml) return True
def test_auto_fill_in_email(self): def _load_status_changed(view, status): if view.get_property("load-status") == WebKit.LoadStatus.FINISHED: loop.quit() loop = GObject.MainLoop(GObject.main_context_default()) webview = SoftwareCenterWebView() email = "foo@bar" webview.set_auto_insert_email(email) with patch.object(webview, "execute_script") as mock_execute_js: webview.connect("notify::load-status", _load_status_changed) webview.load_uri("https://login.ubuntu.com") loop.run() mock_execute_js.assert_called() mock_execute_js.assert_called_with( SoftwareCenterWebView.AUTO_FILL_EMAIL_JS % email)
def test_open_aptcache(self): # mvo: for the performance, its critical to have a # /var/cache/apt/srcpkgcache.bin - otherwise stuff will get slow # open s-c aptcache with ExecutionTime("s-c softwarecenter.apt.AptCache"): self.sccache = get_pkg_info() # cache is opened with a timeout_add() in get_pkg_info() time.sleep(0.2) context = GObject.main_context_default() while context.pending(): context.iteration() # compare with plain apt with ExecutionTime("plain apt: apt.Cache()"): self.cache = apt.Cache() with ExecutionTime("plain apt: apt.Cache(memonly=True)"): self.cache = apt.Cache(memonly=True)
def test_open_aptcache(self): # mvo: for the performance, its critical to have a # /var/cache/apt/srcpkgcache.bin - otherwise stuff will get slow # open s-c aptcache with ExecutionTime("s-c softwarecenter.apt.AptCache"): self.sccache = get_pkg_info() # cache is opened with a timeout_add() in get_pkg_info() time.sleep(0.2) context = GObject.main_context_default() while context.pending(): context.iteration() # compare with plain apt with ExecutionTime("plain apt: apt.Cache()"): self.cache = apt.Cache() with ExecutionTime("plain apt: apt.Cache(memonly=True)"): self.cache = apt.Cache(memonly=True)
def update_from_var_lib_apt_lists(db, cache, listsdir=None): """ index the files in /var/lib/apt/lists/*AppInfo """ try: import apt_pkg except ImportError: return False if not listsdir: listsdir = apt_pkg.config.find_dir("Dir::State::lists") context = GObject.main_context_default() for appinfo in glob("%s/*AppInfo" % listsdir): LOG.debug("processing %s" % appinfo) # process events while context.pending(): context.iteration() tagf = apt_pkg.TagFile(open(appinfo)) for section in tagf: parser = DesktopTagSectionParser(section, appinfo) index_app_info_from_parser(parser, db, cache) return True
def test_compat_api(self): with warnings.catch_warnings(record=True) as w: warnings.simplefilter('always') # GObject formerly exposed a lot of GLib's functions self.assertEqual(GObject.markup_escape_text('foo'), 'foo') ml = GObject.MainLoop() self.assertFalse(ml.is_running()) context = GObject.main_context_default() self.assertTrue(context.pending() in [False, True]) context = GObject.MainContext() self.assertFalse(context.pending()) self.assertTrue(issubclass(w[0].category, PyGIDeprecationWarning)) self.assertTrue('GLib.markup_escape_text' in str(w[0]), str(w[0])) self.assertLess(GObject.PRIORITY_HIGH, GObject.PRIORITY_DEFAULT)
def update_from_var_lib_apt_lists(db, cache, listsdir=None): """ index the files in /var/lib/apt/lists/*AppInfo """ try: import apt_pkg except ImportError: return False if not listsdir: listsdir = apt_pkg.config.find_dir("Dir::State::lists") context = GObject.main_context_default() for appinfo in glob("%s/*AppInfo" % listsdir): LOG.debug("processing %s" % appinfo) # process events while context.pending(): context.iteration() tagf = apt_pkg.TagFile(open(appinfo)) for section in tagf: parser = DesktopTagSectionParser(section, appinfo) index_app_info_from_parser(parser, db, cache) return True
def update_from_appstream_xml(db, cache, xmldir=None): if not xmldir: xmldir = softwarecenter.paths.APPSTREAM_XML_PATH from lxml import etree context = GObject.main_context_default() for appstream_xml in glob(os.path.join(xmldir, "*.xml")): LOG.debug("processing %s" % appstream_xml) # process events while context.pending(): context.iteration() tree = etree.parse(open(appstream_xml)) root = tree.getroot() if not root.tag == "applications": LOG.error("failed to read '%s' expected Applications root tag" % appstream_xml) continue for appinfo in root.iter("application"): parser = AppStreamXMLParser(appinfo, appstream_xml) index_app_info_from_parser(parser, db, cache) return True
def test_compat_api(self): with warnings.catch_warnings(record=True) as w: warnings.simplefilter('always') # GObject formerly exposed a lot of GLib's functions self.assertEqual(GObject.markup_escape_text('foo'), 'foo') ml = GObject.MainLoop() self.assertFalse(ml.is_running()) context = GObject.main_context_default() self.assertTrue(context.pending() in [False, True]) context = GObject.MainContext() self.assertFalse(context.pending()) self.assertTrue(issubclass(w[0].category, PyGIDeprecationWarning)) self.assertTrue('GLib.markup_escape_text' in str(w[0]), str(w[0])) self.assertLess(GObject.PRIORITY_HIGH, GObject.PRIORITY_DEFAULT)
def update_from_appstream_xml(db, cache, xmldir=None): if not xmldir: xmldir = softwarecenter.paths.APPSTREAM_XML_PATH from lxml import etree context = GObject.main_context_default() for appstream_xml in glob(os.path.join(xmldir, "*.xml")): LOG.debug("processing %s" % appstream_xml) # process events while context.pending(): context.iteration() tree = etree.parse(open(appstream_xml)) root = tree.getroot() if not root.tag == "applications": LOG.error("failed to read '%s' expected Applications root tag" % appstream_xml) continue for appinfo in root.iter("application"): parser = AppStreamXMLParser(appinfo, appstream_xml) index_app_info_from_parser(parser, db, cache) return True
def _threaded_perform_search(self): self._perform_search_complete = False # generate a name and ensure we never have two threads # with the same name names = [thread.name for thread in threading.enumerate()] for i in range(threading.active_count() + 1, 0, -1): thread_name = 'ThreadedQuery-%s' % i if not thread_name in names: break # create and start it t = threading.Thread(target=self._blocking_perform_search, name=thread_name) t.start() # don't block the UI while the thread is running context = GObject.main_context_default() while not self._perform_search_complete: time.sleep(0.02) # 50 fps while context.pending(): context.iteration() t.join() # call the query-complete callback self.emit("query-complete")
def _threaded_perform_search(self): self._perform_search_complete = False # generate a name and ensure we never have two threads # with the same name names = [thread.name for thread in threading.enumerate()] for i in range(threading.active_count()+1, 0, -1): thread_name = 'ThreadedQuery-%s' % i if not thread_name in names: break # create and start it t = threading.Thread( target=self._blocking_perform_search, name=thread_name) t.start() # don't block the UI while the thread is running context = GObject.main_context_default() while not self._perform_search_complete: time.sleep(0.02) # 50 fps while context.pending(): context.iteration() t.join() # call the query-complete callback self.emit("query-complete")
from softwarecenter.db.pkginfo import get_pkg_info cache = get_pkg_info() cache.open() db = StoreDatabase(XAPIAN_BASE_PATH + "/xapian", cache) db.open() # rnrclient loader app = Application("ACE", "unace") #app = Application("", "2vcard") from softwarecenter.backend.reviews.rnr import ( ReviewLoaderSpawningRNRClient ) loader = ReviewLoaderSpawningRNRClient(cache, db) print loader.refresh_review_stats(stats_callback) print loader.get_reviews(app, callback) print "\n\n" print "default loader, press ctrl-c for next loader" context = GObject.main_context_default() main = GObject.MainLoop(context) main.run() # default loader app = Application("", "2vcard") loader = get_review_loader(cache, db) loader.refresh_review_stats(stats_callback) loader.get_reviews(app, callback) main.run()
def setUp(self): self.loop = GObject.MainLoop(GObject.main_context_default()) self.error = False
def _p(self): main_loop = GObject.main_context_default() while main_loop.pending(): main_loop.iteration()
def __init__(self): super(MainLoopGtk, self).__init__() self.context = GObject.main_context_default() self.source = None
def _p(self): main_loop = GObject.main_context_default() while main_loop.pending(): main_loop.iteration()
def update(self, percent=0): context = GObject.main_context_default() while context.pending(): context.iteration()
def update(self, percent=0): context = GObject.main_context_default() while context.pending(): context.iteration()
def get_addons(self, pkgname, ignore_installed=True): """ get the list of addons for the given pkgname The optional parameter "ignore_installed" controls if the output should be filtered and pkgs already installed should be ignored in the output (e.g. useful for testing). :return: a tuple of pkgnames (recommends, suggests) """ logging.debug("get_addons for '%s'" % pkgname) def _addons_filter(addon): """ helper for get_addons that filters out unneeded ones """ # we don't know about this one (prefectly legal for suggests) if not addon in self._cache: LOG.debug("not in cache %s" % addon) return False # can happen via "lonley" depends if addon == pkg.name: LOG.debug("circular %s" % addon) return False # child pkg is addon of parent pkg, not the other way around. if addon == '-'.join(pkgname.split('-')[:-1]): LOG.debug("child > parent %s" % addon) return False # get the pkg addon_pkg = self._cache[addon] # we don't care for essential or important (or refrences # to ourself) if (addon_pkg.essential or addon_pkg._pkg.important): LOG.debug("essential or important %s" % addon) return False # we have it in our dependencies already if addon in deps: LOG.debug("already a dependency %s" % addon) return False # its a language-pack, language-selector should deal with it if self._is_language_pkg(addon): LOG.debug("part of language pkg rdepends %s" % addon) return False # something on the system depends on it rdeps = self.get_packages_removed_on_remove(addon_pkg) if rdeps and ignore_installed: LOG.debug("already has a installed rdepends %s" % addon) return False # looks good return True #---------------------------------------------------------------- def _addons_filter_slow(addon): """ helper for get_addons that filters out unneeded ones """ # this addon would get installed anyway (e.g. via indirect # dependency) so it would be misleading to show it if addon in all_deps_if_installed: LOG.debug("would get installed automatically %s" % addon) return False return True #---------------------------------------------------------------- # deb file, or pkg needing source, etc if (not pkgname in self._cache or not self._cache[pkgname].candidate): return ([], []) # initial setup pkg = self._cache[pkgname] # recommended addons addons_rec = self._get_recommends(pkg) LOG.debug("recommends: %s" % addons_rec) # suggested addons and renhances addons_sug = self._get_suggests(pkg) LOG.debug("suggests: %s" % addons_sug) renhances = self._get_renhances(pkg) LOG.debug("renhances: %s" % renhances) addons_sug += renhances provides = self._get_provides(pkg) LOG.debug("provides: %s" % provides) for provide in provides: virtual_aptpkg_pkg = self._cache._cache[provide] renhances = self._get_renhances_lowlevel_apt_pkg( virtual_aptpkg_pkg) LOG.debug("renhances of %s: %s" % (provide, renhances)) addons_sug += renhances context = GObject.main_context_default() while context.pending(): context.iteration() # get more addons, the idea is that if a package foo-data # just depends on foo we want to get the info about # "recommends, suggests, enhances" for foo-data as well # # FIXME: find a good package where this is actually the case and # replace the existing test # (arduino-core -> avrdude -> avrdude-doc) with that # FIXME2: if it turns out we don't have good/better examples, # kill it deps = self._get_depends(pkg) for dep in deps: if dep in self._cache: pkgdep = self._cache[dep] if len(self._get_rdepends(pkgdep)) == 1: # pkg is the only known package that depends on pkgdep pkgdep_rec = self._get_recommends(pkgdep) LOG.debug("recommends from lonley dependency %s: %s" % ( pkgdep, pkgdep_rec)) addons_rec += pkgdep_rec pkgdep_sug = self._get_suggests(pkgdep) LOG.debug("suggests from lonley dependency %s: %s" % ( pkgdep, pkgdep_sug)) addons_sug += pkgdep_sug pkgdep_enh = self._get_renhances(pkgdep) LOG.debug("renhances from lonley dependency %s: %s" % ( pkgdep, pkgdep_enh)) addons_sug += pkgdep_enh context = GObject.main_context_default() while context.pending(): context.iteration() # remove duplicates from suggests (sets are great!) addons_sug = list(set(addons_sug) - set(addons_rec)) # filter out stuff we don't want addons_rec = filter(_addons_filter, addons_rec) addons_sug = filter(_addons_filter, addons_sug) # this is not integrated into the filter above, as it is quite # expensive to run this call, so we only run it if we actually have # addons if addons_rec or addons_sug: # now get all_deps if the package would be installed try: all_deps_if_installed = \ self._try_install_and_get_all_deps_installed(pkg) except: # if we have broken packages, then we return no addons LOG.warn( "broken packages encountered while getting deps for %s" % pkgname) return ([], []) # filter out stuff we don't want addons_rec = filter(_addons_filter_slow, addons_rec) addons_sug = filter(_addons_filter_slow, addons_sug) return (addons_rec, addons_sug)
def _get_apt_history(self): history = AptHistory(use_cache=False) main_loop = GObject.main_context_default() while main_loop.pending(): main_loop.iteration() return history
def _p(self): context = GObject.main_context_default() for i in range(10): while context.pending(): context.iteration() time.sleep(0.1)
def do_events(): from gi.repository import GObject main_loop = GObject.main_context_default() while main_loop.pending(): main_loop.iteration()
def get_addons(self, pkgname, ignore_installed=True): """ get the list of addons for the given pkgname The optional parameter "ignore_installed" controls if the output should be filtered and pkgs already installed should be ignored in the output (e.g. useful for testing). :return: a tuple of pkgnames (recommends, suggests) """ logging.debug("get_addons for '%s'" % pkgname) def _addons_filter(addon): """ helper for get_addons that filters out unneeded ones """ # we don't know about this one (prefectly legal for suggests) if not addon in self._cache: LOG.debug("not in cache %s" % addon) return False # can happen via "lonley" depends if addon == pkg.name: LOG.debug("circular %s" % addon) return False # child pkg is addon of parent pkg, not the other way around. if addon == '-'.join(pkgname.split('-')[:-1]): LOG.debug("child > parent %s" % addon) return False # get the pkg addon_pkg = self._cache[addon] # we don't care for essential or important (or refrences # to ourself) if (addon_pkg.essential or addon_pkg._pkg.important): LOG.debug("essential or important %s" % addon) return False # we have it in our dependencies already if addon in deps: LOG.debug("already a dependency %s" % addon) return False # its a language-pack, language-selector should deal with it if self._is_language_pkg(addon): LOG.debug("part of language pkg rdepends %s" % addon) return False # something on the system depends on it rdeps = self.get_packages_removed_on_remove(addon_pkg) if rdeps and ignore_installed: LOG.debug("already has a installed rdepends %s" % addon) return False # looks good return True #---------------------------------------------------------------- def _addons_filter_slow(addon): """ helper for get_addons that filters out unneeded ones """ # this addon would get installed anyway (e.g. via indirect # dependency) so it would be misleading to show it if addon in all_deps_if_installed: LOG.debug("would get installed automatically %s" % addon) return False return True #---------------------------------------------------------------- # deb file, or pkg needing source, etc if (not pkgname in self._cache or not self._cache[pkgname].candidate): return ([], []) # initial setup pkg = self._cache[pkgname] # recommended addons addons_rec = self._get_recommends(pkg) LOG.debug("recommends: %s" % addons_rec) # suggested addons and renhances addons_sug = self._get_suggests(pkg) LOG.debug("suggests: %s" % addons_sug) renhances = self._get_renhances(pkg) LOG.debug("renhances: %s" % renhances) addons_sug += renhances provides = self._get_provides(pkg) LOG.debug("provides: %s" % provides) for provide in provides: virtual_aptpkg_pkg = self._cache._cache[provide] renhances = self._get_renhances_lowlevel_apt_pkg( virtual_aptpkg_pkg) LOG.debug("renhances of %s: %s" % (provide, renhances)) addons_sug += renhances context = GObject.main_context_default() while context.pending(): context.iteration() # get more addons, the idea is that if a package foo-data # just depends on foo we want to get the info about # "recommends, suggests, enhances" for foo-data as well # # FIXME: find a good package where this is actually the case and # replace the existing test # (arduino-core -> avrdude -> avrdude-doc) with that # FIXME2: if it turns out we don't have good/better examples, # kill it deps = self._get_depends(pkg) for dep in deps: if dep in self._cache: pkgdep = self._cache[dep] if len(self._get_rdepends(pkgdep)) == 1: # pkg is the only known package that depends on pkgdep pkgdep_rec = self._get_recommends(pkgdep) LOG.debug("recommends from lonley dependency %s: %s" % (pkgdep, pkgdep_rec)) addons_rec += pkgdep_rec pkgdep_sug = self._get_suggests(pkgdep) LOG.debug("suggests from lonley dependency %s: %s" % (pkgdep, pkgdep_sug)) addons_sug += pkgdep_sug pkgdep_enh = self._get_renhances(pkgdep) LOG.debug("renhances from lonley dependency %s: %s" % (pkgdep, pkgdep_enh)) addons_sug += pkgdep_enh context = GObject.main_context_default() while context.pending(): context.iteration() # remove duplicates from suggests (sets are great!) addons_sug = list(set(addons_sug) - set(addons_rec)) # filter out stuff we don't want addons_rec = filter(_addons_filter, addons_rec) addons_sug = filter(_addons_filter, addons_sug) # this is not integrated into the filter above, as it is quite # expensive to run this call, so we only run it if we actually have # addons if addons_rec or addons_sug: # now get all_deps if the package would be installed try: all_deps_if_installed = \ self._try_install_and_get_all_deps_installed(pkg) except: # if we have broken packages, then we return no addons LOG.warn( "broken packages encountered while getting deps for %s" % pkgname) return ([], []) # filter out stuff we don't want addons_rec = filter(_addons_filter_slow, addons_rec) addons_sug = filter(_addons_filter_slow, addons_sug) return (addons_rec, addons_sug)
def do_events(): context = GObject.main_context_default() while context.pending(): context.iteration()
def __init__(self, xid=0): self.oauth = None self.xid = xid self.loop = GObject.MainLoop(GObject.main_context_default())
def setUp(self): self.loop = GObject.MainLoop(GObject.main_context_default()) softwarecenter.paths.datadir = "../data" os.environ["PYTHONPATH"] = os.path.abspath("..") self.error = False
global active active -= 1 return True def run(t): if WITH_GUI: dia = AptProgressDialog(t) dia.run() dia.destroy() else: t.run() if __name__ == "__main__": #logging.basicConfig(level=logging.DEBUG) context = GObject.main_context_default() c = AptClient() for i in range(100): print "inst: 3dchess" t = c.install_packages(["3dchess"], exit_handler=exit_handler) run(t) active += 1 print "inst: 2vcard" t = c.install_packages(["2vcard"], exit_handler=exit_handler) run(t) active += 1 print "rm: 3dchess 2vcard" t = c.remove_packages(["3dchess","2vcard"], exit_handler=exit_handler)
def __init__(self, xid=0): self.oauth = None self.xid = xid self.loop = GObject.MainLoop(GObject.main_context_default())