def refresh(self, progress_callback=lambda n, extra: None, clear_cache=True): """Perform network operations to find available updates. The `progress_callback` is invoked with numbers between 0 and 1 or `None` as the network queries complete. The last callback will be `progress_callback(1)`. Passing `None` to `progress_callback` requests "pulse mode" from the progress bar. """ global _parse_cache if clear_cache: _parse_cache = {} # clear microformat parse cache urlrange.urlcleanup() # clean url cache self._cancel = False self._invalidate() # don't notify for the following; we'll notify at the end when we # know what the new values ought to be. self._saw_network_success = False self._network_failures = [] # bookkeeping progress_callback(None, None) self.clear() # find all activities already installed. progress_callback( None, _('Looking for local activities and content...')) # pulse activities = actinfo.get_activities() + actinfo.get_libraries() # enumerate all group urls progress_callback(None, _('Loading groups...')) group_urls = actinfo.get_activity_group_urls() # now we've got enough information to allow us to compute a # reasonable completion percentage. steps_total = [len(activities) + len(group_urls) + 3] steps_count = [0] # box this to allow update from mkprog. def mkprog(msg=None): """Helper function to do progress update.""" steps_count[0] += 1 progress_callback(steps_count[0] / steps_total[0], msg) mkprog(_('Loading groups...')) # okay, first load up any group definitions; these take precedence # if present. groups = [] def group_parser(f, url): name, desc, groups = microformat.parse_html(f.read(), url) if len(groups) > 0 or (name is not None and desc is not None): return name, desc, groups return None # hmm, not a successful parse. for gurl in group_urls: mkprog(_('Fetching %s...') % gurl) if self._cancel: break # bail! gdata = actinfo.retrieve_first_variant(gurl, group_parser, timeout=HTTP_TIMEOUT) if gdata is not None: gname, gdesc, gactmap = gdata groups.append((gname, gdesc, gurl, gactmap)) self._saw_network_success = True else: # headers even for failed groups. groups.append((None, gurl, gurl, {})) self._network_failures.append(gurl) # now start filling up the liststore, keeping a map from activity id # to liststore path row_map = {} group_num = 0 for gname, gdesc, gurl, gactmap in groups: # add group header. if gname is None: gname = _('Activity Group') self._append(IS_HEADER=True, UPDATE_URL=gurl, GROUP_NUM=group_num, DESCRIPTION_BIG=gname, DESCRIPTION_SMALL=gdesc) # now add entries for all activities in the group, whether # currently installed or not. for act_id, version_list in sorted(gactmap.items()): version, url = microformat.only_best_update(version_list) if act_id not in row_map: # temporary description in case user cancels the refresh tmp_desc = act_id.replace('sugar-is-lame', 'lame-is-the-new-cool') row_map[act_id] = self._append(ACTIVITY_ID=act_id, GROUP_NUM=group_num, UPDATE_EXISTS=True, UPDATE_URL=url, UPDATE_VERSION=str(version), DESCRIPTION_BIG=tmp_desc) steps_total[0] += 1 # new activity? else: # allow for a later version in a different group row = self[row_map[act_id]] if NormalizedVersion(version) > \ NormalizedVersion(row[UPDATE_VERSION]): row[UPDATE_URL] = url # XXX: deal with pinned updates. group_num += 1 # add in information from local activities. self._append(IS_HEADER=True, GROUP_NUM=group_num, DESCRIPTION_BIG=_('Local activities')) for act in activities: act_id = act.get_bundle_id() if act_id not in row_map: row_map[act_id] = self._append(ACTIVITY_ID=act_id, GROUP_NUM=group_num, UPDATE_EXISTS=False) else: steps_total[0] -= 1 # correct double-counting. # update icon, and bundle row = self[row_map[act_id]] row[ACTIVITY_BUNDLE] = act row[DESCRIPTION_BIG] = act.get_name() if not self._skip_icons: try: row[ACTIVITY_ICON] = _svg2pixbuf(act.get_icon_data()) except IOError: # dlo trac #8149: don't kill updater if existing icon # bundle is malformed. pass group_num += 1 # now do extra network traffic to look for actual updates. def refresh_existing(row): """Look for updates to an existing activity.""" act = row[ACTIVITY_BUNDLE] oldver = 0 if _DEBUG_MAKE_ALL_OLD else act.get_activity_version() size = 0 def net_good(url_): self._saw_network_success = True def net_bad(url): self._network_failures.append(url) # activity group entries have UPDATE_EXISTS=True # for any activities not present in the group, try their update_url # (if any) for new updates # note the behaviour here: if the XS (which hosts activity groups) # has an entry for the activity, then we trust that it is the # latest and we don't go online to check. # we only go online for activities which the XS does not know about # the purpose of this is to reduce the high latency of having # to check multiple update_urls on a slow connection. if row[UPDATE_EXISTS]: # trust what the XS told us newver, newurl = row[UPDATE_VERSION], row[UPDATE_URL] else: # hit the internet for updates oldver, newver, newurl, size = \ _retrieve_update_version(act, net_good, net_bad) # make sure that the version we found is actually newer... if newver is not None and NormalizedVersion(newver) <= \ NormalizedVersion(act.get_activity_version()): newver = None elif row[UPDATE_EXISTS]: # since we trusted the activity group page above, we don't # know the size of this bundle. but if we're about to offer it # as an update then we should look that up now, with an HTTP # request. # (by avoiding a load of HTTP requests on activity versions that # we already have, we greatly increase the speed and usability # of this updater on high-latency connections) size = urlrange.urlopen(row[UPDATE_URL], timeout=HTTP_TIMEOUT)\ .length() row[UPDATE_EXISTS] = (newver is not None) row[UPDATE_URL] = newurl row[UPDATE_SIZE] = size if newver is None: description = _('At version %s') % oldver else: description = \ _('From version %(old)s to %(new)s (Size: %(size)s)') % \ { 'old':oldver, 'new':newver, 'size':_humanize_size(size) } row[UPDATE_SELECTED] = True row[DESCRIPTION_SMALL] = description def refresh_new(row): """Look for updates to a new activity in the group.""" uo = urlrange.urlopen(row[UPDATE_URL], timeout=HTTP_TIMEOUT) row[UPDATE_SIZE] = uo.length() zf = zipfile.ZipFile(uo) # grab data from activity.info file activity_base = actutils.bundle_base_from_zipfile(zf) try: zf.getinfo('%s/activity/activity.info' % activity_base) is_activity = True except KeyError: try: zf.getinfo('%s/library/library.info' % activity_base) is_activity = False except: raise RuntimeError("not activity or library") if is_activity: cp = actutils.activity_info_from_zipfile(zf) SECTION = 'Activity' else: cp = actutils.library_info_from_zipfile(zf) SECTION = 'Library' act_id = None for fieldname in ('bundle_id', 'service_name', 'global_name'): if cp.has_option(SECTION, fieldname): act_id = cp.get(SECTION, fieldname) break if not act_id: raise RuntimeError("bundle_id not found for %s" % row[UPDATE_URL]) name = act_id if cp.has_option(SECTION, 'name'): name = cp.get(SECTION, 'name') # okay, try to get an appropriately translated name. if is_activity: lcp = actutils.locale_activity_info_from_zipfile(zf) if lcp is not None: name = lcp.get(SECTION, 'name') else: s = actutils.locale_section_for_content_bundle(cp) if s is not None and cp.has_option(s, 'name'): name = cp.get(s, 'name') version = None for fieldname in ('activity_version', 'library_version'): if cp.has_option(SECTION, fieldname): version = cp.get(SECTION, fieldname) break if version is None: raise RuntimeError("can't find version for %s" % row[UPDATE_URL]) row[DESCRIPTION_BIG] = name row[UPDATE_SELECTED] = False row[DESCRIPTION_SMALL] = \ _('New version %(version)s (Size: %(size)s)') % \ {'version':version, 'size':_humanize_size(row[UPDATE_SIZE])} # okay, let's try to update the icon! if not self._skip_icons: if is_activity: # XXX should failures here kill the upgrade? icon_file = cp.get(SECTION, 'icon') icon_filename = '%s/activity/%s.svg' % (activity_base, icon_file) row[ACTIVITY_ICON] = _svg2pixbuf(zf.read(icon_filename)) else: row[ACTIVITY_ICON] = _svg2pixbuf( actinfo.DEFAULT_LIBRARY_ICON) # go through activities and do network traffic for row in self: if self._cancel: break # bail! if row[IS_HEADER]: continue # skip # skip journal if row[ACTIVITY_ID] == "org.laptop.JournalActivity": continue mkprog(_('Checking %s...') % row[DESCRIPTION_BIG]) try: if row[ACTIVITY_BUNDLE] is None: refresh_new(row) self._saw_network_success = True else: refresh_existing(row) except: row[UPDATE_EXISTS] = False # something wrong, can't update if row[UPDATE_URL] is not None: self._network_failures.append(row[UPDATE_URL]) # log the problem for later debugging. print "Failure updating", row[DESCRIPTION_BIG], \ row[DESCRIPTION_SMALL], row[UPDATE_URL] traceback.print_exc() mkprog('Sorting...') # all done # hide headers if all children are hidden sawone, last_header = False, None for row in self: if row[IS_HEADER]: if last_header is not None: last_header[UPDATE_EXISTS] = sawone sawone, last_header = False, row elif row[UPDATE_EXISTS]: sawone = True if last_header is not None: last_header[UPDATE_EXISTS] = sawone # finally, sort all rows. self._sort() mkprog() # all done # XXX: check for base os update, and add an entry here? self._is_valid = True self.notify('is-valid') self.notify('saw-network-failure') self.notify('saw-network-success')
def refresh(self, progress_callback=lambda n, extra: None, clear_cache=True): """Perform network operations to find available updates. The `progress_callback` is invoked with numbers between 0 and 1 or `None` as the network queries complete. The last callback will be `progress_callback(1)`. Passing `None` to `progress_callback` requests "pulse mode" from the progress bar. """ global _parse_cache if clear_cache: _parse_cache = {} # clear microformat parse cache urlrange.urlcleanup() # clean url cache self._cancel = False self._invalidate() # don't notify for the following; we'll notify at the end when we # know what the new values ought to be. self._saw_network_success = False self._network_failures = [] # bookkeeping progress_callback(None, None) self.clear() # find all activities already installed. progress_callback(None, _('Looking for local activities and content...')) # pulse activities = actinfo.get_activities() + actinfo.get_libraries() # enumerate all group urls progress_callback(None, _('Loading groups...')) group_urls = actinfo.get_activity_group_urls() # now we've got enough information to allow us to compute a # reasonable completion percentage. steps_total = [ len(activities) + len(group_urls) + 3 ] steps_count = [ 0 ] # box this to allow update from mkprog. def mkprog(msg=None): """Helper function to do progress update.""" steps_count[0] += 1 progress_callback(steps_count[0]/steps_total[0], msg) mkprog(_('Loading groups...')) # okay, first load up any group definitions; these take precedence # if present. groups = [] def group_parser(f, url): name, desc, groups = microformat.parse_html(f.read(), url) if len(groups) > 0 or (name is not None and desc is not None): return name, desc, groups return None # hmm, not a successful parse. for gurl in group_urls: mkprog(_('Fetching %s...') % gurl) if self._cancel: break # bail! gdata = actinfo.retrieve_first_variant(gurl, group_parser, timeout=HTTP_TIMEOUT) if gdata is not None: gname, gdesc, gactmap = gdata groups.append((gname, gdesc, gurl, gactmap)) self._saw_network_success = True else: # headers even for failed groups. groups.append((None, gurl, gurl, {})) self._network_failures.append(gurl) # now start filling up the liststore, keeping a map from activity id # to liststore path row_map = {} group_num = 0 for gname, gdesc, gurl, gactmap in groups: # add group header. if gname is None: gname = _('Activity Group') self._append(IS_HEADER=True, UPDATE_URL=gurl, GROUP_NUM=group_num, DESCRIPTION_BIG=gname, DESCRIPTION_SMALL=gdesc) # now add entries for all activities in the group, whether # currently installed or not. for act_id, version_list in sorted(gactmap.items()): version, url = microformat.only_best_update(version_list) if act_id not in row_map: # temporary description in case user cancels the refresh tmp_desc = act_id.replace('sugar-is-lame', 'lame-is-the-new-cool') row_map[act_id] = self._append(ACTIVITY_ID=act_id, GROUP_NUM=group_num, UPDATE_EXISTS=True, UPDATE_URL=url, UPDATE_VERSION=str(version), DESCRIPTION_BIG=tmp_desc) steps_total[0] += 1 # new activity? else: # allow for a later version in a different group row = self[row_map[act_id]] if NormalizedVersion(version) > \ NormalizedVersion(row[UPDATE_VERSION]): row[UPDATE_URL] = url # XXX: deal with pinned updates. group_num += 1 # add in information from local activities. self._append(IS_HEADER=True, GROUP_NUM=group_num, DESCRIPTION_BIG=_('Local activities')) for act in activities: act_id = act.get_bundle_id() if act_id not in row_map: row_map[act_id] = self._append(ACTIVITY_ID=act_id, GROUP_NUM=group_num, UPDATE_EXISTS=False) else: steps_total[0] -= 1 # correct double-counting. # update icon, and bundle row = self[row_map[act_id]] row[ACTIVITY_BUNDLE] = act row[DESCRIPTION_BIG] = act.get_name() if not self._skip_icons: try: row[ACTIVITY_ICON] = _svg2pixbuf(act.get_icon_data()) except IOError: # dlo trac #8149: don't kill updater if existing icon # bundle is malformed. pass group_num += 1 # now do extra network traffic to look for actual updates. def refresh_existing(row): """Look for updates to an existing activity.""" act = row[ACTIVITY_BUNDLE] oldver = 0 if _DEBUG_MAKE_ALL_OLD else act.get_activity_version() size = 0 def net_good(url_): self._saw_network_success = True def net_bad(url): self._network_failures.append(url) # activity group entries have UPDATE_EXISTS=True # for any activities not present in the group, try their update_url # (if any) for new updates # note the behaviour here: if the XS (which hosts activity groups) # has an entry for the activity, then we trust that it is the # latest and we don't go online to check. # we only go online for activities which the XS does not know about # the purpose of this is to reduce the high latency of having # to check multiple update_urls on a slow connection. if row[UPDATE_EXISTS]: # trust what the XS told us newver, newurl = row[UPDATE_VERSION], row[UPDATE_URL] else: # hit the internet for updates oldver, newver, newurl, size = \ _retrieve_update_version(act, net_good, net_bad) # make sure that the version we found is actually newer... if newver is not None and NormalizedVersion(newver) <= \ NormalizedVersion(act.get_activity_version()): newver = None elif row[UPDATE_EXISTS]: # since we trusted the activity group page above, we don't # know the size of this bundle. but if we're about to offer it # as an update then we should look that up now, with an HTTP # request. # (by avoiding a load of HTTP requests on activity versions that # we already have, we greatly increase the speed and usability # of this updater on high-latency connections) size = urlrange.urlopen(row[UPDATE_URL], timeout=HTTP_TIMEOUT)\ .length() row[UPDATE_EXISTS] = (newver is not None) row[UPDATE_URL] = newurl row[UPDATE_SIZE] = size if newver is None: description = _('At version %s') % oldver else: description = \ _('From version %(old)s to %(new)s (Size: %(size)s)') % \ { 'old':oldver, 'new':newver, 'size':_humanize_size(size) } row[UPDATE_SELECTED] = True row[DESCRIPTION_SMALL] = description def refresh_new(row): """Look for updates to a new activity in the group.""" uo = urlrange.urlopen(row[UPDATE_URL], timeout=HTTP_TIMEOUT) row[UPDATE_SIZE] = uo.length() zf = zipfile.ZipFile(uo) # grab data from activity.info file activity_base = actutils.bundle_base_from_zipfile(zf) try: zf.getinfo('%s/activity/activity.info' % activity_base) is_activity = True except KeyError: is_activity = False if is_activity: cp = actutils.activity_info_from_zipfile(zf) SECTION = 'Activity' else: cp = actutils.library_info_from_zipfile(zf) SECTION = 'Library' act_id = None for fieldname in ('bundle_id', 'service_name', 'global_name'): if cp.has_option(SECTION, fieldname): act_id = cp.get(SECTION, fieldname) break if not act_id: raise RuntimeError("bundle_id not found for %s" % row[UPDATE_URL]) name = act_id if cp.has_option(SECTION, 'name'): name = cp.get(SECTION, 'name') # okay, try to get an appropriately translated name. if is_activity: lcp = actutils.locale_activity_info_from_zipfile(zf) if lcp is not None: name = lcp.get(SECTION, 'name') else: s = actutils.locale_section_for_content_bundle(cp) if s is not None and cp.has_option(s, 'name'): name = cp.get(s, 'name') version = None for fieldname in ('activity_version', 'library_version'): if cp.has_option(SECTION, fieldname): version = cp.get(SECTION, fieldname) break if version is None: raise RuntimeError("can't find version for %s" % row[UPDATE_URL]) row[DESCRIPTION_BIG] = name row[DESCRIPTION_SMALL] = \ _('New version %(version)s (Size: %(size)s)') % \ {'version':version, 'size':_humanize_size(row[UPDATE_SIZE])} # okay, let's try to update the icon! if not self._skip_icons: if is_activity: # XXX should failures here kill the upgrade? icon_file = cp.get(SECTION, 'icon') icon_filename = '%s/activity/%s.svg'%(activity_base, icon_file) row[ACTIVITY_ICON] = _svg2pixbuf(zf.read(icon_filename)) else: row[ACTIVITY_ICON] = _svg2pixbuf(actinfo.DEFAULT_LIBRARY_ICON) # go through activities and do network traffic for row in self: if self._cancel: break # bail! if row[IS_HEADER]: continue # skip # skip journal if row[ACTIVITY_ID] == "org.laptop.JournalActivity": continue mkprog(_('Checking %s...') % row[DESCRIPTION_BIG]) try: if row[ACTIVITY_BUNDLE] is None: refresh_new(row) self._saw_network_success = True else: refresh_existing(row) except: row[UPDATE_EXISTS] = False # something wrong, can't update if row[UPDATE_URL] is not None: self._network_failures.append(row[UPDATE_URL]) # log the problem for later debugging. print "Failure updating", row[DESCRIPTION_BIG], \ row[DESCRIPTION_SMALL], row[UPDATE_URL] traceback.print_exc() mkprog('Sorting...') # all done # hide headers if all children are hidden sawone, last_header = False, None for row in self: if row[IS_HEADER]: if last_header is not None: last_header[UPDATE_EXISTS] = sawone sawone, last_header = False, row elif row[UPDATE_EXISTS]: sawone = True if last_header is not None: last_header[UPDATE_EXISTS] = sawone # finally, sort all rows. self._sort() mkprog() # all done # XXX: check for base os update, and add an entry here? self._is_valid = True self.notify('is-valid') self.notify('saw-network-failure') self.notify('saw-network-success')
def __del__(self): """Free up any memory held by the cache in urlrange.""" urlrange.urlcleanup()