def fetcher_init(d): """ Called to initilize the fetchers once the configuration data is known Calls before this must not hit the cache. """ pd = persist_data.PersistData(d) # When to drop SCM head revisions controled by user policy srcrev_policy = bb.data.getVar('BB_SRCREV_POLICY', d, 1) or "clear" if srcrev_policy == "cache": bb.msg.debug( 1, bb.msg.domain.Fetcher, "Keeping SRCREV cache due to cache policy of: %s" % srcrev_policy) elif srcrev_policy == "clear": bb.msg.debug( 1, bb.msg.domain.Fetcher, "Clearing SRCREV cache due to cache policy of: %s" % srcrev_policy) try: bb.fetch.saved_headrevs = pd.getKeyValues("BB_URI_HEADREVS") except: pass pd.delDomain("BB_URI_HEADREVS") else: bb.msg.fatal(bb.msg.domain.Fetcher, "Invalid SRCREV cache policy of: %s" % srcrev_policy) for m in methods: if hasattr(m, "init"): m.init(d) # Make sure our domains exist pd.addDomain("BB_URI_HEADREVS") pd.addDomain("BB_URI_LOCALCOUNT")
def sortable_revision(self, url, ud, d): """ """ has_want_sortable = hasattr(self, "_want_sortable_revision") has_sortable = hasattr(self, "_sortable_revision") if not has_want_sortable and has_sortable: return self._sortable_revision(url, ud, d) elif has_want_sortable and self._want_sortable_revision( url, ud, d) and has_sortable: return self._sortable_revision(url, ud, d) pd = persist_data.PersistData(d) key = self.generate_revision_key(url, ud, d) latest_rev = self._build_revision(url, ud, d) last_rev = pd.getValue("BB_URI_LOCALCOUNT", key + "_rev") count = pd.getValue("BB_URI_LOCALCOUNT", key + "_count") if last_rev == latest_rev: return str(count + "+" + latest_rev) if count is None: count = "0" else: count = str(int(count) + 1) pd.setValue("BB_URI_LOCALCOUNT", key + "_rev", latest_rev) pd.setValue("BB_URI_LOCALCOUNT", key + "_count", count) return str(count + "+" + latest_rev)
def latest_revision(self, url, ud, d): """ Look in the cache for the latest revision, if not present ask the SCM. """ if not hasattr(self, "_latest_revision"): raise ParameterError pd = persist_data.PersistData(d) key = self.generate_revision_key(url, ud, d) rev = pd.getValue("BB_URI_HEADREVS", key) if rev != None: return str(rev) rev = self._latest_revision(url, ud, d) pd.setValue("BB_URI_HEADREVS", key, rev) return rev
def fetcher_compare_revisons(d): """ Compare the revisions in the persistant cache with current values and return true/false on whether they've changed. """ pd = persist_data.PersistData(d) data = pd.getKeyValues("BB_URI_HEADREVS") data2 = bb.fetch.saved_headrevs changed = False for key in data: if key not in data2 or data2[key] != data[key]: bb.msg.debug(1, bb.msg.domain.Fetcher, "%s changed" % key) changed = True return True else: bb.msg.debug(2, bb.msg.domain.Fetcher, "%s did not change" % key) return False
def sortable_revision(self, url, ud, d): """ """ if hasattr(self, "_sortable_revision"): return self._sortable_revision(url, ud, d) pd = persist_data.PersistData(d) key = self.generate_revision_key(url, ud, d) latest_rev = self._build_revision(url, ud, d) last_rev = pd.getValue("BB_URI_LOCALCOUNT", key + "_rev") uselocalcount = bb.data.getVar("BB_LOCALCOUNT_OVERRIDE", d, True) or False count = None if uselocalcount: count = Fetch.localcount_internal_helper(ud, d) if count is None: count = pd.getValue("BB_URI_LOCALCOUNT", key + "_count") if last_rev == latest_rev: return str(count + "+" + latest_rev) buildindex_provided = hasattr(self, "_sortable_buildindex") if buildindex_provided: count = self._sortable_buildindex(url, ud, d, latest_rev) if count is None: count = "0" elif uselocalcount or buildindex_provided: count = str(count) else: count = str(int(count) + 1) pd.setValue("BB_URI_LOCALCOUNT", key + "_rev", latest_rev) pd.setValue("BB_URI_LOCALCOUNT", key + "_count", count) return str(count + "+" + latest_rev)