Esempio n. 1
0
def fetcher_init(d):
    """
    Called to initialize the fetchers once the configuration data is known.
    Calls before this must not hit the cache.
    """
    # When to drop SCM head revisions controlled by user policy
    srcrev_policy = bb.data.getVar('BB_SRCREV_POLICY', d, 1) or "clear"
    if srcrev_policy == "cache":
        logger.debug(1, "Keeping SRCREV cache due to cache policy of: %s",
                     srcrev_policy)
    elif srcrev_policy == "clear":
        logger.debug(1, "Clearing SRCREV cache due to cache policy of: %s",
                     srcrev_policy)
        revs = persist_data.persist('BB_URI_HEADREVS', d)
        try:
            bb.fetch.saved_headrevs = revs.items()
        except:
            pass
        revs.clear()
    else:
        raise FetchError("Invalid SRCREV cache policy of: %s" % srcrev_policy)

    for m in methods:
        if hasattr(m, "init"):
            m.init(d)
    def latest_revision(self, url, ud, d):
        """
        Look in the cache for the latest revision, if not present ask the SCM.
        """
        if not hasattr(self, "_latest_revision"):
            raise ParameterError

        revs = persist_data.persist('BB_URI_HEADREVS', d)
        key = self.generate_revision_key(url, ud, d)
        try:
            return revs[key]
        except KeyError:
            revs[key] = rev = self._latest_revision(url, ud, d)
            return rev
Esempio n. 3
0
    def latest_revision(self, url, ud, d, name):
        """
        Look in the cache for the latest revision, if not present ask the SCM.
        """
        if not hasattr(self, "_latest_revision"):
            raise ParameterError("The fetcher for this URL does not support _latest_revision", url)

        pd = persist_data.persist(d)
        revs = pd['BB_URI_HEADREVS']
        key = self.generate_revision_key(url, ud, d, name)
        rev = revs[key]
        if rev != None:
            return str(rev)

        revs[key] = rev = self._latest_revision(url, ud, d, name)
        return rev
def fetcher_compare_revisions(d):
    """
    Compare the revisions in the persistant cache with current values and
    return true/false on whether they've changed.
    """

    data = persist_data.persist('BB_URI_HEADREVS', d).items()
    data2 = bb.fetch.saved_headrevs

    changed = False
    for key in data:
        if key not in data2 or data2[key] != data[key]:
            logger.debug(1, "%s changed", key)
            changed = True
            return True
        else:
            logger.debug(2, "%s did not change", key)
    return False
Esempio n. 5
0
    def sortable_revision(self, url, ud, d, name):
        """

        """
        if hasattr(self, "_sortable_revision"):
            return self._sortable_revision(url, ud, d)

        pd = persist_data.persist(d)
        localcounts = pd['BB_URI_LOCALCOUNT']
        key = self.generate_revision_key(url, ud, d, name)

        latest_rev = self._build_revision(url, ud, d, name)
        last_rev = localcounts[key + '_rev']
        uselocalcount = bb.data.getVar("BB_LOCALCOUNT_OVERRIDE", d, True) or False
        count = None
        if uselocalcount:
            count = FetchMethod.localcount_internal_helper(ud, d, name)
        if count is None:
            count = localcounts[key + '_count'] or "0"

        if last_rev == latest_rev:
            return str(count + "+" + latest_rev)

        buildindex_provided = hasattr(self, "_sortable_buildindex")
        if buildindex_provided:
            count = self._sortable_buildindex(url, ud, d, latest_rev)

        if count is None:
            count = "0"
        elif uselocalcount or buildindex_provided:
            count = str(count)
        else:
            count = str(int(count) + 1)

        localcounts[key + '_rev'] = latest_rev
        localcounts[key + '_count'] = count

        return str(count + "+" + latest_rev)
Esempio n. 6
0
    def sortable_revision(self, url, ud, d):
        """

        """
        if hasattr(self, "_sortable_revision"):
            return self._sortable_revision(url, ud, d)

        localcounts = persist_data.persist('BB_URI_LOCALCOUNT', d)
        key = self.generate_revision_key(url, ud, d)

        latest_rev = self._build_revision(url, ud, d)
        last_rev = localcounts[key + '_rev']
        uselocalcount = bb.data.getVar("BB_LOCALCOUNT_OVERRIDE", d,
                                       True) or False
        count = None
        if uselocalcount:
            count = Fetch.localcount_internal_helper(ud, d)
        if count is None:
            count = localcounts[key + '_count']

        if last_rev == latest_rev:
            return str(count + "+" + latest_rev)

        buildindex_provided = hasattr(self, "_sortable_buildindex")
        if buildindex_provided:
            count = self._sortable_buildindex(url, ud, d, latest_rev)

        if count is None:
            count = "0"
        elif uselocalcount or buildindex_provided:
            count = str(count)
        else:
            count = str(int(count) + 1)

        localcounts[key + '_rev'] = latest_rev
        localcounts[key + '_count'] = count

        return str(count + "+" + latest_rev)
Esempio n. 7
0
def fetcher_init(d):
    """
    Called to initialize the fetchers once the configuration data is known.
    Calls before this must not hit the cache.
    """
    pd = persist_data.persist(d)
    # When to drop SCM head revisions controlled by user policy
    srcrev_policy = bb.data.getVar('BB_SRCREV_POLICY', d, True) or "clear"
    if srcrev_policy == "cache":
        logger.debug(1, "Keeping SRCREV cache due to cache policy of: %s", srcrev_policy)
    elif srcrev_policy == "clear":
        logger.debug(1, "Clearing SRCREV cache due to cache policy of: %s", srcrev_policy)
        try:
            bb.fetch2.saved_headrevs = pd['BB_URI_HEADREVS'].items()
        except:
            pass
        del pd['BB_URI_HEADREVS']
    else:
        raise FetchError("Invalid SRCREV cache policy of: %s" % srcrev_policy)

    for m in methods:
        if hasattr(m, "init"):
            m.init(d)