Ejemplo n.º 1
0
        def fetch_uri(uri, ud, d):
            if checkonly:
                fetchcmd = data.getVar("CHECKCOMMAND", d, True)
            elif os.path.exists(ud.localpath):
                # file exists, but we didnt complete it.. trying again..
                fetchcmd = data.getVar("RESUMECOMMAND", d, True)
            else:
                fetchcmd = data.getVar("FETCHCOMMAND", d, True)

            uri = uri.split(";")[0]
            uri_decoded = list(decodeurl(uri))
            uri_type = uri_decoded[0]
            uri_host = uri_decoded[1]

            fetchcmd = fetchcmd.replace("${URI}", uri.split(";")[0])
            fetchcmd = fetchcmd.replace("${FILE}", ud.basename)
            if not checkonly:
                logger.info("fetch " + uri)
                logger.debug(2, "executing " + fetchcmd)
            bb.fetch2.check_network_access(d, fetchcmd)
            runfetchcmd(fetchcmd, d, quiet=checkonly)

            # Sanity check since wget can pretend it succeed when it didn't
            # Also, this used to happen if sourceforge sent us to the mirror page
            if not os.path.exists(ud.localpath) and not checkonly:
                raise FetchError("The fetch command returned success for url %s but %s doesn't exist?!" % (uri, ud.localpath), uri)
Ejemplo n.º 2
0
def stamp_is_current(task, d, checkdeps = 1):
    """Check status of a given task's stamp. returns 0 if it is not current and needs updating."""
    task_graph = data.getVar('_task_graph', d)
    if not task_graph:
        task_graph = bb.digraph()
        data.setVar('_task_graph', task_graph, d)
    stamp = data.getVar('STAMP', d)
    if not stamp:
        return 0
    stampfile = "%s.%s" % (data.expand(stamp, d), task)
    if not os.access(stampfile, os.F_OK):
        return 0

    if checkdeps == 0:
        return 1

    import stat
    tasktime = os.stat(stampfile)[stat.ST_MTIME]

    _deps = []
    def checkStamp(graph, task):
        # check for existance
        if data.getVarFlag(task, 'nostamp', d):
            return 1

        if not stamp_is_current(task, d, 0):
            return 0

        depfile = "%s.%s" % (data.expand(stamp, d), task)
        deptime = os.stat(depfile)[stat.ST_MTIME]
        if deptime > tasktime:
            return 0
        return 1

    return task_graph.walkdown(task, checkStamp)
Ejemplo n.º 3
0
def exec_func(func, d, dirs = None):
    """Execute an BB 'function'"""

    body = data.getVar(func, d)
    if not body:
        return

    if not dirs:
        dirs = (data.getVarFlag(func, 'dirs', d) or "").split()
    for adir in dirs:
        adir = data.expand(adir, d)
        mkdirhier(adir)

    if len(dirs) > 0:
        adir = dirs[-1]
    else:
        adir = data.getVar('B', d, 1)

    adir = data.expand(adir, d)

    try:
        prevdir = os.getcwd()
    except OSError:
        prevdir = data.expand('${TOPDIR}', d)
    if adir and os.access(adir, os.F_OK):
        os.chdir(adir)

    if data.getVarFlag(func, "python", d):
        exec_func_python(func, d)
    else:
        exec_func_shell(func, d)
    os.chdir(prevdir)
Ejemplo n.º 4
0
        def fetch_uri(uri, ud, d):
            if os.path.exists(ud.localpath):
                # file exists, but we didnt complete it.. trying again..
                fetchcmd = data.getVar("RESUMECOMMAND", d, 1)
            else:
                fetchcmd = data.getVar("FETCHCOMMAND", d, 1)

            bb.msg.note(1, bb.msg.domain.Fetcher, "fetch " + uri)
            fetchcmd = fetchcmd.replace("${URI}", uri)
            fetchcmd = fetchcmd.replace("${FILE}", ud.basename)
            bb.msg.debug(2, bb.msg.domain.Fetcher, "executing " + fetchcmd)
            ret = os.system(fetchcmd)
            if ret != 0:
                return False

            # Sanity check since wget can pretend it succeed when it didn't
            # Also, this used to happen if sourceforge sent us to the mirror page
            if not os.path.exists(ud.localpath):
                bb.msg.debug(
                    2,
                    bb.msg.domain.Fetcher,
                    "The fetch command for %s returned success but %s doesn't exist?..." % (uri, ud.localpath),
                )
                return False

            return True
Ejemplo n.º 5
0
    def try_mirror(d, tarfn):
        """
        Try to use a mirrored version of the sources. We do this
        to avoid massive loads on foreign cvs and svn servers.
        This method will be used by the different fetcher
        implementations.

        d Is a bb.data instance
        tarfn is the name of the tarball
        """
        tarpath = os.path.join(data.getVar("DL_DIR", d, 1), tarfn)
        if os.access(tarpath, os.R_OK):
            bb.msg.debug(1, bb.msg.domain.Fetcher, "%s already exists, skipping checkout." % tarfn)
            return True

        pn = data.getVar('PN', d, True)
        src_tarball_stash = None
        if pn:
            src_tarball_stash = (data.getVar('SRC_TARBALL_STASH_%s' % pn, d, True) or data.getVar('CVS_TARBALL_STASH_%s' % pn, d, True) or data.getVar('SRC_TARBALL_STASH', d, True) or data.getVar('CVS_TARBALL_STASH', d, True) or "").split()

        for stash in src_tarball_stash:
            fetchcmd = data.getVar("FETCHCOMMAND_mirror", d, True) or data.getVar("FETCHCOMMAND_wget", d, True)
            uri = stash + tarfn
            bb.msg.note(1, bb.msg.domain.Fetcher, "fetch " + uri)
            fetchcmd = fetchcmd.replace("${URI}", uri)
            ret = os.system(fetchcmd)
            if ret == 0:
                bb.msg.note(1, bb.msg.domain.Fetcher, "Fetched %s from tarball stash, skipping checkout" % tarfn)
                return True
        return False
Ejemplo n.º 6
0
    def download(self, loc, ud, d):
        """
        do fetch
        """
        # if the package has been downloaded, just return
        if os.access(os.path.join(data.getVar("DL_DIR", d, True), ud.localfile), os.R_OK):
            logger.debug(1, "%s already exists (or was stashed). Skipping gclient sync.", ud.localpath)
            return

        depot_dir = data.getVar("DEPOTDIR", d, True) or os.path.join(data.getVar("DL_DIR", d, True), "depot")
        sync_dir = os.path.join(depot_dir, ud.packname)

        bb.utils.mkdirhier(sync_dir)
        os.chdir(sync_dir)

        if not os.path.exists(os.path.join(sync_dir, ".gclient")):
            logger.info('This is the first time to sync this depot, config it as htttp://%s%s'
                    % (ud.host, ud.path))
            runfetchcmd('gclient config http://%s%s' % (ud.host, ud.path), d)

        logger.info('Start to sync source code..')
        runfetchcmd('gclient fetch --jobs %s' % ud.njobs, d)

        logger.info('Creating tarball %s.' % ud.localfile)
        runfetchcmd('tar --exclude .svn --exclude .git -czf %s ./' %
                os.path.join(data.getVar("DL_DIR", d, True), ud.localfile), d)
Ejemplo n.º 7
0
    def download(self, ud, d):
        """Fetch url"""

        if os.access(os.path.join(data.getVar("DL_DIR", d, True), ud.localfile), os.R_OK):
            logger.debug(1, "%s already exists (or was stashed). Skipping repo init / sync.", ud.localpath)
            return

        gitsrcname = "%s%s" % (ud.host, ud.path.replace("/", "."))
        repodir = data.getVar("REPODIR", d, True) or os.path.join(data.getVar("DL_DIR", d, True), "repo")
        codir = os.path.join(repodir, gitsrcname, ud.manifest)

        if ud.user:
            username = ud.user + "@"
        else:
            username = ""

        bb.utils.mkdirhier(os.path.join(codir, "repo"))
        os.chdir(os.path.join(codir, "repo"))
        if not os.path.exists(os.path.join(codir, "repo", ".repo")):
            bb.fetch2.check_network_access(d, "repo init -m %s -b %s -u %s://%s%s%s" % (ud.manifest, ud.branch, ud.proto, username, ud.host, ud.path), ud.url)
            runfetchcmd("repo init -m %s -b %s -u %s://%s%s%s" % (ud.manifest, ud.branch, ud.proto, username, ud.host, ud.path), d)

        bb.fetch2.check_network_access(d, "repo sync %s" % ud.url, ud.url)
        runfetchcmd("repo sync", d)
        os.chdir(codir)

        scmdata = ud.parm.get("scmdata", "")
        if scmdata == "keep":
            tar_flags = ""
        else:
            tar_flags = "--exclude='.repo' --exclude='.git'"

        # Create a cache
        runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.join(".", "*") ), d)
Ejemplo n.º 8
0
 def localpaths(self, urldata, d):
     """
     Return the local filename of a given url assuming a successful fetch.
     """
     searched = []
     path = urldata.decodedurl
     newpath = path
     if path[0] == "/":
         return [path]
     filespath = data.getVar('FILESPATH', d, True)
     if filespath:
         logger.debug(2, "Searching for %s in paths:\n    %s" % (path, "\n    ".join(filespath.split(":"))))
         newpath, hist = bb.utils.which(filespath, path, history=True)
         searched.extend(hist)
     if not newpath:
         filesdir = data.getVar('FILESDIR', d, True)
         if filesdir:
             logger.debug(2, "Searching for %s in path: %s" % (path, filesdir))
             newpath = os.path.join(filesdir, path)
             searched.append(newpath)
     if (not newpath or not os.path.exists(newpath)) and path.find("*") != -1:
         # For expressions using '*', best we can do is take the first directory in FILESPATH that exists
         newpath, hist = bb.utils.which(filespath, ".", history=True)
         searched.extend(hist)
         logger.debug(2, "Searching for %s in path: %s" % (path, newpath))
         return searched
     if not os.path.exists(newpath):
         dldirfile = os.path.join(d.getVar("DL_DIR", True), path)
         logger.debug(2, "Defaulting to %s for %s" % (dldirfile, path))
         bb.utils.mkdirhier(os.path.dirname(dldirfile))
         searched.append(dldirfile)
         return searched
     return searched
Ejemplo n.º 9
0
    def __init__(self, data):
        self.basehash = {}
        self.taskhash = {}
        self.unihash = {}
        self.taskdeps = {}
        self.runtaskdeps = {}
        self.file_checksum_values = {}
        self.taints = {}
        self.gendeps = {}
        self.lookupcache = {}
        self.setscenetasks = set()
        self.basehash_ignore_vars = set((data.getVar("BB_BASEHASH_IGNORE_VARS")
                                         or "").split())
        self.taskhash_ignore_tasks = None
        self.init_rundepcheck(data)
        checksum_cache_file = data.getVar("BB_HASH_CHECKSUM_CACHE_FILE")
        if checksum_cache_file:
            self.checksum_cache = FileChecksumCache()
            self.checksum_cache.init_cache(data, checksum_cache_file)
        else:
            self.checksum_cache = None

        self.unihash_cache = bb.cache.SimpleCache("3")
        self.unitaskhashes = self.unihash_cache.init_cache(
            data, "bb_unihashes.dat", {})
        self.localdirsexclude = (
            data.getVar("BB_SIGNATURE_LOCAL_DIRS_EXCLUDE")
            or "CVS .bzr .git .hg .osc .p4 .repo .svn").split()
        self.tidtopn = {}
Ejemplo n.º 10
0
 def localpath(self, url, urldata, d):
     """
     Return the local filename of a given url assuming a successful fetch.
     """
     path = urldata.decodedurl
     newpath = path
     if path[0] != "/":
         filespath = data.getVar('FILESPATH', d, True)
         if filespath:
             logger.debug(
                 2, "Searching for %s in paths:    \n%s" %
                 (path, "\n    ".join(filespath.split(":"))))
             newpath = bb.utils.which(filespath, path)
         if not newpath:
             filesdir = data.getVar('FILESDIR', d, True)
             if filesdir:
                 logger.debug(
                     2, "Searching for %s in path: %s" % (path, filesdir))
                 newpath = os.path.join(filesdir, path)
         if not newpath or not os.path.exists(
                 newpath) and path.find("*") != -1:
             # For expressions using '*', best we can do is take the first directory in FILESPATH that exists
             newpath = bb.utils.which(filespath, ".")
             logger.debug(2,
                          "Searching for %s in path: %s" % (path, newpath))
             return newpath
         if not os.path.exists(newpath):
             dldirfile = os.path.join(d.getVar("DL_DIR", True), path)
             logger.debug(2, "Defaulting to %s for %s" % (dldirfile, path))
             bb.utils.mkdirhier(os.path.dirname(dldirfile))
             return dldirfile
     return newpath
Ejemplo n.º 11
0
    def getcset(d, depot, host, user, pswd, parm):
        p4opt = ""
        if "cset" in parm:
            return parm["cset"]
        if user:
            p4opt += " -u %s" % (user)
        if pswd:
            p4opt += " -P %s" % (pswd)
        if host:
            p4opt += " -p %s" % (host)

        p4date = data.getVar("P4DATE", d, 1)
        if "revision" in parm:
            depot += "#%s" % (parm["revision"])
        elif "label" in parm:
            depot += "@%s" % (parm["label"])
        elif p4date:
            depot += "@%s" % (p4date)

        p4cmd = data.getVar('FETCHCOMMAND_p4', d, 1)
        bb.msg.debug(1, bb.msg.domain.Fetcher,
                     "Running %s%s changes -m 1 %s" % (p4cmd, p4opt, depot))
        p4file = os.popen("%s%s changes -m 1 %s" % (p4cmd, p4opt, depot))
        cset = p4file.readline().strip()
        bb.msg.debug(1, bb.msg.domain.Fetcher, "READ %s" % (cset))
        if not cset:
            return -1

        return cset.split(' ')[1]
Ejemplo n.º 12
0
    def getcset(d, depot,host,user,pswd,parm):
        if "cset" in parm:
            return parm["cset"];
        if user:
            data.setVar('P4USER', user, d)
        if pswd:
            data.setVar('P4PASSWD', pswd, d)
        if host:
            data.setVar('P4PORT', host, d)

        p4date = data.getVar("P4DATE", d, 1)
        if "revision" in parm:
            depot += "#%s" % (parm["revision"])
        elif "label" in parm:
            depot += "@%s" % (parm["label"])
        elif p4date:
            depot += "@%s" % (p4date)

        p4cmd = data.getVar('FETCHCOMMAND_p4', d, 1)
        bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s changes -m 1 %s" % (p4cmd, depot))
        p4file = os.popen("%s changes -m 1 %s" % (p4cmd,depot))
        cset = p4file.readline().strip()
        bb.msg.debug(1, bb.msg.domain.Fetcher, "READ %s" % (cset))
        if not cset:
            return -1

        return cset.split(' ')[1]
Ejemplo n.º 13
0
Archivo: cache.py Proyecto: ssssam/poky
    def load_bbfile(bbfile, appends, config):
        """
        Load and parse one .bb build file
        Return the data and whether parsing resulted in the file being skipped
        """
        chdir_back = False

        from bb import data, parse

        # expand tmpdir to include this topdir
        data.setVar('TMPDIR', data.getVar('TMPDIR', config, 1) or "", config)
        bbfile_loc = os.path.abspath(os.path.dirname(bbfile))
        oldpath = os.path.abspath(os.getcwd())
        parse.cached_mtime_noerror(bbfile_loc)
        bb_data = data.init_db(config)
        # The ConfHandler first looks if there is a TOPDIR and if not
        # then it would call getcwd().
        # Previously, we chdir()ed to bbfile_loc, called the handler
        # and finally chdir()ed back, a couple of thousand times. We now
        # just fill in TOPDIR to point to bbfile_loc if there is no TOPDIR yet.
        if not data.getVar('TOPDIR', bb_data):
            chdir_back = True
            data.setVar('TOPDIR', bbfile_loc, bb_data)
        try:
            if appends:
                data.setVar('__BBAPPEND', " ".join(appends), bb_data)
            bb_data = parse.handle(bbfile, bb_data)
            if chdir_back:
                os.chdir(oldpath)
            return bb_data
        except:
            if chdir_back:
                os.chdir(oldpath)
            raise
Ejemplo n.º 14
0
    def srcrev_internal_helper(ud, d):
        """
        Return:
            a) a source revision if specified
            b) True if auto srcrev is in action
            c) False otherwise
        """

        if 'rev' in ud.parm:
            return ud.parm['rev']

        if 'tag' in ud.parm:
            return ud.parm['tag']

        rev = None
        if 'name' in ud.parm:
            pn = data.getVar("PN", d, 1)
            rev = data.getVar("SRCREV_%s_pn-%s" % (ud.parm['name'], pn), d, 1)
            if not rev:
                rev = data.getVar("SRCREV_pn-%s_%s" % (pn, ud.parm['name']), d,
                                  1)
            if not rev:
                rev = data.getVar("SRCREV_%s" % (ud.parm['name']), d, 1)
        if not rev:
            rev = data.getVar("SRCREV", d, 1)
        if rev == "INVALID":
            raise InvalidSRCREV("Please set SRCREV to a valid value")
        if not rev:
            return False
        if rev == "SRCREVINACTION":
            return True
        return rev
Ejemplo n.º 15
0
    def srcrev_internal_helper(ud, d):
        """
        Return:
            a) a source revision if specified
	    b) True if auto srcrev is in action
	    c) False otherwise
        """

        if 'rev' in ud.parm:
            return ud.parm['rev']

        if 'tag' in ud.parm:
            return ud.parm['tag']

        rev = None
        if 'name' in ud.parm:
            pn = data.getVar("PN", d, 1)
            rev = data.getVar("SRCREV_pn-" + pn + "_" + ud.parm['name'], d, 1)
        if not rev:
            rev = data.getVar("SRCREV", d, 1)
        if not rev:
            return False
        if rev is "SRCREVINACTION":
            return True
        return rev
Ejemplo n.º 16
0
    def localpath(self, url, ud, d):
        if not "module" in ud.parm:
            raise MissingParameterError("osc method needs a 'module' parameter.")

        ud.module = ud.parm["module"]

        # Create paths to osc checkouts
        relpath = ud.path
        if relpath.startswith('/'):
            # Remove leading slash as os.path.join can't cope
            relpath = relpath[1:]
        ud.pkgdir = os.path.join(data.expand('${OSCDIR}', d), ud.host)
        ud.moddir = os.path.join(ud.pkgdir, relpath, ud.module)

        if 'rev' in ud.parm:
            ud.revision = ud.parm['rev']
        else:
            pv = data.getVar("PV", d, 0)
            rev = Fetch.srcrev_internal_helper(ud, d)
            if rev and rev != True:
                ud.revision = rev
            else:
                ud.revision = ""

        ud.localfile = data.expand('%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.path.replace('/', '.'), ud.revision), d)

        return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
Ejemplo n.º 17
0
    def go(self, loc, ud, d):
        """Fetch url"""

        if os.access(os.path.join(data.getVar("DL_DIR", d, True), ud.localfile), os.R_OK):
            logger.debug(1, "%s already exists (or was stashed). Skipping repo init / sync.", ud.localpath)
            return

        gitsrcname = "%s%s" % (ud.host, ud.path.replace("/", "."))
        repodir = data.getVar("REPODIR", d, True) or os.path.join(data.getVar("DL_DIR", d, True), "repo")
        codir = os.path.join(repodir, gitsrcname, ud.manifest)

        if ud.user:
            username = ud.user + "@"
        else:
            username = ""

        bb.mkdirhier(os.path.join(codir, "repo"))
        os.chdir(os.path.join(codir, "repo"))
        if not os.path.exists(os.path.join(codir, "repo", ".repo")):
            runfetchcmd("repo init -m %s -b %s -u %s://%s%s%s" % (ud.manifest, ud.branch, ud.proto, username, ud.host, ud.path), d)

        runfetchcmd("repo sync", d)
        os.chdir(codir)

        # Create a cache
        runfetchcmd("tar --exclude=.repo --exclude=.git -czf %s %s" % (ud.localpath, os.path.join(".", "*") ), d)
Ejemplo n.º 18
0
    def download(self, uri, ud, d, checkonly = False):
        """Fetch urls"""

        if checkonly:
            fetchcmd = data.getVar("CHECKCOMMAND_wget", d, True) or d.expand("/usr/bin/env wget --spider -t 5 --passive-ftp --no-check-certificate -P ${DL_DIR} '${URI}'")
        elif os.path.exists(ud.localpath):
            # file exists, but we didnt complete it.. trying again..
            fetchcmd = data.getVar("RESUMECOMMAND_wget", d, True) or d.expand("/usr/bin/env wget -c -t 5 -nv --passive-ftp --no-check-certificate -P ${DL_DIR} '${URI}'")
        else:
            fetchcmd = data.getVar("FETCHCOMMAND_wget", d, True) or d.expand("/usr/bin/env wget -t 5 -nv --passive-ftp --no-check-certificate -P ${DL_DIR} '${URI}'")

        uri = uri.split(";")[0]
        uri_decoded = list(decodeurl(uri))
        uri_type = uri_decoded[0]
        uri_host = uri_decoded[1]

        fetchcmd = fetchcmd.replace("${URI}", uri.split(";")[0])
        fetchcmd = fetchcmd.replace("${FILE}", ud.basename)
        if not checkonly:
            logger.info("fetch " + uri)
            logger.debug(2, "executing " + fetchcmd)
        bb.fetch2.check_network_access(d, fetchcmd)
        runfetchcmd(fetchcmd, d, quiet=checkonly)

        # Sanity check since wget can pretend it succeed when it didn't
        # Also, this used to happen if sourceforge sent us to the mirror page
        if not os.path.exists(ud.localpath) and not checkonly:
            raise FetchError("The fetch command returned success for url %s but %s doesn't exist?!" % (uri, ud.localpath), uri)

        return True
Ejemplo n.º 19
0
    def getcset(d, depot,host,user,pswd,parm):
        if "cset" in parm:
            return parm["cset"];
        if user:
            data.setVar('P4USER', user, d)
        if pswd:
            data.setVar('P4PASSWD', pswd, d)
        if host:
            data.setVar('P4PORT', host, d)

        p4date = data.getVar("P4DATE", d, 1)
        if "revision" in parm:
            depot += "#%s" % (parm["revision"])
        elif "label" in parm:
            depot += "@%s" % (parm["label"])
        elif p4date:
            depot += "@%s" % (p4date)

        p4cmd = data.getVar('FETCHCOMMAND_p4', d, 1)
        bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s changes -m 1 %s" % (p4cmd, depot))
        p4file = os.popen("%s changes -m 1 %s" % (p4cmd,depot))
        cset = p4file.readline().strip()
        bb.msg.debug(1, bb.msg.domain.Fetcher, "READ %s" % (cset))
        if not cset:
            return -1

        return cset.split(' ')[1]
Ejemplo n.º 20
0
    def download(self, loc, ud, d):
        """Fetch url"""

        if os.access(os.path.join(data.getVar("DL_DIR", d, True), ud.localfile), os.R_OK):
            logger.debug(1, "%s already exists (or was stashed). Skipping repo init / sync.", ud.localpath)
            return

        gitsrcname = "%s%s" % (ud.host, ud.path.replace("/", "."))
        repodir = data.getVar("REPODIR", d, True) or os.path.join(data.getVar("DL_DIR", d, True), "repo")
        codir = os.path.join(repodir, gitsrcname, ud.manifest)

        if ud.user:
            username = ud.user + "@"
        else:
            username = ""

        bb.mkdirhier(os.path.join(codir, "repo"))
        os.chdir(os.path.join(codir, "repo"))
        if not os.path.exists(os.path.join(codir, "repo", ".repo")):
            bb.fetch2.check_network_access(d, "repo init -m %s -b %s -u %s://%s%s%s" % (ud.manifest, ud.branch, ud.proto, username, ud.host, ud.path))
            runfetchcmd("repo init -m %s -b %s -u %s://%s%s%s" % (ud.manifest, ud.branch, ud.proto, username, ud.host, ud.path), d)

        bb.fetch2.check_network_access(d, "repo sync %s" % ud.url)
        runfetchcmd("repo sync", d)
        os.chdir(codir)

        scmdata = ud.parm.get("scmdata", "")
        if scmdata == "keep":
            tar_flags = ""
        else:
            tar_flags = "--exclude '.repo' --exclude '.git'"

        # Create a cache
        runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.join(".", "*") ), d)
Ejemplo n.º 21
0
def add_task(task, deps, d):
    task_graph = data.getVar('_task_graph', d)
    if not task_graph:
        task_graph = bb.digraph()
    data.setVarFlag(task, 'task', 1, d)
    task_graph.addnode(task, None)
    for dep in deps:
        if not task_graph.hasnode(dep):
            task_graph.addnode(dep, None)
        task_graph.addnode(task, dep)
    # don't assume holding a reference
    data.setVar('_task_graph', task_graph, d)

    task_deps = data.getVar('_task_deps', d)
    if not task_deps:
        task_deps = {}
    def getTask(name):
        deptask = data.getVarFlag(task, name, d)
        if deptask:
            if not name in task_deps:
                task_deps[name] = {}
            task_deps[name][task] = deptask
    getTask('deptask')
    getTask('rdeptask')
    getTask('recrdeptask')
    getTask('nostamp')

    data.setVar('_task_deps', task_deps, d)
Ejemplo n.º 22
0
 def md5_sum(basename, d):
     """
     Fast and incomplete OVERRIDE implementation for MD5SUM handling
     MD5SUM_basename = "SUM" and fallback to MD5SUM_basename
     """
     var = "MD5SUM_%s" % basename
     return data.getVar(var, d) or data.getVar("MD5SUM", d)
Ejemplo n.º 23
0
    def srcrev_internal_helper(ud, d):
        """
        Return:
            a) a source revision if specified
	    b) True if auto srcrev is in action
	    c) False otherwise
        """

        if 'rev' in ud.parm:
            return ud.parm['rev']

        if 'tag' in ud.parm:
            return ud.parm['tag']

        rev = None
        if 'name' in ud.parm:
            pn = data.getVar("PN", d, 1)
            rev = data.getVar("SRCREV_pn-" + pn + "_" + ud.parm['name'], d, 1)
        if not rev:
            rev = data.getVar("SRCREV", d, 1)
        if rev == "INVALID":
            raise InvalidSRCREV("Please set SRCREV to a valid value")
        if not rev:
            return False
        if rev is "SRCREVINACTION":
            return True
        return rev
Ejemplo n.º 24
0
    def load_bbfile(bbfile, appends, config):
        """
        Load and parse one .bb build file
        Return the data and whether parsing resulted in the file being skipped
        """
        chdir_back = False

        from bb import data, parse

        # expand tmpdir to include this topdir
        data.setVar('TMPDIR', data.getVar('TMPDIR', config, 1) or "", config)
        bbfile_loc = os.path.abspath(os.path.dirname(bbfile))
        oldpath = os.path.abspath(os.getcwd())
        parse.cached_mtime_noerror(bbfile_loc)
        bb_data = data.init_db(config)
        # The ConfHandler first looks if there is a TOPDIR and if not
        # then it would call getcwd().
        # Previously, we chdir()ed to bbfile_loc, called the handler
        # and finally chdir()ed back, a couple of thousand times. We now
        # just fill in TOPDIR to point to bbfile_loc if there is no TOPDIR yet.
        if not data.getVar('TOPDIR', bb_data):
            chdir_back = True
            data.setVar('TOPDIR', bbfile_loc, bb_data)
        try:
            if appends:
                data.setVar('__BBAPPEND', " ".join(appends), bb_data)
            bb_data = parse.handle(bbfile, bb_data)
            if chdir_back:
                os.chdir(oldpath)
            return bb_data
        except:
            if chdir_back:
                os.chdir(oldpath)
            raise
Ejemplo n.º 25
0
    def getcset(d, depot, host, user, pswd, parm):
        p4opt = ""
        if "cset" in parm:
            return parm["cset"];
        if user:
            p4opt += " -u %s" % (user)
        if pswd:
            p4opt += " -P %s" % (pswd)
        if host:
            p4opt += " -p %s" % (host)

        p4date = data.getVar("P4DATE", d, True)
        if "revision" in parm:
            depot += "#%s" % (parm["revision"])
        elif "label" in parm:
            depot += "@%s" % (parm["label"])
        elif p4date:
            depot += "@%s" % (p4date)

        p4cmd = data.getVar('FETCHCOMMAND_p4', d, True)
        logger.debug(1, "Running %s%s changes -m 1 %s", p4cmd, p4opt, depot)
        p4file, errors = bb.process.run("%s%s changes -m 1 %s" % (p4cmd, p4opt, depot))
        cset = p4file.strip()
        logger.debug(1, "READ %s", cset)
        if not cset:
            return -1

        return cset.split(' ')[1]
Ejemplo n.º 26
0
    def getSRCDate(d):
        """
        Return the SRC Date for the component

        d the bb.data module
        """
        return data.getVar("SRCDATE", d, 1) or data.getVar("CVSDATE", d, 1) or data.getVar("DATE", d, 1 )
Ejemplo n.º 27
0
def srcrev_internal_helper(ud, d, name):
    """
    Return:
        a) a source revision if specified
        b) latest revision if SRCREV="AUTOINC"
        c) None if not specified
    """

    if 'rev' in ud.parm:
        return ud.parm['rev']

    if 'tag' in ud.parm:
        return ud.parm['tag']

    rev = None
    if name != '':
        pn = data.getVar("PN", d, True)
        rev = data.getVar("SRCREV_%s_pn-%s" % (name, pn), d, True)
        if not rev:
            rev = data.getVar("SRCREV_%s" % name, d, True)
    if not rev:
        rev = data.getVar("SRCREV", d, True)
    if rev == "INVALID":
        raise FetchError("Please set SRCREV to a valid value", ud.url)
    if rev == "AUTOINC":
        rev = ud.method.latest_revision(ud.url, ud, d, name)

    return rev
Ejemplo n.º 28
0
    def getcset(d, depot, host, user, pswd, parm):
        p4opt = ""
        if "cset" in parm:
            return parm["cset"]
        if user:
            p4opt += " -u %s" % (user)
        if pswd:
            p4opt += " -P %s" % (pswd)
        if host:
            p4opt += " -p %s" % (host)

        p4date = data.getVar("P4DATE", d, True)
        if "revision" in parm:
            depot += "#%s" % (parm["revision"])
        elif "label" in parm:
            depot += "@%s" % (parm["label"])
        elif p4date:
            depot += "@%s" % (p4date)

        p4cmd = data.getVar('FETCHCOMMAND_p4', d, True)
        logger.debug(1, "Running %s%s changes -m 1 %s", p4cmd, p4opt, depot)
        p4file, errors = bb.process.run("%s%s changes -m 1 %s" %
                                        (p4cmd, p4opt, depot))
        cset = p4file.strip()
        logger.debug(1, "READ %s", cset)
        if not cset:
            return -1

        return cset.split(' ')[1]
Ejemplo n.º 29
0
    def urldata_init(self, ud, d):
        """
        init ClearCase specific variable within url data
        """
        ud.proto = "https"
        if 'protocol' in ud.parm:
            ud.proto = ud.parm['protocol']
        if not ud.proto in ('http', 'https'):
            raise fetch2.ParameterError("Invalid protocol type", ud.url)

        ud.vob = ''
        if 'vob' in ud.parm:
            ud.vob = ud.parm['vob']
        else:
            msg = ud.url+": vob must be defined so the fetcher knows what to get."
            raise MissingParameterError('vob', msg)

        if 'module' in ud.parm:
            ud.module = ud.parm['module']
        else:
            ud.module = ""

        ud.basecmd = d.getVar("FETCHCMD_ccrc", True) or spawn.find_executable("cleartool") or spawn.find_executable("rcleartool")

        if data.getVar("SRCREV", d, True) == "INVALID":
          raise FetchError("Set a valid SRCREV for the clearcase fetcher in your recipe, e.g. SRCREV = \"/main/LATEST\" or any other label of your choice.")

        ud.label = d.getVar("SRCREV")
        ud.customspec = d.getVar("CCASE_CUSTOM_CONFIG_SPEC", True)

        ud.server     = "%s://%s%s" % (ud.proto, ud.host, ud.path)

        ud.identifier = "clearcase-%s%s-%s" % ( ud.vob.replace("/", ""),
                                                ud.module.replace("/", "."),
                                                ud.label.replace("/", "."))

        ud.viewname         = "%s-view%s" % (ud.identifier, d.getVar("DATETIME", d, True))
        ud.csname           = "%s-config-spec" % (ud.identifier)
        ud.ccasedir         = os.path.join(data.getVar("DL_DIR", d, True), ud.type)
        ud.viewdir          = os.path.join(ud.ccasedir, ud.viewname)
        ud.configspecfile   = os.path.join(ud.ccasedir, ud.csname)
        ud.localfile        = "%s.tar.gz" % (ud.identifier)

        self.debug("host            = %s" % ud.host)
        self.debug("path            = %s" % ud.path)
        self.debug("server          = %s" % ud.server)
        self.debug("proto           = %s" % ud.proto)
        self.debug("type            = %s" % ud.type)
        self.debug("vob             = %s" % ud.vob)
        self.debug("module          = %s" % ud.module)
        self.debug("basecmd         = %s" % ud.basecmd)
        self.debug("label           = %s" % ud.label)
        self.debug("ccasedir        = %s" % ud.ccasedir)
        self.debug("viewdir         = %s" % ud.viewdir)
        self.debug("viewname        = %s" % ud.viewname)
        self.debug("configspecfile  = %s" % ud.configspecfile)
        self.debug("localfile       = %s" % ud.localfile)

        ud.localfile = os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
Ejemplo n.º 30
0
def exec_func(func, d, dirs=None):
    """Execute an BB 'function'"""

    body = data.getVar(func, d)
    if not body:
        if body is None:
            logger.warn("Function %s doesn't exist", func)
        return

    flags = data.getVarFlags(func, d)
    cleandirs = flags.get('cleandirs')
    if cleandirs:
        for cdir in data.expand(cleandirs, d).split():
            bb.utils.remove(cdir, True)

    if dirs is None:
        dirs = flags.get('dirs')
        if dirs:
            dirs = data.expand(dirs, d).split()

    if dirs:
        for adir in dirs:
            bb.utils.mkdirhier(adir)
        adir = dirs[-1]
    else:
        adir = data.getVar('B', d, 1)
        bb.utils.mkdirhier(adir)

    ispython = flags.get('python')

    lockflag = flags.get('lockfiles')
    if lockflag:
        lockfiles = [data.expand(f, d) for f in lockflag.split()]
    else:
        lockfiles = None

    tempdir = data.getVar('T', d, 1)

    # or func allows items to be executed outside of the normal
    # task set, such as buildhistory
    task = data.getVar('BB_RUNTASK', d, 1) or func
    if task == func:
        taskfunc = task
    else:
        taskfunc = "%s.%s" % (task, func)

    runfmt = data.getVar('BB_RUNFMT', d, 1) or "run.{func}.{pid}"
    runfn = runfmt.format(taskfunc=taskfunc,
                          task=task,
                          func=func,
                          pid=os.getpid())
    runfile = os.path.join(tempdir, runfn)
    bb.utils.mkdirhier(os.path.dirname(runfile))

    with bb.utils.fileslocked(lockfiles):
        if ispython:
            exec_func_python(func, d, runfile, cwd=adir)
        else:
            exec_func_shell(func, d, runfile, cwd=adir)
Ejemplo n.º 31
0
    def urldata_init(self, ud, d):
        """
        init git specific variable within url data
        so that the git method like latest_revision() can work
        """
        if 'protocol' in ud.parm:
            ud.proto = ud.parm['protocol']
        elif not ud.host:
            ud.proto = 'file'
        else:
            ud.proto = "git"

        if not ud.proto in ('git', 'file', 'ssh', 'http', 'https', 'rsync'):
            raise bb.fetch2.ParameterError("Invalid protocol type", ud.url)

        ud.nocheckout = ud.parm.get("nocheckout","0") == "1"

        ud.rebaseable = ud.parm.get("rebaseable","0") == "1"

        # bareclone implies nocheckout
        ud.bareclone = ud.parm.get("bareclone","0") == "1"
        if ud.bareclone:
            ud.nocheckout = 1
  
        branches = ud.parm.get("branch", "master").split(',')
        if len(branches) != len(ud.names):
            raise bb.fetch2.ParameterError("The number of name and branch parameters is not balanced", ud.url)
        ud.branches = {}
        for name in ud.names:
            branch = branches[ud.names.index(name)]
            ud.branches[name] = branch

        ud.basecmd = data.getVar("FETCHCMD_git", d, True) or "git"

        ud.write_tarballs = ((data.getVar("BB_GENERATE_MIRROR_TARBALLS", d, True) or "0") != "0") or ud.rebaseable

        ud.setup_revisons(d)

        for name in ud.names:
            # Ensure anything that doesn't look like a sha256 checksum/revision is translated into one
            if not ud.revisions[name] or len(ud.revisions[name]) != 40  or (False in [c in "abcdef0123456789" for c in ud.revisions[name]]):
                if ud.revisions[name]:
                    ud.branches[name] = ud.revisions[name]
                ud.revisions[name] = self.latest_revision(ud.url, ud, d, name)

        gitsrcname = '%s%s' % (ud.host.replace(':','.'), ud.path.replace('/', '.').replace('*', '.'))
        # for rebaseable git repo, it is necessary to keep mirror tar ball
        # per revision, so that even the revision disappears from the
        # upstream repo in the future, the mirror will remain intact and still
        # contains the revision
        if ud.rebaseable:
            for name in ud.names:
                gitsrcname = gitsrcname + '_' + ud.revisions[name]
        ud.mirrortarball = 'git2_%s.tar.gz' % (gitsrcname)
        ud.fullmirror = os.path.join(d.getVar("DL_DIR", True), ud.mirrortarball)
        gitdir = d.getVar("GITDIR", True) or (d.getVar("DL_DIR", True) + "/git2/")
        ud.clonedir = os.path.join(gitdir, gitsrcname)

        ud.localfile = ud.clonedir
Ejemplo n.º 32
0
    def urldata_init(self, ud, d):
        """
        init ClearCase specific variable within url data
        """
        ud.proto = "https"
        if 'protocol' in ud.parm:
            ud.proto = ud.parm['protocol']
        if not ud.proto in ('http', 'https'):
            raise fetch2.ParameterError("Invalid protocol type", ud.url)

        ud.vob = ''
        if 'vob' in ud.parm:
            ud.vob = ud.parm['vob']
        else:
            msg = ud.url+": vob must be defined so the fetcher knows what to get."
            raise MissingParameterError('vob', msg)

        if 'module' in ud.parm:
            ud.module = ud.parm['module']
        else:
            ud.module = ""

        ud.basecmd = d.getVar("FETCHCMD_ccrc", True) or spawn.find_executable("cleartool") or spawn.find_executable("rcleartool")

        if data.getVar("SRCREV", d, True) == "INVALID":
          raise FetchError("Set a valid SRCREV for the clearcase fetcher in your recipe, e.g. SRCREV = \"/main/LATEST\" or any other label of your choice.")

        ud.label = d.getVar("SRCREV", False)
        ud.customspec = d.getVar("CCASE_CUSTOM_CONFIG_SPEC", True)

        ud.server     = "%s://%s%s" % (ud.proto, ud.host, ud.path)

        ud.identifier = "clearcase-%s%s-%s" % ( ud.vob.replace("/", ""),
                                                ud.module.replace("/", "."),
                                                ud.label.replace("/", "."))

        ud.viewname         = "%s-view%s" % (ud.identifier, d.getVar("DATETIME", d, True))
        ud.csname           = "%s-config-spec" % (ud.identifier)
        ud.ccasedir         = os.path.join(data.getVar("DL_DIR", d, True), ud.type)
        ud.viewdir          = os.path.join(ud.ccasedir, ud.viewname)
        ud.configspecfile   = os.path.join(ud.ccasedir, ud.csname)
        ud.localfile        = "%s.tar.gz" % (ud.identifier)

        self.debug("host            = %s" % ud.host)
        self.debug("path            = %s" % ud.path)
        self.debug("server          = %s" % ud.server)
        self.debug("proto           = %s" % ud.proto)
        self.debug("type            = %s" % ud.type)
        self.debug("vob             = %s" % ud.vob)
        self.debug("module          = %s" % ud.module)
        self.debug("basecmd         = %s" % ud.basecmd)
        self.debug("label           = %s" % ud.label)
        self.debug("ccasedir        = %s" % ud.ccasedir)
        self.debug("viewdir         = %s" % ud.viewdir)
        self.debug("viewname        = %s" % ud.viewname)
        self.debug("configspecfile  = %s" % ud.configspecfile)
        self.debug("localfile       = %s" % ud.localfile)

        ud.localfile = os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
Ejemplo n.º 33
0
def handle(fn, d, include):
    global __func_start_regexp__, __inherit_regexp__, __export_func_regexp__, __addtask_regexp__, __addhandler_regexp__, __infunc__, __body__, __residue__
    __body__ = []
    __infunc__ = ""
    __classname__ = ""
    __residue__ = []


    if include == 0:
        logger.debug(2, "BB %s: handle(data)", fn)
    else:
        logger.debug(2, "BB %s: handle(data, include)", fn)

    base_name = os.path.basename(fn)
    (root, ext) = os.path.splitext(base_name)
    init(d)

    if ext == ".bbclass":
        __classname__ = root
        classes.append(__classname__)
        __inherit_cache = data.getVar('__inherit_cache', d) or []
        if not fn in __inherit_cache:
            __inherit_cache.append(fn)
            data.setVar('__inherit_cache', __inherit_cache, d)

    if include != 0:
        oldfile = data.getVar('FILE', d)
    else:
        oldfile = None

    abs_fn = resolve_file(fn, d)

    if include:
        bb.parse.mark_dependency(d, abs_fn)

    # actual loading
    statements = get_statements(fn, abs_fn, base_name)

    # DONE WITH PARSING... time to evaluate
    if ext != ".bbclass":
        data.setVar('FILE', abs_fn, d)

    statements.eval(d)

    if ext == ".bbclass":
        classes.remove(__classname__)
    else:
        if include == 0:
            return ast.multi_finalize(fn, d)

    if oldfile:
        d.setVar("FILE", oldfile)

    # we have parsed the bb class now
    if ext == ".bbclass" or ext == ".inc":
        bb.methodpool.get_parsed_dict()[base_name] = 1

    return d
Ejemplo n.º 34
0
    def go(self, loc, ud, d):
        """Fetch url"""

        if os.access(os.path.join(data.getVar("DL_DIR", d, True), ud.localfile), os.R_OK):
            bb.msg.debug(1, bb.msg.domain.Fetcher, "%s already exists (or was stashed). Skipping git checkout." % ud.localpath)
            return

        if ud.user:
            username = ud.user + '@'
        else:
            username = ""

        gitsrcname = '%s%s' % (ud.host, ud.path.replace('/', '.'))

        repofilename = 'git_%s.tar.gz' % (gitsrcname)
        repofile = os.path.join(data.getVar("DL_DIR", d, 1), repofilename)
        repodir = os.path.join(data.expand('${GITDIR}', d), gitsrcname)

        coname = '%s' % (ud.tag)
        codir = os.path.join(repodir, coname)

        if not os.path.exists(repodir):
            if Fetch.try_mirror(d, repofilename):    
                bb.mkdirhier(repodir)
                os.chdir(repodir)
                runfetchcmd("tar -xzf %s" % (repofile), d)
            else:
                runfetchcmd("git clone -n %s://%s%s%s %s" % (ud.proto, username, ud.host, ud.path, repodir), d)

        os.chdir(repodir)
        # Remove all but the .git directory
        if not self._contains_ref(ud.tag, d):
            runfetchcmd("rm * -Rf", d)
            runfetchcmd("git fetch %s://%s%s%s %s" % (ud.proto, username, ud.host, ud.path, ud.branch), d)
            runfetchcmd("git fetch --tags %s://%s%s%s" % (ud.proto, username, ud.host, ud.path), d)
            runfetchcmd("git prune-packed", d)
            runfetchcmd("git pack-redundant --all | xargs -r rm", d)

        os.chdir(repodir)
        mirror_tarballs = data.getVar("BB_GENERATE_MIRROR_TARBALLS", d, True)
        if mirror_tarballs != "0": 
            bb.msg.note(1, bb.msg.domain.Fetcher, "Creating tarball of git repository")
            runfetchcmd("tar -czf %s %s" % (repofile, os.path.join(".", ".git", "*") ), d)

        if os.path.exists(codir):
            bb.utils.prunedir(codir)

        bb.mkdirhier(codir)
        os.chdir(repodir)
        runfetchcmd("git read-tree %s" % (ud.tag), d)
        runfetchcmd("git checkout-index -q -f --prefix=%s -a" % (os.path.join(codir, "git", "")), d)

        os.chdir(codir)
        bb.msg.note(1, bb.msg.domain.Fetcher, "Creating tarball of git checkout")
        runfetchcmd("tar -czf %s %s" % (ud.localpath, os.path.join(".", "*") ), d)

        os.chdir(repodir)
        bb.utils.prunedir(codir)
Ejemplo n.º 35
0
def handle(fn, d, include):
    global __func_start_regexp__, __inherit_regexp__, __export_func_regexp__, __addtask_regexp__, __addhandler_regexp__, __infunc__, __body__, __residue__
    __body__ = []
    __infunc__ = ""
    __classname__ = ""
    __residue__ = []


    if include == 0:
        logger.debug(2, "BB %s: handle(data)", fn)
    else:
        logger.debug(2, "BB %s: handle(data, include)", fn)

    base_name = os.path.basename(fn)
    (root, ext) = os.path.splitext(base_name)
    init(d)

    if ext == ".bbclass":
        __classname__ = root
        classes.append(__classname__)
        __inherit_cache = data.getVar('__inherit_cache', d) or []
        if not fn in __inherit_cache:
            __inherit_cache.append(fn)
            data.setVar('__inherit_cache', __inherit_cache, d)

    if include != 0:
        oldfile = data.getVar('FILE', d)
    else:
        oldfile = None

    abs_fn = resolve_file(fn, d)

    if include:
        bb.parse.mark_dependency(d, abs_fn)

    # actual loading
    statements = get_statements(fn, abs_fn, base_name)

    # DONE WITH PARSING... time to evaluate
    if ext != ".bbclass":
        data.setVar('FILE', fn, d)

    statements.eval(d)

    if ext == ".bbclass":
        classes.remove(__classname__)
    else:
        if include == 0:
            return ast.multi_finalize(fn, d)

    if oldfile:
        bb.data.setVar("FILE", oldfile, d)

    # we have parsed the bb class now
    if ext == ".bbclass" or ext == ".inc":
        bb.methodpool.get_parsed_dict()[base_name] = 1

    return d
Ejemplo n.º 36
0
    def go(self, loc, ud, d):
        """Fetch url"""

        if Fetch.try_mirror(d, ud.localfile):
            bb.msg.debug(1, bb.msg.domain.Fetcher, "%s already exists (or was stashed). Skipping git checkout." % ud.localpath)
            return

        if ud.user:
            username = ud.user + '@'
        else:
            username = ""

        gitsrcname = '%s%s' % (ud.host, ud.path.replace('/', '.'))

        repofilename = 'git_%s.tar.gz' % (gitsrcname)
        repofile = os.path.join(data.getVar("DL_DIR", d, 1), repofilename)
        repodir = os.path.join(data.expand('${GITDIR}', d), gitsrcname)

        coname = '%s' % (ud.tag)
        codir = os.path.join(repodir, coname)

        if not os.path.exists(repodir):
            if Fetch.try_mirror(d, repofilename):    
                bb.mkdirhier(repodir)
                os.chdir(repodir)
                runfetchcmd("tar -xzf %s" % (repofile), d)
            else:
                runfetchcmd("git clone -n %s://%s%s%s %s" % (ud.proto, username, ud.host, ud.path, repodir), d)

        os.chdir(repodir)
        # Remove all but the .git directory
        if not self._contains_ref(ud.tag, d):
            runfetchcmd("rm * -Rf", d)
            runfetchcmd("git fetch %s://%s%s%s %s" % (ud.proto, username, ud.host, ud.path, ud.branch), d)
            runfetchcmd("git fetch --tags %s://%s%s%s" % (ud.proto, username, ud.host, ud.path), d)
            runfetchcmd("git prune-packed", d)
            runfetchcmd("git pack-redundant --all | xargs -r rm", d)

        os.chdir(repodir)
        mirror_tarballs = data.getVar("BB_GENERATE_MIRROR_TARBALLS", d, True)
        if mirror_tarballs != "0": 
            bb.msg.note(1, bb.msg.domain.Fetcher, "Creating tarball of git repository")
            runfetchcmd("tar -czf %s %s" % (repofile, os.path.join(".", ".git", "*") ), d)

        if os.path.exists(codir):
            bb.utils.prunedir(codir)

        bb.mkdirhier(codir)
        os.chdir(repodir)
        runfetchcmd("git read-tree %s" % (ud.tag), d)
        runfetchcmd("git checkout-index -q -f --prefix=%s -a" % (os.path.join(codir, "git", "")), d)

        os.chdir(codir)
        bb.msg.note(1, bb.msg.domain.Fetcher, "Creating tarball of git checkout")
        runfetchcmd("tar -czf %s %s" % (ud.localpath, os.path.join(".", "*") ), d)

        os.chdir(repodir)
        bb.utils.prunedir(codir)
Ejemplo n.º 37
0
    def urldata_init(self, ud, d):
        """
        init git specific variable within url data
        so that the git method like latest_revision() can work
        """
        if "protocol" in ud.parm:
            ud.proto = ud.parm["protocol"]
        elif not ud.host:
            ud.proto = "file"
        else:
            ud.proto = "git"

        if not ud.proto in ("git", "file", "ssh", "http", "https", "rsync"):
            raise bb.fetch2.ParameterError("Invalid protocol type", ud.url)

        ud.nocheckout = ud.parm.get("nocheckout", "0") == "1"

        ud.rebaseable = ud.parm.get("rebaseable", "0") == "1"

        branches = ud.parm.get("branch", "master").split(",")
        if len(branches) != len(ud.names):
            raise bb.fetch2.ParameterError("The number of name and branch parameters is not balanced", ud.url)
        ud.branches = {}
        for name in ud.names:
            branch = branches[ud.names.index(name)]
            ud.branches[name] = branch

        ud.basecmd = data.getVar("FETCHCMD_git", d, True) or "git"

        ud.write_tarballs = ((data.getVar("BB_GENERATE_MIRROR_TARBALLS", d, True) or "0") != "0") or ud.rebaseable

        ud.setup_revisons(d)

        for name in ud.names:
            # Ensure anything that doesn't look like a sha256 checksum/revision is translated into one
            if (
                not ud.revisions[name]
                or len(ud.revisions[name]) != 40
                or (False in [c in "abcdef0123456789" for c in ud.revisions[name]])
            ):
                ud.branches[name] = ud.revisions[name]
                ud.revisions[name] = self.latest_revision(ud.url, ud, d, name)

        gitsrcname = "%s%s" % (ud.host, ud.path.replace("/", "."))
        # for rebaseable git repo, it is necessary to keep mirror tar ball
        # per revision, so that even the revision disappears from the
        # upstream repo in the future, the mirror will remain intact and still
        # contains the revision
        if ud.rebaseable:
            for name in ud.names:
                gitsrcname = gitsrcname + "_" + ud.revisions[name]
        ud.mirrortarball = "git2_%s.tar.gz" % (gitsrcname)
        ud.fullmirror = os.path.join(data.getVar("DL_DIR", d, True), ud.mirrortarball)
        ud.clonedir = os.path.join(data.expand("${GITDIR}", d), gitsrcname)

        ud.localfile = ud.clonedir
Ejemplo n.º 38
0
    def testGVar(self):
        # import the data module
        from bb import data
        from bb import data_smart

        d = data_smart.DataSmart()
        data.setVar('TEST', 'testcontents', d )
        self.assertEquals( data.getVar('TEST',d), 'testcontents', 'Setting Variable Failed')
        data.delVar('TEST', d)
        self.assertEquals(data.getVar('TEST', d), None)
Ejemplo n.º 39
0
def test_assign():
    print "Testing assign"
    data = bb.data.init()
    a = ast.Assignment('KEY', 'VALUE')
    a.eval(data, None)

    print "\t", data.getVar('KEY', False) == 'VALUE'
    a = ast.Assignment('KEY', 'NEWVALUE')
    a.eval(data, None)
    print "\t", data.getVar('KEY', False) == 'NEWVALUE'
Ejemplo n.º 40
0
def test_immediate():
    print "Testing immediate assignment"
    data = bb.data.init()
    a = ast.ImmediateAssignment('KEY', 'VALUE')
    a.eval(data, None)

    print "\t", data.getVar('KEY', False) == 'VALUE'
    a = ast.ImmediateAssignment('KEY', '${@3*3}')
    a.eval(data, None)
    print "\t", data.getVar('KEY', False) == '9'
Ejemplo n.º 41
0
def exec_func(func, d, dirs=None):
    """Execute an BB 'function'"""

    body = data.getVar(func, d)
    if not body:
        return

    flags = data.getVarFlags(func, d)
    for item in [
            'deps', 'check', 'interactive', 'python', 'cleandirs', 'dirs',
            'lockfiles', 'fakeroot'
    ]:
        if not item in flags:
            flags[item] = None

    ispython = flags['python']

    cleandirs = (data.expand(flags['cleandirs'], d) or "").split()
    for cdir in cleandirs:
        os.system("rm -rf %s" % cdir)

    if dirs:
        dirs = data.expand(dirs, d)
    else:
        dirs = (data.expand(flags['dirs'], d) or "").split()
    for adir in dirs:
        mkdirhier(adir)

    if len(dirs) > 0:
        adir = dirs[-1]
    else:
        adir = data.getVar('B', d, 1)

    try:
        prevdir = os.getcwd()
    except OSError:
        prevdir = data.getVar('TOPDIR', d, True)
    if adir and os.access(adir, os.F_OK):
        os.chdir(adir)

    locks = []
    lockfiles = (data.expand(flags['lockfiles'], d) or "").split()
    for lock in lockfiles:
        locks.append(bb.utils.lockfile(lock))

    if flags['python']:
        exec_func_python(func, d)
    else:
        exec_func_shell(func, d, flags)

    for lock in locks:
        bb.utils.unlockfile(lock)

    if os.path.exists(prevdir):
        os.chdir(prevdir)
Ejemplo n.º 42
0
def extract_stamp_data(d, fn):
    """
    Extracts stamp data from d which is either a data dictonary (fn unset) 
    or a dataCache entry (fn set). 
    """
    if fn:
        return (d.task_queues[fn], d.stamp[fn], d.task_deps[fn])
    task_graph = data.getVar('_task_graph', d)
    if not task_graph:
        task_graph = bb.digraph()
        data.setVar('_task_graph', task_graph, d)
    return (task_graph, data.getVar('STAMP', d, 1), None)
Ejemplo n.º 43
0
    def parse( self, params ):
        """(Re-)parse .bb files and calculate the dependency graph"""
        cooker.status = cache.CacheData()
        ignore = data.getVar("ASSUME_PROVIDED", cooker.configuration.data, 1) or ""
        cooker.status.ignored_dependencies = set( ignore.split() )
        cooker.handleCollections( data.getVar("BBFILE_COLLECTIONS", cooker.configuration.data, 1) )

        (filelist, masked) = cooker.collect_bbfiles()
        cooker.parse_bbfiles(filelist, masked, cooker.myProgressCallback)
        cooker.buildDepgraph()
        global parsed
        parsed = True
        print
Ejemplo n.º 44
0
def exec_func(func, d, dirs=None, logfile=NULL):
    """Execute an BB 'function'"""

    body = data.getVar(func, d)
    if not body:
        if body is None:
            logger.warn("Function %s doesn't exist", func)
        return

    flags = data.getVarFlags(func, d)
    cleandirs = flags.get('cleandirs')
    if cleandirs:
        for cdir in data.expand(cleandirs, d).split():
            bb.utils.remove(cdir, True)

    if dirs is None:
        dirs = flags.get('dirs')
        if dirs:
            dirs = data.expand(dirs, d).split()

    if dirs:
        for adir in dirs:
            bb.utils.mkdirhier(adir)
        adir = dirs[-1]
    else:
        adir = data.getVar('B', d, 1)
        if not os.path.exists(adir):
            adir = None

    ispython = flags.get('python')
    fakeroot = flags.get('fakeroot')

    lockflag = flags.get('lockfiles')
    if lockflag:
        lockfiles = [data.expand(f, d) for f in lockflag.split()]
    else:
        lockfiles = None

    tempdir = data.getVar('T', d, 1)
    runfile = os.path.join(tempdir, 'run.{0}.{1}'.format(func, os.getpid()))

    with bb.utils.fileslocked(lockfiles):
        if ispython:
            exec_func_python(func, d, runfile, logfile, cwd=adir)
        else:
            exec_func_shell(func,
                            d,
                            runfile,
                            logfile,
                            cwd=adir,
                            fakeroot=fakeroot)
Ejemplo n.º 45
0
def inherit(files, fn, lineno, d):
    __inherit_cache = data.getVar('__inherit_cache', d) or []
    files = d.expand(files).split()
    for file in files:
        if not os.path.isabs(file) and not file.endswith(".bbclass"):
            file = os.path.join('classes', '%s.bbclass' % file)

        if not file in __inherit_cache:
            logger.log(logging.DEBUG - 1, "BB %s:%d: inheriting %s", fn,
                       lineno, file)
            __inherit_cache.append(file)
            data.setVar('__inherit_cache', __inherit_cache, d)
            include(fn, file, lineno, d, "inherit")
            __inherit_cache = data.getVar('__inherit_cache', d) or []
Ejemplo n.º 46
0
    def __init__(self, data):
        self.basehash = {}
        self.taskhash = {}
        self.taskdeps = {}
        self.runtaskdeps = {}
        self.gendeps = {}
        self.lookupcache = {}
        self.basewhitelist = set((data.getVar("BB_HASHBASE_WHITELIST", True) or "").split())
        self.taskwhitelist = data.getVar("BB_HASHTASK_WHITELIST", True) or None

        if self.taskwhitelist:
            self.twl = re.compile(self.taskwhitelist)
        else:
            self.twl = None
Ejemplo n.º 47
0
    def localcount_internal_helper(ud, d):
        """
        Return:
            a) a locked localcount if specified
            b) None otherwise
        """

        localcount = None
        if 'name' in ud.parm:
            pn = data.getVar("PN", d, 1)
            localcount = data.getVar("LOCALCOUNT_" + ud.parm['name'], d, 1)
        if not localcount:
            localcount = data.getVar("LOCALCOUNT", d, 1)
        return localcount
Ejemplo n.º 48
0
def inherit(files, d):
    __inherit_cache = data.getVar('__inherit_cache', d) or []
    fn = ""
    lineno = 0
    files = data.expand(files, d)
    for file in files:
        if file[0] != "/" and file[-8:] != ".bbclass":
            file = os.path.join('classes', '%s.bbclass' % file)

        if not file in __inherit_cache:
            bb.msg.debug(2, bb.msg.domain.Parsing, "BB %s:%d: inheriting %s" % (fn, lineno, file))
            __inherit_cache.append( file )
            data.setVar('__inherit_cache', __inherit_cache, d)
            include(fn, file, d, "inherit")
            __inherit_cache = data.getVar('__inherit_cache', d) or []
Ejemplo n.º 49
0
    def doparse(url, d):
        parm = {}
        path = url.split("://")[1]
        delim = path.find("@");
        if delim != -1:
            (user, pswd, host, port) = path.split('@')[0].split(":")
            path = path.split('@')[1]
        else:
            (host, port) = data.getVar('P4PORT', d).split(':')
            user = ""
            pswd = ""

        if path.find(";") != -1:
            keys=[]
            values=[]
            plist = path.split(';')
            for item in plist:
                if item.count('='):
                    (key, value) = item.split('=')
                    keys.append(key)
                    values.append(value)

            parm = dict(zip(keys, values))
        path = "//" + path.split(';')[0]
        host += ":%s" % (port)
        parm["cset"] = Perforce.getcset(d, path, host, user, pswd, parm)

        return host, path, user, pswd, parm
Ejemplo n.º 50
0
 def init_rundepcheck(self, data):
     self.taskhash_ignore_tasks = data.getVar(
         "BB_TASKHASH_IGNORE_TASKS") or None
     if self.taskhash_ignore_tasks:
         self.twl = re.compile(self.taskhash_ignore_tasks)
     else:
         self.twl = None
Ejemplo n.º 51
0
    def parseConfigurationFile(self, afile):
        try:
            self.configuration.data = bb.parse.handle(afile,
                                                      self.configuration.data)

            # Handle any INHERITs and inherit the base class
            inherits = ["base"] + (bb.data.getVar(
                'INHERIT', self.configuration.data, True) or "").split()
            for inherit in inherits:
                self.configuration.data = bb.parse.handle(
                    os.path.join('classes', '%s.bbclass' % inherit),
                    self.configuration.data, True)

            # Nomally we only register event handlers at the end of parsing .bb files
            # We register any handlers we've found so far here...
            for var in data.getVar('__BBHANDLERS',
                                   self.configuration.data) or []:
                bb.event.register(var,
                                  bb.data.getVar(var, self.configuration.data))

            bb.fetch.fetcher_init(self.configuration.data)

            bb.event.fire(bb.event.ConfigParsed(self.configuration.data))

        except IOError, e:
            bb.msg.fatal(bb.msg.domain.Parsing,
                         "Error when parsing %s: %s" % (afile, str(e)))
Ejemplo n.º 52
0
    def urldata_init(self, ud, d):
        """
        init NPM specific variable within url data
        """
        if 'downloadfilename' in ud.parm:
            ud.basename = ud.parm['downloadfilename']
        else:
            ud.basename = os.path.basename(ud.path)

        # can't call it ud.name otherwise fetcher base class will start doing sha1stuff
        # TODO: find a way to get an sha1/sha256 manifest of pkg & all deps
        ud.pkgname = ud.parm.get("name", None)
        if not ud.pkgname:
            raise ParameterError("NPM fetcher requires a name parameter", ud.url)
        ud.version = ud.parm.get("version", None)
        if not ud.version:
            raise ParameterError("NPM fetcher requires a version parameter", ud.url)
        ud.bbnpmmanifest = "%s-%s.deps.json" % (ud.pkgname, ud.version)
        ud.registry = "http://%s" % (ud.url.replace('npm://', '', 1).split(';'))[0]
        prefixdir = "npm/%s" % ud.pkgname
        ud.pkgdatadir = d.expand("${DL_DIR}/%s" % prefixdir)
        if not os.path.exists(ud.pkgdatadir):
            bb.utils.mkdirhier(ud.pkgdatadir)
        ud.localpath = d.expand("${DL_DIR}/npm/%s" % ud.bbnpmmanifest)

        self.basecmd = d.getVar("FETCHCMD_wget", True) or "/usr/bin/env wget -O -t 2 -T 30 -nv --passive-ftp --no-check-certificate "
        ud.prefixdir = prefixdir

        ud.write_tarballs = ((data.getVar("BB_GENERATE_MIRROR_TARBALLS", d, True) or "0") != "0")
        ud.mirrortarball = 'npm_%s-%s.tar.xz' % (ud.pkgname, ud.version)
        ud.fullmirror = os.path.join(d.getVar("DL_DIR", True), ud.mirrortarball)
Ejemplo n.º 53
0
    def localpath(self, url, ud, d):
        if not "module" in ud.parm:
            raise MissingParameterError("hg method needs a 'module' parameter")

        ud.module = ud.parm["module"]

        # Create paths to mercurial checkouts
        relpath = self._strip_leading_slashes(ud.path)
        ud.pkgdir = os.path.join(data.expand('${HGDIR}', d), ud.host, relpath)
        ud.moddir = os.path.join(ud.pkgdir, ud.module)

        if 'rev' in ud.parm:
            ud.revision = ud.parm['rev']
        else:
            tag = Fetch.srcrev_internal_helper(ud, d)
            if tag is True:
                ud.revision = self.latest_revision(url, ud, d)
            elif tag:
                ud.revision = tag
            else:
                ud.revision = self.latest_revision(url, ud, d)

        ud.localfile = data.expand('%s_%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision), d)

        return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
Ejemplo n.º 54
0
    def which(self, params):
        """Computes the providers for a given providee"""
        # Need to use taskData for this information
        item = params[0]

        self._checkParsed()

        preferred = data.getVar("PREFERRED_PROVIDER_%s" % item,
                                cooker.configuration.data, 1)
        if not preferred: preferred = item

        try:
            lv, lf, pv, pf = Providers.findBestProvider(
                preferred, cooker.configuration.data, cooker.status)
        except KeyError:
            lv, lf, pv, pf = (None, ) * 4

        try:
            providers = cooker.status.providers[item]
        except KeyError:
            print("SHELL: ERROR: Nothing provides", preferred)
        else:
            for provider in providers:
                if provider == pf: provider = " (***) %s" % provider
                else: provider = "       %s" % provider
                print(provider)
Ejemplo n.º 55
0
 def localpath(self, url, urldata, d):
     """
     Return the local filename of a given url assuming a successful fetch.
     Can also setup variables in urldata for use in go (saving code duplication
     and duplicate code execution)
     """
     return os.path.join(data.getVar("DL_DIR", d, True), urldata.localfile)
Ejemplo n.º 56
0
    def download(self, ud, d):
        """Fetch urls"""

        urlo = URI(ud.url)
        basecmd = 'sftp -oBatchMode=yes'
        port = ''
        if urlo.port:
            port = '-P %d' % urlo.port
            urlo.port = None

        dldir = data.getVar('DL_DIR', d, True)
        lpath = os.path.join(dldir, ud.localfile)

        user = ''
        if urlo.userinfo:
            user = urlo.userinfo + '@'

        path = urlo.path

        # Supoprt URIs relative to the user's home directory, with
        # the tilde syntax. (E.g. <sftp://example.com/~/foo.diff>).
        if path[:3] == '/~/':
            path = path[3:]

        remote = '%s%s:%s' % (user, urlo.hostname, path)

        cmd = '%s %s %s %s' % (basecmd, port, remote, lpath)

        bb.fetch2.check_network_access(d, cmd, ud.url)
        runfetchcmd(cmd, d)
        return True
Ejemplo n.º 57
0
    def load_bbfile( self, bbfile , config):
        """
        Load and parse one .bb build file
        Return the data and whether parsing resulted in the file being skipped
        """

        import bb
        from bb import utils, data, parse, debug, event, fatal

        # expand tmpdir to include this topdir
        data.setVar('TMPDIR', data.getVar('TMPDIR', config, 1) or "", config)
        bbfile_loc = os.path.abspath(os.path.dirname(bbfile))
        oldpath = os.path.abspath(os.getcwd())
        if bb.parse.cached_mtime_noerror(bbfile_loc):
            os.chdir(bbfile_loc)
        bb_data = data.init_db(config)
        try:
            bb_data = parse.handle(bbfile, bb_data) # read .bb data
            os.chdir(oldpath)
            return bb_data, False
        except bb.parse.SkipPackage:
            os.chdir(oldpath)
            return bb_data, True
        except:
            os.chdir(oldpath)
            raise
Ejemplo n.º 58
-1
def exec_func(func, d, dirs = None):
    """Execute an BB 'function'"""

    body = data.getVar(func, d)
    if not body:
        return

    flags = data.getVarFlags(func, d)
    for item in ['deps', 'check', 'interactive', 'python', 'cleandirs', 'dirs', 'lockfiles', 'fakeroot']:
        if not item in flags:
            flags[item] = None

    ispython = flags['python']

    cleandirs = (data.expand(flags['cleandirs'], d) or "").split()
    for cdir in cleandirs:
        os.system("rm -rf %s" % cdir)

    if dirs:
        dirs = data.expand(dirs, d)
    else:
        dirs = (data.expand(flags['dirs'], d) or "").split()
    for adir in dirs:
        mkdirhier(adir)

    if len(dirs) > 0:
        adir = dirs[-1]
    else:
        adir = data.getVar('B', d, 1)

    # Save current directory
    try:
        prevdir = os.getcwd()
    except OSError:
        prevdir = data.getVar('TOPDIR', d, True)

    # Setup logfiles
    t = data.getVar('T', d, 1)
    if not t:
        bb.msg.fatal(bb.msg.domain.Build, "T not set")
    mkdirhier(t)
    # Gross hack, FIXME
    import random
    logfile = "%s/log.%s.%s.%s" % (t, func, str(os.getpid()),random.random())
    runfile = "%s/run.%s.%s" % (t, func, str(os.getpid()))

    # Change to correct directory (if specified)
    if adir and os.access(adir, os.F_OK):
        os.chdir(adir)

    # Handle logfiles
    si = file('/dev/null', 'r')
    try:
        if bb.msg.debug_level['default'] > 0 or ispython:
            so = os.popen("tee \"%s\"" % logfile, "w")
        else:
            so = file(logfile, 'w')
    except OSError, e:
        bb.msg.error(bb.msg.domain.Build, "opening log file: %s" % e)
        pass
Ejemplo n.º 59
-1
def exec_func(func, d, dirs = None):
    """Execute an BB 'function'"""

    body = data.getVar(func, d)
    if not body:
        if body is None:
            logger.warn("Function %s doesn't exist", func)
        return

    flags = data.getVarFlags(func, d)
    cleandirs = flags.get('cleandirs')
    if cleandirs:
        for cdir in data.expand(cleandirs, d).split():
            bb.utils.remove(cdir, True)

    if dirs is None:
        dirs = flags.get('dirs')
        if dirs:
            dirs = data.expand(dirs, d).split()

    if dirs:
        for adir in dirs:
            bb.utils.mkdirhier(adir)
        adir = dirs[-1]
    else:
        adir = data.getVar('B', d, 1)
        bb.utils.mkdirhier(adir)

    ispython = flags.get('python')

    lockflag = flags.get('lockfiles')
    if lockflag:
        lockfiles = [data.expand(f, d) for f in lockflag.split()]
    else:
        lockfiles = None

    tempdir = data.getVar('T', d, 1)

    # or func allows items to be executed outside of the normal
    # task set, such as buildhistory
    task = data.getVar('BB_RUNTASK', d, 1) or func
    if task == func:
        taskfunc = task
    else:
        taskfunc = "%s.%s" % (task, func)

    runfmt = data.getVar('BB_RUNFMT', d, 1) or "run.{func}.{pid}"
    runfn = runfmt.format(taskfunc=taskfunc, task=task, func=func, pid=os.getpid())
    runfile = os.path.join(tempdir, runfn)
    bb.utils.mkdirhier(os.path.dirname(runfile))

    with bb.utils.fileslocked(lockfiles):
        if ispython:
            exec_func_python(func, d, runfile, cwd=adir)
        else:
            exec_func_shell(func, d, runfile, cwd=adir)