Ejemplo n.º 1
0
def exec_task(task, d):
    """Execute an BB 'task'

       The primary difference between executing a task versus executing
       a function is that a task exists in the task digraph, and therefore
       has dependencies amongst other tasks."""

    # Check whther this is a valid task
    if not data.getVarFlag(task, 'task', d):
        raise EventException("No such task", InvalidTask(task, d))

    try:
        bb.msg.debug(1, bb.msg.domain.Build, "Executing task %s" % task)
        old_overrides = data.getVar('OVERRIDES', d, 0)
        localdata = data.createCopy(d)
        data.setVar('OVERRIDES', 'task-%s:%s' % (task[3:], old_overrides),
                    localdata)
        data.update_data(localdata)
        data.expandKeys(localdata)
        event.fire(TaskStarted(task, localdata), localdata)
        exec_func(task, localdata)
        event.fire(TaskSucceeded(task, localdata), localdata)
    except FuncFailed, message:
        # Try to extract the optional logfile
        try:
            (msg, logfile) = message
        except:
            logfile = None
            msg = message
        bb.msg.note(1, bb.msg.domain.Build, "Task failed: %s" % message)
        failedevent = TaskFailed(msg, logfile, task, d)
        event.fire(failedevent, d)
        raise EventException("Function failed in task: %s" % message,
                             failedevent)
Ejemplo n.º 2
0
def finalise(fn, d):
    data.expandKeys(d)
    data.update_data(d)
    anonqueue = data.getVar("__anonqueue", d, 1) or []
    body = [x['content'] for x in anonqueue]
    flag = {'python': 1, 'func': 1}
    data.setVar("__anonfunc", "\n".join(body), d)
    data.setVarFlags("__anonfunc", flag, d)
    from bb import build
    try:
        t = data.getVar('T', d)
        data.setVar('T', '${TMPDIR}/anonfunc/', d)
        anonfuncs = data.getVar('__BBANONFUNCS', d) or []
        code = ""
        for f in anonfuncs:
            code = code + "    %s(d)\n" % f
        data.setVar("__anonfunc", code, d)
        build.exec_func("__anonfunc", d)
        data.delVar('T', d)
        if t:
            data.setVar('T', t, d)
    except Exception, e:
        bb.msg.debug(1, bb.msg.domain.Parsing,
                     "Exception when executing anonymous function: %s" % e)
        raise
Ejemplo n.º 3
0
    def download(self, loc, ud, d):
        """Fetch urls"""

        svkroot = ud.host + ud.path

        svkcmd = "svk co -r {%s} %s/%s" % (ud.date, svkroot, ud.module)

        if ud.revision:
            svkcmd = "svk co -r %s %s/%s" % (ud.revision, svkroot, ud.module)

        # create temp directory
        localdata = data.createCopy(d)
        data.update_data(localdata)
        logger.debug(2, "Fetch: creating temporary directory")
        bb.utils.mkdirhier(data.expand('${WORKDIR}', localdata))
        data.setVar('TMPBASE', data.expand('${WORKDIR}/oesvk.XXXXXX', localdata), localdata)
        tmpfile, errors = bb.process.run(data.getVar('MKTEMPDIRCMD', localdata, True) or "false")
        tmpfile = tmpfile.strip()
        if not tmpfile:
            logger.error()
            raise FetchError("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.", loc)

        # check out sources there
        os.chdir(tmpfile)
        logger.info("Fetch " + loc)
        logger.debug(1, "Running %s", svkcmd)
        runfetchcmd(svkcmd, d, cleanup = [tmpfile])

        os.chdir(os.path.join(tmpfile, os.path.dirname(ud.module)))
        # tar them up to a defined filename
        runfetchcmd("tar -czf %s %s" % (ud.localpath, os.path.basename(ud.module)), d, cleanup = [ud.localpath])

        # cleanup
        bb.utils.prunedir(tmpfile)
Ejemplo n.º 4
0
def exec_task(task, d):
    """Execute an BB 'task'

       The primary difference between executing a task versus executing
       a function is that a task exists in the task digraph, and therefore
       has dependencies amongst other tasks."""

    # Check whther this is a valid task
    if not data.getVarFlag(task, 'task', d):
        raise EventException("No such task", InvalidTask(task, d))

    try:
        bb.msg.debug(1, bb.msg.domain.Build, "Executing task %s" % task)
        old_overrides = data.getVar('OVERRIDES', d, 0)
        localdata = data.createCopy(d)
        data.setVar('OVERRIDES', 'task-%s:%s' % (task[3:], old_overrides), localdata)
        data.update_data(localdata)
        data.expandKeys(localdata)
        event.fire(TaskStarted(task, localdata))
        exec_func(task, localdata)
        event.fire(TaskSucceeded(task, localdata))
    except FuncFailed, message:
        # Try to extract the optional logfile
        try:
            (msg, logfile) = message
        except:
            logfile = None
            msg = message
        bb.msg.note(1, bb.msg.domain.Build, "Task failed: %s" % message )
        failedevent = TaskFailed(msg, logfile, task, d)
        event.fire(failedevent)
        raise EventException("Function failed in task: %s" % message, failedevent)
Ejemplo n.º 5
0
    def execute(graph, item):
        if data.getVarFlag(item, 'task', d):
            if item in task_cache:
                return 1

            if task != item:
                # deeper than toplevel, exec w/ deps
                exec_task(item, d)
                return 1

            try:
                debug(1, "Executing task %s" % item)
                old_overrides = data.getVar('OVERRIDES', d, 0)
                from copy import deepcopy
                localdata = deepcopy(d)
                data.setVar('OVERRIDES', 'task_%s:%s' % (item, old_overrides), localdata)
                data.update_data(localdata)
                event.fire(TaskStarted(item, localdata))
                exec_func(item, localdata)
                event.fire(TaskSucceeded(item, localdata))
                task_cache.append(item)
            except FuncFailed, reason:
                note( "Task failed: %s" % reason )
                failedevent = TaskFailed(item, d)
                event.fire(failedevent)
                raise EventException(None, failedevent)
Ejemplo n.º 6
0
    def execute(graph, item):
        if data.getVarFlag(item, 'task', d):
            if item in task_cache:
                return 1

            if task != item:
                # deeper than toplevel, exec w/ deps
                exec_task(item, d)
                return 1

            try:
                bb.msg.debug(1, bb.msg.domain.Build, "Executing task %s" % item)
                old_overrides = data.getVar('OVERRIDES', d, 0)
                localdata = data.createCopy(d)
                data.setVar('OVERRIDES', 'task_%s:%s' % (item, old_overrides), localdata)
                data.update_data(localdata)
                event.fire(TaskStarted(item, localdata))
                exec_func(item, localdata)
                event.fire(TaskSucceeded(item, localdata))
                task_cache.append(item)
                data.setVar('_task_cache', task_cache, d)
            except FuncFailed, reason:
                bb.msg.note(1, bb.msg.domain.Build, "Task failed: %s" % reason )
                failedevent = TaskFailed(item, d)
                event.fire(failedevent)
                raise EventException("Function failed in task: %s" % reason, failedevent)
Ejemplo n.º 7
0
    def download(self, ud, d):
        """Fetch urls"""

        svkroot = ud.host + ud.path

        svkcmd = "svk co -r {%s} %s/%s" % (ud.date, svkroot, ud.module)

        if ud.revision:
            svkcmd = "svk co -r %s %s/%s" % (ud.revision, svkroot, ud.module)

        # create temp directory
        localdata = data.createCopy(d)
        data.update_data(localdata)
        logger.debug(2, "Fetch: creating temporary directory")
        bb.utils.mkdirhier(data.expand('${WORKDIR}', localdata))
        data.setVar('TMPBASE', data.expand('${WORKDIR}/oesvk.XXXXXX', localdata), localdata)
        tmpfile, errors = bb.process.run(data.getVar('MKTEMPDIRCMD', localdata, True) or "false")
        tmpfile = tmpfile.strip()
        if not tmpfile:
            logger.error()
            raise FetchError("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.", ud.url)

        # check out sources there
        os.chdir(tmpfile)
        logger.info("Fetch " + ud.url)
        logger.debug(1, "Running %s", svkcmd)
        runfetchcmd(svkcmd, d, cleanup = [tmpfile])

        os.chdir(os.path.join(tmpfile, os.path.dirname(ud.module)))
        # tar them up to a defined filename
        runfetchcmd("tar -czf %s %s" % (ud.localpath, os.path.basename(ud.module)), d, cleanup = [ud.localpath])

        # cleanup
        bb.utils.prunedir(tmpfile)
Ejemplo n.º 8
0
def exec_task(fn, task, d):
    """Execute a BB 'task'

    Execution of a task involves a bit more setup than executing a function,
    running it with its own local metadata, and with some useful variables set.
    """

    # Check whther this is a valid task
    if not data.getVarFlag(task, 'task', d):
        raise InvalidTask(task, d)

    try:
        logger.debug(1, "Executing task %s", task)
        old_overrides = data.getVar('OVERRIDES', d, 0)
        localdata = data.createCopy(d)
        data.setVar('OVERRIDES', 'task-%s:%s' % (task[3:], old_overrides), localdata)
        data.update_data(localdata)
        data.expandKeys(localdata)
        data.setVar('BB_FILENAME', fn, d)
        data.setVar('BB_CURRENTTASK', task[3:], d)
        event.fire(TaskStarted(task, localdata), localdata)
        exec_func(task, localdata)
        event.fire(TaskSucceeded(task, localdata), localdata)
    except FuncFailed as exc:
        event.fire(TaskFailed(exc.name, exc.logfile, localdata), localdata)
        raise

    # make stamp, or cause event and raise exception
    if not data.getVarFlag(task, 'nostamp', d) and not data.getVarFlag(task, 'selfstamp', d):
        make_stamp(task, d)
Ejemplo n.º 9
0
    def fileBuild( self, params, cmd = "build" ):
        """Parse and build a .bb file"""
        global last_exception
        name = params[0]
        bf = completeFilePath( name )
        print "SHELL: Calling '%s' on '%s'" % ( cmd, bf )

        oldcmd = cooker.configuration.cmd
        cooker.configuration.cmd = cmd

        thisdata = data.createCopy(cooker.configuration.data)
        data.update_data(thisdata)
        data.expandKeys(thisdata)

        try:
            bbfile_data = parse.handle( bf, thisdata )
        except parse.ParseError:
            print "ERROR: Unable to open or parse '%s'" % bf
        else:
            # Remove stamp for target if force mode active
            if cooker.configuration.force:
                bb.msg.note(2, bb.msg.domain.RunQueue, "Remove stamp %s, %s" % (cmd, bf))
                bb.build.del_stamp('do_%s' % cmd, bbfile_data)

            item = data.getVar('PN', bbfile_data, 1)
            data.setVar( "_task_cache", [], bbfile_data ) # force
            try:
                cooker.tryBuildPackage( os.path.abspath( bf ), item, cmd, bbfile_data, True )
            except build.EventException, e:
                print "ERROR: Couldn't build '%s'" % name
                last_exception = e
Ejemplo n.º 10
0
    def go(self, loc, ud, d):
        """Fetch urls"""

        if not self.forcefetch(loc, ud, d) and Fetch.try_mirror(
                d, ud.localfile):
            return

        svkroot = ud.host + ud.path

        # pyflakes claims date is not known... it looks right
        svkcmd = "svk co -r {%s} %s/%s" % (date, svkroot, ud.module)

        if ud.revision:
            svkcmd = "svk co -r %s/%s" % (ud.revision, svkroot, ud.module)

        # create temp directory
        localdata = data.createCopy(d)
        data.update_data(localdata)
        bb.msg.debug(2, bb.msg.domain.Fetcher,
                     "Fetch: creating temporary directory")
        bb.mkdirhier(data.expand('${WORKDIR}', localdata))
        data.setVar('TMPBASE', data.expand('${WORKDIR}/oesvk.XXXXXX',
                                           localdata), localdata)
        tmppipe = os.popen(
            data.getVar('MKTEMPDIRCMD', localdata, 1) or "false")
        tmpfile = tmppipe.readline().strip()
        if not tmpfile:
            bb.msg.error(
                bb.msg.domain.Fetcher,
                "Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH."
            )
            raise FetchError(ud.module)

        # check out sources there
        os.chdir(tmpfile)
        bb.msg.note(1, bb.msg.domain.Fetcher, "Fetch " + loc)
        bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % svkcmd)
        myret = os.system(svkcmd)
        if myret != 0:
            try:
                os.rmdir(tmpfile)
            except OSError:
                pass
            raise FetchError(ud.module)

        os.chdir(os.path.join(tmpfile, os.path.dirname(ud.module)))
        # tar them up to a defined filename
        myret = os.system("tar -czf %s %s" %
                          (ud.localpath, os.path.basename(ud.module)))
        if myret != 0:
            try:
                os.unlink(ud.localpath)
            except OSError:
                pass
            raise FetchError(ud.module)
        # cleanup
        os.system('rm -rf %s' % tmpfile)
Ejemplo n.º 11
0
    def clean(self, ud, d):
        """ Clean CVS Files and tarballs """
        
        pkg = data.expand('${PN}', d)
        localdata = data.createCopy(d)
        data.setVar('OVERRIDES', "cvs:%s" % data.getVar('OVERRIDES', localdata), localdata)
        data.update_data(localdata)
        pkgdir = os.path.join(data.expand('${CVSDIR}', localdata), pkg)

        bb.utils.remove(pkgdir, True)
        bb.utils.remove(ud.localpath)
Ejemplo n.º 12
0
    def go(self, uri, ud, d):
        """Fetch urls"""

        def fetch_uri(uri, ud, d):
            if os.path.exists(ud.localpath):
                # file exists, but we didnt complete it.. trying again..
                fetchcmd = data.getVar("RESUMECOMMAND", d, 1)
            else:
                fetchcmd = data.getVar("FETCHCOMMAND", d, 1)

            bb.msg.note(1, bb.msg.domain.Fetcher, "fetch " + uri)
            fetchcmd = fetchcmd.replace("${URI}", uri)
            fetchcmd = fetchcmd.replace("${FILE}", ud.basename)
            bb.msg.debug(2, bb.msg.domain.Fetcher, "executing " + fetchcmd)
            ret = os.system(fetchcmd)
            if ret != 0:
                return False

            # Sanity check since wget can pretend it succeed when it didn't
            # Also, this used to happen if sourceforge sent us to the mirror page
            if not os.path.exists(ud.localpath):
                bb.msg.debug(
                    2,
                    bb.msg.domain.Fetcher,
                    "The fetch command for %s returned success but %s doesn't exist?..." % (uri, ud.localpath),
                )
                return False

            return True

        localdata = data.createCopy(d)
        data.setVar("OVERRIDES", "wget:" + data.getVar("OVERRIDES", localdata), localdata)
        data.update_data(localdata)

        premirrors = [i.split() for i in (data.getVar("PREMIRRORS", localdata, 1) or "").split("\n") if i]
        for (find, replace) in premirrors:
            newuri = uri_replace(uri, find, replace, d)
            if newuri != uri:
                if fetch_uri(newuri, ud, localdata):
                    return

        if fetch_uri(uri, ud, localdata):
            return

        # try mirrors
        mirrors = [i.split() for i in (data.getVar("MIRRORS", localdata, 1) or "").split("\n") if i]
        for (find, replace) in mirrors:
            newuri = uri_replace(uri, find, replace, d)
            if newuri != uri:
                if fetch_uri(newuri, ud, localdata):
                    return

        raise FetchError(uri)
Ejemplo n.º 13
0
    def go(self, loc, ud, d):
        """Fetch urls"""

        svkroot = ud.host + ud.path

        svkcmd = "svk co -r {%s} %s/%s" % (ud.date, svkroot, ud.module)

        if ud.revision:
            svkcmd = "svk co -r %s %s/%s" % (ud.revision, svkroot, ud.module)

        # create temp directory
        localdata = data.createCopy(d)
        data.update_data(localdata)
        logger.debug(2, "Fetch: creating temporary directory")
        bb.mkdirhier(data.expand('${WORKDIR}', localdata))
        data.setVar('TMPBASE', data.expand('${WORKDIR}/oesvk.XXXXXX',
                                           localdata), localdata)
        tmppipe = os.popen(
            data.getVar('MKTEMPDIRCMD', localdata, 1) or "false")
        tmpfile = tmppipe.readline().strip()
        if not tmpfile:
            logger.error(
                "Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH."
            )
            raise FetchError(ud.module)

        # check out sources there
        os.chdir(tmpfile)
        logger.info("Fetch " + loc)
        logger.debug(1, "Running %s", svkcmd)
        myret = os.system(svkcmd)
        if myret != 0:
            try:
                os.rmdir(tmpfile)
            except OSError:
                pass
            raise FetchError(ud.module)

        os.chdir(os.path.join(tmpfile, os.path.dirname(ud.module)))
        # tar them up to a defined filename
        myret = os.system("tar -czf %s %s" %
                          (ud.localpath, os.path.basename(ud.module)))
        if myret != 0:
            try:
                os.unlink(ud.localpath)
            except OSError:
                pass
            raise FetchError(ud.module)
        # cleanup
        bb.utils.prunedir(tmpfile)
Ejemplo n.º 14
0
    def go(self, loc, ud, d):
        """Fetch urls"""

        if not self.forcefetch(loc, ud, d) and Fetch.try_mirror(d, ud.localfile):
            return

        svkroot = ud.host + ud.path

        # pyflakes claims date is not known... it looks right
        svkcmd = "svk co -r {%s} %s/%s" % (date, svkroot, ud.module)

        if ud.revision:
            svkcmd = "svk co -r %s/%s" % (ud.revision, svkroot, ud.module)

        # create temp directory
        localdata = data.createCopy(d)
        data.update_data(localdata)
        bb.msg.debug(2, bb.msg.domain.Fetcher, "Fetch: creating temporary directory")
        bb.mkdirhier(data.expand('${WORKDIR}', localdata))
        data.setVar('TMPBASE', data.expand('${WORKDIR}/oesvk.XXXXXX', localdata), localdata)
        tmppipe = os.popen(data.getVar('MKTEMPDIRCMD', localdata, 1) or "false")
        tmpfile = tmppipe.readline().strip()
        if not tmpfile:
            bb.msg.error(bb.msg.domain.Fetcher, "Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.")
            raise FetchError(ud.module)

        # check out sources there
        os.chdir(tmpfile)
        bb.msg.note(1, bb.msg.domain.Fetcher, "Fetch " + loc)
        bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % svkcmd)
        myret = os.system(svkcmd)
        if myret != 0:
            try:
                os.rmdir(tmpfile)
            except OSError:
                pass
            raise FetchError(ud.module)

        os.chdir(os.path.join(tmpfile, os.path.dirname(ud.module)))
        # tar them up to a defined filename
        myret = os.system("tar -czf %s %s" % (ud.localpath, os.path.basename(ud.module)))
        if myret != 0:
            try:
                os.unlink(ud.localpath)
            except OSError:
                pass
            raise FetchError(ud.module)
        # cleanup
        os.system('rm -rf %s' % tmpfile)
Ejemplo n.º 15
0
    def go(self, uri, ud, d):
        """Fetch urls"""

        def fetch_uri(uri, ud, d):
            if os.path.exists(ud.localpath):
                # file exists, but we didnt complete it.. trying again..
                fetchcmd = data.getVar("RESUMECOMMAND", d, 1)
            else:
                fetchcmd = data.getVar("FETCHCOMMAND", d, 1)

            bb.msg.note(1, bb.msg.domain.Fetcher, "fetch " + uri)
            fetchcmd = fetchcmd.replace("${URI}", uri)
            fetchcmd = fetchcmd.replace("${FILE}", ud.basename)
            bb.msg.debug(2, bb.msg.domain.Fetcher, "executing " + fetchcmd)
            ret = os.system(fetchcmd)
            if ret != 0:
                return False

            # check if sourceforge did send us to the mirror page
            if not os.path.exists(ud.localpath):
                os.system("rm %s*" % ud.localpath) # FIXME shell quote it
                bb.msg.debug(2, bb.msg.domain.Fetcher, "sourceforge.net send us to the mirror on %s" % ud.basename)
                return False

            return True

        localdata = data.createCopy(d)
        data.setVar('OVERRIDES', "wget:" + data.getVar('OVERRIDES', localdata), localdata)
        data.update_data(localdata)

        premirrors = [ i.split() for i in (data.getVar('PREMIRRORS', localdata, 1) or "").split('\n') if i ]
        for (find, replace) in premirrors:
            newuri = uri_replace(uri, find, replace, d)
            if newuri != uri:
                if fetch_uri(newuri, ud, localdata):
                    return

        if fetch_uri(uri, ud, localdata):
            return

        # try mirrors
        mirrors = [ i.split() for i in (data.getVar('MIRRORS', localdata, 1) or "").split('\n') if i ]
        for (find, replace) in mirrors:
            newuri = uri_replace(uri, find, replace, d)
            if newuri != uri:
                if fetch_uri(newuri, ud, localdata):
                    return

        raise FetchError(uri)
Ejemplo n.º 16
0
Archivo: svk.py Proyecto: ack3000/poky
    def go(self, loc, ud, d):
        """Fetch urls"""

        svkroot = ud.host + ud.path

        svkcmd = "svk co -r {%s} %s/%s" % (ud.date, svkroot, ud.module)

        if ud.revision:
            svkcmd = "svk co -r %s %s/%s" % (ud.revision, svkroot, ud.module)

        # create temp directory
        localdata = data.createCopy(d)
        data.update_data(localdata)
        logger.debug(2, "Fetch: creating temporary directory")
        bb.utils.mkdirhier(data.expand('${WORKDIR}', localdata))
        data.setVar('TMPBASE', data.expand('${WORKDIR}/oesvk.XXXXXX', localdata), localdata)
        tmppipe = os.popen(data.getVar('MKTEMPDIRCMD', localdata, 1) or "false")
        tmpfile = tmppipe.readline().strip()
        if not tmpfile:
            logger.error("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.")
            raise FetchError(ud.module)

        # check out sources there
        os.chdir(tmpfile)
        logger.info("Fetch " + loc)
        logger.debug(1, "Running %s", svkcmd)
        myret = os.system(svkcmd)
        if myret != 0:
            try:
                os.rmdir(tmpfile)
            except OSError:
                pass
            raise FetchError(ud.module)

        os.chdir(os.path.join(tmpfile, os.path.dirname(ud.module)))
        # tar them up to a defined filename
        myret = os.system("tar -czf %s %s" % (ud.localpath, os.path.basename(ud.module)))
        if myret != 0:
            try:
                os.unlink(ud.localpath)
            except OSError:
                pass
            raise FetchError(ud.module)
        # cleanup
        bb.utils.prunedir(tmpfile)
Ejemplo n.º 17
0
def finalise(fn, d):
    data.expandKeys(d)
    data.update_data(d)
    anonqueue = data.getVar("__anonqueue", d, 1) or []
    body = [x['content'] for x in anonqueue]
    flag = { 'python' : 1, 'func' : 1 }
    data.setVar("__anonfunc", "\n".join(body), d)
    data.setVarFlags("__anonfunc", flag, d)
    from bb import build
    try:
        t = data.getVar('T', d)
        data.setVar('T', '${TMPDIR}/anonfunc/', d)
        build.exec_func("__anonfunc", d)
        data.delVar('T', d)
        if t:
            data.setVar('T', t, d)
    except Exception, e:
        bb.msg.debug(1, bb.msg.domain.Parsing, "Exception when executing anonymous function: %s" % e)
        raise
Ejemplo n.º 18
0
    def go(self, uri, ud, d, checkonly=False):
        """Fetch urls"""
        def fetch_uri(uri, ud, d):
            if checkonly:
                fetchcmd = data.getVar("CHECKCOMMAND", d, 1)
            elif os.path.exists(ud.localpath):
                # file exists, but we didnt complete it.. trying again..
                fetchcmd = data.getVar("RESUMECOMMAND", d, 1)
            else:
                fetchcmd = data.getVar("FETCHCOMMAND", d, 1)

            uri = uri.split(";")[0]
            uri_decoded = list(decodeurl(uri))
            uri_type = uri_decoded[0]
            uri_host = uri_decoded[1]

            fetchcmd = fetchcmd.replace("${URI}", uri.split(";")[0])
            fetchcmd = fetchcmd.replace("${FILE}", ud.basename)
            logger.info("fetch " + uri)
            logger.debug(2, "executing " + fetchcmd)
            runfetchcmd(fetchcmd, d)

            # Sanity check since wget can pretend it succeed when it didn't
            # Also, this used to happen if sourceforge sent us to the mirror page
            if not os.path.exists(ud.localpath) and not checkonly:
                logger.debug(
                    2,
                    "The fetch command for %s returned success but %s doesn't exist?...",
                    uri, ud.localpath)
                return False

            return True

        localdata = data.createCopy(d)
        data.setVar('OVERRIDES', "wget:" + data.getVar('OVERRIDES', localdata),
                    localdata)
        data.update_data(localdata)

        if fetch_uri(uri, ud, localdata):
            return True

        raise FetchError(uri)
Ejemplo n.º 19
0
    def build( self, params, cmd = "build" ):
        """Build a providee"""
        global last_exception
        globexpr = params[0]
        self._checkParsed()
        names = globfilter( cooker.status.pkg_pn.keys(), globexpr )
        if len( names ) == 0: names = [ globexpr ]
        print "SHELL: Building %s" % ' '.join( names )

        oldcmd = cooker.configuration.cmd
        cooker.configuration.cmd = cmd

        td = taskdata.TaskData(cooker.configuration.abort)
        localdata = data.createCopy(cooker.configuration.data)
        data.update_data(localdata)
        data.expandKeys(localdata)

        try:
            tasks = []
            for name in names:
                td.add_provider(localdata, cooker.status, name)
                providers = td.get_provider(name)

                if len(providers) == 0:
                    raise Providers.NoProvider

                tasks.append([name, "do_%s" % cooker.configuration.cmd])

            td.add_unresolved(localdata, cooker.status)
            
            rq = runqueue.RunQueue(cooker, localdata, cooker.status, td, tasks)
            rq.prepare_runqueue()
            rq.execute_runqueue()

        except Providers.NoProvider:
            print "ERROR: No Provider"
            last_exception = Providers.NoProvider

        except runqueue.TaskFailure, fnids:
            for fnid in fnids:
                print "ERROR: '%s' failed" % td.fn_index[fnid]
            last_exception = runqueue.TaskFailure
Ejemplo n.º 20
0
    def build(self, params, cmd="build"):
        """Build a providee"""
        global last_exception
        globexpr = params[0]
        self._checkParsed()
        names = globfilter(cooker.status.pkg_pn.keys(), globexpr)
        if len(names) == 0: names = [globexpr]
        print "SHELL: Building %s" % ' '.join(names)

        oldcmd = cooker.configuration.cmd
        cooker.configuration.cmd = cmd

        td = taskdata.TaskData(cooker.configuration.abort)
        localdata = data.createCopy(cooker.configuration.data)
        data.update_data(localdata)
        data.expandKeys(localdata)

        try:
            tasks = []
            for name in names:
                td.add_provider(localdata, cooker.status, name)
                providers = td.get_provider(name)

                if len(providers) == 0:
                    raise Providers.NoProvider

                tasks.append([name, "do_%s" % cooker.configuration.cmd])

            td.add_unresolved(localdata, cooker.status)

            rq = runqueue.RunQueue(cooker, localdata, cooker.status, td, tasks)
            rq.prepare_runqueue()
            rq.execute_runqueue()

        except Providers.NoProvider:
            print "ERROR: No Provider"
            last_exception = Providers.NoProvider

        except runqueue.TaskFailure, fnids:
            for fnid in fnids:
                print "ERROR: '%s' failed" % td.fn_index[fnid]
            last_exception = runqueue.TaskFailure
Ejemplo n.º 21
0
    def build(self, params, cmd="build"):
        """Build a providee"""
        global last_exception
        globexpr = params[0]
        self._checkParsed()
        names = globfilter(cooker.status.pkg_pn, globexpr)
        if len(names) == 0:
            names = [globexpr]
        print("SHELL: Building %s" % " ".join(names))

        td = taskdata.TaskData(cooker.configuration.abort)
        localdata = data.createCopy(cooker.configuration.data)
        data.update_data(localdata)
        data.expandKeys(localdata)

        try:
            tasks = []
            for name in names:
                td.add_provider(localdata, cooker.status, name)
                providers = td.get_provider(name)

                if len(providers) == 0:
                    raise Providers.NoProvider

                tasks.append([name, "do_%s" % cmd])

            td.add_unresolved(localdata, cooker.status)

            rq = runqueue.RunQueue(cooker, localdata, cooker.status, td, tasks)
            rq.prepare_runqueue()
            rq.execute_runqueue()

        except Providers.NoProvider:
            print("ERROR: No Provider")
            last_exception = Providers.NoProvider

        except runqueue.TaskFailure as fnids:
            last_exception = runqueue.TaskFailure

        except build.FuncFailed as e:
            print("ERROR: Couldn't build '%s'" % names)
            last_exception = e
Ejemplo n.º 22
0
    def testUpdateDataLastBug(self):
        from bb import data
        from bb import data_smart

        slugos = data_smart.DataSmart()
        data.setVar('PN', 'busybox', slugos)
        data.setVar('INITSCRIPT_PARAMS_${PN}_slugos', 'start 20 .', slugos)

        data.expandKeys(slugos)
        self.assertTrue('INITSCRIPT_PARAMS_busybox_slugos' in data.keys(slugos))

        data.setVar('OVERRIDES', 'slugos', slugos) 
        data.update_data(slugos)

        self.assertTrue('INITSCRIPT_PARAMS_busybox' in data.keys(slugos))
        data.setVar('OVERRIDES', 'busybox:slugos', slugos)
        data.update_data(slugos)
 
        self.assertTrue('INITSCRIPT_PARAMS' in data.keys(slugos))
        self.assertEquals('start 20 .', data.getVar('INITSCRIPT_PARAMS', slugos))
Ejemplo n.º 23
0
Archivo: wget.py Proyecto: ack3000/poky
    def go(self, uri, ud, d, checkonly = False):
        """Fetch urls"""

        def fetch_uri(uri, ud, d):
            if checkonly:
                fetchcmd = data.getVar("CHECKCOMMAND", d, 1)
            elif os.path.exists(ud.localpath):
                # file exists, but we didnt complete it.. trying again..
                fetchcmd = data.getVar("RESUMECOMMAND", d, 1)
            else:
                fetchcmd = data.getVar("FETCHCOMMAND", d, 1)

            uri = uri.split(";")[0]
            uri_decoded = list(decodeurl(uri))
            uri_type = uri_decoded[0]
            uri_host = uri_decoded[1]

            fetchcmd = fetchcmd.replace("${URI}", uri.split(";")[0])
            fetchcmd = fetchcmd.replace("${FILE}", ud.basename)
            logger.info("fetch " + uri)
            logger.debug(2, "executing " + fetchcmd)
            runfetchcmd(fetchcmd, d)

            # Sanity check since wget can pretend it succeed when it didn't
            # Also, this used to happen if sourceforge sent us to the mirror page
            if not os.path.exists(ud.localpath) and not checkonly:
                logger.debug(2, "The fetch command for %s returned success but %s doesn't exist?...", uri, ud.localpath)
                return False

            return True

        localdata = data.createCopy(d)
        data.setVar('OVERRIDES', "wget:" + data.getVar('OVERRIDES', localdata), localdata)
        data.update_data(localdata)

        if fetch_uri(uri, ud, localdata):
            return True

        raise FetchError(uri)
Ejemplo n.º 24
0
    def go(self, uri, ud, d, checkonly = False):
        """Fetch urls"""

        def fetch_uri(uri, ud, d):
            if checkonly:
                fetchcmd = data.getVar("CHECKCOMMAND", d, 1)
            elif os.path.exists(ud.localpath):
                # file exists, but we didnt complete it.. trying again..
                fetchcmd = data.getVar("RESUMECOMMAND", d, 1)
            else:
                fetchcmd = data.getVar("FETCHCOMMAND", d, 1)

            uri = uri.split(";")[0]
            uri_decoded = list(bb.decodeurl(uri))
            uri_type = uri_decoded[0]
            uri_host = uri_decoded[1]

            bb.msg.note(1, bb.msg.domain.Fetcher, "fetch " + uri)
            fetchcmd = fetchcmd.replace("${URI}", uri.split(";")[0])
            fetchcmd = fetchcmd.replace("${FILE}", ud.basename)
            httpproxy = None
            ftpproxy = None
            if uri_type == 'http':
                httpproxy = data.getVar("HTTP_PROXY", d, True)
                httpproxy_ignore = (data.getVar("HTTP_PROXY_IGNORE", d, True) or "").split()
                for p in httpproxy_ignore:
                    if uri_host.endswith(p):
                        httpproxy = None
                        break
            if uri_type == 'ftp':
                ftpproxy = data.getVar("FTP_PROXY", d, True)
                ftpproxy_ignore = (data.getVar("HTTP_PROXY_IGNORE", d, True) or "").split()
                for p in ftpproxy_ignore:
                    if uri_host.endswith(p):
                        ftpproxy = None
                        break
            if httpproxy:
                fetchcmd = "http_proxy=" + httpproxy + " " + fetchcmd
            if ftpproxy:
                fetchcmd = "ftp_proxy=" + ftpproxy + " " + fetchcmd
            bb.msg.debug(2, bb.msg.domain.Fetcher, "executing " + fetchcmd)
            ret = os.system(fetchcmd)
            if ret != 0:
                return False

            # Sanity check since wget can pretend it succeed when it didn't
            # Also, this used to happen if sourceforge sent us to the mirror page
            if not os.path.exists(ud.localpath) and not checkonly:
                bb.msg.debug(2, bb.msg.domain.Fetcher, "The fetch command for %s returned success but %s doesn't exist?..." % (uri, ud.localpath))
                return False

            return True

        localdata = data.createCopy(d)
        data.setVar('OVERRIDES', "wget:" + data.getVar('OVERRIDES', localdata), localdata)
        data.update_data(localdata)

        if fetch_uri(uri, ud, localdata):
            return True

        raise FetchError(uri)
Ejemplo n.º 25
0
def handle(fn, d, include=0):
    global __func_start_regexp__, __inherit_regexp__, __export_func_regexp__, __addtask_regexp__, __addhandler_regexp__, __infunc__, __body__, __residue__
    __body__ = []
    __infunc__ = ""
    __classname__ = ""
    __residue__ = []

    if include == 0:
        bb.msg.debug(2, bb.msg.domain.Parsing, "BB " + fn + ": handle(data)")
    else:
        bb.msg.debug(2, bb.msg.domain.Parsing,
                     "BB " + fn + ": handle(data, include)")

    (root, ext) = os.path.splitext(os.path.basename(fn))
    base_name = "%s%s" % (root, ext)
    init(d)

    if ext == ".bbclass":
        __classname__ = root
        classes.append(__classname__)
        __inherit_cache = data.getVar('__inherit_cache', d) or []
        if not fn in __inherit_cache:
            __inherit_cache.append(fn)
            data.setVar('__inherit_cache', __inherit_cache, d)

    if include != 0:
        oldfile = data.getVar('FILE', d)
    else:
        oldfile = None

    fn = obtain(fn, d)
    bbpath = (data.getVar('BBPATH', d, 1) or '').split(':')
    if not os.path.isabs(fn):
        f = None
        for p in bbpath:
            j = os.path.join(p, fn)
            if os.access(j, os.R_OK):
                abs_fn = j
                f = open(j, 'r')
                break
        if f is None:
            raise IOError("file %s not found" % fn)
    else:
        f = open(fn, 'r')
        abs_fn = fn

    if ext != ".bbclass":
        dname = os.path.dirname(abs_fn)
        if bbpath[0] != dname:
            bbpath.insert(0, dname)
            data.setVar('BBPATH', ":".join(bbpath), d)

    if include:
        bb.parse.mark_dependency(d, abs_fn)

    if ext != ".bbclass":
        data.setVar('FILE', fn, d)

    lineno = 0
    while 1:
        lineno = lineno + 1
        s = f.readline()
        if not s: break
        s = s.rstrip()
        feeder(lineno, s, fn, base_name, d)
    if __inpython__:
        # add a blank line to close out any python definition
        feeder(IN_PYTHON_EOF, "", fn, base_name, d)
    if ext == ".bbclass":
        classes.remove(__classname__)
    else:
        if include == 0:
            data.expandKeys(d)
            data.update_data(d)
            anonqueue = data.getVar("__anonqueue", d, 1) or []
            body = [x['content'] for x in anonqueue]
            flag = {'python': 1, 'func': 1}
            data.setVar("__anonfunc", "\n".join(body), d)
            data.setVarFlags("__anonfunc", flag, d)
            from bb import build
            try:
                t = data.getVar('T', d)
                data.setVar('T', '${TMPDIR}/', d)
                build.exec_func("__anonfunc", d)
                data.delVar('T', d)
                if t:
                    data.setVar('T', t, d)
            except Exception, e:
                bb.msg.debug(
                    1, bb.msg.domain.Parsing,
                    "Exception when executing anonymous function: %s" % e)
                raise
            data.delVar("__anonqueue", d)
            data.delVar("__anonfunc", d)
            set_additional_vars(fn, d, include)
            data.update_data(d)

            all_handlers = {}
            for var in data.getVar('__BBHANDLERS', d) or []:
                # try to add the handler
                handler = data.getVar(var, d)
                bb.event.register(var, handler)

            tasklist = data.getVar('__BBTASKS', d) or []
            bb.build.add_tasks(tasklist, d)

        bbpath.pop(0)
    def go(self, loc, ud, d):
        """
        Fetch urls
        """

        (host, depot, user, pswd, parm) = Perforce.doparse(loc, d)

        if depot.find('/...') != -1:
            path = depot[:depot.find('/...')]
        else:
            path = depot

        module = parm.get('module', os.path.basename(path))

        localdata = data.createCopy(d)
        data.setVar('OVERRIDES', "p4:%s" % data.getVar('OVERRIDES', localdata), localdata)
        data.update_data(localdata)

        # Get the p4 command
        p4opt = ""
        if user:
            p4opt += " -u %s" % (user)

        if pswd:
            p4opt += " -P %s" % (pswd)

        if host:
            p4opt += " -p %s" % (host)

        p4cmd = data.getVar('FETCHCOMMAND', localdata, 1)

        # create temp directory
        logger.debug(2, "Fetch: creating temporary directory")
        bb.utils.mkdirhier(data.expand('${WORKDIR}', localdata))
        data.setVar('TMPBASE', data.expand('${WORKDIR}/oep4.XXXXXX', localdata), localdata)
        tmppipe = os.popen(data.getVar('MKTEMPDIRCMD', localdata, 1) or "false")
        tmpfile = tmppipe.readline().strip()
        if not tmpfile:
            logger.error("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.")
            raise FetchError(module)

        if "label" in parm:
            depot = "%s@%s" % (depot, parm["label"])
        else:
            cset = Perforce.getcset(d, depot, host, user, pswd, parm)
            depot = "%s@%s" % (depot, cset)

        os.chdir(tmpfile)
        logger.info("Fetch " + loc)
        logger.info("%s%s files %s", p4cmd, p4opt, depot)
        p4file = os.popen("%s%s files %s" % (p4cmd, p4opt, depot))

        if not p4file:
            logger.error("Fetch: unable to get the P4 files from %s", depot)
            raise FetchError(module)

        count = 0

        for file in p4file:
            list = file.split()

            if list[2] == "delete":
                continue

            dest = list[0][len(path)+1:]
            where = dest.find("#")

            os.system("%s%s print -o %s/%s %s" % (p4cmd, p4opt, module, dest[:where], list[0]))
            count = count + 1

        if count == 0:
            logger.error("Fetch:  No files gathered from the P4 fetch")
            raise FetchError(module)

        myret = os.system("tar -czf %s %s" % (ud.localpath, module))
        if myret != 0:
            try:
                os.unlink(ud.localpath)
            except OSError:
                pass
            raise FetchError(module)
        # cleanup
        bb.utils.prunedir(tmpfile)
Ejemplo n.º 27
0
    def go(self, uri, ud, d, checkonly=False):
        """Fetch urls"""
        def fetch_uri(uri, ud, d):
            if checkonly:
                fetchcmd = data.getVar("CHECKCOMMAND", d, 1)
            elif os.path.exists(ud.localpath):
                # file exists, but we didnt complete it.. trying again..
                fetchcmd = data.getVar("RESUMECOMMAND", d, 1)
            else:
                fetchcmd = data.getVar("FETCHCOMMAND", d, 1)

            bb.msg.note(1, bb.msg.domain.Fetcher, "fetch " + uri)
            fetchcmd = fetchcmd.replace("${URI}", uri)
            fetchcmd = fetchcmd.replace("${FILE}", ud.basename)
            bb.msg.debug(2, bb.msg.domain.Fetcher, "executing " + fetchcmd)
            ret = os.system(fetchcmd)
            if ret != 0:
                return False

            # Sanity check since wget can pretend it succeed when it didn't
            # Also, this used to happen if sourceforge sent us to the mirror page
            if not os.path.exists(ud.localpath):
                bb.msg.debug(
                    2, bb.msg.domain.Fetcher,
                    "The fetch command for %s returned success but %s doesn't exist?..."
                    % (uri, ud.localpath))
                return False

            return True

        localdata = data.createCopy(d)
        data.setVar('OVERRIDES', "wget:" + data.getVar('OVERRIDES', localdata),
                    localdata)
        data.update_data(localdata)

        premirrors = [
            i.split() for i in (
                data.getVar('PREMIRRORS', localdata, 1) or "").split('\n') if i
        ]
        for (find, replace) in premirrors:
            newuri = uri_replace(uri, find, replace, d)
            if newuri != uri:
                if fetch_uri(newuri, ud, localdata):
                    return True

        if fetch_uri(uri, ud, localdata):
            return True

        # try mirrors
        mirrors = [
            i.split()
            for i in (data.getVar('MIRRORS', localdata, 1) or "").split('\n')
            if i
        ]
        for (find, replace) in mirrors:
            newuri = uri_replace(uri, find, replace, d)
            if newuri != uri:
                if fetch_uri(newuri, ud, localdata):
                    return True

        raise FetchError(uri)
Ejemplo n.º 28
0
    def go(self, d, urls = []):
        """Fetch urls"""
        if not urls:
            urls = self.urls

        localdata = data.createCopy(d)
        data.setVar('OVERRIDES', "svk:%s" % data.getVar('OVERRIDES', localdata), localdata)
        data.update_data(localdata)

        for loc in urls:
            (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(loc, localdata))
            if not "module" in parm:
                raise MissingParameterError("svk method needs a 'module' parameter")
            else:
                module = parm["module"]

            dlfile = self.localpath(loc, localdata)
            dldir = data.getVar('DL_DIR', localdata, 1)

#           setup svk options
            options = []
            if 'rev' in parm:
                revision = parm['rev']
            else:
                revision = ""

            date = Fetch.getSRCDate(d)
            tarfn = data.expand('%s_%s_%s_%s_%s.tar.gz' % (module.replace('/', '.'), host, path.replace('/', '.'), revision, date), localdata)
            data.setVar('TARFILES', dlfile, localdata)
            data.setVar('TARFN', tarfn, localdata)

            dl = os.path.join(dldir, tarfn)
            if os.access(dl, os.R_OK):
                bb.debug(1, "%s already exists, skipping svk checkout." % tarfn)
                continue

            olddir = os.path.abspath(os.getcwd())
            os.chdir(data.expand(dldir, localdata))

            svkroot = host + path

            data.setVar('SVKROOT', svkroot, localdata)
            data.setVar('SVKCOOPTS', " ".join(options), localdata)
            data.setVar('SVKMODULE', module, localdata)
            svkcmd = "svk co -r {%s} %s/%s" % (date, svkroot, module)

            if revision:
                svkcmd = "svk co -r %s/%s" % (revision, svkroot, module)

#           create temp directory
            bb.debug(2, "Fetch: creating temporary directory")
            bb.mkdirhier(data.expand('${WORKDIR}', localdata))
            data.setVar('TMPBASE', data.expand('${WORKDIR}/oesvk.XXXXXX', localdata), localdata)
            tmppipe = os.popen(data.getVar('MKTEMPDIRCMD', localdata, 1) or "false")
            tmpfile = tmppipe.readline().strip()
            if not tmpfile:
                bb.error("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.")
                raise FetchError(module)

#           check out sources there
            os.chdir(tmpfile)
            bb.note("Fetch " + loc)
            bb.debug(1, "Running %s" % svkcmd)
            myret = os.system(svkcmd)
            if myret != 0:
                try:
                    os.rmdir(tmpfile)
                except OSError:
                    pass
                raise FetchError(module)

            os.chdir(os.path.join(tmpfile, os.path.dirname(module)))
#           tar them up to a defined filename
            myret = os.system("tar -czf %s %s" % (os.path.join(dldir,tarfn), os.path.basename(module)))
            if myret != 0:
                try:
                    os.unlink(tarfn)
                except OSError:
                    pass
#           cleanup
            os.system('rm -rf %s' % tmpfile)
            os.chdir(olddir)
        del localdata
Ejemplo n.º 29
0
def handle(fn, d, include = 0):
    global __func_start_regexp__, __inherit_regexp__, __export_func_regexp__, __addtask_regexp__, __addhandler_regexp__, __infunc__, __body__, __residue__
    __body__ = []
    __infunc__ = ""
    __classname__ = ""
    __residue__ = []

    if include == 0:
        bb.msg.debug(2, bb.msg.domain.Parsing, "BB " + fn + ": handle(data)")
    else:
        bb.msg.debug(2, bb.msg.domain.Parsing, "BB " + fn + ": handle(data, include)")

    (root, ext) = os.path.splitext(os.path.basename(fn))
    base_name = "%s%s" % (root,ext)
    init(d)

    if ext == ".bbclass":
        __classname__ = root
        classes.append(__classname__)

    if include != 0:
        oldfile = data.getVar('FILE', d)
    else:
        oldfile = None

    fn = obtain(fn, d)
    bbpath = (data.getVar('BBPATH', d, 1) or '').split(':')
    if not os.path.isabs(fn):
        f = None
        for p in bbpath:
            j = os.path.join(p, fn)
            if os.access(j, os.R_OK):
                abs_fn = j
                f = open(j, 'r')
                break
        if f is None:
            raise IOError("file not found")
    else:
        f = open(fn,'r')
        abs_fn = fn

    if ext != ".bbclass":
        bbpath.insert(0, os.path.dirname(abs_fn))
        data.setVar('BBPATH', ":".join(bbpath), d)

    if include:
        bb.parse.mark_dependency(d, abs_fn)

    if ext != ".bbclass":
        data.setVar('FILE', fn, d)
        i = (data.getVar("INHERIT", d, 1) or "").split()
        if not "base" in i and __classname__ != "base":
            i[0:0] = ["base"]
        inherit(i, d)

    lineno = 0
    while 1:
        lineno = lineno + 1
        s = f.readline()
        if not s: break
        s = s.rstrip()
        feeder(lineno, s, fn, base_name, d)
    if __inpython__:
        # add a blank line to close out any python definition
        feeder(IN_PYTHON_EOF, "", fn, base_name, d)
    if ext == ".bbclass":
        classes.remove(__classname__)
    else:
        if include == 0:
            data.expandKeys(d)
            data.update_data(d)
            anonqueue = data.getVar("__anonqueue", d, 1) or []
            body = [x['content'] for x in anonqueue]
            flag = { 'python' : 1, 'func' : 1 }
            data.setVar("__anonfunc", "\n".join(body), d)
            data.setVarFlags("__anonfunc", flag, d)
            from bb import build
            try:
                t = data.getVar('T', d)
                data.setVar('T', '${TMPDIR}/', d)
                build.exec_func("__anonfunc", d)
                data.delVar('T', d)
                if t:
                    data.setVar('T', t, d)
            except Exception, e:
                bb.msg.debug(1, bb.msg.domain.Parsing, "executing anonymous function: %s" % e)
                raise
            data.delVar("__anonqueue", d)
            data.delVar("__anonfunc", d)
            set_additional_vars(fn, d, include)
            data.update_data(d)

            all_handlers = {} 
            for var in data.getVar('__BBHANDLERS', d) or []:
                # try to add the handler
                # if we added it remember the choiche
                handler = data.getVar(var,d)
                if bb.event.register(var,handler) == bb.event.Registered:
                    all_handlers[var] = handler

            for var in data.getVar('__BBTASKS', d) or []:
                deps = data.getVarFlag(var, 'deps', d) or []
                postdeps = data.getVarFlag(var, 'postdeps', d) or []
                bb.build.add_task(var, deps, d)
                for p in postdeps:
                    pdeps = data.getVarFlag(p, 'deps', d) or []
                    pdeps.append(var)
                    data.setVarFlag(p, 'deps', pdeps, d)
                    bb.build.add_task(p, pdeps, d)

            # now add the handlers
            if not len(all_handlers) == 0:
                data.setVar('__all_handlers__', all_handlers, d)

        bbpath.pop(0)
Ejemplo n.º 30
0
    def go(self, uri, ud, d, checkonly = False):
        """Fetch urls"""

        def fetch_uri(uri, ud, d):
            if checkonly:
                fetchcmd = data.getVar("CHECKCOMMAND", d, 1)
            elif os.path.exists(ud.localpath):
                # file exists, but we didnt complete it.. trying again..
                fetchcmd = data.getVar("RESUMECOMMAND", d, 1)
            else:
                fetchcmd = data.getVar("FETCHCOMMAND", d, 1)

            uri = uri.split(";")[0]
            uri_decoded = list(bb.decodeurl(uri))
            uri_type = uri_decoded[0]
            uri_host = uri_decoded[1]

            bb.msg.note(1, bb.msg.domain.Fetcher, "fetch " + uri)
            fetchcmd = fetchcmd.replace("${URI}", uri.split(";")[0])
            fetchcmd = fetchcmd.replace("${FILE}", ud.basename)
            httpproxy = None
            ftpproxy = None
            if uri_type == 'http':
                httpproxy = data.getVar("HTTP_PROXY", d, True)
                httpproxy_ignore = (data.getVar("HTTP_PROXY_IGNORE", d, True) or "").split()
                for p in httpproxy_ignore:
                    if uri_host.endswith(p):
                        httpproxy = None
                        break
            if uri_type == 'ftp':
                ftpproxy = data.getVar("FTP_PROXY", d, True)
                ftpproxy_ignore = (data.getVar("HTTP_PROXY_IGNORE", d, True) or "").split()
                for p in ftpproxy_ignore:
                    if uri_host.endswith(p):
                        ftpproxy = None
                        break
            if httpproxy:
                fetchcmd = "http_proxy=" + httpproxy + " " + fetchcmd
            if ftpproxy:
                fetchcmd = "ftp_proxy=" + ftpproxy + " " + fetchcmd
            bb.msg.debug(2, bb.msg.domain.Fetcher, "executing " + fetchcmd)
            ret = os.system(fetchcmd)
            if ret != 0:
                return False

            # Sanity check since wget can pretend it succeed when it didn't
            # Also, this used to happen if sourceforge sent us to the mirror page
            if not os.path.exists(ud.localpath):
                bb.msg.debug(2, bb.msg.domain.Fetcher, "The fetch command for %s returned success but %s doesn't exist?..." % (uri, ud.localpath))
                return False

            return True

        localdata = data.createCopy(d)
        data.setVar('OVERRIDES', "wget:" + data.getVar('OVERRIDES', localdata), localdata)
        data.update_data(localdata)

        premirrors = [ i.split() for i in (data.getVar('PREMIRRORS', localdata, 1) or "").split('\n') if i ]
        for (find, replace) in premirrors:
            newuri = uri_replace(uri, find, replace, d)
            if newuri != uri:
                if fetch_uri(newuri, ud, localdata):
                    return True

        if fetch_uri(uri, ud, localdata):
            return True

        # try mirrors
        mirrors = [ i.split() for i in (data.getVar('MIRRORS', localdata, 1) or "").split('\n') if i ]
        for (find, replace) in mirrors:
            newuri = uri_replace(uri, find, replace, d)
            if newuri != uri:
                if fetch_uri(newuri, ud, localdata):
                    return True

        raise FetchError(uri)
Ejemplo n.º 31
0
    def go(self, loc, ud, d):
        """
        Fetch urls
        """

        (host, depot, user, pswd, parm) = Perforce.doparse(loc, d)

        if depot.find('/...') != -1:
            path = depot[:depot.find('/...')]
        else:
            path = depot

        module = parm.get('module', os.path.basename(path))

        localdata = data.createCopy(d)
        data.setVar('OVERRIDES', "p4:%s" % data.getVar('OVERRIDES', localdata), localdata)
        data.update_data(localdata)

        # Get the p4 command
        p4opt = ""
        if user:
            p4opt += " -u %s" % (user)

        if pswd:
            p4opt += " -P %s" % (pswd)

        if host:
            p4opt += " -p %s" % (host)

        p4cmd = data.getVar('FETCHCOMMAND', localdata, 1)

        # create temp directory
        logger.debug(2, "Fetch: creating temporary directory")
        bb.mkdirhier(data.expand('${WORKDIR}', localdata))
        data.setVar('TMPBASE', data.expand('${WORKDIR}/oep4.XXXXXX', localdata), localdata)
        tmppipe = os.popen(data.getVar('MKTEMPDIRCMD', localdata, 1) or "false")
        tmpfile = tmppipe.readline().strip()
        if not tmpfile:
            logger.error("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.")
            raise FetchError(module)

        if "label" in parm:
            depot = "%s@%s" % (depot, parm["label"])
        else:
            cset = Perforce.getcset(d, depot, host, user, pswd, parm)
            depot = "%s@%s" % (depot, cset)

        os.chdir(tmpfile)
        logger.info("Fetch " + loc)
        logger.info("%s%s files %s", p4cmd, p4opt, depot)
        p4file = os.popen("%s%s files %s" % (p4cmd, p4opt, depot))

        if not p4file:
            logger.error("Fetch: unable to get the P4 files from %s", depot)
            raise FetchError(module)

        count = 0

        for file in p4file:
            list = file.split()

            if list[2] == "delete":
                continue

            dest = list[0][len(path)+1:]
            where = dest.find("#")

            os.system("%s%s print -o %s/%s %s" % (p4cmd, p4opt, module, dest[:where], list[0]))
            count = count + 1

        if count == 0:
            logger.error("Fetch:  No files gathered from the P4 fetch")
            raise FetchError(module)

        myret = os.system("tar -czf %s %s" % (ud.localpath, module))
        if myret != 0:
            try:
                os.unlink(ud.localpath)
            except OSError:
                pass
            raise FetchError(module)
        # cleanup
        bb.utils.prunedir(tmpfile)
Ejemplo n.º 32
0
    def download(self, ud, d):
        """
        Fetch urls
        """

        (host, depot, user, pswd, parm) = Perforce.doparse(ud.url, d)

        if depot.find('/...') != -1:
            path = depot[:depot.find('/...')]
        else:
            path = depot

        module = parm.get('module', os.path.basename(path))

        localdata = data.createCopy(d)
        data.setVar('OVERRIDES', "p4:%s" % data.getVar('OVERRIDES', localdata), localdata)
        data.update_data(localdata)

        # Get the p4 command
        p4opt = ""
        if user:
            p4opt += " -u %s" % (user)

        if pswd:
            p4opt += " -P %s" % (pswd)

        if host:
            p4opt += " -p %s" % (host)

        p4cmd = data.getVar('FETCHCOMMAND', localdata, True)

        # create temp directory
        logger.debug(2, "Fetch: creating temporary directory")
        bb.utils.mkdirhier(data.expand('${WORKDIR}', localdata))
        data.setVar('TMPBASE', data.expand('${WORKDIR}/oep4.XXXXXX', localdata), localdata)
        tmpfile, errors = bb.process.run(data.getVar('MKTEMPDIRCMD', localdata, True) or "false")
        tmpfile = tmpfile.strip()
        if not tmpfile:
            raise FetchError("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.", ud.url)

        if "label" in parm:
            depot = "%s@%s" % (depot, parm["label"])
        else:
            cset = Perforce.getcset(d, depot, host, user, pswd, parm)
            depot = "%s@%s" % (depot, cset)

        os.chdir(tmpfile)
        logger.info("Fetch " + ud.url)
        logger.info("%s%s files %s", p4cmd, p4opt, depot)
        p4file, errors = bb.process.run("%s%s files %s" % (p4cmd, p4opt, depot))
        p4file = [f.rstrip() for f in p4file.splitlines()]

        if not p4file:
            raise FetchError("Fetch: unable to get the P4 files from %s" % depot, ud.url)

        count = 0

        for file in p4file:
            list = file.split()

            if list[2] == "delete":
                continue

            dest = list[0][len(path)+1:]
            where = dest.find("#")

            subprocess.call("%s%s print -o %s/%s %s" % (p4cmd, p4opt, module, dest[:where], list[0]), shell=True)
            count = count + 1

        if count == 0:
            logger.error()
            raise FetchError("Fetch: No files gathered from the P4 fetch", ud.url)

        runfetchcmd("tar -czf %s %s" % (ud.localpath, module), d, cleanup = [ud.localpath])
        # cleanup
        bb.utils.prunedir(tmpfile)
Ejemplo n.º 33
0
    def go(self, loc, ud, d):

        # try to use the tarball stash
        if not self.forcefetch(loc, ud, d) and Fetch.try_mirror(d, ud.localfile):
            bb.msg.debug(1, bb.msg.domain.Fetcher, "%s already exists or was mirrored, skipping cvs checkout." % ud.localpath)
            return

        method = "pserver"
        if "method" in ud.parm:
            method = ud.parm["method"]

        localdir = ud.module
        if "localdir" in ud.parm:
            localdir = ud.parm["localdir"]

        cvs_port = ""
        if "port" in ud.parm:
            cvs_port = ud.parm["port"]

        cvs_rsh = None
        if method == "ext":
            if "rsh" in ud.parm:
                cvs_rsh = ud.parm["rsh"]

        if method == "dir":
            cvsroot = ud.path
        else:
            cvsroot = ":" + method
            cvsproxyhost = data.getVar('CVS_PROXY_HOST', d, True)
            if cvsproxyhost:
                cvsroot += ";proxy=" + cvsproxyhost
            cvsproxyport = data.getVar('CVS_PROXY_PORT', d, True)
            if cvsproxyport:
                cvsroot += ";proxyport=" + cvsproxyport
            cvsroot += ":" + ud.user
            if ud.pswd:
                cvsroot += ":" + ud.pswd
            cvsroot += "@" + ud.host + ":" + cvs_port + ud.path

        options = []
        if 'norecurse' in ud.parm:
            options.append("-l")
        if ud.date:
            # treat YYYYMMDDHHMM specially for CVS
            if len(ud.date) == 12:
                options.append("-D \"%s %s:%s UTC\"" % (ud.date[0:8], ud.date[8:10], ud.date[10:12]))
            else:
                options.append("-D \"%s UTC\"" % ud.date)
        if ud.tag:
            options.append("-r %s" % ud.tag)

        localdata = data.createCopy(d)
        data.setVar('OVERRIDES', "cvs:%s" % data.getVar('OVERRIDES', localdata), localdata)
        data.update_data(localdata)

        data.setVar('CVSROOT', cvsroot, localdata)
        data.setVar('CVSCOOPTS', " ".join(options), localdata)
        data.setVar('CVSMODULE', ud.module, localdata)
        cvscmd = data.getVar('FETCHCOMMAND', localdata, 1)
        cvsupdatecmd = data.getVar('UPDATECOMMAND', localdata, 1)

        if cvs_rsh:
            cvscmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvscmd)
            cvsupdatecmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvsupdatecmd)

        # create module directory
        bb.msg.debug(2, bb.msg.domain.Fetcher, "Fetch: checking for module directory")
        pkg = data.expand('${PN}', d)
        pkgdir = os.path.join(data.expand('${CVSDIR}', localdata), pkg)
        moddir = os.path.join(pkgdir,localdir)
        if os.access(os.path.join(moddir,'CVS'), os.R_OK):
            bb.msg.note(1, bb.msg.domain.Fetcher, "Update " + loc)
            # update sources there
            os.chdir(moddir)
            myret = os.system(cvsupdatecmd)
        else:
            bb.msg.note(1, bb.msg.domain.Fetcher, "Fetch " + loc)
            # check out sources there
            bb.mkdirhier(pkgdir)
            os.chdir(pkgdir)
            bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % cvscmd)
            myret = os.system(cvscmd)

        if myret != 0 or not os.access(moddir, os.R_OK):
            try:
                os.rmdir(moddir)
            except OSError:
                 pass
            raise FetchError(ud.module)

        # tar them up to a defined filename
        if 'fullpath' in ud.parm:
            os.chdir(pkgdir)
            myret = os.system("tar -czf %s %s" % (ud.localpath, localdir))
        else:
            os.chdir(moddir)
            os.chdir('..')
            myret = os.system("tar -czf %s %s" % (ud.localpath, os.path.basename(moddir)))

        if myret != 0:
            try:
                os.unlink(ud.localpath)
            except OSError:
                pass
            raise FetchError(ud.module)
Ejemplo n.º 34
0
def handle(fn, d, include = 0):
    global __func_start_regexp__, __inherit_regexp__, __export_func_regexp__, __addtask_regexp__, __addhandler_regexp__, __infunc__, __body__, __bbpath_found__, __residue__
    __body__ = []
    __bbpath_found__ = 0
    __infunc__ = ""
    __classname__ = ""
    __residue__ = []

    if include == 0:
        debug(2, "BB " + fn + ": handle(data)")
    else:
        debug(2, "BB " + fn + ": handle(data, include)")

    (root, ext) = os.path.splitext(os.path.basename(fn))
    init(d)

    if ext == ".bbclass":
        __classname__ = root
        classes.append(__classname__)

    if include != 0:
        oldfile = data.getVar('FILE', d)
    else:
        oldfile = None

    fn = obtain(fn, d)
    bbpath = (data.getVar('BBPATH', d, 1) or '').split(':')
    if not os.path.isabs(fn):
        f = None
        for p in bbpath:
            p = data.expand(p, d)
            j = os.path.join(p, fn)
            if os.access(j, os.R_OK):
                abs_fn = j
                f = open(j, 'r')
                break
        if f is None:
            raise IOError("file not found")
    else:
        f = open(fn,'r')
        abs_fn = fn

    if ext != ".bbclass":
        bbpath.insert(0, os.path.dirname(abs_fn))
        data.setVar('BBPATH', ":".join(bbpath), d)

    if include:
        bb.parse.mark_dependency(d, abs_fn)

    if ext != ".bbclass":
        data.setVar('FILE', fn, d)
        i = (data.getVar("INHERIT", d, 1) or "").split()
        if not "base" in i and __classname__ != "base":
            i[0:0] = ["base"]
        inherit(i, d)

    lineno = 0
    while 1:
        lineno = lineno + 1
        s = f.readline()
        if not s: break
        s = s.rstrip()
        feeder(lineno, s, fn, d)
    if __inpython__:
        # add a blank line to close out any python definition
        feeder(lineno + 1, "", fn, d)
    if ext == ".bbclass":
        classes.remove(__classname__)
    else:
        if include == 0:
            data.expandKeys(d)
            data.update_data(d)
            anonqueue = data.getVar("__anonqueue", d, 1) or []
            for anon in anonqueue:
                data.setVar("__anonfunc", anon["content"], d)
                data.setVarFlags("__anonfunc", anon["flags"], d)
                from bb import build
                try:
                    t = data.getVar('T', d)
                    data.setVar('T', '${TMPDIR}/', d)
                    build.exec_func("__anonfunc", d)
                    data.delVar('T', d)
                    if t:
                        data.setVar('T', t, d)
                except Exception, e:
                    bb.debug(1, "executing anonymous function: %s" % e)
                    raise
            data.delVar("__anonqueue", d)
            data.delVar("__anonfunc", d)
            set_additional_vars(fn, d, include)
            data.update_data(d)

            for var in data.keys(d):
                if data.getVarFlag(var, 'handler', d):
                    bb.event.register(data.getVar(var, d))
                    continue

                if not data.getVarFlag(var, 'task', d):
                    continue

                deps = data.getVarFlag(var, 'deps', d) or []
                postdeps = data.getVarFlag(var, 'postdeps', d) or []
                bb.build.add_task(var, deps, d)
                for p in postdeps:
                    pdeps = data.getVarFlag(p, 'deps', d) or []
                    pdeps.append(var)
                    data.setVarFlag(p, 'deps', pdeps, d)
                    bb.build.add_task(p, pdeps, d)
        bbpath.pop(0)
Ejemplo n.º 35
0
    def go(self, loc, ud, d):
        """
        Fetch urls
        """

        # try to use the tarball stash
        if Fetch.try_mirror(d, ud.localfile):
            bb.msg.debug(1, bb.msg.domain.Fetcher, "%s already exists or was mirrored, skipping perforce checkout." % ud.localpath)
            return

        (host,depot,user,pswd,parm) = Perforce.doparse(loc, d)

        if depot.find('/...') != -1:
            path = depot[:depot.find('/...')]
        else:
            path = depot

        if "module" in parm:
            module = parm["module"]
        else:
            module = os.path.basename(path)

        localdata = data.createCopy(d)
        data.setVar('OVERRIDES', "p4:%s" % data.getVar('OVERRIDES', localdata), localdata)
        data.update_data(localdata)

        # Get the p4 command
        if user:
            data.setVar('P4USER', user, localdata)

        if pswd:
            data.setVar('P4PASSWD', pswd, localdata)

        if host:
            data.setVar('P4PORT', host, localdata)

        p4cmd = data.getVar('FETCHCOMMAND', localdata, 1)

        # create temp directory
        bb.msg.debug(2, bb.msg.domain.Fetcher, "Fetch: creating temporary directory")
        bb.mkdirhier(data.expand('${WORKDIR}', localdata))
        data.setVar('TMPBASE', data.expand('${WORKDIR}/oep4.XXXXXX', localdata), localdata)
        tmppipe = os.popen(data.getVar('MKTEMPDIRCMD', localdata, 1) or "false")
        tmpfile = tmppipe.readline().strip()
        if not tmpfile:
            bb.error("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.")
            raise FetchError(module)

        if "label" in parm:
            depot = "%s@%s" % (depot,parm["label"])
        else:
            cset = Perforce.getcset(d, depot, host, user, pswd, parm)
            depot = "%s@%s" % (depot,cset)

        os.chdir(tmpfile)
        bb.msg.note(1, bb.msg.domain.Fetcher, "Fetch " + loc)
        bb.msg.note(1, bb.msg.domain.Fetcher, "%s files %s" % (p4cmd, depot))
        p4file = os.popen("%s files %s" % (p4cmd, depot))

        if not p4file:
            bb.error("Fetch: unable to get the P4 files from %s" % (depot))
            raise FetchError(module)

        count = 0

        for file in p4file: 
            list = file.split()

            if list[2] == "delete":
                continue

            dest = list[0][len(path)+1:]
            where = dest.find("#")

            os.system("%s print -o %s/%s %s" % (p4cmd, module,dest[:where],list[0]))
            count = count + 1
            
        if count == 0:
            bb.error("Fetch:  No files gathered from the P4 fetch")
            raise FetchError(module)

        myret = os.system("tar -czf %s %s" % (ud.localpath, module))
        if myret != 0:
            try:
                os.unlink(ud.localpath)
            except OSError:
                pass
            raise FetchError(module)
        # cleanup
        os.system('rm -rf %s' % tmpfile)
Ejemplo n.º 36
0
        data.setVar('T', '${TMPDIR}/anonfunc/', d)
        anonfuncs = data.getVar('__BBANONFUNCS', d) or []
        code = ""
        for f in anonfuncs:
            code = code + "    %s(d)\n" % f
        data.setVar("__anonfunc", code, d)        
        build.exec_func("__anonfunc", d)
        data.delVar('T', d)
        if t:
            data.setVar('T', t, d)
    except Exception, e:
        bb.msg.debug(1, bb.msg.domain.Parsing, "Exception when executing anonymous function: %s" % e)
        raise
    data.delVar("__anonqueue", d)
    data.delVar("__anonfunc", d)
    data.update_data(d)

    all_handlers = {} 
    for var in data.getVar('__BBHANDLERS', d) or []:
        # try to add the handler
        handler = data.getVar(var,d)
        bb.event.register(var, handler)

    tasklist = data.getVar('__BBTASKS', d) or []
    bb.build.add_tasks(tasklist, d)


def handle(fn, d, include = 0):
    global __func_start_regexp__, __inherit_regexp__, __export_func_regexp__, __addtask_regexp__, __addhandler_regexp__, __infunc__, __body__, __residue__
    __body__ = []
    __infunc__ = ""
Ejemplo n.º 37
0
    def download(self, loc, ud, d):

        method = ud.parm.get('method', 'pserver')
        localdir = ud.parm.get('localdir', ud.module)
        cvs_port = ud.parm.get('port', '')

        cvs_rsh = None
        if method == "ext":
            if "rsh" in ud.parm:
                cvs_rsh = ud.parm["rsh"]

        if method == "dir":
            cvsroot = ud.path
        else:
            cvsroot = ":" + method
            cvsproxyhost = data.getVar('CVS_PROXY_HOST', d, True)
            if cvsproxyhost:
                cvsroot += ";proxy=" + cvsproxyhost
            cvsproxyport = data.getVar('CVS_PROXY_PORT', d, True)
            if cvsproxyport:
                cvsroot += ";proxyport=" + cvsproxyport
            cvsroot += ":" + ud.user
            if ud.pswd:
                cvsroot += ":" + ud.pswd
            cvsroot += "@" + ud.host + ":" + cvs_port + ud.path

        options = []
        if 'norecurse' in ud.parm:
            options.append("-l")
        if ud.date:
            # treat YYYYMMDDHHMM specially for CVS
            if len(ud.date) == 12:
                options.append("-D \"%s %s:%s UTC\"" % (ud.date[0:8], ud.date[8:10], ud.date[10:12]))
            else:
                options.append("-D \"%s UTC\"" % ud.date)
        if ud.tag:
            options.append("-r %s" % ud.tag)

        localdata = data.createCopy(d)
        data.setVar('OVERRIDES', "cvs:%s" % data.getVar('OVERRIDES', localdata), localdata)
        data.update_data(localdata)

        data.setVar('CVSROOT', cvsroot, localdata)
        data.setVar('CVSCOOPTS', " ".join(options), localdata)
        data.setVar('CVSMODULE', ud.module, localdata)
        cvscmd = data.getVar('FETCHCOMMAND', localdata, True)
        cvsupdatecmd = data.getVar('UPDATECOMMAND', localdata, True)

        if cvs_rsh:
            cvscmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvscmd)
            cvsupdatecmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvsupdatecmd)

        # create module directory
        logger.debug(2, "Fetch: checking for module directory")
        pkg = data.expand('${PN}', d)
        pkgdir = os.path.join(data.expand('${CVSDIR}', localdata), pkg)
        moddir = os.path.join(pkgdir, localdir)
        if os.access(os.path.join(moddir, 'CVS'), os.R_OK):
            logger.info("Update " + loc)
            bb.fetch2.check_network_access(d, cvsupdatecmd, ud.url)
            # update sources there
            os.chdir(moddir)
            cmd = cvsupdatecmd
        else:
            logger.info("Fetch " + loc)
            # check out sources there
            bb.utils.mkdirhier(pkgdir)
            os.chdir(pkgdir)
            logger.debug(1, "Running %s", cvscmd)
            bb.fetch2.check_network_access(d, cvscmd, ud.url)
            cmd = cvscmd

        runfetchcmd(cmd, d, cleanup = [moddir])

        if not os.access(moddir, os.R_OK):
            raise FetchError("Directory %s was not readable despite sucessful fetch?!" % moddir, ud.url)

        scmdata = ud.parm.get("scmdata", "")
        if scmdata == "keep":
            tar_flags = ""
        else:
            tar_flags = "--exclude 'CVS'"

        # tar them up to a defined filename
        if 'fullpath' in ud.parm:
            os.chdir(pkgdir)
            cmd = "tar %s -czf %s %s" % (tar_flags, ud.localpath, localdir)
        else:
            os.chdir(moddir)
            os.chdir('..')
            cmd = "tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.basename(moddir))

        runfetchcmd(cmd, d, cleanup = [ud.localpath])
Ejemplo n.º 38
0
    def go(self, d, urls = []):
        """Fetch urls"""

        def md5_sum(parm, d):
            """
            Return the MD5SUM associated with the to be downloaded
            file.
            It can return None if no md5sum is associated
            """
            try:
                return parm['md5sum']
            except:
                return None

        def verify_md5sum(wanted_sum, got_sum):
            """
            Verify the md5sum we wanted with the one we got
            """
            if not wanted_sum:
                return True

            return wanted_sum == got_sum

        def fetch_uri(uri, basename, dl, md5, parm, d):
            # the MD5 sum we want to verify
            wanted_md5sum = md5_sum(parm, d)
            if os.path.exists(dl):
#               file exists, but we didnt complete it.. trying again..
                fetchcmd = data.getVar("RESUMECOMMAND", d, 1)
            else:
                fetchcmd = data.getVar("FETCHCOMMAND", d, 1)

            bb.note("fetch " + uri)
            fetchcmd = fetchcmd.replace("${URI}", uri)
            fetchcmd = fetchcmd.replace("${FILE}", basename)
            bb.debug(2, "executing " + fetchcmd)
            ret = os.system(fetchcmd)
            if ret != 0:
                return False

            # check if sourceforge did send us to the mirror page
            dl_dir = data.getVar("DL_DIR", d, True)
            if not os.path.exists(dl):
                os.system("rm %s*" % dl) # FIXME shell quote it
                bb.debug(2,"sourceforge.net send us to the mirror on %s" % basename)
                return False

#           supposedly complete.. write out md5sum
            if bb.which(data.getVar('PATH', d), 'md5sum'):
                try:
                    md5pipe = os.popen('md5sum ' + dl)
                    md5data = (md5pipe.readline().split() or [ "" ])[0]
                    md5pipe.close()
                except OSError:
                    md5data = ""

            # verify the md5sum
            if not verify_md5sum(wanted_md5sum, md5data):
                raise MD5SumError(uri)

            md5out = file(md5, 'w')
            md5out.write(md5data)
            md5out.close()
            return True

        if not urls:
            urls = self.urls

        localdata = data.createCopy(d)
        data.setVar('OVERRIDES', "wget:" + data.getVar('OVERRIDES', localdata), localdata)
        data.update_data(localdata)

        for uri in urls:
            completed = 0
            (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(uri, localdata))
            basename = os.path.basename(path)
            dl = self.localpath(uri, d)
            dl = data.expand(dl, localdata)
            md5 = dl + '.md5'

            if os.path.exists(md5):
#               complete, nothing to see here..
                continue

            premirrors = [ i.split() for i in (data.getVar('PREMIRRORS', localdata, 1) or "").split('\n') if i ]
            for (find, replace) in premirrors:
                newuri = uri_replace(uri, find, replace, d)
                if newuri != uri:
                    if fetch_uri(newuri, basename, dl, md5, parm, localdata):
                        completed = 1
                        break

            if completed:
                continue

            if fetch_uri(uri, basename, dl, md5, parm, localdata):
                continue

#           try mirrors
            mirrors = [ i.split() for i in (data.getVar('MIRRORS', localdata, 1) or "").split('\n') if i ]
            for (find, replace) in mirrors:
                newuri = uri_replace(uri, find, replace, d)
                if newuri != uri:
                    if fetch_uri(newuri, basename, dl, md5, parm, localdata):
                        completed = 1
                        break

            if not completed:
                raise FetchError(uri)

        del localdata
Ejemplo n.º 39
0
    def go(self, d, urls = []):
        """Fetch urls"""
        if not urls:
            urls = self.urls

        localdata = data.createCopy(d)
        data.setVar('OVERRIDES', "cvs:%s" % data.getVar('OVERRIDES', localdata), localdata)
        data.update_data(localdata)

        for loc in urls:
            (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(loc, localdata))
            if not "module" in parm:
                raise MissingParameterError("cvs method needs a 'module' parameter")
            else:
                module = parm["module"]

            dlfile = self.localpath(loc, localdata)
            dldir = data.getVar('DL_DIR', localdata, 1)
#           if local path contains the cvs
#           module, consider the dir above it to be the
#           download directory
#           pos = dlfile.find(module)
#           if pos:
#               dldir = dlfile[:pos]
#           else:
#               dldir = os.path.dirname(dlfile)

#           setup cvs options
            options = []
            if 'tag' in parm:
                tag = parm['tag']
            else:
                tag = ""

            if 'date' in parm:
                date = parm['date']
            else:
                if not tag:
                    date = Fetch.getSRCDate(d)
                else:
                    date = ""

            if "method" in parm:
                method = parm["method"]
            else:
                method = "pserver"

            if "localdir" in parm:
                localdir = parm["localdir"]
            else:
                localdir = module

            cvs_rsh = None
            if method == "ext":
                if "rsh" in parm:
                    cvs_rsh = parm["rsh"]

            tarfn = data.expand('%s_%s_%s_%s.tar.gz' % (module.replace('/', '.'), host, tag, date), localdata)
            data.setVar('TARFILES', dlfile, localdata)
            data.setVar('TARFN', tarfn, localdata)

            dl = os.path.join(dldir, tarfn)
            if os.access(dl, os.R_OK):
                bb.debug(1, "%s already exists, skipping cvs checkout." % tarfn)
                continue

            # try to use the tarball stash
            if Fetch.try_mirror(d, tarfn):
                continue

            if date:
                options.append("-D %s" % date)
            if tag:
                options.append("-r %s" % tag)

            olddir = os.path.abspath(os.getcwd())
            os.chdir(data.expand(dldir, localdata))

#           setup cvsroot
            if method == "dir":
                cvsroot = path
            else:
                cvsroot = ":" + method + ":" + user
                if pswd:
                    cvsroot += ":" + pswd
                cvsroot += "@" + host + ":" + path

            data.setVar('CVSROOT', cvsroot, localdata)
            data.setVar('CVSCOOPTS', " ".join(options), localdata)
            data.setVar('CVSMODULE', module, localdata)
            cvscmd = data.getVar('FETCHCOMMAND', localdata, 1)
            cvsupdatecmd = data.getVar('UPDATECOMMAND', localdata, 1)

            if cvs_rsh:
                cvscmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvscmd)
                cvsupdatecmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvsupdatecmd)

#           create module directory
            bb.debug(2, "Fetch: checking for module directory")
            pkg=data.expand('${PN}', d)
            pkgdir=os.path.join(data.expand('${CVSDIR}', localdata), pkg)
            moddir=os.path.join(pkgdir,localdir)
            if os.access(os.path.join(moddir,'CVS'), os.R_OK):
                bb.note("Update " + loc)
#               update sources there
                os.chdir(moddir)
                myret = os.system(cvsupdatecmd)
            else:
                bb.note("Fetch " + loc)
#               check out sources there
                bb.mkdirhier(pkgdir)
                os.chdir(pkgdir)
                bb.debug(1, "Running %s" % cvscmd)
                myret = os.system(cvscmd)

            if myret != 0 or not os.access(moddir, os.R_OK):
                try:
                    os.rmdir(moddir)
                except OSError:
                    pass
                raise FetchError(module)

            os.chdir(moddir)
            os.chdir('..')
#           tar them up to a defined filename
            myret = os.system("tar -czf %s %s" % (os.path.join(dldir,tarfn), os.path.basename(moddir)))
            if myret != 0:
                try:
                    os.unlink(tarfn)
                except OSError:
                    pass
            os.chdir(olddir)
        del localdata
Ejemplo n.º 40
0
    def go(self, d, urls = []):
        """Fetch urls"""
        if not urls:
            urls = self.urls

        localdata = data.createCopy(d)
        data.setVar('OVERRIDES', "svn:%s" % data.getVar('OVERRIDES', localdata), localdata)
        data.update_data(localdata)

        for loc in urls:
            (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(loc, localdata))
            if not "module" in parm:
                raise MissingParameterError("svn method needs a 'module' parameter")
            else:
                module = parm["module"]

            dlfile = self.localpath(loc, localdata)
            dldir = data.getVar('DL_DIR', localdata, 1)
#           if local path contains the svn
#           module, consider the dir above it to be the
#           download directory
#           pos = dlfile.find(module)
#           if pos:
#               dldir = dlfile[:pos]
#           else:
#               dldir = os.path.dirname(dlfile)

#           setup svn options
            options = []
            if 'rev' in parm:
                revision = parm['rev']
            else:
                revision = ""

            date = Fetch.getSRCDate(d)

            if "proto" in parm:
                proto = parm["proto"]
            else:
                proto = "svn"

            svn_rsh = None
            if proto == "svn+ssh" and "rsh" in parm:
                svn_rsh = parm["rsh"]

            tarfn = data.expand('%s_%s_%s_%s_%s.tar.gz' % (module.replace('/', '.'), host, path.replace('/', '.'), revision, date), localdata)
            data.setVar('TARFILES', dlfile, localdata)
            data.setVar('TARFN', tarfn, localdata)

            # try to use the tarball stash
            if Fetch.check_for_tarball(d, tarfn, dldir, date):
                bb.debug(1, "%s already exists or was mirrored, skipping svn checkout." % tarfn)
                continue

            olddir = os.path.abspath(os.getcwd())
            os.chdir(data.expand(dldir, localdata))

            svnroot = host + path

            data.setVar('SVNROOT', svnroot, localdata)
            data.setVar('SVNCOOPTS', " ".join(options), localdata)
            data.setVar('SVNMODULE', module, localdata)
            svncmd = data.getVar('FETCHCOMMAND', localdata, 1)
            svncmd = "svn co -r {%s} %s://%s/%s" % (date, proto, svnroot, module)

            # either use the revision or if SRCDATE is now no braces
            if revision:
                svncmd = "svn co -r %s %s://%s/%s" % (revision, proto, svnroot, module)
            elif date == "now":
                svncmd = "svn co %s://%s/%s" % (proto, svnroot, module)

            if svn_rsh:
                svncmd = "svn_RSH=\"%s\" %s" % (svn_rsh, svncmd)

#           create temp directory
            bb.debug(2, "Fetch: creating temporary directory")
            bb.mkdirhier(data.expand('${WORKDIR}', localdata))
            data.setVar('TMPBASE', data.expand('${WORKDIR}/oesvn.XXXXXX', localdata), localdata)
            tmppipe = os.popen(data.getVar('MKTEMPDIRCMD', localdata, 1) or "false")
            tmpfile = tmppipe.readline().strip()
            if not tmpfile:
                bb.error("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.")
                raise FetchError(module)

#           check out sources there
            os.chdir(tmpfile)
            bb.note("Fetch " + loc)
            bb.debug(1, "Running %s" % svncmd)
            myret = os.system(svncmd)
            if myret != 0:
                try:
                    os.rmdir(tmpfile)
                except OSError:
                    pass
                raise FetchError(module)

            os.chdir(os.path.join(tmpfile, os.path.dirname(module)))
#           tar them up to a defined filename
            myret = os.system("tar -czf %s %s" % (os.path.join(dldir,tarfn), os.path.basename(module)))
            if myret != 0:
                try:
                    os.unlink(tarfn)
                except OSError:
                    pass
#           cleanup
            os.system('rm -rf %s' % tmpfile)
            os.chdir(olddir)
        del localdata
Ejemplo n.º 41
0
    def go(self, loc, ud, d):
        """
        Fetch urls
        """

        # try to use the tarball stash
        if not self.forcefetch(loc, ud, d) and Fetch.try_mirror(d, ud.localfile):
            bb.msg.debug(1, bb.msg.domain.Fetcher, "%s already exists or was mirrored, skipping perforce checkout." % ud.localpath)
            return

        (host,depot,user,pswd,parm) = Perforce.doparse(loc, d)

        if depot.find('/...') != -1:
            path = depot[:depot.find('/...')]
        else:
            path = depot

        if "module" in parm:
            module = parm["module"]
        else:
            module = os.path.basename(path)

        localdata = data.createCopy(d)
        data.setVar('OVERRIDES', "p4:%s" % data.getVar('OVERRIDES', localdata), localdata)
        data.update_data(localdata)

        # Get the p4 command
        if user:
            data.setVar('P4USER', user, localdata)

        if pswd:
            data.setVar('P4PASSWD', pswd, localdata)

        if host:
            data.setVar('P4PORT', host, localdata)

        p4cmd = data.getVar('FETCHCOMMAND', localdata, 1)

        # create temp directory
        bb.msg.debug(2, bb.msg.domain.Fetcher, "Fetch: creating temporary directory")
        bb.mkdirhier(data.expand('${WORKDIR}', localdata))
        data.setVar('TMPBASE', data.expand('${WORKDIR}/oep4.XXXXXX', localdata), localdata)
        tmppipe = os.popen(data.getVar('MKTEMPDIRCMD', localdata, 1) or "false")
        tmpfile = tmppipe.readline().strip()
        if not tmpfile:
            bb.error("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.")
            raise FetchError(module)

        if "label" in parm:
            depot = "%s@%s" % (depot,parm["label"])
        else:
            cset = Perforce.getcset(d, depot, host, user, pswd, parm)
            depot = "%s@%s" % (depot,cset)

        os.chdir(tmpfile)
        bb.msg.note(1, bb.msg.domain.Fetcher, "Fetch " + loc)
        bb.msg.note(1, bb.msg.domain.Fetcher, "%s files %s" % (p4cmd, depot))
        p4file = os.popen("%s files %s" % (p4cmd, depot))

        if not p4file:
            bb.error("Fetch: unable to get the P4 files from %s" % (depot))
            raise FetchError(module)

        count = 0

        for file in p4file: 
            list = file.split()

            if list[2] == "delete":
                continue

            dest = list[0][len(path)+1:]
            where = dest.find("#")

            os.system("%s print -o %s/%s %s" % (p4cmd, module,dest[:where],list[0]))
            count = count + 1
            
        if count == 0:
            bb.error("Fetch:  No files gathered from the P4 fetch")
            raise FetchError(module)

        myret = os.system("tar -czf %s %s" % (ud.localpath, module))
        if myret != 0:
            try:
                os.unlink(ud.localpath)
            except OSError:
                pass
            raise FetchError(module)
        # cleanup
        os.system('rm -rf %s' % tmpfile)
Ejemplo n.º 42
0
            fnid = taskdata.build_targets[targetid][0]
            fn = taskdata.fn_index[fnid]
        else:
            envdata = self.configuration.data

        if fn:
            try:
                envdata = self.bb_cache.loadDataFull(fn, self.configuration.data)
            except IOError, e:
                bb.msg.fatal(bb.msg.domain.Parsing, "Unable to read %s: %s" % (fn, e))
            except Exception, e:
                bb.msg.fatal(bb.msg.domain.Parsing, "%s" % e)

        # emit variables and shell functions
        try:
            data.update_data( envdata )
            data.emit_env(sys.__stdout__, envdata, True)
        except Exception, e:
            bb.msg.fatal(bb.msg.domain.Parsing, "%s" % e)
        # emit the metadata which isnt valid shell
        data.expandKeys( envdata )
        for e in envdata.keys():
            if data.getVarFlag( e, 'python', envdata ):
                sys.__stdout__.write("\npython %s () {\n%s}\n" % (e, data.getVar(e, envdata, 1)))

    def generateDotGraph( self, pkgs_to_build, ignore_deps ):
        """
        Generate a task dependency graph. 

        pkgs_to_build A list of packages that needs to be built
        ignore_deps   A list of names where processing of dependencies
Ejemplo n.º 43
0
                             "Unable to read %s: %s" % (fn, e))
                raise
            except Exception, e:
                bb.msg.error(bb.msg.domain.Parsing, "%s" % e)
                raise

        class dummywrite:
            def __init__(self):
                self.writebuf = ""

            def write(self, output):
                self.writebuf = self.writebuf + output

        # emit variables and shell functions
        try:
            data.update_data(envdata)
            wb = dummywrite()
            data.emit_env(wb, envdata, True)
            bb.msg.plain(wb.writebuf)
        except Exception, e:
            bb.msg.fatal(bb.msg.domain.Parsing, "%s" % e)
        # emit the metadata which isnt valid shell
        data.expandKeys(envdata)
        for e in envdata.keys():
            if data.getVarFlag(e, 'python', envdata):
                bb.msg.plain("\npython %s () {\n%s}\n" %
                             (e, data.getVar(e, envdata, 1)))

    def generateDepTreeData(self, pkgs_to_build, task):
        """
        Create a dependency tree of pkgs_to_build, returning the data.
Ejemplo n.º 44
0
    data.setVarFlags("__anonfunc", flag, d)
    from bb import build
    try:
        t = data.getVar('T', d)
        data.setVar('T', '${TMPDIR}/', d)
        build.exec_func("__anonfunc", d)
        data.delVar('T', d)
        if t:
            data.setVar('T', t, d)
    except Exception, e:
        bb.msg.debug(1, bb.msg.domain.Parsing,
                     "Exception when executing anonymous function: %s" % e)
        raise
    data.delVar("__anonqueue", d)
    data.delVar("__anonfunc", d)
    data.update_data(d)

    all_handlers = {}
    for var in data.getVar('__BBHANDLERS', d) or []:
        # try to add the handler
        handler = data.getVar(var, d)
        bb.event.register(var, handler)

    tasklist = data.getVar('__BBTASKS', d) or []
    bb.build.add_tasks(tasklist, d)


def handle(fn, d, include=0):
    global __func_start_regexp__, __inherit_regexp__, __export_func_regexp__, __addtask_regexp__, __addhandler_regexp__, __infunc__, __body__, __residue__
    __body__ = []
    __infunc__ = ""