def exec_task(task, d): """Execute an BB 'task' The primary difference between executing a task versus executing a function is that a task exists in the task digraph, and therefore has dependencies amongst other tasks.""" # Check whther this is a valid task if not data.getVarFlag(task, 'task', d): raise EventException("No such task", InvalidTask(task, d)) try: bb.msg.debug(1, bb.msg.domain.Build, "Executing task %s" % task) old_overrides = data.getVar('OVERRIDES', d, 0) localdata = data.createCopy(d) data.setVar('OVERRIDES', 'task-%s:%s' % (task[3:], old_overrides), localdata) data.update_data(localdata) data.expandKeys(localdata) event.fire(TaskStarted(task, localdata), localdata) exec_func(task, localdata) event.fire(TaskSucceeded(task, localdata), localdata) except FuncFailed, message: # Try to extract the optional logfile try: (msg, logfile) = message except: logfile = None msg = message bb.msg.note(1, bb.msg.domain.Build, "Task failed: %s" % message) failedevent = TaskFailed(msg, logfile, task, d) event.fire(failedevent, d) raise EventException("Function failed in task: %s" % message, failedevent)
def download(self, ud, d): """Fetch urls""" svkroot = ud.host + ud.path svkcmd = "svk co -r {%s} %s/%s" % (ud.date, svkroot, ud.module) if ud.revision: svkcmd = "svk co -r %s %s/%s" % (ud.revision, svkroot, ud.module) # create temp directory localdata = data.createCopy(d) data.update_data(localdata) logger.debug(2, "Fetch: creating temporary directory") bb.utils.mkdirhier(data.expand('${WORKDIR}', localdata)) data.setVar('TMPBASE', data.expand('${WORKDIR}/oesvk.XXXXXX', localdata), localdata) tmpfile, errors = bb.process.run(data.getVar('MKTEMPDIRCMD', localdata, True) or "false") tmpfile = tmpfile.strip() if not tmpfile: logger.error() raise FetchError("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.", ud.url) # check out sources there os.chdir(tmpfile) logger.info("Fetch " + ud.url) logger.debug(1, "Running %s", svkcmd) runfetchcmd(svkcmd, d, cleanup = [tmpfile]) os.chdir(os.path.join(tmpfile, os.path.dirname(ud.module))) # tar them up to a defined filename runfetchcmd("tar -czf %s %s" % (ud.localpath, os.path.basename(ud.module)), d, cleanup = [ud.localpath]) # cleanup bb.utils.prunedir(tmpfile)
def add_task(task, deps, d): task_graph = data.getVar('_task_graph', d) if not task_graph: task_graph = bb.digraph() data.setVarFlag(task, 'task', 1, d) task_graph.addnode(task, None) for dep in deps: if not task_graph.hasnode(dep): task_graph.addnode(dep, None) task_graph.addnode(task, dep) # don't assume holding a reference data.setVar('_task_graph', task_graph, d) task_deps = data.getVar('_task_deps', d) if not task_deps: task_deps = {} def getTask(name): deptask = data.getVarFlag(task, name, d) if deptask: if not name in task_deps: task_deps[name] = {} task_deps[name][task] = deptask getTask('deptask') getTask('rdeptask') getTask('recrdeptask') getTask('nostamp') data.setVar('_task_deps', task_deps, d)
def fileBuild( self, params, cmd = "build" ): """Parse and build a .bb file""" global last_exception name = params[0] bf = completeFilePath( name ) print "SHELL: Calling '%s' on '%s'" % ( cmd, bf ) oldcmd = cooker.configuration.cmd cooker.configuration.cmd = cmd thisdata = copy.deepcopy( initdata ) # Caution: parse.handle modifies thisdata, hence it would # lead to pollution cooker.configuration.data, which is # why we use it on a safe copy we obtained from cooker right after # parsing the initial *.conf files try: bbfile_data = parse.handle( bf, thisdata ) except parse.ParseError: print "ERROR: Unable to open or parse '%s'" % bf else: # Remove stamp for target if force mode active if cooker.configuration.force: bb.msg.note(2, bb.msg.domain.RunQueue, "Remove stamp %s, %s" % (cmd, bf)) bb.build.del_stamp('do_%s' % cmd, bbfile_data) item = data.getVar('PN', bbfile_data, 1) data.setVar( "_task_cache", [], bbfile_data ) # force try: cooker.tryBuildPackage( os.path.abspath( bf ), item, cmd, bbfile_data, True ) except build.EventException, e: print "ERROR: Couldn't build '%s'" % name last_exception = e
def execute(graph, item): if data.getVarFlag(item, 'task', d): if item in task_cache: return 1 if task != item: # deeper than toplevel, exec w/ deps exec_task(item, d) return 1 try: debug(1, "Executing task %s" % item) old_overrides = data.getVar('OVERRIDES', d, 0) from copy import deepcopy localdata = deepcopy(d) data.setVar('OVERRIDES', 'task_%s:%s' % (item, old_overrides), localdata) data.update_data(localdata) event.fire(TaskStarted(item, localdata)) exec_func(item, localdata) event.fire(TaskSucceeded(item, localdata)) task_cache.append(item) except FuncFailed, reason: note( "Task failed: %s" % reason ) failedevent = TaskFailed(item, d) event.fire(failedevent) raise EventException(None, failedevent)
def download(self, loc, ud, d): """Fetch urls""" svkroot = ud.host + ud.path svkcmd = "svk co -r {%s} %s/%s" % (ud.date, svkroot, ud.module) if ud.revision: svkcmd = "svk co -r %s %s/%s" % (ud.revision, svkroot, ud.module) # create temp directory localdata = data.createCopy(d) data.update_data(localdata) logger.debug(2, "Fetch: creating temporary directory") bb.utils.mkdirhier(data.expand('${WORKDIR}', localdata)) data.setVar('TMPBASE', data.expand('${WORKDIR}/oesvk.XXXXXX', localdata), localdata) tmpfile, errors = bb.process.run(data.getVar('MKTEMPDIRCMD', localdata, True) or "false") tmpfile = tmpfile.strip() if not tmpfile: logger.error() raise FetchError("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.", loc) # check out sources there os.chdir(tmpfile) logger.info("Fetch " + loc) logger.debug(1, "Running %s", svkcmd) runfetchcmd(svkcmd, d, cleanup = [tmpfile]) os.chdir(os.path.join(tmpfile, os.path.dirname(ud.module))) # tar them up to a defined filename runfetchcmd("tar -czf %s %s" % (ud.localpath, os.path.basename(ud.module)), d, cleanup = [ud.localpath]) # cleanup bb.utils.prunedir(tmpfile)
def load_bbfile( self, bbfile , config): """ Load and parse one .bb build file Return the data and whether parsing resulted in the file being skipped """ import bb from bb import utils, data, parse, debug, event, fatal # expand tmpdir to include this topdir data.setVar('TMPDIR', data.getVar('TMPDIR', config, 1) or "", config) bbfile_loc = os.path.abspath(os.path.dirname(bbfile)) oldpath = os.path.abspath(os.getcwd()) if bb.parse.cached_mtime_noerror(bbfile_loc): os.chdir(bbfile_loc) bb_data = data.init_db(config) try: bb_data = parse.handle(bbfile, bb_data) # read .bb data os.chdir(oldpath) return bb_data, False except bb.parse.SkipPackage: os.chdir(oldpath) return bb_data, True except: os.chdir(oldpath) raise
def exec_task(task, d): """Execute an BB 'task' The primary difference between executing a task versus executing a function is that a task exists in the task digraph, and therefore has dependencies amongst other tasks.""" # Check whther this is a valid task if not data.getVarFlag(task, 'task', d): raise EventException("No such task", InvalidTask(task, d)) try: bb.msg.debug(1, bb.msg.domain.Build, "Executing task %s" % task) old_overrides = data.getVar('OVERRIDES', d, 0) localdata = data.createCopy(d) data.setVar('OVERRIDES', 'task-%s:%s' % (task[3:], old_overrides), localdata) data.update_data(localdata) data.expandKeys(localdata) event.fire(TaskStarted(task, localdata)) exec_func(task, localdata) event.fire(TaskSucceeded(task, localdata)) except FuncFailed, message: # Try to extract the optional logfile try: (msg, logfile) = message except: logfile = None msg = message bb.msg.note(1, bb.msg.domain.Build, "Task failed: %s" % message ) failedevent = TaskFailed(msg, logfile, task, d) event.fire(failedevent) raise EventException("Function failed in task: %s" % message, failedevent)
def load_bbfile( self, bbfile , config): """ Load and parse one .bb build file Return the data and whether parsing resulted in the file being skipped """ import bb from bb import utils, data, parse, debug, event, fatal # expand tmpdir to include this topdir data.setVar('TMPDIR', data.getVar('TMPDIR', config, 1) or "", config) bbfile_loc = os.path.abspath(os.path.dirname(bbfile)) oldpath = os.path.abspath(os.getcwd()) if self.mtime(bbfile_loc): os.chdir(bbfile_loc) bb_data = data.init_db(config) try: bb_data = parse.handle(bbfile, bb_data) # read .bb data os.chdir(oldpath) return bb_data, False except bb.parse.SkipPackage: os.chdir(oldpath) return bb_data, True except: os.chdir(oldpath) raise
def execute(graph, item): if data.getVarFlag(item, 'task', d): if item in task_cache: return 1 if task != item: # deeper than toplevel, exec w/ deps exec_task(item, d) return 1 try: bb.msg.debug(1, bb.msg.domain.Build, "Executing task %s" % item) old_overrides = data.getVar('OVERRIDES', d, 0) localdata = data.createCopy(d) data.setVar('OVERRIDES', 'task_%s:%s' % (item, old_overrides), localdata) data.update_data(localdata) event.fire(TaskStarted(item, localdata)) exec_func(item, localdata) event.fire(TaskSucceeded(item, localdata)) task_cache.append(item) data.setVar('_task_cache', task_cache, d) except FuncFailed, reason: bb.msg.note(1, bb.msg.domain.Build, "Task failed: %s" % reason ) failedevent = TaskFailed(item, d) event.fire(failedevent) raise EventException("Function failed in task: %s" % reason, failedevent)
def stamp_is_current(task, d, checkdeps = 1): """Check status of a given task's stamp. returns 0 if it is not current and needs updating.""" task_graph = data.getVar('_task_graph', d) if not task_graph: task_graph = bb.digraph() data.setVar('_task_graph', task_graph, d) stamp = data.getVar('STAMP', d) if not stamp: return 0 stampfile = "%s.%s" % (data.expand(stamp, d), task) if not os.access(stampfile, os.F_OK): return 0 if checkdeps == 0: return 1 import stat tasktime = os.stat(stampfile)[stat.ST_MTIME] _deps = [] def checkStamp(graph, task): # check for existance if data.getVarFlag(task, 'nostamp', d): return 1 if not stamp_is_current(task, d, 0): return 0 depfile = "%s.%s" % (data.expand(stamp, d), task) deptime = os.stat(depfile)[stat.ST_MTIME] if deptime > tasktime: return 0 return 1 return task_graph.walkdown(task, checkStamp)
def fileBuild( self, params, cmd = "build" ): """Parse and build a .bb file""" name = params[0] bf = completeFilePath( name ) print "SHELL: Calling '%s' on '%s'" % ( cmd, bf ) oldcmd = cooker.configuration.cmd cooker.configuration.cmd = cmd cooker.build_cache = [] cooker.build_cache_fail = [] thisdata = copy.deepcopy( initdata ) # Caution: parse.handle modifies thisdata, hence it would # lead to pollution cooker.configuration.data, which is # why we use it on a safe copy we obtained from cooker right after # parsing the initial *.conf files try: bbfile_data = parse.handle( bf, thisdata ) except parse.ParseError: print "ERROR: Unable to open or parse '%s'" % bf else: item = data.getVar('PN', bbfile_data, 1) data.setVar( "_task_cache", [], bbfile_data ) # force try: cooker.tryBuildPackage( os.path.abspath( bf ), item, cmd, bbfile_data, True ) except build.EventException, e: print "ERROR: Couldn't build '%s'" % name global last_exception last_exception = e
def fileBuild( self, params, cmd = "build" ): """Parse and build a .bb file""" global last_exception name = params[0] bf = completeFilePath( name ) print "SHELL: Calling '%s' on '%s'" % ( cmd, bf ) oldcmd = cooker.configuration.cmd cooker.configuration.cmd = cmd thisdata = data.createCopy(cooker.configuration.data) data.update_data(thisdata) data.expandKeys(thisdata) try: bbfile_data = parse.handle( bf, thisdata ) except parse.ParseError: print "ERROR: Unable to open or parse '%s'" % bf else: # Remove stamp for target if force mode active if cooker.configuration.force: bb.msg.note(2, bb.msg.domain.RunQueue, "Remove stamp %s, %s" % (cmd, bf)) bb.build.del_stamp('do_%s' % cmd, bbfile_data) item = data.getVar('PN', bbfile_data, 1) data.setVar( "_task_cache", [], bbfile_data ) # force try: cooker.tryBuildPackage( os.path.abspath( bf ), item, cmd, bbfile_data, True ) except build.EventException, e: print "ERROR: Couldn't build '%s'" % name last_exception = e
def handle(fn, d, include): global __func_start_regexp__, __inherit_regexp__, __export_func_regexp__, __addtask_regexp__, __addhandler_regexp__, __infunc__, __body__, __residue__, __classname__ __body__ = [] __infunc__ = "" __classname__ = "" __residue__ = [] if include == 0: logger.debug(2, "BB %s: handle(data)", fn) else: logger.debug(2, "BB %s: handle(data, include)", fn) base_name = os.path.basename(fn) (root, ext) = os.path.splitext(base_name) init(d) if ext == ".bbclass": __classname__ = root __inherit_cache = d.getVar('__inherit_cache') or [] if not fn in __inherit_cache: __inherit_cache.append(fn) data.setVar('__inherit_cache', __inherit_cache, d) if include != 0: oldfile = d.getVar('FILE') else: oldfile = None abs_fn = resolve_file(fn, d) if include: bb.parse.mark_dependency(d, abs_fn) # actual loading statements = get_statements(fn, abs_fn, base_name) # DONE WITH PARSING... time to evaluate if ext != ".bbclass": data.setVar('FILE', abs_fn, d) try: statements.eval(d) except bb.parse.SkipPackage: bb.data.setVar("__SKIPPED", True, d) if include == 0: return { "" : d } if ext != ".bbclass" and include == 0: return ast.multi_finalize(fn, d) if oldfile: d.setVar("FILE", oldfile) # we have parsed the bb class now if ext == ".bbclass" or ext == ".inc": bb.methodpool.set_parsed_module(base_name) return d
def handle(fn, d, include): global __func_start_regexp__, __inherit_regexp__, __export_func_regexp__, __addtask_regexp__, __addhandler_regexp__, __infunc__, __body__, __residue__ __body__ = [] __infunc__ = "" __classname__ = "" __residue__ = [] if include == 0: bb.msg.debug(2, bb.msg.domain.Parsing, "BB " + fn + ": handle(data)") else: bb.msg.debug(2, bb.msg.domain.Parsing, "BB " + fn + ": handle(data, include)") (root, ext) = os.path.splitext(os.path.basename(fn)) base_name = "%s%s" % (root,ext) init(d) if ext == ".bbclass": __classname__ = root classes.append(__classname__) __inherit_cache = data.getVar('__inherit_cache', d) or [] if not fn in __inherit_cache: __inherit_cache.append(fn) data.setVar('__inherit_cache', __inherit_cache, d) if include != 0: oldfile = data.getVar('FILE', d) else: oldfile = None abs_fn = resolve_file(fn, d) if include: bb.parse.mark_dependency(d, abs_fn) # actual loading statements = get_statements(fn, abs_fn, base_name) # DONE WITH PARSING... time to evaluate if ext != ".bbclass": data.setVar('FILE', fn, d) statements.eval(d) if ext == ".bbclass": classes.remove(__classname__) else: if include == 0: return ast.multi_finalize(fn, d) if oldfile: bb.data.setVar("FILE", oldfile, d) # we have parsed the bb class now if ext == ".bbclass" or ext == ".inc": bb.methodpool.get_parsed_dict()[base_name] = 1 return d
def _parse_layer_conf(layerdir, data): data.setVar("LAYERDIR", str(layerdir)) if hasattr(bb, "cookerdata"): # Newer BitBake data = bb.cookerdata.parse_config_file(os.path.join(layerdir, "conf", "layer.conf"), data) else: # Older BitBake (1.18 and below) data = bb.cooker._parse(os.path.join(layerdir, "conf", "layer.conf"), data) data.expandVarref("LAYERDIR")
def go(self, loc, ud, d): """Fetch urls""" if not self.forcefetch(loc, ud, d) and Fetch.try_mirror( d, ud.localfile): return svkroot = ud.host + ud.path # pyflakes claims date is not known... it looks right svkcmd = "svk co -r {%s} %s/%s" % (date, svkroot, ud.module) if ud.revision: svkcmd = "svk co -r %s/%s" % (ud.revision, svkroot, ud.module) # create temp directory localdata = data.createCopy(d) data.update_data(localdata) bb.msg.debug(2, bb.msg.domain.Fetcher, "Fetch: creating temporary directory") bb.mkdirhier(data.expand('${WORKDIR}', localdata)) data.setVar('TMPBASE', data.expand('${WORKDIR}/oesvk.XXXXXX', localdata), localdata) tmppipe = os.popen( data.getVar('MKTEMPDIRCMD', localdata, 1) or "false") tmpfile = tmppipe.readline().strip() if not tmpfile: bb.msg.error( bb.msg.domain.Fetcher, "Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH." ) raise FetchError(ud.module) # check out sources there os.chdir(tmpfile) bb.msg.note(1, bb.msg.domain.Fetcher, "Fetch " + loc) bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % svkcmd) myret = os.system(svkcmd) if myret != 0: try: os.rmdir(tmpfile) except OSError: pass raise FetchError(ud.module) os.chdir(os.path.join(tmpfile, os.path.dirname(ud.module))) # tar them up to a defined filename myret = os.system("tar -czf %s %s" % (ud.localpath, os.path.basename(ud.module))) if myret != 0: try: os.unlink(ud.localpath) except OSError: pass raise FetchError(ud.module) # cleanup os.system('rm -rf %s' % tmpfile)
def testGVar(self): # import the data module from bb import data from bb import data_smart d = data_smart.DataSmart() data.setVar('TEST', 'testcontents', d ) self.assertEquals( data.getVar('TEST',d), 'testcontents', 'Setting Variable Failed') data.delVar('TEST', d) self.assertEquals(data.getVar('TEST', d), None)
def clean(self, ud, d): """ Clean CVS Files and tarballs """ pkg = data.expand('${PN}', d) localdata = data.createCopy(d) data.setVar('OVERRIDES', "cvs:%s" % data.getVar('OVERRIDES', localdata), localdata) data.update_data(localdata) pkgdir = os.path.join(data.expand('${CVSDIR}', localdata), pkg) bb.utils.remove(pkgdir, True) bb.utils.remove(ud.localpath)
def parse_layer_conf(layerdir, data, logger=None): conf_file = os.path.join(layerdir, "conf", "layer.conf") if not is_layer_valid(layerdir): if logger: logger.error("Cannot find layer.conf: %s" % conf_file) return data.setVar('LAYERDIR', str(layerdir)) data = parse_conf(conf_file, data) data.expandVarref('LAYERDIR')
def add_task(task, deps, d): task_graph = data.getVar('_task_graph', d) if not task_graph: task_graph = bb.digraph() data.setVar('_task_graph', task_graph, d) data.setVarFlag(task, 'task', 1, d) task_graph.addnode(task, None) for dep in deps: if not task_graph.hasnode(dep): task_graph.addnode(dep, None) task_graph.addnode(task, dep)
def go(self, uri, ud, d): """Fetch urls""" def fetch_uri(uri, ud, d): if os.path.exists(ud.localpath): # file exists, but we didnt complete it.. trying again.. fetchcmd = data.getVar("RESUMECOMMAND", d, 1) else: fetchcmd = data.getVar("FETCHCOMMAND", d, 1) bb.msg.note(1, bb.msg.domain.Fetcher, "fetch " + uri) fetchcmd = fetchcmd.replace("${URI}", uri) fetchcmd = fetchcmd.replace("${FILE}", ud.basename) bb.msg.debug(2, bb.msg.domain.Fetcher, "executing " + fetchcmd) ret = os.system(fetchcmd) if ret != 0: return False # Sanity check since wget can pretend it succeed when it didn't # Also, this used to happen if sourceforge sent us to the mirror page if not os.path.exists(ud.localpath): bb.msg.debug( 2, bb.msg.domain.Fetcher, "The fetch command for %s returned success but %s doesn't exist?..." % (uri, ud.localpath), ) return False return True localdata = data.createCopy(d) data.setVar("OVERRIDES", "wget:" + data.getVar("OVERRIDES", localdata), localdata) data.update_data(localdata) premirrors = [i.split() for i in (data.getVar("PREMIRRORS", localdata, 1) or "").split("\n") if i] for (find, replace) in premirrors: newuri = uri_replace(uri, find, replace, d) if newuri != uri: if fetch_uri(newuri, ud, localdata): return if fetch_uri(uri, ud, localdata): return # try mirrors mirrors = [i.split() for i in (data.getVar("MIRRORS", localdata, 1) or "").split("\n") if i] for (find, replace) in mirrors: newuri = uri_replace(uri, find, replace, d) if newuri != uri: if fetch_uri(newuri, ud, localdata): return raise FetchError(uri)
def extract_stamp_data(d, fn): """ Extracts stamp data from d which is either a data dictonary (fn unset) or a dataCache entry (fn set). """ if fn: return (d.task_queues[fn], d.stamp[fn], d.task_deps[fn]) task_graph = data.getVar('_task_graph', d) if not task_graph: task_graph = bb.digraph() data.setVar('_task_graph', task_graph, d) return (task_graph, data.getVar('STAMP', d, 1), None)
def poke( self, params ): """Set contents of variable defined in providee's metadata""" name, var, value = params bbfile = self._findProvider( name ) d = make.pkgdata[bbfile] if bbfile is not None: data.setVar( var, value, d ) # mark the change semi persistant make.pkgdata.setDirty(bbfile, d) print "OK" else: print "ERROR: Nothing provides '%s'" % name
def inherit(files, fn, lineno, d): __inherit_cache = data.getVar('__inherit_cache', d) or [] for file in files: file = data.expand(file, d) if not os.path.isabs(file) and not file.endswith(".bbclass"): file = os.path.join('classes', '%s.bbclass' % file) if not file in __inherit_cache: logger.log(logging.DEBUG -1, "BB %s:%d: inheriting %s", fn, lineno, file) __inherit_cache.append( file ) data.setVar('__inherit_cache', __inherit_cache, d) include(fn, file, lineno, d, "inherit") __inherit_cache = data.getVar('__inherit_cache', d) or []
def exec_task(task, d): """Execute an BB 'task' The primary difference between executing a task versus executing a function is that a task exists in the task digraph, and therefore has dependencies amongst other tasks.""" # check if the task is in the graph.. task_graph = data.getVar('_task_graph', d) if not task_graph: task_graph = bb.digraph() data.setVar('_task_graph', task_graph, d) task_cache = data.getVar('_task_cache', d) if not task_cache: task_cache = [] data.setVar('_task_cache', task_cache, d) if not task_graph.hasnode(task): raise EventException("Missing node in task graph", InvalidTask(task, d)) # check whether this task needs executing.. if not data.getVarFlag(task, 'force', d): if stamp_is_current(task, d): return 1 # follow digraph path up, then execute our way back down def execute(graph, item): if data.getVarFlag(item, 'task', d): if item in task_cache: return 1 if task != item: # deeper than toplevel, exec w/ deps exec_task(item, d) return 1 try: bb.msg.debug(1, bb.msg.domain.Build, "Executing task %s" % item) old_overrides = data.getVar('OVERRIDES', d, 0) localdata = data.createCopy(d) data.setVar('OVERRIDES', 'task_%s:%s' % (item, old_overrides), localdata) data.update_data(localdata) event.fire(TaskStarted(item, localdata)) exec_func(item, localdata) event.fire(TaskSucceeded(item, localdata)) task_cache.append(item) data.setVar('_task_cache', task_cache, d) except FuncFailed, reason: note( "Task failed: %s" % reason ) failedevent = TaskFailed(item, d) event.fire(failedevent) raise EventException("Function failed in task: %s" % reason, failedevent)
def go(self, loc, ud, d): """Fetch urls""" svkroot = ud.host + ud.path svkcmd = "svk co -r {%s} %s/%s" % (ud.date, svkroot, ud.module) if ud.revision: svkcmd = "svk co -r %s %s/%s" % (ud.revision, svkroot, ud.module) # create temp directory localdata = data.createCopy(d) data.update_data(localdata) logger.debug(2, "Fetch: creating temporary directory") bb.mkdirhier(data.expand('${WORKDIR}', localdata)) data.setVar('TMPBASE', data.expand('${WORKDIR}/oesvk.XXXXXX', localdata), localdata) tmppipe = os.popen( data.getVar('MKTEMPDIRCMD', localdata, 1) or "false") tmpfile = tmppipe.readline().strip() if not tmpfile: logger.error( "Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH." ) raise FetchError(ud.module) # check out sources there os.chdir(tmpfile) logger.info("Fetch " + loc) logger.debug(1, "Running %s", svkcmd) myret = os.system(svkcmd) if myret != 0: try: os.rmdir(tmpfile) except OSError: pass raise FetchError(ud.module) os.chdir(os.path.join(tmpfile, os.path.dirname(ud.module))) # tar them up to a defined filename myret = os.system("tar -czf %s %s" % (ud.localpath, os.path.basename(ud.module))) if myret != 0: try: os.unlink(ud.localpath) except OSError: pass raise FetchError(ud.module) # cleanup bb.utils.prunedir(tmpfile)
def inherit(files, d): __inherit_cache = data.getVar('__inherit_cache', d) or [] fn = "" lineno = 0 files = data.expand(files, d) for file in files: if file[0] != "/" and file[-8:] != ".bbclass": file = os.path.join('classes', '%s.bbclass' % file) if not file in __inherit_cache: bb.msg.debug(2, bb.msg.domain.Parsing, "BB %s:%d: inheriting %s" % (fn, lineno, file)) __inherit_cache.append( file ) include(fn, file, d, "inherit") data.setVar('__inherit_cache', __inherit_cache, d)
def inherit(files, d): __inherit_cache = data.getVar('__inherit_cache', d) or "" fn = "" lineno = 0 for f in files: file = data.expand(f, d) if file[0] != "/" and file[-8:] != ".bbclass": file = os.path.join('classes', '%s.bbclass' % file) if not file in __inherit_cache.split(): debug(2, "BB %s:%d: inheriting %s" % (fn, lineno, file)) __inherit_cache += " %s" % file include(fn, file, d) data.setVar('__inherit_cache', __inherit_cache, d)
def finalise(fn, d): data.expandKeys(d) data.update_data(d) anonqueue = data.getVar("__anonqueue", d, 1) or [] body = [x['content'] for x in anonqueue] flag = {'python': 1, 'func': 1} data.setVar("__anonfunc", "\n".join(body), d) data.setVarFlags("__anonfunc", flag, d) from bb import build try: t = data.getVar('T', d) data.setVar('T', '${TMPDIR}/anonfunc/', d) anonfuncs = data.getVar('__BBANONFUNCS', d) or [] code = "" for f in anonfuncs: code = code + " %s(d)\n" % f data.setVar("__anonfunc", code, d) build.exec_func("__anonfunc", d) data.delVar('T', d) if t: data.setVar('T', t, d) except Exception, e: bb.msg.debug(1, bb.msg.domain.Parsing, "Exception when executing anonymous function: %s" % e) raise
def inherit(files, fn, lineno, d): __inherit_cache = d.getVar('__inherit_cache') or [] files = d.expand(files).split() for file in files: if not os.path.isabs(file) and not file.endswith(".bbclass"): file = os.path.join('classes', '%s.bbclass' % file) if not file in __inherit_cache: logger.log(logging.DEBUG - 1, "BB %s:%d: inheriting %s", fn, lineno, file) __inherit_cache.append(file) data.setVar('__inherit_cache', __inherit_cache, d) include(fn, file, lineno, d, "inherit") __inherit_cache = d.getVar('__inherit_cache') or []
def go(self, loc, ud, d): """Fetch urls""" if not self.forcefetch(loc, ud, d) and Fetch.try_mirror(d, ud.localfile): return svkroot = ud.host + ud.path # pyflakes claims date is not known... it looks right svkcmd = "svk co -r {%s} %s/%s" % (date, svkroot, ud.module) if ud.revision: svkcmd = "svk co -r %s/%s" % (ud.revision, svkroot, ud.module) # create temp directory localdata = data.createCopy(d) data.update_data(localdata) bb.msg.debug(2, bb.msg.domain.Fetcher, "Fetch: creating temporary directory") bb.mkdirhier(data.expand('${WORKDIR}', localdata)) data.setVar('TMPBASE', data.expand('${WORKDIR}/oesvk.XXXXXX', localdata), localdata) tmppipe = os.popen(data.getVar('MKTEMPDIRCMD', localdata, 1) or "false") tmpfile = tmppipe.readline().strip() if not tmpfile: bb.msg.error(bb.msg.domain.Fetcher, "Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.") raise FetchError(ud.module) # check out sources there os.chdir(tmpfile) bb.msg.note(1, bb.msg.domain.Fetcher, "Fetch " + loc) bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % svkcmd) myret = os.system(svkcmd) if myret != 0: try: os.rmdir(tmpfile) except OSError: pass raise FetchError(ud.module) os.chdir(os.path.join(tmpfile, os.path.dirname(ud.module))) # tar them up to a defined filename myret = os.system("tar -czf %s %s" % (ud.localpath, os.path.basename(ud.module))) if myret != 0: try: os.unlink(ud.localpath) except OSError: pass raise FetchError(ud.module) # cleanup os.system('rm -rf %s' % tmpfile)
def go(self, uri, ud, d): """Fetch urls""" def fetch_uri(uri, ud, d): if os.path.exists(ud.localpath): # file exists, but we didnt complete it.. trying again.. fetchcmd = data.getVar("RESUMECOMMAND", d, 1) else: fetchcmd = data.getVar("FETCHCOMMAND", d, 1) bb.msg.note(1, bb.msg.domain.Fetcher, "fetch " + uri) fetchcmd = fetchcmd.replace("${URI}", uri) fetchcmd = fetchcmd.replace("${FILE}", ud.basename) bb.msg.debug(2, bb.msg.domain.Fetcher, "executing " + fetchcmd) ret = os.system(fetchcmd) if ret != 0: return False # check if sourceforge did send us to the mirror page if not os.path.exists(ud.localpath): os.system("rm %s*" % ud.localpath) # FIXME shell quote it bb.msg.debug(2, bb.msg.domain.Fetcher, "sourceforge.net send us to the mirror on %s" % ud.basename) return False return True localdata = data.createCopy(d) data.setVar('OVERRIDES', "wget:" + data.getVar('OVERRIDES', localdata), localdata) data.update_data(localdata) premirrors = [ i.split() for i in (data.getVar('PREMIRRORS', localdata, 1) or "").split('\n') if i ] for (find, replace) in premirrors: newuri = uri_replace(uri, find, replace, d) if newuri != uri: if fetch_uri(newuri, ud, localdata): return if fetch_uri(uri, ud, localdata): return # try mirrors mirrors = [ i.split() for i in (data.getVar('MIRRORS', localdata, 1) or "").split('\n') if i ] for (find, replace) in mirrors: newuri = uri_replace(uri, find, replace, d) if newuri != uri: if fetch_uri(newuri, ud, localdata): return raise FetchError(uri)
def collect_bbfiles(self): """Collect all available .bb build files""" parsed, cached, skipped, masked = 0, 0, 0, 0 self.bb_cache = bb.cache.init(self) files = (data.getVar("BBFILES", self.configuration.data, 1) or "").split() data.setVar("BBFILES", " ".join(files), self.configuration.data) if not len(files): files = self.get_bbfiles() if not len(files): bb.msg.error(bb.msg.domain.Collection, "no files to build.") newfiles = [] for f in files: if os.path.isdir(f): dirfiles = self.find_bbfiles(f) if dirfiles: newfiles += dirfiles continue else: globbed = glob.glob(f) if not globbed and os.path.exists(f): globbed = [f] newfiles += globbed bbmask = bb.data.getVar('BBMASK', self.configuration.data, 1) if not bbmask: return (newfiles, 0) try: bbmask_compiled = re.compile(bbmask) except sre_constants.error: bb.msg.fatal(bb.msg.domain.Collection, "BBMASK is not a valid regular expression.") finalfiles = [] for f in newfiles: if bbmask_compiled.search(f): bb.msg.debug(1, bb.msg.domain.Collection, "skipping masked file %s" % f) masked += 1 continue finalfiles.append(f) return (finalfiles, masked)
def parse(cls, filename, appends, configdata): """Parse the specified filename, returning the recipe information""" infos = [] datastores = cls.load_bbfile(filename, appends, configdata) depends = set() for variant, data in sorted(datastores.iteritems(), key=lambda i: i[0], reverse=True): virtualfn = cls.realfn2virtual(filename, variant) depends |= (data.getVar("__depends", False) or set()) if depends and not variant: data.setVar("__depends", depends) info = RecipeInfo.from_metadata(filename, data) infos.append((virtualfn, info)) return infos
def inherit(files, d): __inherit_cache = data.getVar('__inherit_cache', d) or [] fn = "" lineno = 0 files = data.expand(files, d) for file in files: if file[0] != "/" and file[-8:] != ".bbclass": file = os.path.join('classes', '%s.bbclass' % file) if not file in __inherit_cache: bb.msg.debug(2, bb.msg.domain.Parsing, "BB %s:%d: inheriting %s" % (fn, lineno, file)) __inherit_cache.append( file ) data.setVar('__inherit_cache', __inherit_cache, d) include(fn, file, d, "inherit") __inherit_cache = data.getVar('__inherit_cache', d) or []
def remove_task(task, kill, d): """Remove an BB 'task'. If kill is 1, also remove tasks that depend on this task.""" task_graph = data.getVar('_task_graph', d) if not task_graph: task_graph = bb.digraph() data.setVar('_task_graph', task_graph, d) if not task_graph.hasnode(task): return data.delVarFlag(task, 'task', d) ref = 1 if kill == 1: ref = 2 task_graph.delnode(task, ref)
def exec_task(task, d): """Execute an BB 'task' The primary difference between executing a task versus executing a function is that a task exists in the task digraph, and therefore has dependencies amongst other tasks.""" # check if the task is in the graph.. task_graph = data.getVar('_task_graph', d) if not task_graph: task_graph = bb.digraph() data.setVar('_task_graph', task_graph, d) task_cache = data.getVar('_task_cache', d) if not task_cache: task_cache = [] data.setVar('_task_cache', task_cache, d) if not task_graph.hasnode(task): raise EventException("", InvalidTask(task, d)) # check whether this task needs executing.. if not data.getVarFlag(task, 'force', d): if stamp_is_current(task, d): return 1 # follow digraph path up, then execute our way back down def execute(graph, item): if data.getVarFlag(item, 'task', d): if item in task_cache: return 1 if task != item: # deeper than toplevel, exec w/ deps exec_task(item, d) return 1 try: debug(1, "Executing task %s" % item) event.fire(TaskStarted(item, d)) exec_func(item, d) event.fire(TaskSucceeded(item, d)) task_cache.append(item) except FuncFailed, reason: note( "Task failed: %s" % reason ) failedevent = TaskFailed(item, d) event.fire(failedevent) raise EventException(None, failedevent)
def collect_bbfiles( self ): """Collect all available .bb build files""" parsed, cached, skipped, masked = 0, 0, 0, 0 self.bb_cache = bb.cache.init(self) files = (data.getVar( "BBFILES", self.configuration.data, 1 ) or "").split() data.setVar("BBFILES", " ".join(files), self.configuration.data) if not len(files): files = self.get_bbfiles() if not len(files): bb.msg.error(bb.msg.domain.Collection, "no files to build.") newfiles = [] for f in files: if os.path.isdir(f): dirfiles = self.find_bbfiles(f) if dirfiles: newfiles += dirfiles continue else: globbed = glob.glob(f) if not globbed and os.path.exists(f): globbed = [f] newfiles += globbed bbmask = bb.data.getVar('BBMASK', self.configuration.data, 1) if not bbmask: return (newfiles, 0) try: bbmask_compiled = re.compile(bbmask) except sre_constants.error: bb.msg.fatal(bb.msg.domain.Collection, "BBMASK is not a valid regular expression.") finalfiles = [] for f in newfiles: if bbmask_compiled.search(f): bb.msg.debug(1, bb.msg.domain.Collection, "skipping masked file %s" % f) masked += 1 continue finalfiles.append(f) return (finalfiles, masked)
def go(self, uri, ud, d, checkonly=False): """Fetch urls""" def fetch_uri(uri, ud, d): if checkonly: fetchcmd = data.getVar("CHECKCOMMAND", d, 1) elif os.path.exists(ud.localpath): # file exists, but we didnt complete it.. trying again.. fetchcmd = data.getVar("RESUMECOMMAND", d, 1) else: fetchcmd = data.getVar("FETCHCOMMAND", d, 1) uri = uri.split(";")[0] uri_decoded = list(decodeurl(uri)) uri_type = uri_decoded[0] uri_host = uri_decoded[1] fetchcmd = fetchcmd.replace("${URI}", uri.split(";")[0]) fetchcmd = fetchcmd.replace("${FILE}", ud.basename) logger.info("fetch " + uri) logger.debug(2, "executing " + fetchcmd) runfetchcmd(fetchcmd, d) # Sanity check since wget can pretend it succeed when it didn't # Also, this used to happen if sourceforge sent us to the mirror page if not os.path.exists(ud.localpath) and not checkonly: logger.debug( 2, "The fetch command for %s returned success but %s doesn't exist?...", uri, ud.localpath) return False return True localdata = data.createCopy(d) data.setVar('OVERRIDES', "wget:" + data.getVar('OVERRIDES', localdata), localdata) data.update_data(localdata) if fetch_uri(uri, ud, localdata): return True raise FetchError(uri)
def inherit(files, fn, lineno, d): __inherit_cache = d.getVar('__inherit_cache') or [] files = d.expand(files).split() for file in files: if not os.path.isabs(file) and not file.endswith(".bbclass"): file = os.path.join('classes', '%s.bbclass' % file) if not os.path.isabs(file): dname = os.path.dirname(fn) bbpath = "%s:%s" % (dname, d.getVar("BBPATH", True)) abs_fn = bb.utils.which(bbpath, file) if abs_fn: file = abs_fn if not file in __inherit_cache: logger.log(logging.DEBUG -1, "BB %s:%d: inheriting %s", fn, lineno, file) __inherit_cache.append( file ) data.setVar('__inherit_cache', __inherit_cache, d) include(fn, file, lineno, d, "inherit") __inherit_cache = d.getVar('__inherit_cache') or []
def add_tasks(tasklist, d): task_deps = data.getVar('_task_deps', d) if not task_deps: task_deps = {} if not 'tasks' in task_deps: task_deps['tasks'] = [] if not 'parents' in task_deps: task_deps['parents'] = {} for task in tasklist: task = data.expand(task, d) data.setVarFlag(task, 'task', 1, d) if not task in task_deps['tasks']: task_deps['tasks'].append(task) flags = data.getVarFlags(task, d) def getTask(name): if not name in task_deps: task_deps[name] = {} if name in flags: deptask = data.expand(flags[name], d) task_deps[name][task] = deptask getTask('depends') getTask('rdepends') getTask('deptask') getTask('rdeptask') getTask('recrdeptask') getTask('nostamp') getTask('fakeroot') getTask('noexec') getTask('umask') task_deps['parents'][task] = [] for dep in flags['deps']: dep = data.expand(dep, d) task_deps['parents'][task].append(dep) # don't assume holding a reference data.setVar('_task_deps', task_deps, d)
def load_bbfile(bbfile, appends, config): """ Load and parse one .bb build file Return the data and whether parsing resulted in the file being skipped """ chdir_back = False from bb import data, parse # expand tmpdir to include this topdir data.setVar('TMPDIR', data.getVar('TMPDIR', config, 1) or "", config) bbfile_loc = os.path.abspath(os.path.dirname(bbfile)) oldpath = os.path.abspath(os.getcwd()) parse.cached_mtime_noerror(bbfile_loc) bb_data = data.init_db(config) # The ConfHandler first looks if there is a TOPDIR and if not # then it would call getcwd(). # Previously, we chdir()ed to bbfile_loc, called the handler # and finally chdir()ed back, a couple of thousand times. We now # just fill in TOPDIR to point to bbfile_loc if there is no TOPDIR yet. if not data.getVar('TOPDIR', bb_data): chdir_back = True data.setVar('TOPDIR', bbfile_loc, bb_data) try: if appends: data.setVar('__BBAPPEND', " ".join(appends), bb_data) bb_data = parse.handle(bbfile, bb_data) if chdir_back: os.chdir(oldpath) return bb_data except: if chdir_back: os.chdir(oldpath) raise
def getcset(d, depot,host,user,pswd,parm): if "cset" in parm: return parm["cset"]; if user: data.setVar('P4USER', user, d) if pswd: data.setVar('P4PASSWD', pswd, d) if host: data.setVar('P4PORT', host, d) p4date = data.getVar("P4DATE", d, 1) if "revision" in parm: depot += "#%s" % (parm["revision"]) elif "label" in parm: depot += "@%s" % (parm["label"]) elif p4date: depot += "@%s" % (p4date) p4cmd = data.getVar('FETCHCOMMAND_p4', d, 1) bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s changes -m 1 %s" % (p4cmd, depot)) p4file = os.popen("%s changes -m 1 %s" % (p4cmd,depot)) cset = p4file.readline().strip() bb.msg.debug(1, bb.msg.domain.Fetcher, "READ %s" % (cset)) if not cset: return -1 return cset.split(' ')[1]
def parse(cls, filename, appends, configdata, caches_array): """Parse the specified filename, returning the recipe information""" infos = [] datastores = cls.load_bbfile(filename, appends, configdata) depends = set() for variant, data in sorted(datastores.iteritems(), key=lambda i: i[0], reverse=True): virtualfn = cls.realfn2virtual(filename, variant) depends |= (data.getVar("__depends", False) or set()) if depends and not variant: data.setVar("__depends", depends) info_array = [] for cache_class in caches_array: if type(cache_class) is type and issubclass( cache_class, RecipeInfoCommon): info = cache_class(filename, data) info_array.append(info) infos.append((virtualfn, info_array)) return infos
def parseConfigurationFiles(self, prefiles, postfiles, mc="default"): data = bb.data.createCopy(self.basedata) data.setVar("BB_CURRENT_MC", mc) # Parse files for loading *before* bitbake.conf and any includes for f in prefiles: data = parse_config_file(f, data) layerconf = self._findLayerConf(data) if layerconf: parselog.debug(2, "Found bblayers.conf (%s)", layerconf) # By definition bblayers.conf is in conf/ of TOPDIR. # We may have been called with cwd somewhere else so reset TOPDIR data.setVar("TOPDIR", os.path.dirname(os.path.dirname(layerconf))) data = parse_config_file(layerconf, data) layers = (data.getVar('BBLAYERS') or "").split() broken_layers = [] data = bb.data.createCopy(data) approved = bb.utils.approved_variables() # Check whether present layer directories exist for layer in layers: if not os.path.isdir(layer): broken_layers.append(layer) if broken_layers: parselog.critical( "The following layer directories do not exist:") for layer in broken_layers: parselog.critical(" %s", layer) parselog.critical("Please check BBLAYERS in %s" % (layerconf)) raise bb.BBHandledException() for layer in layers: parselog.debug(2, "Adding layer %s", layer) if 'HOME' in approved and '~' in layer: layer = os.path.expanduser(layer) if layer.endswith('/'): layer = layer.rstrip('/') data.setVar('LAYERDIR', layer) data.setVar('LAYERDIR_RE', re.escape(layer)) data = parse_config_file( os.path.join(layer, "conf", "layer.conf"), data) data.expandVarref('LAYERDIR') data.expandVarref('LAYERDIR_RE') data.delVar('LAYERDIR_RE') data.delVar('LAYERDIR') bbfiles_dynamic = (data.getVar('BBFILES_DYNAMIC') or "").split() collections = (data.getVar('BBFILE_COLLECTIONS') or "").split() invalid = [] for entry in bbfiles_dynamic: parts = entry.split(":", 1) if len(parts) != 2: invalid.append(entry) continue l, f = parts invert = l[0] == "!" if invert: l = l[1:] if (l in collections and not invert) or (l not in collections and invert): data.appendVar("BBFILES", " " + f) if invalid: bb.fatal( "BBFILES_DYNAMIC entries must be of the form {!}<collection name>:<filename pattern>, not:\n %s" % "\n ".join(invalid)) layerseries = set((data.getVar("LAYERSERIES_CORENAMES") or "").split()) collections_tmp = collections[:] for c in collections: collections_tmp.remove(c) if c in collections_tmp: bb.fatal( "Found duplicated BBFILE_COLLECTIONS '%s', check bblayers.conf or layer.conf to fix it." % c) compat = set((data.getVar("LAYERSERIES_COMPAT_%s" % c) or "").split()) if compat and not (compat & layerseries): bb.fatal( "Layer %s is not compatible with the core layer which only supports these series: %s (layer is compatible with %s)" % (c, " ".join(layerseries), " ".join(compat))) elif not compat and not data.getVar("BB_WORKERCONTEXT"): bb.warn( "Layer %s should set LAYERSERIES_COMPAT_%s in its conf/layer.conf file to list the core layer names it is compatible with." % (c, c)) if not data.getVar("BBPATH"): msg = "The BBPATH variable is not set" if not layerconf: msg += ( " and bitbake did not find a conf/bblayers.conf file in" " the expected location.\nMaybe you accidentally" " invoked bitbake from the wrong directory?") raise SystemExit(msg) data = parse_config_file(os.path.join("conf", "bitbake.conf"), data) # Parse files for loading *after* bitbake.conf and any includes for p in postfiles: data = parse_config_file(p, data) # Handle any INHERITs and inherit the base class bbclasses = ["base"] + (data.getVar('INHERIT') or "").split() for bbclass in bbclasses: data = _inherit(bbclass, data) # Nomally we only register event handlers at the end of parsing .bb files # We register any handlers we've found so far here... for var in data.getVar('__BBHANDLERS', False) or []: handlerfn = data.getVarFlag(var, "filename", False) if not handlerfn: parselog.critical("Undefined event handler function '%s'" % var) raise bb.BBHandledException() handlerln = int(data.getVarFlag(var, "lineno", False)) bb.event.register(var, data.getVar(var, False), (data.getVarFlag(var, "eventmask") or "").split(), handlerfn, handlerln) data.setVar('BBINCLUDED', bb.parse.get_file_depends(data)) return data
def parseConfigurationFiles(self, prefiles, postfiles): data = self.data bb.parse.init_parser(data) # Parse files for loading *before* bitbake.conf and any includes for f in prefiles: data = parse_config_file(f, data) layerconf = self._findLayerConf(data) if layerconf: parselog.debug(2, "Found bblayers.conf (%s)", layerconf) # By definition bblayers.conf is in conf/ of TOPDIR. # We may have been called with cwd somewhere else so reset TOPDIR data.setVar("TOPDIR", os.path.dirname(os.path.dirname(layerconf))) data = parse_config_file(layerconf, data) layers = (data.getVar('BBLAYERS', True) or "").split() data = bb.data.createCopy(data) for layer in layers: parselog.debug(2, "Adding layer %s", layer) data.setVar('LAYERDIR', layer) data = parse_config_file( os.path.join(layer, "conf", "layer.conf"), data) data.expandVarref('LAYERDIR') data.delVar('LAYERDIR') if not data.getVar("BBPATH", True): msg = "The BBPATH variable is not set" if not layerconf: msg += ( " and bitbake did not find a conf/bblayers.conf file in" " the expected location.\nMaybe you accidentally" " invoked bitbake from the wrong directory?") raise SystemExit(msg) data = parse_config_file(os.path.join("conf", "bitbake.conf"), data) # Parse files for loading *after* bitbake.conf and any includes for p in postfiles: data = parse_config_file(p, data) # Handle any INHERITs and inherit the base class bbclasses = ["base"] + (data.getVar('INHERIT', True) or "").split() for bbclass in bbclasses: data = _inherit(bbclass, data) # Nomally we only register event handlers at the end of parsing .bb files # We register any handlers we've found so far here... for var in data.getVar('__BBHANDLERS') or []: bb.event.register(var, data.getVar(var), (data.getVarFlag(var, "eventmask", True) or "").split()) if data.getVar("BB_WORKERCONTEXT", False) is None: bb.fetch.fetcher_init(data) bb.codeparser.parser_cache_init(data) bb.event.fire(bb.event.ConfigParsed(), data) if data.getVar("BB_INVALIDCONF") is True: data.setVar("BB_INVALIDCONF", False) self.parseConfigurationFiles(self.prefiles, self.postfiles) return bb.parse.init_parser(data) data.setVar('BBINCLUDED', bb.parse.get_file_depends(data)) self.data = data self.data_hash = data.get_hash()
def go(self, uri, ud, d, checkonly = False): """Fetch urls""" def fetch_uri(uri, ud, d): if checkonly: fetchcmd = data.getVar("CHECKCOMMAND", d, 1) elif os.path.exists(ud.localpath): # file exists, but we didnt complete it.. trying again.. fetchcmd = data.getVar("RESUMECOMMAND", d, 1) else: fetchcmd = data.getVar("FETCHCOMMAND", d, 1) uri = uri.split(";")[0] uri_decoded = list(bb.decodeurl(uri)) uri_type = uri_decoded[0] uri_host = uri_decoded[1] bb.msg.note(1, bb.msg.domain.Fetcher, "fetch " + uri) fetchcmd = fetchcmd.replace("${URI}", uri.split(";")[0]) fetchcmd = fetchcmd.replace("${FILE}", ud.basename) httpproxy = None ftpproxy = None if uri_type == 'http': httpproxy = data.getVar("HTTP_PROXY", d, True) httpproxy_ignore = (data.getVar("HTTP_PROXY_IGNORE", d, True) or "").split() for p in httpproxy_ignore: if uri_host.endswith(p): httpproxy = None break if uri_type == 'ftp': ftpproxy = data.getVar("FTP_PROXY", d, True) ftpproxy_ignore = (data.getVar("HTTP_PROXY_IGNORE", d, True) or "").split() for p in ftpproxy_ignore: if uri_host.endswith(p): ftpproxy = None break if httpproxy: fetchcmd = "http_proxy=" + httpproxy + " " + fetchcmd if ftpproxy: fetchcmd = "ftp_proxy=" + ftpproxy + " " + fetchcmd bb.msg.debug(2, bb.msg.domain.Fetcher, "executing " + fetchcmd) ret = os.system(fetchcmd) if ret != 0: return False # Sanity check since wget can pretend it succeed when it didn't # Also, this used to happen if sourceforge sent us to the mirror page if not os.path.exists(ud.localpath): bb.msg.debug(2, bb.msg.domain.Fetcher, "The fetch command for %s returned success but %s doesn't exist?..." % (uri, ud.localpath)) return False return True localdata = data.createCopy(d) data.setVar('OVERRIDES', "wget:" + data.getVar('OVERRIDES', localdata), localdata) data.update_data(localdata) premirrors = [ i.split() for i in (data.getVar('PREMIRRORS', localdata, 1) or "").split('\n') if i ] for (find, replace) in premirrors: newuri = uri_replace(uri, find, replace, d) if newuri != uri: if fetch_uri(newuri, ud, localdata): return True if fetch_uri(uri, ud, localdata): return True # try mirrors mirrors = [ i.split() for i in (data.getVar('MIRRORS', localdata, 1) or "").split('\n') if i ] for (find, replace) in mirrors: newuri = uri_replace(uri, find, replace, d) if newuri != uri: if fetch_uri(newuri, ud, localdata): return True raise FetchError(uri)
def setVar(self, params): """Set an outer BitBake environment variable""" var, value = params data.setVar(var, value, cooker.configuration.data) print("OK")
def parseConfigurationFiles(self, prefiles, postfiles): data = bb.data.createCopy(self.basedata) # Parse files for loading *before* bitbake.conf and any includes for f in prefiles: data = parse_config_file(f, data) layerconf = self._findLayerConf(data) if layerconf: parselog.debug(2, "Found bblayers.conf (%s)", layerconf) # By definition bblayers.conf is in conf/ of TOPDIR. # We may have been called with cwd somewhere else so reset TOPDIR data.setVar("TOPDIR", os.path.dirname(os.path.dirname(layerconf))) data = parse_config_file(layerconf, data) layers = (data.getVar('BBLAYERS', True) or "").split() data = bb.data.createCopy(data) approved = bb.utils.approved_variables() for layer in layers: if not os.path.isdir(layer): parselog.critical("Layer directory '%s' does not exist! " "Please check BBLAYERS in %s" % (layer, layerconf)) sys.exit(1) parselog.debug(2, "Adding layer %s", layer) if 'HOME' in approved and '~' in layer: layer = os.path.expanduser(layer) if layer.endswith('/'): layer = layer.rstrip('/') data.setVar('LAYERDIR', layer) data.setVar('LAYERDIR_RE', re.escape(layer)) data = parse_config_file( os.path.join(layer, "conf", "layer.conf"), data) data.expandVarref('LAYERDIR') data.expandVarref('LAYERDIR_RE') data.delVar('LAYERDIR_RE') data.delVar('LAYERDIR') if not data.getVar("BBPATH", True): msg = "The BBPATH variable is not set" if not layerconf: msg += ( " and bitbake did not find a conf/bblayers.conf file in" " the expected location.\nMaybe you accidentally" " invoked bitbake from the wrong directory?") raise SystemExit(msg) data = parse_config_file(os.path.join("conf", "bitbake.conf"), data) # Parse files for loading *after* bitbake.conf and any includes for p in postfiles: data = parse_config_file(p, data) # Handle any INHERITs and inherit the base class bbclasses = ["base"] + (data.getVar('INHERIT', True) or "").split() for bbclass in bbclasses: data = _inherit(bbclass, data) # Nomally we only register event handlers at the end of parsing .bb files # We register any handlers we've found so far here... for var in data.getVar('__BBHANDLERS', False) or []: handlerfn = data.getVarFlag(var, "filename", False) if not handlerfn: parselog.critical("Undefined event handler function '%s'" % var) sys.exit(1) handlerln = int(data.getVarFlag(var, "lineno", False)) bb.event.register(var, data.getVar(var, False), (data.getVarFlag(var, "eventmask", True) or "").split(), handlerfn, handlerln) data.setVar('BBINCLUDED', bb.parse.get_file_depends(data)) return data
def go(self, loc, ud, d): # try to use the tarball stash if not self.forcefetch(loc, ud, d) and Fetch.try_mirror(d, ud.localfile): bb.msg.debug(1, bb.msg.domain.Fetcher, "%s already exists or was mirrored, skipping cvs checkout." % ud.localpath) return method = "pserver" if "method" in ud.parm: method = ud.parm["method"] localdir = ud.module if "localdir" in ud.parm: localdir = ud.parm["localdir"] cvs_port = "" if "port" in ud.parm: cvs_port = ud.parm["port"] cvs_rsh = None if method == "ext": if "rsh" in ud.parm: cvs_rsh = ud.parm["rsh"] if method == "dir": cvsroot = ud.path else: cvsroot = ":" + method cvsproxyhost = data.getVar('CVS_PROXY_HOST', d, True) if cvsproxyhost: cvsroot += ";proxy=" + cvsproxyhost cvsproxyport = data.getVar('CVS_PROXY_PORT', d, True) if cvsproxyport: cvsroot += ";proxyport=" + cvsproxyport cvsroot += ":" + ud.user if ud.pswd: cvsroot += ":" + ud.pswd cvsroot += "@" + ud.host + ":" + cvs_port + ud.path options = [] if 'norecurse' in ud.parm: options.append("-l") if ud.date: # treat YYYYMMDDHHMM specially for CVS if len(ud.date) == 12: options.append("-D \"%s %s:%s UTC\"" % (ud.date[0:8], ud.date[8:10], ud.date[10:12])) else: options.append("-D \"%s UTC\"" % ud.date) if ud.tag: options.append("-r %s" % ud.tag) localdata = data.createCopy(d) data.setVar('OVERRIDES', "cvs:%s" % data.getVar('OVERRIDES', localdata), localdata) data.update_data(localdata) data.setVar('CVSROOT', cvsroot, localdata) data.setVar('CVSCOOPTS', " ".join(options), localdata) data.setVar('CVSMODULE', ud.module, localdata) cvscmd = data.getVar('FETCHCOMMAND', localdata, 1) cvsupdatecmd = data.getVar('UPDATECOMMAND', localdata, 1) if cvs_rsh: cvscmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvscmd) cvsupdatecmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvsupdatecmd) # create module directory bb.msg.debug(2, bb.msg.domain.Fetcher, "Fetch: checking for module directory") pkg = data.expand('${PN}', d) pkgdir = os.path.join(data.expand('${CVSDIR}', localdata), pkg) moddir = os.path.join(pkgdir,localdir) if os.access(os.path.join(moddir,'CVS'), os.R_OK): bb.msg.note(1, bb.msg.domain.Fetcher, "Update " + loc) # update sources there os.chdir(moddir) myret = os.system(cvsupdatecmd) else: bb.msg.note(1, bb.msg.domain.Fetcher, "Fetch " + loc) # check out sources there bb.mkdirhier(pkgdir) os.chdir(pkgdir) bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % cvscmd) myret = os.system(cvscmd) if myret != 0 or not os.access(moddir, os.R_OK): try: os.rmdir(moddir) except OSError: pass raise FetchError(ud.module) # tar them up to a defined filename if 'fullpath' in ud.parm: os.chdir(pkgdir) myret = os.system("tar -czf %s %s" % (ud.localpath, localdir)) else: os.chdir(moddir) os.chdir('..') myret = os.system("tar -czf %s %s" % (ud.localpath, os.path.basename(moddir))) if myret != 0: try: os.unlink(ud.localpath) except OSError: pass raise FetchError(ud.module)
def download(self, ud, d): """ Fetch urls """ (host, depot, user, pswd, parm) = Perforce.doparse(ud.url, d) if depot.find('/...') != -1: path = depot[:depot.find('/...')] else: path = depot module = parm.get('module', os.path.basename(path)) localdata = data.createCopy(d) data.setVar('OVERRIDES', "p4:%s" % data.getVar('OVERRIDES', localdata), localdata) data.update_data(localdata) # Get the p4 command p4opt = "" if user: p4opt += " -u %s" % (user) if pswd: p4opt += " -P %s" % (pswd) if host: p4opt += " -p %s" % (host) p4cmd = data.getVar('FETCHCOMMAND', localdata, True) # create temp directory logger.debug(2, "Fetch: creating temporary directory") bb.utils.mkdirhier(data.expand('${WORKDIR}', localdata)) data.setVar('TMPBASE', data.expand('${WORKDIR}/oep4.XXXXXX', localdata), localdata) tmpfile, errors = bb.process.run(data.getVar('MKTEMPDIRCMD', localdata, True) or "false") tmpfile = tmpfile.strip() if not tmpfile: raise FetchError("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.", ud.url) if "label" in parm: depot = "%s@%s" % (depot, parm["label"]) else: cset = Perforce.getcset(d, depot, host, user, pswd, parm) depot = "%s@%s" % (depot, cset) os.chdir(tmpfile) logger.info("Fetch " + ud.url) logger.info("%s%s files %s", p4cmd, p4opt, depot) p4file, errors = bb.process.run("%s%s files %s" % (p4cmd, p4opt, depot)) p4file = [f.rstrip() for f in p4file.splitlines()] if not p4file: raise FetchError("Fetch: unable to get the P4 files from %s" % depot, ud.url) count = 0 for file in p4file: list = file.split() if list[2] == "delete": continue dest = list[0][len(path)+1:] where = dest.find("#") subprocess.call("%s%s print -o %s/%s %s" % (p4cmd, p4opt, module, dest[:where], list[0]), shell=True) count = count + 1 if count == 0: logger.error() raise FetchError("Fetch: No files gathered from the P4 fetch", ud.url) runfetchcmd("tar -czf %s %s" % (ud.localpath, module), d, cleanup = [ud.localpath]) # cleanup bb.utils.prunedir(tmpfile)
def download(self, loc, ud, d): method = ud.parm.get('method', 'pserver') localdir = ud.parm.get('localdir', ud.module) cvs_port = ud.parm.get('port', '') cvs_rsh = None if method == "ext": if "rsh" in ud.parm: cvs_rsh = ud.parm["rsh"] if method == "dir": cvsroot = ud.path else: cvsroot = ":" + method cvsproxyhost = data.getVar('CVS_PROXY_HOST', d, True) if cvsproxyhost: cvsroot += ";proxy=" + cvsproxyhost cvsproxyport = data.getVar('CVS_PROXY_PORT', d, True) if cvsproxyport: cvsroot += ";proxyport=" + cvsproxyport cvsroot += ":" + ud.user if ud.pswd: cvsroot += ":" + ud.pswd cvsroot += "@" + ud.host + ":" + cvs_port + ud.path options = [] if 'norecurse' in ud.parm: options.append("-l") if ud.date: # treat YYYYMMDDHHMM specially for CVS if len(ud.date) == 12: options.append("-D \"%s %s:%s UTC\"" % (ud.date[0:8], ud.date[8:10], ud.date[10:12])) else: options.append("-D \"%s UTC\"" % ud.date) if ud.tag: options.append("-r %s" % ud.tag) localdata = data.createCopy(d) data.setVar('OVERRIDES', "cvs:%s" % data.getVar('OVERRIDES', localdata), localdata) data.update_data(localdata) data.setVar('CVSROOT', cvsroot, localdata) data.setVar('CVSCOOPTS', " ".join(options), localdata) data.setVar('CVSMODULE', ud.module, localdata) cvscmd = data.getVar('FETCHCOMMAND', localdata, True) cvsupdatecmd = data.getVar('UPDATECOMMAND', localdata, True) if cvs_rsh: cvscmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvscmd) cvsupdatecmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvsupdatecmd) # create module directory logger.debug(2, "Fetch: checking for module directory") pkg = data.expand('${PN}', d) pkgdir = os.path.join(data.expand('${CVSDIR}', localdata), pkg) moddir = os.path.join(pkgdir, localdir) if os.access(os.path.join(moddir, 'CVS'), os.R_OK): logger.info("Update " + loc) bb.fetch2.check_network_access(d, cvsupdatecmd, ud.url) # update sources there os.chdir(moddir) cmd = cvsupdatecmd else: logger.info("Fetch " + loc) # check out sources there bb.utils.mkdirhier(pkgdir) os.chdir(pkgdir) logger.debug(1, "Running %s", cvscmd) bb.fetch2.check_network_access(d, cvscmd, ud.url) cmd = cvscmd runfetchcmd(cmd, d, cleanup = [moddir]) if not os.access(moddir, os.R_OK): raise FetchError("Directory %s was not readable despite sucessful fetch?!" % moddir, ud.url) scmdata = ud.parm.get("scmdata", "") if scmdata == "keep": tar_flags = "" else: tar_flags = "--exclude 'CVS'" # tar them up to a defined filename if 'fullpath' in ud.parm: os.chdir(pkgdir) cmd = "tar %s -czf %s %s" % (tar_flags, ud.localpath, localdir) else: os.chdir(moddir) os.chdir('..') cmd = "tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.basename(moddir)) runfetchcmd(cmd, d, cleanup = [ud.localpath])
def go(self, uri, ud, d, checkonly=False): """Fetch urls""" def fetch_uri(uri, ud, d): if checkonly: fetchcmd = data.getVar("CHECKCOMMAND", d, 1) elif os.path.exists(ud.localpath): # file exists, but we didnt complete it.. trying again.. fetchcmd = data.getVar("RESUMECOMMAND", d, 1) else: fetchcmd = data.getVar("FETCHCOMMAND", d, 1) bb.msg.note(1, bb.msg.domain.Fetcher, "fetch " + uri) fetchcmd = fetchcmd.replace("${URI}", uri) fetchcmd = fetchcmd.replace("${FILE}", ud.basename) bb.msg.debug(2, bb.msg.domain.Fetcher, "executing " + fetchcmd) ret = os.system(fetchcmd) if ret != 0: return False # Sanity check since wget can pretend it succeed when it didn't # Also, this used to happen if sourceforge sent us to the mirror page if not os.path.exists(ud.localpath): bb.msg.debug( 2, bb.msg.domain.Fetcher, "The fetch command for %s returned success but %s doesn't exist?..." % (uri, ud.localpath)) return False return True localdata = data.createCopy(d) data.setVar('OVERRIDES', "wget:" + data.getVar('OVERRIDES', localdata), localdata) data.update_data(localdata) premirrors = [ i.split() for i in ( data.getVar('PREMIRRORS', localdata, 1) or "").split('\n') if i ] for (find, replace) in premirrors: newuri = uri_replace(uri, find, replace, d) if newuri != uri: if fetch_uri(newuri, ud, localdata): return True if fetch_uri(uri, ud, localdata): return True # try mirrors mirrors = [ i.split() for i in (data.getVar('MIRRORS', localdata, 1) or "").split('\n') if i ] for (find, replace) in mirrors: newuri = uri_replace(uri, find, replace, d) if newuri != uri: if fetch_uri(newuri, ud, localdata): return True raise FetchError(uri)
def handle(fn, d, include=0): global __func_start_regexp__, __inherit_regexp__, __export_func_regexp__, __addtask_regexp__, __addhandler_regexp__, __infunc__, __body__, __residue__ __body__ = [] __infunc__ = "" __classname__ = "" __residue__ = [] if include == 0: bb.msg.debug(2, bb.msg.domain.Parsing, "BB " + fn + ": handle(data)") else: bb.msg.debug(2, bb.msg.domain.Parsing, "BB " + fn + ": handle(data, include)") (root, ext) = os.path.splitext(os.path.basename(fn)) base_name = "%s%s" % (root, ext) init(d) if ext == ".bbclass": __classname__ = root classes.append(__classname__) __inherit_cache = data.getVar('__inherit_cache', d) or [] if not fn in __inherit_cache: __inherit_cache.append(fn) data.setVar('__inherit_cache', __inherit_cache, d) if include != 0: oldfile = data.getVar('FILE', d) else: oldfile = None fn = obtain(fn, d) bbpath = (data.getVar('BBPATH', d, 1) or '').split(':') if not os.path.isabs(fn): f = None for p in bbpath: j = os.path.join(p, fn) if os.access(j, os.R_OK): abs_fn = j f = open(j, 'r') break if f is None: raise IOError("file %s not found" % fn) else: f = open(fn, 'r') abs_fn = fn if ext != ".bbclass": dname = os.path.dirname(abs_fn) if bbpath[0] != dname: bbpath.insert(0, dname) data.setVar('BBPATH', ":".join(bbpath), d) if include: bb.parse.mark_dependency(d, abs_fn) if ext != ".bbclass": data.setVar('FILE', fn, d) lineno = 0 while 1: lineno = lineno + 1 s = f.readline() if not s: break s = s.rstrip() feeder(lineno, s, fn, base_name, d) if __inpython__: # add a blank line to close out any python definition feeder(IN_PYTHON_EOF, "", fn, base_name, d) if ext == ".bbclass": classes.remove(__classname__) else: if include == 0: data.expandKeys(d) data.update_data(d) anonqueue = data.getVar("__anonqueue", d, 1) or [] body = [x['content'] for x in anonqueue] flag = {'python': 1, 'func': 1} data.setVar("__anonfunc", "\n".join(body), d) data.setVarFlags("__anonfunc", flag, d) from bb import build try: t = data.getVar('T', d) data.setVar('T', '${TMPDIR}/', d) build.exec_func("__anonfunc", d) data.delVar('T', d) if t: data.setVar('T', t, d) except Exception, e: bb.msg.debug( 1, bb.msg.domain.Parsing, "Exception when executing anonymous function: %s" % e) raise data.delVar("__anonqueue", d) data.delVar("__anonfunc", d) set_additional_vars(fn, d, include) data.update_data(d) all_handlers = {} for var in data.getVar('__BBHANDLERS', d) or []: # try to add the handler handler = data.getVar(var, d) bb.event.register(var, handler) tasklist = data.getVar('__BBTASKS', d) or [] bb.build.add_tasks(tasklist, d) bbpath.pop(0)
def feeder(lineno, s, fn, root, d): global __func_start_regexp__, __inherit_regexp__, __export_func_regexp__, __addtask_regexp__, __addhandler_regexp__, __def_regexp__, __python_func_regexp__, __inpython__, __infunc__, __body__, classes, bb, __residue__ if __infunc__: if s == '}': __body__.append('') data.setVar(__infunc__, '\n'.join(__body__), d) data.setVarFlag(__infunc__, "func", 1, d) if __infunc__ == "__anonymous": anonqueue = bb.data.getVar("__anonqueue", d) or [] anonitem = {} anonitem["content"] = bb.data.getVar("__anonymous", d) anonitem["flags"] = bb.data.getVarFlags("__anonymous", d) anonqueue.append(anonitem) bb.data.setVar("__anonqueue", anonqueue, d) bb.data.delVarFlags("__anonymous", d) bb.data.delVar("__anonymous", d) __infunc__ = "" __body__ = [] else: __body__.append(s) return if __inpython__: m = __python_func_regexp__.match(s) if m and lineno != IN_PYTHON_EOF: __body__.append(s) return else: # Note we will add root to parsedmethods after having parse # 'this' file. This means we will not parse methods from # bb classes twice if not root in __parsed_methods__: text = '\n'.join(__body__) methodpool.insert_method(root, text, fn) funcs = data.getVar('__functions__', d) or {} if not funcs.has_key(root): funcs[root] = text else: funcs[root] = "%s\n%s" % (funcs[root], text) data.setVar('__functions__', funcs, d) __body__ = [] __inpython__ = False if lineno == IN_PYTHON_EOF: return # fall through if s == '' or s[0] == '#': return # skip comments and empty lines if s[-1] == '\\': __residue__.append(s[:-1]) return s = "".join(__residue__) + s __residue__ = [] m = __func_start_regexp__.match(s) if m: __infunc__ = m.group("func") or "__anonymous" key = __infunc__ if data.getVar(key, d): # clean up old version of this piece of metadata, as its # flags could cause problems data.setVarFlag(key, 'python', None, d) data.setVarFlag(key, 'fakeroot', None, d) if m.group("py") is not None: data.setVarFlag(key, "python", "1", d) else: data.delVarFlag(key, "python", d) if m.group("fr") is not None: data.setVarFlag(key, "fakeroot", "1", d) else: data.delVarFlag(key, "fakeroot", d) return m = __def_regexp__.match(s) if m: __body__.append(s) __inpython__ = True return m = __export_func_regexp__.match(s) if m: fns = m.group(1) n = __word__.findall(fns) for f in n: allvars = [] allvars.append(f) allvars.append(classes[-1] + "_" + f) vars = [[allvars[0], allvars[1]]] if len(classes) > 1 and classes[-2] is not None: allvars.append(classes[-2] + "_" + f) vars = [] vars.append([allvars[2], allvars[1]]) vars.append([allvars[0], allvars[2]]) for (var, calledvar) in vars: if data.getVar( var, d) and not data.getVarFlag(var, 'export_func', d): continue if data.getVar(var, d): data.setVarFlag(var, 'python', None, d) data.setVarFlag(var, 'func', None, d) for flag in ["func", "python"]: if data.getVarFlag(calledvar, flag, d): data.setVarFlag(var, flag, data.getVarFlag(calledvar, flag, d), d) for flag in ["dirs"]: if data.getVarFlag(var, flag, d): data.setVarFlag(calledvar, flag, data.getVarFlag(var, flag, d), d) if data.getVarFlag(calledvar, "python", d): data.setVar( var, "\tbb.build.exec_func('" + calledvar + "', d)\n", d) else: data.setVar(var, "\t" + calledvar + "\n", d) data.setVarFlag(var, 'export_func', '1', d) return m = __addtask_regexp__.match(s) if m: func = m.group("func") before = m.group("before") after = m.group("after") if func is None: return var = "do_" + func data.setVarFlag(var, "task", 1, d) bbtasks = data.getVar('__BBTASKS', d) or [] if not var in bbtasks: bbtasks.append(var) data.setVar('__BBTASKS', bbtasks, d) existing = data.getVarFlag(var, "deps", d) or [] if after is not None: # set up deps for function for entry in after.split(): if entry not in existing: existing.append(entry) data.setVarFlag(var, "deps", existing, d) if before is not None: # set up things that depend on this func for entry in before.split(): existing = data.getVarFlag(entry, "deps", d) or [] if var not in existing: data.setVarFlag(entry, "deps", [var] + existing, d) return m = __addhandler_regexp__.match(s) if m: fns = m.group(1) hs = __word__.findall(fns) bbhands = data.getVar('__BBHANDLERS', d) or [] for h in hs: bbhands.append(h) data.setVarFlag(h, "handler", 1, d) data.setVar('__BBHANDLERS', bbhands, d) return m = __inherit_regexp__.match(s) if m: files = m.group(1) n = __word__.findall(files) inherit(n, d) return from bb.parse import ConfHandler return ConfHandler.feeder(lineno, s, fn, d)
def add_tasks(tasklist, d): task_deps = data.getVar('_task_deps', d) if not task_deps: task_deps = {} if not 'tasks' in task_deps: task_deps['tasks'] = [] if not 'parents' in task_deps: task_deps['parents'] = {} for task in tasklist: task = data.expand(task, d) data.setVarFlag(task, 'task', 1, d) if not task in task_deps['tasks']: task_deps['tasks'].append(task) flags = data.getVarFlags(task, d) def getTask(name): if not name in task_deps: task_deps[name] = {} if name in flags: deptask = data.expand(flags[name], d) task_deps[name][task] = deptask getTask('depends') getTask('deptask') getTask('rdeptask') getTask('recrdeptask') getTask('nostamp') task_deps['parents'][task] = [] for dep in flags['deps']: dep = data.expand(dep, d) task_deps['parents'][task].append(dep) # don't assume holding a reference data.setVar('_task_deps', task_deps, d)