def exec_task(fn, task, d): """Execute a BB 'task' Execution of a task involves a bit more setup than executing a function, running it with its own local metadata, and with some useful variables set. """ # Check whther this is a valid task if not data.getVarFlag(task, 'task', d): raise InvalidTask(task, d) try: logger.debug(1, "Executing task %s", task) old_overrides = data.getVar('OVERRIDES', d, 0) localdata = data.createCopy(d) data.setVar('OVERRIDES', 'task-%s:%s' % (task[3:], old_overrides), localdata) data.update_data(localdata) data.expandKeys(localdata) data.setVar('BB_FILENAME', fn, d) data.setVar('BB_CURRENTTASK', task[3:], d) event.fire(TaskStarted(task, localdata), localdata) exec_func(task, localdata) event.fire(TaskSucceeded(task, localdata), localdata) except FuncFailed as exc: event.fire(TaskFailed(exc.name, exc.logfile, localdata), localdata) raise # make stamp, or cause event and raise exception if not data.getVarFlag(task, 'nostamp', d) and not data.getVarFlag(task, 'selfstamp', d): make_stamp(task, d)
def exec_func(func, d, dirs = None): """Execute an BB 'function'""" body = data.getVar(func, d) if not body: return if not dirs: dirs = (data.getVarFlag(func, 'dirs', d) or "").split() for adir in dirs: adir = data.expand(adir, d) mkdirhier(adir) if len(dirs) > 0: adir = dirs[-1] else: adir = data.getVar('B', d, 1) adir = data.expand(adir, d) try: prevdir = os.getcwd() except OSError: prevdir = data.expand('${TOPDIR}', d) if adir and os.access(adir, os.F_OK): os.chdir(adir) if data.getVarFlag(func, "python", d): exec_func_python(func, d) else: exec_func_shell(func, d) os.chdir(prevdir)
def exec_func_shell(func, d): """Execute a shell BB 'function' Returns true if execution was successful. For this, it creates a bash shell script in the tmp dectory, writes the local data into it and finally executes. The output of the shell will end in a log file and stdout. Note on directory behavior. The 'dirs' varflag should contain a list of the directories you need created prior to execution. The last item in the list is where we will chdir/cd to. """ import sys deps = data.getVarFlag(func, 'deps', d) check = data.getVarFlag(func, 'check', d) interact = data.getVarFlag(func, 'interactive', d) if check in globals(): if globals()[check](func, deps): return global logfile t = data.getVar('T', d, 1) if not t: return 0 mkdirhier(t) logfile = "%s/log.%s.%s" % (t, func, str(os.getpid())) runfile = "%s/run.%s.%s" % (t, func, str(os.getpid())) f = open(runfile, "w") f.write("#!/bin/sh -e\n") if bb.msg.debug_level['default'] > 0: f.write("set -x\n") data.emit_env(f, d) f.write("cd %s\n" % os.getcwd()) if func: f.write("%s\n" % func) f.close() os.chmod(runfile, 0775) if not func: bb.msg.error(bb.msg.domain.Build, "Function not specified") raise FuncFailed() # open logs si = file('/dev/null', 'r') try: if bb.msg.debug_level['default'] > 0: so = os.popen("tee \"%s\"" % logfile, "w") else: so = file(logfile, 'w') except OSError, e: bb.msg.error(bb.msg.domain.Build, "opening log file: %s" % e) pass
def exec_task(task, d): """Execute an BB 'task' The primary difference between executing a task versus executing a function is that a task exists in the task digraph, and therefore has dependencies amongst other tasks.""" # Check whther this is a valid task if not data.getVarFlag(task, 'task', d): raise EventException("No such task", InvalidTask(task, d)) try: bb.msg.debug(1, bb.msg.domain.Build, "Executing task %s" % task) old_overrides = data.getVar('OVERRIDES', d, 0) localdata = data.createCopy(d) data.setVar('OVERRIDES', 'task-%s:%s' % (task[3:], old_overrides), localdata) data.update_data(localdata) data.expandKeys(localdata) event.fire(TaskStarted(task, localdata), localdata) exec_func(task, localdata) event.fire(TaskSucceeded(task, localdata), localdata) except FuncFailed, message: # Try to extract the optional logfile try: (msg, logfile) = message except: logfile = None msg = message bb.msg.note(1, bb.msg.domain.Build, "Task failed: %s" % message) failedevent = TaskFailed(msg, logfile, task, d) event.fire(failedevent, d) raise EventException("Function failed in task: %s" % message, failedevent)
def getTask(name): deptask = data.getVarFlag(task, name, d) if deptask: deptask = data.expand(deptask, d) if not name in task_deps: task_deps[name] = {} task_deps[name][task] = deptask
def execute(graph, item): if data.getVarFlag(item, 'task', d): if item in task_cache: return 1 if task != item: # deeper than toplevel, exec w/ deps exec_task(item, d) return 1 try: bb.msg.debug(1, bb.msg.domain.Build, "Executing task %s" % item) old_overrides = data.getVar('OVERRIDES', d, 0) localdata = data.createCopy(d) data.setVar('OVERRIDES', 'task_%s:%s' % (item, old_overrides), localdata) data.update_data(localdata) event.fire(TaskStarted(item, localdata)) exec_func(item, localdata) event.fire(TaskSucceeded(item, localdata)) task_cache.append(item) data.setVar('_task_cache', task_cache, d) except FuncFailed, reason: bb.msg.note(1, bb.msg.domain.Build, "Task failed: %s" % reason ) failedevent = TaskFailed(item, d) event.fire(failedevent) raise EventException("Function failed in task: %s" % reason, failedevent)
def execute(graph, item): if data.getVarFlag(item, 'task', d): if item in task_cache: return 1 if task != item: # deeper than toplevel, exec w/ deps exec_task(item, d) return 1 try: debug(1, "Executing task %s" % item) old_overrides = data.getVar('OVERRIDES', d, 0) from copy import deepcopy localdata = deepcopy(d) data.setVar('OVERRIDES', 'task_%s:%s' % (item, old_overrides), localdata) data.update_data(localdata) event.fire(TaskStarted(item, localdata)) exec_func(item, localdata) event.fire(TaskSucceeded(item, localdata)) task_cache.append(item) except FuncFailed, reason: note( "Task failed: %s" % reason ) failedevent = TaskFailed(item, d) event.fire(failedevent) raise EventException(None, failedevent)
def testVarFlag(self): # import the data modules from bb import data from bb import data_smart d = data_smart.DataSmart() data.setVarFlag('TEST', 'testflag', 1, d) self.assertEquals(data.getVarFlag('TEST', 'testflag', d), 1) data.delVarFlag('TEST', 'testflag', d) self.assertEquals(data.getVarFlag('TEST', 'testflag', d), None) # test that changing a variable is working as well data.setVarFlag('TEST', 'testflag', 1, d) self.assertEquals(data.getVarFlag('TEST', 'testflag', d), 1) data.setVarFlag('TEST', 'testflag', 2, d) self.assertEquals(data.getVarFlag('TEST', 'testflag', d), 2)
def exec_task(task, d): """Execute an BB 'task' The primary difference between executing a task versus executing a function is that a task exists in the task digraph, and therefore has dependencies amongst other tasks.""" # Check whther this is a valid task if not data.getVarFlag(task, 'task', d): raise EventException("No such task", InvalidTask(task, d)) try: bb.msg.debug(1, bb.msg.domain.Build, "Executing task %s" % task) old_overrides = data.getVar('OVERRIDES', d, 0) localdata = data.createCopy(d) data.setVar('OVERRIDES', 'task-%s:%s' % (task[3:], old_overrides), localdata) data.update_data(localdata) data.expandKeys(localdata) event.fire(TaskStarted(task, localdata)) exec_func(task, localdata) event.fire(TaskSucceeded(task, localdata)) except FuncFailed, message: # Try to extract the optional logfile try: (msg, logfile) = message except: logfile = None msg = message bb.msg.note(1, bb.msg.domain.Build, "Task failed: %s" % message ) failedevent = TaskFailed(msg, logfile, task, d) event.fire(failedevent) raise EventException("Function failed in task: %s" % message, failedevent)
def exec_func(func, d, dirs = None): """Execute an BB 'function'""" body = data.getVar(func, d) if not body: return cleandirs = (data.expand(data.getVarFlag(func, 'cleandirs', d), d) or "").split() for cdir in cleandirs: os.system("rm -rf %s" % cdir) if not dirs: dirs = (data.expand(data.getVarFlag(func, 'dirs', d), d) or "").split() for adir in dirs: mkdirhier(adir) if len(dirs) > 0: adir = dirs[-1] else: adir = data.getVar('B', d, 1) adir = data.expand(adir, d) try: prevdir = os.getcwd() except OSError: prevdir = data.expand('${TOPDIR}', d) if adir and os.access(adir, os.F_OK): os.chdir(adir) locks = [] lockfiles = (data.expand(data.getVarFlag(func, 'lockfiles', d), d) or "").split() for lock in lockfiles: locks.append(bb.utils.lockfile(lock)) if data.getVarFlag(func, "python", d): exec_func_python(func, d) else: exec_func_shell(func, d) for lock in locks: bb.utils.unlockfile(lock) if os.path.exists(prevdir): os.chdir(prevdir)
def exec_task(fn, task, d): """Execute a BB 'task' Execution of a task involves a bit more setup than executing a function, running it with its own local metadata, and with some useful variables set. """ if not data.getVarFlag(task, 'task', d): raise InvalidTask(task, d) logger.debug(1, "Executing task %s", task) localdata = _task_data(fn, task, d) tempdir = localdata.getVar('T', True) if not tempdir: bb.fatal("T variable not set, unable to build") bb.utils.mkdirhier(tempdir) loglink = os.path.join(tempdir, 'log.{0}'.format(task)) logfn = os.path.join(tempdir, 'log.{0}.{1}'.format(task, os.getpid())) if loglink: bb.utils.remove(loglink) try: os.symlink(logfn, loglink) except OSError: pass prefuncs = localdata.getVarFlag(task, 'prefuncs', expand=True) postfuncs = localdata.getVarFlag(task, 'postfuncs', expand=True) logfile = open(logfn, 'w') event.fire(TaskStarted(task, localdata), localdata) try: for func in (prefuncs or '').split(): exec_func(func, localdata, logfile=logfile) exec_func(task, localdata, logfile=logfile) for func in (postfuncs or '').split(): exec_func(func, localdata, logfile=logfile) except FuncFailed as exc: event.fire(TaskFailed(exc.name, exc.logfile, localdata), localdata) raise finally: logfile.close() if os.path.exists(logfn) and os.path.getsize(logfn) == 0: logger.debug(2, "Zero size logfn %s, removing", logfn) bb.utils.remove(logfn) bb.utils.remove(loglink) event.fire(TaskSucceeded(task, localdata), localdata) if not localdata.getVarFlag(task, 'nostamp') and not localdata.getVarFlag( task, 'selfstamp'): make_stamp(task, localdata)
def checkStamp(graph, task): # check for existance if data.getVarFlag(task, 'nostamp', d): return 1 if not stamp_is_current(task, d, 0): return 0 depfile = "%s.%s" % (data.expand(stamp, d), task) deptime = os.stat(depfile)[stat.ST_MTIME] if deptime > tasktime: return 0 return 1
def exec_task(fn, task, d): """Execute a BB 'task' Execution of a task involves a bit more setup than executing a function, running it with its own local metadata, and with some useful variables set. """ if not data.getVarFlag(task, 'task', d): raise InvalidTask(task, d) logger.debug(1, "Executing task %s", task) localdata = _task_data(fn, task, d) tempdir = localdata.getVar('T', True) if not tempdir: bb.fatal("T variable not set, unable to build") bb.utils.mkdirhier(tempdir) loglink = os.path.join(tempdir, 'log.{0}'.format(task)) logfn = os.path.join(tempdir, 'log.{0}.{1}'.format(task, os.getpid())) if loglink: bb.utils.remove(loglink) try: os.symlink(logfn, loglink) except OSError: pass prefuncs = localdata.getVarFlag(task, 'prefuncs', expand=True) postfuncs = localdata.getVarFlag(task, 'postfuncs', expand=True) logfile = open(logfn, 'w') event.fire(TaskStarted(task, localdata), localdata) try: for func in (prefuncs or '').split(): exec_func(func, localdata, logfile=logfile) exec_func(task, localdata, logfile=logfile) for func in (postfuncs or '').split(): exec_func(func, localdata, logfile=logfile) except FuncFailed as exc: event.fire(TaskFailed(exc.name, exc.logfile, localdata), localdata) raise finally: logfile.close() if os.path.exists(logfn) and os.path.getsize(logfn) == 0: logger.debug(2, "Zero size logfn %s, removing", logfn) bb.utils.remove(logfn) bb.utils.remove(loglink) event.fire(TaskSucceeded(task, localdata), localdata) if not localdata.getVarFlag(task, 'nostamp') and not localdata.getVarFlag(task, 'selfstamp'): make_stamp(task, localdata)
def exec_task(task, d): """Execute an BB 'task' The primary difference between executing a task versus executing a function is that a task exists in the task digraph, and therefore has dependencies amongst other tasks.""" # check if the task is in the graph.. task_graph = data.getVar('_task_graph', d) if not task_graph: task_graph = bb.digraph() data.setVar('_task_graph', task_graph, d) task_cache = data.getVar('_task_cache', d) if not task_cache: task_cache = [] data.setVar('_task_cache', task_cache, d) if not task_graph.hasnode(task): raise EventException("Missing node in task graph", InvalidTask(task, d)) # check whether this task needs executing.. if not data.getVarFlag(task, 'force', d): if stamp_is_current(task, d): return 1 # follow digraph path up, then execute our way back down def execute(graph, item): if data.getVarFlag(item, 'task', d): if item in task_cache: return 1 if task != item: # deeper than toplevel, exec w/ deps exec_task(item, d) return 1 try: bb.msg.debug(1, bb.msg.domain.Build, "Executing task %s" % item) old_overrides = data.getVar('OVERRIDES', d, 0) localdata = data.createCopy(d) data.setVar('OVERRIDES', 'task_%s:%s' % (item, old_overrides), localdata) data.update_data(localdata) event.fire(TaskStarted(item, localdata)) exec_func(item, localdata) event.fire(TaskSucceeded(item, localdata)) task_cache.append(item) data.setVar('_task_cache', task_cache, d) except FuncFailed, reason: note( "Task failed: %s" % reason ) failedevent = TaskFailed(item, d) event.fire(failedevent) raise EventException("Function failed in task: %s" % reason, failedevent)
def exec_task(task, d): """Execute an BB 'task' The primary difference between executing a task versus executing a function is that a task exists in the task digraph, and therefore has dependencies amongst other tasks.""" # check if the task is in the graph.. task_graph = data.getVar('_task_graph', d) if not task_graph: task_graph = bb.digraph() data.setVar('_task_graph', task_graph, d) task_cache = data.getVar('_task_cache', d) if not task_cache: task_cache = [] data.setVar('_task_cache', task_cache, d) if not task_graph.hasnode(task): raise EventException("", InvalidTask(task, d)) # check whether this task needs executing.. if not data.getVarFlag(task, 'force', d): if stamp_is_current(task, d): return 1 # follow digraph path up, then execute our way back down def execute(graph, item): if data.getVarFlag(item, 'task', d): if item in task_cache: return 1 if task != item: # deeper than toplevel, exec w/ deps exec_task(item, d) return 1 try: debug(1, "Executing task %s" % item) event.fire(TaskStarted(item, d)) exec_func(item, d) event.fire(TaskSucceeded(item, d)) task_cache.append(item) except FuncFailed, reason: note( "Task failed: %s" % reason ) failedevent = TaskFailed(item, d) event.fire(failedevent) raise EventException(None, failedevent)
def checkStamp(graph, task): # check for existance if file_name: if 'nostamp' in taskdep and task in taskdep['nostamp']: return 1 else: if data.getVarFlag(task, 'nostamp', d): return 1 if not stamp_is_current(task, d, file_name, 0 ): return 0 depfile = "%s.%s" % (stampfn, task) deptime = os.stat(depfile)[stat.ST_MTIME] if deptime > tasktime: return 0 return 1
def execute(graph, item): if data.getVarFlag(item, 'task', d): if item in task_cache: return 1 if task != item: # deeper than toplevel, exec w/ deps exec_task(item, d) return 1 try: debug(1, "Executing task %s" % item) event.fire(TaskStarted(item, d)) exec_func(item, d) event.fire(TaskSucceeded(item, d)) task_cache.append(item) except FuncFailed, reason: note( "Task failed: %s" % reason ) failedevent = TaskFailed(item, d) event.fire(failedevent) raise EventException(None, failedevent)
envdata = self.bb_cache.loadDataFull(fn, self.configuration.data) except IOError, e: bb.msg.fatal(bb.msg.domain.Parsing, "Unable to read %s: %s" % (fn, e)) except Exception, e: bb.msg.fatal(bb.msg.domain.Parsing, "%s" % e) # emit variables and shell functions try: data.update_data(envdata) data.emit_env(sys.__stdout__, envdata, True) except Exception, e: bb.msg.fatal(bb.msg.domain.Parsing, "%s" % e) # emit the metadata which isnt valid shell data.expandKeys(envdata) for e in envdata.keys(): if data.getVarFlag(e, "python", envdata): sys.__stdout__.write("\npython %s () {\n%s}\n" % (e, data.getVar(e, envdata, 1))) def generateDotGraph(self, pkgs_to_build, ignore_deps): """ Generate a task dependency graph. pkgs_to_build A list of packages that needs to be built ignore_deps A list of names where processing of dependencies should be stopped. e.g. dependencies that get """ for dep in ignore_deps: self.status.ignored_dependencies.add(dep) localdata = data.createCopy(self.configuration.data)
try: self.configuration.data = self.bb_cache.loadDataFull(self.configuration.buildfile, self.configuration.data) except IOError, e: bb.msg.fatal(bb.msg.domain.Parsing, "Unable to read %s: %s" % ( self.configuration.buildfile, e )) except Exception, e: bb.msg.fatal(bb.msg.domain.Parsing, "%s" % e) # emit variables and shell functions try: data.update_data( self.configuration.data ) data.emit_env(sys.__stdout__, self.configuration.data, True) except Exception, e: bb.msg.fatal(bb.msg.domain.Parsing, "%s" % e) # emit the metadata which isnt valid shell data.expandKeys( self.configuration.data ) for e in self.configuration.data.keys(): if data.getVarFlag( e, 'python', self.configuration.data ): sys.__stdout__.write("\npython %s () {\n%s}\n" % (e, data.getVar(e, self.configuration.data, 1))) def generateDotGraph( self, pkgs_to_build, ignore_deps ): """ Generate a task dependency graph. pkgs_to_build A list of packages that needs to be built ignore_deps A list of names where processing of dependencies should be stopped. e.g. dependencies that get """ for dep in ignore_deps: self.status.ignored_dependencies.add(dep) localdata = data.createCopy(self.configuration.data)
def handle(fn, d, include = 0): global __func_start_regexp__, __inherit_regexp__, __export_func_regexp__, __addtask_regexp__, __addhandler_regexp__, __infunc__, __body__, __bbpath_found__, __residue__ __body__ = [] __bbpath_found__ = 0 __infunc__ = "" __classname__ = "" __residue__ = [] if include == 0: debug(2, "BB " + fn + ": handle(data)") else: debug(2, "BB " + fn + ": handle(data, include)") (root, ext) = os.path.splitext(os.path.basename(fn)) init(d) if ext == ".bbclass": __classname__ = root classes.append(__classname__) if include != 0: oldfile = data.getVar('FILE', d) else: oldfile = None fn = obtain(fn, d) bbpath = (data.getVar('BBPATH', d, 1) or '').split(':') if not os.path.isabs(fn): f = None for p in bbpath: p = data.expand(p, d) j = os.path.join(p, fn) if os.access(j, os.R_OK): abs_fn = j f = open(j, 'r') break if f is None: raise IOError("file not found") else: f = open(fn,'r') abs_fn = fn if ext != ".bbclass": bbpath.insert(0, os.path.dirname(abs_fn)) data.setVar('BBPATH', ":".join(bbpath), d) if include: bb.parse.mark_dependency(d, abs_fn) if ext != ".bbclass": data.setVar('FILE', fn, d) i = (data.getVar("INHERIT", d, 1) or "").split() if not "base" in i and __classname__ != "base": i[0:0] = ["base"] inherit(i, d) lineno = 0 while 1: lineno = lineno + 1 s = f.readline() if not s: break s = s.rstrip() feeder(lineno, s, fn, d) if __inpython__: # add a blank line to close out any python definition feeder(lineno + 1, "", fn, d) if ext == ".bbclass": classes.remove(__classname__) else: if include == 0: data.expandKeys(d) data.update_data(d) anonqueue = data.getVar("__anonqueue", d, 1) or [] for anon in anonqueue: data.setVar("__anonfunc", anon["content"], d) data.setVarFlags("__anonfunc", anon["flags"], d) from bb import build try: t = data.getVar('T', d) data.setVar('T', '${TMPDIR}/', d) build.exec_func("__anonfunc", d) data.delVar('T', d) if t: data.setVar('T', t, d) except Exception, e: bb.debug(1, "executing anonymous function: %s" % e) raise data.delVar("__anonqueue", d) data.delVar("__anonfunc", d) set_additional_vars(fn, d, include) data.update_data(d) for var in data.keys(d): if data.getVarFlag(var, 'handler', d): bb.event.register(data.getVar(var, d)) continue if not data.getVarFlag(var, 'task', d): continue deps = data.getVarFlag(var, 'deps', d) or [] postdeps = data.getVarFlag(var, 'postdeps', d) or [] bb.build.add_task(var, deps, d) for p in postdeps: pdeps = data.getVarFlag(p, 'deps', d) or [] pdeps.append(var) data.setVarFlag(p, 'deps', pdeps, d) bb.build.add_task(p, pdeps, d) bbpath.pop(0)
def feeder(lineno, s, fn, root, d): global __func_start_regexp__, __inherit_regexp__, __export_func_regexp__, __addtask_regexp__, __addhandler_regexp__, __def_regexp__, __python_func_regexp__, __inpython__, __infunc__, __body__, classes, bb, __residue__ if __infunc__: if s == '}': __body__.append('') data.setVar(__infunc__, '\n'.join(__body__), d) data.setVarFlag(__infunc__, "func", 1, d) if __infunc__ == "__anonymous": anonqueue = bb.data.getVar("__anonqueue", d) or [] anonitem = {} anonitem["content"] = bb.data.getVar("__anonymous", d) anonitem["flags"] = bb.data.getVarFlags("__anonymous", d) anonqueue.append(anonitem) bb.data.setVar("__anonqueue", anonqueue, d) bb.data.delVarFlags("__anonymous", d) bb.data.delVar("__anonymous", d) __infunc__ = "" __body__ = [] else: __body__.append(s) return if __inpython__: m = __python_func_regexp__.match(s) if m and lineno != IN_PYTHON_EOF: __body__.append(s) return else: # Note we will add root to parsedmethods after having parse # 'this' file. This means we will not parse methods from # bb classes twice if not root in __parsed_methods__: text = '\n'.join(__body__) methodpool.insert_method(root, text, fn) funcs = data.getVar('__functions__', d) or {} if not funcs.has_key(root): funcs[root] = text else: funcs[root] = "%s\n%s" % (funcs[root], text) data.setVar('__functions__', funcs, d) __body__ = [] __inpython__ = False if lineno == IN_PYTHON_EOF: return # fall through if s == '' or s[0] == '#': return # skip comments and empty lines if s[-1] == '\\': __residue__.append(s[:-1]) return s = "".join(__residue__) + s __residue__ = [] m = __func_start_regexp__.match(s) if m: __infunc__ = m.group("func") or "__anonymous" key = __infunc__ if data.getVar(key, d): # clean up old version of this piece of metadata, as its # flags could cause problems data.setVarFlag(key, 'python', None, d) data.setVarFlag(key, 'fakeroot', None, d) if m.group("py") is not None: data.setVarFlag(key, "python", "1", d) else: data.delVarFlag(key, "python", d) if m.group("fr") is not None: data.setVarFlag(key, "fakeroot", "1", d) else: data.delVarFlag(key, "fakeroot", d) return m = __def_regexp__.match(s) if m: __body__.append(s) __inpython__ = True return m = __export_func_regexp__.match(s) if m: fns = m.group(1) n = __word__.findall(fns) for f in n: allvars = [] allvars.append(f) allvars.append(classes[-1] + "_" + f) vars = [[allvars[0], allvars[1]]] if len(classes) > 1 and classes[-2] is not None: allvars.append(classes[-2] + "_" + f) vars = [] vars.append([allvars[2], allvars[1]]) vars.append([allvars[0], allvars[2]]) for (var, calledvar) in vars: if data.getVar( var, d) and not data.getVarFlag(var, 'export_func', d): continue if data.getVar(var, d): data.setVarFlag(var, 'python', None, d) data.setVarFlag(var, 'func', None, d) for flag in ["func", "python"]: if data.getVarFlag(calledvar, flag, d): data.setVarFlag(var, flag, data.getVarFlag(calledvar, flag, d), d) for flag in ["dirs"]: if data.getVarFlag(var, flag, d): data.setVarFlag(calledvar, flag, data.getVarFlag(var, flag, d), d) if data.getVarFlag(calledvar, "python", d): data.setVar( var, "\tbb.build.exec_func('" + calledvar + "', d)\n", d) else: data.setVar(var, "\t" + calledvar + "\n", d) data.setVarFlag(var, 'export_func', '1', d) return m = __addtask_regexp__.match(s) if m: func = m.group("func") before = m.group("before") after = m.group("after") if func is None: return var = "do_" + func data.setVarFlag(var, "task", 1, d) bbtasks = data.getVar('__BBTASKS', d) or [] if not var in bbtasks: bbtasks.append(var) data.setVar('__BBTASKS', bbtasks, d) existing = data.getVarFlag(var, "deps", d) or [] if after is not None: # set up deps for function for entry in after.split(): if entry not in existing: existing.append(entry) data.setVarFlag(var, "deps", existing, d) if before is not None: # set up things that depend on this func for entry in before.split(): existing = data.getVarFlag(entry, "deps", d) or [] if var not in existing: data.setVarFlag(entry, "deps", [var] + existing, d) return m = __addhandler_regexp__.match(s) if m: fns = m.group(1) hs = __word__.findall(fns) bbhands = data.getVar('__BBHANDLERS', d) or [] for h in hs: bbhands.append(h) data.setVarFlag(h, "handler", 1, d) data.setVar('__BBHANDLERS', bbhands, d) return m = __inherit_regexp__.match(s) if m: files = m.group(1) n = __word__.findall(files) inherit(n, d) return from bb.parse import ConfHandler return ConfHandler.feeder(lineno, s, fn, d)
def parseConfigurationFiles(self, prefiles, postfiles, mc = "default"): data = bb.data.createCopy(self.basedata) data.setVar("BB_CURRENT_MC", mc) # Parse files for loading *before* bitbake.conf and any includes for f in prefiles: data = parse_config_file(f, data) layerconf = self._findLayerConf(data) if layerconf: parselog.debug(2, "Found bblayers.conf (%s)", layerconf) # By definition bblayers.conf is in conf/ of TOPDIR. # We may have been called with cwd somewhere else so reset TOPDIR data.setVar("TOPDIR", os.path.dirname(os.path.dirname(layerconf))) data = parse_config_file(layerconf, data) layers = (data.getVar('BBLAYERS') or "").split() data = bb.data.createCopy(data) approved = bb.utils.approved_variables() for layer in layers: if not os.path.isdir(layer): parselog.critical("Layer directory '%s' does not exist! " "Please check BBLAYERS in %s" % (layer, layerconf)) sys.exit(1) parselog.debug(2, "Adding layer %s", layer) if 'HOME' in approved and '~' in layer: layer = os.path.expanduser(layer) if layer.endswith('/'): layer = layer.rstrip('/') data.setVar('LAYERDIR', layer) data.setVar('LAYERDIR_RE', re.escape(layer)) data = parse_config_file(os.path.join(layer, "conf", "layer.conf"), data) data.expandVarref('LAYERDIR') data.expandVarref('LAYERDIR_RE') data.delVar('LAYERDIR_RE') data.delVar('LAYERDIR') bbfiles_dynamic = (data.getVar('BBFILES_DYNAMIC') or "").split() collections = (data.getVar('BBFILE_COLLECTIONS') or "").split() invalid = [] for entry in bbfiles_dynamic: parts = entry.split(":", 1) if len(parts) != 2: invalid.append(entry) continue l, f = parts if l in collections: data.appendVar("BBFILES", " " + f) if invalid: bb.fatal("BBFILES_DYNAMIC entries must be of the form <collection name>:<filename pattern>, not:\n %s" % "\n ".join(invalid)) layerseries = set((data.getVar("LAYERSERIES_CORENAMES") or "").split()) collections_tmp = collections[:] for c in collections: collections_tmp.remove(c) if c in collections_tmp: bb.fatal("Found duplicated BBFILE_COLLECTIONS '%s', check bblayers.conf or layer.conf to fix it." % c) compat = set((data.getVar("LAYERSERIES_COMPAT_%s" % c) or "").split()) if compat and not (compat & layerseries): bb.fatal("Layer %s is not compatible with the core layer which only supports these series: %s (layer is compatible with %s)" % (c, " ".join(layerseries), " ".join(compat))) elif not compat and not data.getVar("BB_WORKERCONTEXT"): bb.warn("Layer %s should set LAYERSERIES_COMPAT_%s in its conf/layer.conf file to list the core layer names it is compatible with." % (c, c)) if not data.getVar("BBPATH"): msg = "The BBPATH variable is not set" if not layerconf: msg += (" and bitbake did not find a conf/bblayers.conf file in" " the expected location.\nMaybe you accidentally" " invoked bitbake from the wrong directory?") raise SystemExit(msg) data = parse_config_file(os.path.join("conf", "bitbake.conf"), data) # Parse files for loading *after* bitbake.conf and any includes for p in postfiles: data = parse_config_file(p, data) # Handle any INHERITs and inherit the base class bbclasses = ["base"] + (data.getVar('INHERIT') or "").split() for bbclass in bbclasses: data = _inherit(bbclass, data) # Nomally we only register event handlers at the end of parsing .bb files # We register any handlers we've found so far here... for var in data.getVar('__BBHANDLERS', False) or []: handlerfn = data.getVarFlag(var, "filename", False) if not handlerfn: parselog.critical("Undefined event handler function '%s'" % var) sys.exit(1) handlerln = int(data.getVarFlag(var, "lineno", False)) bb.event.register(var, data.getVar(var, False), (data.getVarFlag(var, "eventmask") or "").split(), handlerfn, handlerln) data.setVar('BBINCLUDED',bb.parse.get_file_depends(data)) return data
def parseConfigurationFiles(self, prefiles, postfiles, mc="default"): data = bb.data.createCopy(self.basedata) data.setVar("BB_CURRENT_MC", mc) # Parse files for loading *before* bitbake.conf and any includes for f in prefiles: data = parse_config_file(f, data) layerconf = self._findLayerConf(data) if layerconf: parselog.debug(2, "Found bblayers.conf (%s)", layerconf) # By definition bblayers.conf is in conf/ of TOPDIR. # We may have been called with cwd somewhere else so reset TOPDIR data.setVar("TOPDIR", os.path.dirname(os.path.dirname(layerconf))) data = parse_config_file(layerconf, data) layers = (data.getVar('BBLAYERS') or "").split() broken_layers = [] data = bb.data.createCopy(data) approved = bb.utils.approved_variables() # Check whether present layer directories exist for layer in layers: if not os.path.isdir(layer): broken_layers.append(layer) if broken_layers: parselog.critical( "The following layer directories do not exist:") for layer in broken_layers: parselog.critical(" %s", layer) parselog.critical("Please check BBLAYERS in %s" % (layerconf)) raise bb.BBHandledException() for layer in layers: parselog.debug(2, "Adding layer %s", layer) if 'HOME' in approved and '~' in layer: layer = os.path.expanduser(layer) if layer.endswith('/'): layer = layer.rstrip('/') data.setVar('LAYERDIR', layer) data.setVar('LAYERDIR_RE', re.escape(layer)) data = parse_config_file( os.path.join(layer, "conf", "layer.conf"), data) data.expandVarref('LAYERDIR') data.expandVarref('LAYERDIR_RE') data.delVar('LAYERDIR_RE') data.delVar('LAYERDIR') bbfiles_dynamic = (data.getVar('BBFILES_DYNAMIC') or "").split() collections = (data.getVar('BBFILE_COLLECTIONS') or "").split() invalid = [] for entry in bbfiles_dynamic: parts = entry.split(":", 1) if len(parts) != 2: invalid.append(entry) continue l, f = parts invert = l[0] == "!" if invert: l = l[1:] if (l in collections and not invert) or (l not in collections and invert): data.appendVar("BBFILES", " " + f) if invalid: bb.fatal( "BBFILES_DYNAMIC entries must be of the form {!}<collection name>:<filename pattern>, not:\n %s" % "\n ".join(invalid)) layerseries = set((data.getVar("LAYERSERIES_CORENAMES") or "").split()) collections_tmp = collections[:] for c in collections: collections_tmp.remove(c) if c in collections_tmp: bb.fatal( "Found duplicated BBFILE_COLLECTIONS '%s', check bblayers.conf or layer.conf to fix it." % c) compat = set((data.getVar("LAYERSERIES_COMPAT_%s" % c) or "").split()) if compat and not (compat & layerseries): bb.fatal( "Layer %s is not compatible with the core layer which only supports these series: %s (layer is compatible with %s)" % (c, " ".join(layerseries), " ".join(compat))) elif not compat and not data.getVar("BB_WORKERCONTEXT"): bb.warn( "Layer %s should set LAYERSERIES_COMPAT_%s in its conf/layer.conf file to list the core layer names it is compatible with." % (c, c)) if not data.getVar("BBPATH"): msg = "The BBPATH variable is not set" if not layerconf: msg += ( " and bitbake did not find a conf/bblayers.conf file in" " the expected location.\nMaybe you accidentally" " invoked bitbake from the wrong directory?") raise SystemExit(msg) data = parse_config_file(os.path.join("conf", "bitbake.conf"), data) # Parse files for loading *after* bitbake.conf and any includes for p in postfiles: data = parse_config_file(p, data) # Handle any INHERITs and inherit the base class bbclasses = ["base"] + (data.getVar('INHERIT') or "").split() for bbclass in bbclasses: data = _inherit(bbclass, data) # Nomally we only register event handlers at the end of parsing .bb files # We register any handlers we've found so far here... for var in data.getVar('__BBHANDLERS', False) or []: handlerfn = data.getVarFlag(var, "filename", False) if not handlerfn: parselog.critical("Undefined event handler function '%s'" % var) raise bb.BBHandledException() handlerln = int(data.getVarFlag(var, "lineno", False)) bb.event.register(var, data.getVar(var, False), (data.getVarFlag(var, "eventmask") or "").split(), handlerfn, handlerln) data.setVar('BBINCLUDED', bb.parse.get_file_depends(data)) return data
def parseConfigurationFiles(self, prefiles, postfiles): data = self.data bb.parse.init_parser(data) # Parse files for loading *before* bitbake.conf and any includes for f in prefiles: data = parse_config_file(f, data) layerconf = self._findLayerConf(data) if layerconf: parselog.debug(2, "Found bblayers.conf (%s)", layerconf) # By definition bblayers.conf is in conf/ of TOPDIR. # We may have been called with cwd somewhere else so reset TOPDIR data.setVar("TOPDIR", os.path.dirname(os.path.dirname(layerconf))) data = parse_config_file(layerconf, data) layers = (data.getVar('BBLAYERS', True) or "").split() data = bb.data.createCopy(data) for layer in layers: parselog.debug(2, "Adding layer %s", layer) data.setVar('LAYERDIR', layer) data = parse_config_file(os.path.join(layer, "conf", "layer.conf"), data) data.expandVarref('LAYERDIR') data.delVar('LAYERDIR') if not data.getVar("BBPATH", True): msg = "The BBPATH variable is not set" if not layerconf: msg += (" and bitbake did not find a conf/bblayers.conf file in" " the expected location.\nMaybe you accidentally" " invoked bitbake from the wrong directory?") raise SystemExit(msg) data = parse_config_file(os.path.join("conf", "bitbake.conf"), data) # Parse files for loading *after* bitbake.conf and any includes for p in postfiles: data = parse_config_file(p, data) # Handle any INHERITs and inherit the base class bbclasses = ["base"] + (data.getVar('INHERIT', True) or "").split() for bbclass in bbclasses: data = _inherit(bbclass, data) # Nomally we only register event handlers at the end of parsing .bb files # We register any handlers we've found so far here... for var in data.getVar('__BBHANDLERS') or []: bb.event.register(var, data.getVar(var), (data.getVarFlag(var, "eventmask", True) or "").split()) if data.getVar("BB_WORKERCONTEXT", False) is None: bb.fetch.fetcher_init(data) bb.codeparser.parser_cache_init(data) bb.event.fire(bb.event.ConfigParsed(), data) if data.getVar("BB_INVALIDCONF") is True: data.setVar("BB_INVALIDCONF", False) self.parseConfigurationFiles(self.prefiles, self.postfiles) return bb.parse.init_parser(data) data.setVar('BBINCLUDED',bb.parse.get_file_depends(data)) self.data = data self.data_hash = data.get_hash()
def collect_bbfiles( progressCallback ): """Collect all available .bb build files""" parsed, cached, skipped, masked = 0, 0, 0, 0 global cache, pkgdata cache = bb.data.getVar( "CACHE", cfg, 1 ) pkgdata = data.pkgdata( not cache in [None, ''], cache ) if not cache in [None, '']: print "NOTE: Using cache in '%s'" % cache try: os.stat( cache ) except OSError: bb.mkdirhier( cache ) else: print "NOTE: Not using a cache. Set CACHE = <directory> to enable." files = (data.getVar( "BBFILES", cfg, 1 ) or "").split() data.setVar("BBFILES", " ".join(files), cfg) if not len(files): files = get_bbfiles() if not len(files): bb.error("no files to build.") newfiles = [] for f in files: if os.path.isdir(f): dirfiles = find_bbfiles(f) if dirfiles: newfiles += dirfiles continue newfiles += glob.glob(f) or [ f ] bbmask = bb.data.getVar('BBMASK', cfg, 1) or "" try: bbmask_compiled = re.compile(bbmask) except sre_constants.error: bb.fatal("BBMASK is not a valid regular expression.") for i in xrange( len( newfiles ) ): f = newfiles[i] if bbmask and bbmask_compiled.search(f): bb.debug(1, "bbmake: skipping %s" % f) masked += 1 continue debug(1, "bbmake: parsing %s" % f) # read a file's metadata try: bb_data, fromCache = load_bbfile(f) if fromCache: cached += 1 else: parsed += 1 deps = None if bb_data is not None: # allow metadata files to add items to BBFILES #data.update_data(pkgdata[f]) addbbfiles = data.getVar('BBFILES', bb_data) or None if addbbfiles: for aof in addbbfiles.split(): if not files.count(aof): if not os.path.isabs(aof): aof = os.path.join(os.path.dirname(f),aof) files.append(aof) for var in bb_data.keys(): if data.getVarFlag(var, "handler", bb_data) and data.getVar(var, bb_data): event.register(data.getVar(var, bb_data)) pkgdata[f] = bb_data # now inform the caller progressCallback( i + 1, len( newfiles ), f, bb_data, fromCache ) except IOError, e: bb.error("opening %s: %s" % (f, e)) pass except bb.parse.SkipPackage: skipped += 1 pass
def _exec_task(fn, task, d, quieterr): """Execute a BB 'task' Execution of a task involves a bit more setup than executing a function, running it with its own local metadata, and with some useful variables set. """ if not data.getVarFlag(task, 'task', d): event.fire(TaskInvalid(task, d), d) logger.error("No such task: %s" % task) return 1 logger.debug(1, "Executing task %s", task) localdata = _task_data(fn, task, d) tempdir = localdata.getVar('T', True) if not tempdir: bb.fatal("T variable not set, unable to build") bb.utils.mkdirhier(tempdir) # Determine the logfile to generate logfmt = localdata.getVar('BB_LOGFMT', True) or 'log.{task}.{pid}' logbase = logfmt.format(task=task, pid=os.getpid()) # Document the order of the tasks... logorder = os.path.join(tempdir, 'log.task_order') try: logorderfile = file(logorder, 'a') except OSError: logger.exception("Opening log file '%s'", logorder) pass logorderfile.write('{0} ({1}): {2}\n'.format(task, os.getpid(), logbase)) logorderfile.close() # Setup the courtesy link to the logfn loglink = os.path.join(tempdir, 'log.{0}'.format(task)) logfn = os.path.join(tempdir, logbase) if loglink: bb.utils.remove(loglink) try: os.symlink(logbase, loglink) except OSError: pass prefuncs = localdata.getVarFlag(task, 'prefuncs', expand=True) postfuncs = localdata.getVarFlag(task, 'postfuncs', expand=True) class ErrorCheckHandler(logging.Handler): def __init__(self): self.triggered = False logging.Handler.__init__(self, logging.ERROR) def emit(self, record): self.triggered = True # Handle logfiles si = file('/dev/null', 'r') try: bb.utils.mkdirhier(os.path.dirname(logfn)) logfile = file(logfn, 'w') except OSError: logger.exception("Opening log file '%s'", logfn) pass # Dup the existing fds so we dont lose them osi = [os.dup(sys.stdin.fileno()), sys.stdin.fileno()] oso = [os.dup(sys.stdout.fileno()), sys.stdout.fileno()] ose = [os.dup(sys.stderr.fileno()), sys.stderr.fileno()] # Replace those fds with our own os.dup2(si.fileno(), osi[1]) os.dup2(logfile.fileno(), oso[1]) os.dup2(logfile.fileno(), ose[1]) # Ensure python logging goes to the logfile handler = logging.StreamHandler(logfile) handler.setFormatter(logformatter) # Always enable full debug output into task logfiles handler.setLevel(logging.DEBUG - 2) bblogger.addHandler(handler) errchk = ErrorCheckHandler() bblogger.addHandler(errchk) localdata.setVar('BB_LOGFILE', logfn) localdata.setVar('BB_RUNTASK', task) event.fire(TaskStarted(task, localdata), localdata) try: for func in (prefuncs or '').split(): exec_func(func, localdata) exec_func(task, localdata) for func in (postfuncs or '').split(): exec_func(func, localdata) except FuncFailed as exc: if quieterr: event.fire(TaskFailedSilent(task, logfn, localdata), localdata) else: errprinted = errchk.triggered logger.error(str(exc)) event.fire(TaskFailed(task, logfn, localdata, errprinted), localdata) return 1 finally: sys.stdout.flush() sys.stderr.flush() bblogger.removeHandler(handler) # Restore the backup fds os.dup2(osi[0], osi[1]) os.dup2(oso[0], oso[1]) os.dup2(ose[0], ose[1]) # Close the backup fds os.close(osi[0]) os.close(oso[0]) os.close(ose[0]) si.close() logfile.close() if os.path.exists(logfn) and os.path.getsize(logfn) == 0: logger.debug(2, "Zero size logfn %s, removing", logfn) bb.utils.remove(logfn) bb.utils.remove(loglink) event.fire(TaskSucceeded(task, localdata), localdata) if not localdata.getVarFlag(task, 'nostamp') and not localdata.getVarFlag(task, 'selfstamp'): make_stamp(task, localdata) return 0
def parseConfigurationFiles(self, prefiles, postfiles): data = bb.data.createCopy(self.basedata) # Parse files for loading *before* bitbake.conf and any includes for f in prefiles: data = parse_config_file(f, data) layerconf = self._findLayerConf(data) if layerconf: parselog.debug(2, "Found bblayers.conf (%s)", layerconf) # By definition bblayers.conf is in conf/ of TOPDIR. # We may have been called with cwd somewhere else so reset TOPDIR data.setVar("TOPDIR", os.path.dirname(os.path.dirname(layerconf))) data = parse_config_file(layerconf, data) layers = (data.getVar('BBLAYERS') or "").split() data = bb.data.createCopy(data) approved = bb.utils.approved_variables() for layer in layers: if not os.path.isdir(layer): parselog.critical("Layer directory '%s' does not exist! " "Please check BBLAYERS in %s" % (layer, layerconf)) sys.exit(1) parselog.debug(2, "Adding layer %s", layer) if 'HOME' in approved and '~' in layer: layer = os.path.expanduser(layer) if layer.endswith('/'): layer = layer.rstrip('/') data.setVar('LAYERDIR', layer) data.setVar('LAYERDIR_RE', re.escape(layer)) data = parse_config_file(os.path.join(layer, "conf", "layer.conf"), data) data.expandVarref('LAYERDIR') data.expandVarref('LAYERDIR_RE') data.delVar('LAYERDIR_RE') data.delVar('LAYERDIR') if not data.getVar("BBPATH"): msg = "The BBPATH variable is not set" if not layerconf: msg += (" and bitbake did not find a conf/bblayers.conf file in" " the expected location.\nMaybe you accidentally" " invoked bitbake from the wrong directory?") raise SystemExit(msg) data = parse_config_file(os.path.join("conf", "bitbake.conf"), data) # Parse files for loading *after* bitbake.conf and any includes for p in postfiles: data = parse_config_file(p, data) # Handle any INHERITs and inherit the base class bbclasses = ["base"] + (data.getVar('INHERIT') or "").split() for bbclass in bbclasses: data = _inherit(bbclass, data) # Nomally we only register event handlers at the end of parsing .bb files # We register any handlers we've found so far here... for var in data.getVar('__BBHANDLERS', False) or []: handlerfn = data.getVarFlag(var, "filename", False) if not handlerfn: parselog.critical("Undefined event handler function '%s'" % var) sys.exit(1) handlerln = int(data.getVarFlag(var, "lineno", False)) bb.event.register(var, data.getVar(var, False), (data.getVarFlag(var, "eventmask") or "").split(), handlerfn, handlerln) data.setVar('BBINCLUDED',bb.parse.get_file_depends(data)) return data
event.fire(TaskSucceeded(task, localdata), localdata) except FuncFailed, message: # Try to extract the optional logfile try: (msg, logfile) = message except: logfile = None msg = message bb.msg.note(1, bb.msg.domain.Build, "Task failed: %s" % message) failedevent = TaskFailed(msg, logfile, task, d) event.fire(failedevent, d) raise EventException("Function failed in task: %s" % message, failedevent) # make stamp, or cause event and raise exception if not data.getVarFlag(task, 'nostamp', d) and not data.getVarFlag( task, 'selfstamp', d): make_stamp(task, d) def extract_stamp(d, fn): """ Extracts stamp format which is either a data dictonary (fn unset) or a dataCache entry (fn set). """ if fn: return d.stamp[fn] return data.getVar('STAMP', d, 1) def stamp_internal(task, d, file_name):
exec_func(task, localdata) event.fire(TaskSucceeded(task, localdata)) except FuncFailed, message: # Try to extract the optional logfile try: (msg, logfile) = message except: logfile = None msg = message bb.msg.note(1, bb.msg.domain.Build, "Task failed: %s" % message ) failedevent = TaskFailed(msg, logfile, task, d) event.fire(failedevent) raise EventException("Function failed in task: %s" % message, failedevent) # make stamp, or cause event and raise exception if not data.getVarFlag(task, 'nostamp', d) and not data.getVarFlag(task, 'selfstamp', d): make_stamp(task, d) def extract_stamp(d, fn): """ Extracts stamp format which is either a data dictonary (fn unset) or a dataCache entry (fn set). """ if fn: return d.stamp[fn] return data.getVar('STAMP', d, 1) def stamp_internal(task, d, file_name): """ Internal stamp helper function Removes any stamp for the given task
def parseConfigurationFiles(self, prefiles, postfiles): data = bb.data.createCopy(self.basedata) # Parse files for loading *before* bitbake.conf and any includes for f in prefiles: data = parse_config_file(f, data) layerconf = self._findLayerConf(data) if layerconf: parselog.debug(2, "Found bblayers.conf (%s)", layerconf) # By definition bblayers.conf is in conf/ of TOPDIR. # We may have been called with cwd somewhere else so reset TOPDIR data.setVar("TOPDIR", os.path.dirname(os.path.dirname(layerconf))) data = parse_config_file(layerconf, data) layers = (data.getVar('BBLAYERS', True) or "").split() data = bb.data.createCopy(data) approved = bb.utils.approved_variables() for layer in layers: if not os.path.isdir(layer): parselog.critical("Layer directory '%s' does not exist! " "Please check BBLAYERS in %s" % (layer, layerconf)) sys.exit(1) parselog.debug(2, "Adding layer %s", layer) if 'HOME' in approved and '~' in layer: layer = os.path.expanduser(layer) if layer.endswith('/'): layer = layer.rstrip('/') data.setVar('LAYERDIR', layer) data.setVar('LAYERDIR_RE', re.escape(layer)) data = parse_config_file( os.path.join(layer, "conf", "layer.conf"), data) data.expandVarref('LAYERDIR') data.expandVarref('LAYERDIR_RE') data.delVar('LAYERDIR_RE') data.delVar('LAYERDIR') if not data.getVar("BBPATH", True): msg = "The BBPATH variable is not set" if not layerconf: msg += ( " and bitbake did not find a conf/bblayers.conf file in" " the expected location.\nMaybe you accidentally" " invoked bitbake from the wrong directory?") raise SystemExit(msg) data = parse_config_file(os.path.join("conf", "bitbake.conf"), data) # Parse files for loading *after* bitbake.conf and any includes for p in postfiles: data = parse_config_file(p, data) # Handle any INHERITs and inherit the base class bbclasses = ["base"] + (data.getVar('INHERIT', True) or "").split() for bbclass in bbclasses: data = _inherit(bbclass, data) # Nomally we only register event handlers at the end of parsing .bb files # We register any handlers we've found so far here... for var in data.getVar('__BBHANDLERS', False) or []: handlerfn = data.getVarFlag(var, "filename", False) if not handlerfn: parselog.critical("Undefined event handler function '%s'" % var) sys.exit(1) handlerln = int(data.getVarFlag(var, "lineno", False)) bb.event.register(var, data.getVar(var, False), (data.getVarFlag(var, "eventmask", True) or "").split(), handlerfn, handlerln) data.setVar('BBINCLUDED', bb.parse.get_file_depends(data)) return data
def write(self, output): self.writebuf = self.writebuf + output # emit variables and shell functions try: data.update_data(envdata) wb = dummywrite() data.emit_env(wb, envdata, True) bb.msg.plain(wb.writebuf) except Exception, e: bb.msg.fatal(bb.msg.domain.Parsing, "%s" % e) # emit the metadata which isnt valid shell data.expandKeys(envdata) for e in envdata.keys(): if data.getVarFlag(e, 'python', envdata): bb.msg.plain("\npython %s () {\n%s}\n" % (e, data.getVar(e, envdata, 1))) def generateDepTreeData(self, pkgs_to_build, task): """ Create a dependency tree of pkgs_to_build, returning the data. """ # Need files parsed self.updateCache() # If we are told to do the None task then query the default task if (task == None): task = self.configuration.cmd
def parseConfigurationFiles(self, prefiles, postfiles): data = self.data bb.parse.init_parser(data) # Parse files for loading *before* bitbake.conf and any includes for f in prefiles: data = parse_config_file(f, data) layerconf = self._findLayerConf(data) if layerconf: parselog.debug(2, "Found bblayers.conf (%s)", layerconf) # By definition bblayers.conf is in conf/ of TOPDIR. # We may have been called with cwd somewhere else so reset TOPDIR data.setVar("TOPDIR", os.path.dirname(os.path.dirname(layerconf))) data = parse_config_file(layerconf, data) layers = (data.getVar('BBLAYERS', True) or "").split() data = bb.data.createCopy(data) for layer in layers: parselog.debug(2, "Adding layer %s", layer) data.setVar('LAYERDIR', layer) data = parse_config_file( os.path.join(layer, "conf", "layer.conf"), data) data.expandVarref('LAYERDIR') data.delVar('LAYERDIR') if not data.getVar("BBPATH", True): msg = "The BBPATH variable is not set" if not layerconf: msg += ( " and bitbake did not find a conf/bblayers.conf file in" " the expected location.\nMaybe you accidentally" " invoked bitbake from the wrong directory?") raise SystemExit(msg) data = parse_config_file(os.path.join("conf", "bitbake.conf"), data) # Parse files for loading *after* bitbake.conf and any includes for p in postfiles: data = parse_config_file(p, data) # Handle any INHERITs and inherit the base class bbclasses = ["base"] + (data.getVar('INHERIT', True) or "").split() for bbclass in bbclasses: data = _inherit(bbclass, data) # Nomally we only register event handlers at the end of parsing .bb files # We register any handlers we've found so far here... for var in data.getVar('__BBHANDLERS') or []: bb.event.register(var, data.getVar(var), (data.getVarFlag(var, "eventmask", True) or "").split()) if data.getVar("BB_WORKERCONTEXT", False) is None: bb.fetch.fetcher_init(data) bb.codeparser.parser_cache_init(data) bb.event.fire(bb.event.ConfigParsed(), data) if data.getVar("BB_INVALIDCONF") is True: data.setVar("BB_INVALIDCONF", False) self.parseConfigurationFiles(self.prefiles, self.postfiles) return bb.parse.init_parser(data) data.setVar('BBINCLUDED', bb.parse.get_file_depends(data)) self.data = data self.data_hash = data.get_hash()
def handle(fn, d, include = 0): global __func_start_regexp__, __inherit_regexp__, __export_func_regexp__, __addtask_regexp__, __addhandler_regexp__, __infunc__, __body__, __residue__ __body__ = [] __infunc__ = "" __classname__ = "" __residue__ = [] if include == 0: bb.msg.debug(2, bb.msg.domain.Parsing, "BB " + fn + ": handle(data)") else: bb.msg.debug(2, bb.msg.domain.Parsing, "BB " + fn + ": handle(data, include)") (root, ext) = os.path.splitext(os.path.basename(fn)) base_name = "%s%s" % (root,ext) init(d) if ext == ".bbclass": __classname__ = root classes.append(__classname__) if include != 0: oldfile = data.getVar('FILE', d) else: oldfile = None fn = obtain(fn, d) bbpath = (data.getVar('BBPATH', d, 1) or '').split(':') if not os.path.isabs(fn): f = None for p in bbpath: j = os.path.join(p, fn) if os.access(j, os.R_OK): abs_fn = j f = open(j, 'r') break if f is None: raise IOError("file not found") else: f = open(fn,'r') abs_fn = fn if ext != ".bbclass": bbpath.insert(0, os.path.dirname(abs_fn)) data.setVar('BBPATH', ":".join(bbpath), d) if include: bb.parse.mark_dependency(d, abs_fn) if ext != ".bbclass": data.setVar('FILE', fn, d) i = (data.getVar("INHERIT", d, 1) or "").split() if not "base" in i and __classname__ != "base": i[0:0] = ["base"] inherit(i, d) lineno = 0 while 1: lineno = lineno + 1 s = f.readline() if not s: break s = s.rstrip() feeder(lineno, s, fn, base_name, d) if __inpython__: # add a blank line to close out any python definition feeder(IN_PYTHON_EOF, "", fn, base_name, d) if ext == ".bbclass": classes.remove(__classname__) else: if include == 0: data.expandKeys(d) data.update_data(d) anonqueue = data.getVar("__anonqueue", d, 1) or [] body = [x['content'] for x in anonqueue] flag = { 'python' : 1, 'func' : 1 } data.setVar("__anonfunc", "\n".join(body), d) data.setVarFlags("__anonfunc", flag, d) from bb import build try: t = data.getVar('T', d) data.setVar('T', '${TMPDIR}/', d) build.exec_func("__anonfunc", d) data.delVar('T', d) if t: data.setVar('T', t, d) except Exception, e: bb.msg.debug(1, bb.msg.domain.Parsing, "executing anonymous function: %s" % e) raise data.delVar("__anonqueue", d) data.delVar("__anonfunc", d) set_additional_vars(fn, d, include) data.update_data(d) all_handlers = {} for var in data.getVar('__BBHANDLERS', d) or []: # try to add the handler # if we added it remember the choiche handler = data.getVar(var,d) if bb.event.register(var,handler) == bb.event.Registered: all_handlers[var] = handler for var in data.getVar('__BBTASKS', d) or []: deps = data.getVarFlag(var, 'deps', d) or [] postdeps = data.getVarFlag(var, 'postdeps', d) or [] bb.build.add_task(var, deps, d) for p in postdeps: pdeps = data.getVarFlag(p, 'deps', d) or [] pdeps.append(var) data.setVarFlag(p, 'deps', pdeps, d) bb.build.add_task(p, pdeps, d) # now add the handlers if not len(all_handlers) == 0: data.setVar('__all_handlers__', all_handlers, d) bbpath.pop(0)
se = so # dup the existing fds so we dont lose them osi = [os.dup(sys.stdin.fileno()), sys.stdin.fileno()] oso = [os.dup(sys.stdout.fileno()), sys.stdout.fileno()] ose = [os.dup(sys.stderr.fileno()), sys.stderr.fileno()] # replace those fds with our own os.dup2(si.fileno(), osi[1]) os.dup2(so.fileno(), oso[1]) os.dup2(se.fileno(), ose[1]) # execute function prevdir = os.getcwd() if data.getVarFlag(func, "fakeroot", d): maybe_fakeroot = "PATH=\"%s\" fakeroot " % bb.data.getVar("PATH", d, 1) else: maybe_fakeroot = '' ret = os.system('%ssh -e %s' % (maybe_fakeroot, runfile)) os.chdir(prevdir) # restore the backups os.dup2(osi[0], osi[1]) os.dup2(oso[0], oso[1]) os.dup2(ose[0], ose[1]) # close our logs si.close() so.close() se.close()
def feeder(lineno, s, fn, d): global __func_start_regexp__, __inherit_regexp__, __export_func_regexp__, __addtask_regexp__, __addhandler_regexp__, __def_regexp__, __python_func_regexp__, __inpython__,__infunc__, __body__, __bbpath_found__, classes, bb, __residue__ if __infunc__: if s == '}': __body__.append('') data.setVar(__infunc__, '\n'.join(__body__), d) data.setVarFlag(__infunc__, "func", 1, d) if __infunc__ == "__anonymous": anonqueue = bb.data.getVar("__anonqueue", d) or [] anonitem = {} anonitem["content"] = bb.data.getVar("__anonymous", d) anonitem["flags"] = bb.data.getVarFlags("__anonymous", d) anonqueue.append(anonitem) bb.data.setVar("__anonqueue", anonqueue, d) bb.data.delVarFlags("__anonymous", d) bb.data.delVar("__anonymous", d) __infunc__ = "" __body__ = [] else: __body__.append(s) return if __inpython__: m = __python_func_regexp__.match(s) if m: __body__.append(s) return else: text = '\n'.join(__body__) comp = compile(text, "<bb>", "exec") exec comp in __builtins__ __body__ = [] __inpython__ = False funcs = data.getVar('__functions__', d) or "" data.setVar('__functions__', "%s\n%s" % (funcs, text), d) # fall through if s == '' or s[0] == '#': return # skip comments and empty lines if s[-1] == '\\': __residue__.append(s[:-1]) return s = "".join(__residue__) + s __residue__ = [] m = __func_start_regexp__.match(s) if m: __infunc__ = m.group("func") or "__anonymous" key = __infunc__ if data.getVar(key, d): # clean up old version of this piece of metadata, as its # flags could cause problems data.setVarFlag(key, 'python', None, d) data.setVarFlag(key, 'fakeroot', None, d) if m.group("py") is not None: data.setVarFlag(key, "python", "1", d) else: data.delVarFlag(key, "python", d) if m.group("fr") is not None: data.setVarFlag(key, "fakeroot", "1", d) else: data.delVarFlag(key, "fakeroot", d) return m = __def_regexp__.match(s) if m: __body__.append(s) __inpython__ = True return m = __export_func_regexp__.match(s) if m: fns = m.group(1) n = __word__.findall(fns) for f in n: allvars = [] allvars.append(f) allvars.append(classes[-1] + "_" + f) vars = [[ allvars[0], allvars[1] ]] if len(classes) > 1 and classes[-2] is not None: allvars.append(classes[-2] + "_" + f) vars = [] vars.append([allvars[2], allvars[1]]) vars.append([allvars[0], allvars[2]]) for (var, calledvar) in vars: if data.getVar(var, d) and not data.getVarFlag(var, 'export_func', d): continue if data.getVar(var, d): data.setVarFlag(var, 'python', None, d) data.setVarFlag(var, 'func', None, d) for flag in [ "func", "python" ]: if data.getVarFlag(calledvar, flag, d): data.setVarFlag(var, flag, data.getVarFlag(calledvar, flag, d), d) for flag in [ "dirs" ]: if data.getVarFlag(var, flag, d): data.setVarFlag(calledvar, flag, data.getVarFlag(var, flag, d), d) if data.getVarFlag(calledvar, "python", d): data.setVar(var, "\tbb.build.exec_func('" + calledvar + "', d)\n", d) else: data.setVar(var, "\t" + calledvar + "\n", d) data.setVarFlag(var, 'export_func', '1', d) return m = __addtask_regexp__.match(s) if m: func = m.group("func") before = m.group("before") after = m.group("after") if func is None: return var = "do_" + func data.setVarFlag(var, "task", 1, d) if after is not None: # set up deps for function data.setVarFlag(var, "deps", after.split(), d) if before is not None: # set up things that depend on this func data.setVarFlag(var, "postdeps", before.split(), d) return m = __addhandler_regexp__.match(s) if m: fns = m.group(1) hs = __word__.findall(fns) for h in hs: data.setVarFlag(h, "handler", 1, d) return m = __inherit_regexp__.match(s) if m: files = m.group(1) n = __word__.findall(files) inherit(n, d) return from bb.parse import ConfHandler return ConfHandler.feeder(lineno, s, fn, d)
def _exec_task(fn, task, d, quieterr): """Execute a BB 'task' Execution of a task involves a bit more setup than executing a function, running it with its own local metadata, and with some useful variables set. """ if not data.getVarFlag(task, 'task', d): event.fire(TaskInvalid(task, d), d) logger.error("No such task: %s" % task) return 1 logger.debug(1, "Executing task %s", task) localdata = _task_data(fn, task, d) tempdir = localdata.getVar('T', True) if not tempdir: bb.fatal("T variable not set, unable to build") bb.utils.mkdirhier(tempdir) loglink = os.path.join(tempdir, 'log.{0}'.format(task)) logfn = os.path.join(tempdir, 'log.{0}.{1}'.format(task, os.getpid())) if loglink: bb.utils.remove(loglink) try: os.symlink(logfn, loglink) except OSError: pass prefuncs = localdata.getVarFlag(task, 'prefuncs', expand=True) postfuncs = localdata.getVarFlag(task, 'postfuncs', expand=True) # Handle logfiles si = file('/dev/null', 'r') try: logfile = file(logfn, 'w') except OSError: logger.exception("Opening log file '%s'", logfn) pass # Dup the existing fds so we dont lose them osi = [os.dup(sys.stdin.fileno()), sys.stdin.fileno()] oso = [os.dup(sys.stdout.fileno()), sys.stdout.fileno()] ose = [os.dup(sys.stderr.fileno()), sys.stderr.fileno()] # Replace those fds with our own os.dup2(si.fileno(), osi[1]) os.dup2(logfile.fileno(), oso[1]) os.dup2(logfile.fileno(), ose[1]) # Ensure python logging goes to the logfile handler = logging.StreamHandler(logfile) handler.setFormatter(logformatter) bblogger.addHandler(handler) localdata.setVar('BB_LOGFILE', logfn) event.fire(TaskStarted(task, localdata), localdata) try: for func in (prefuncs or '').split(): exec_func(func, localdata) exec_func(task, localdata) for func in (postfuncs or '').split(): exec_func(func, localdata) except FuncFailed as exc: if not quieterr: logger.error(str(exc)) event.fire(TaskFailed(task, logfn, localdata), localdata) return 1 finally: sys.stdout.flush() sys.stderr.flush() bblogger.removeHandler(handler) # Restore the backup fds os.dup2(osi[0], osi[1]) os.dup2(oso[0], oso[1]) os.dup2(ose[0], ose[1]) # Close the backup fds os.close(osi[0]) os.close(oso[0]) os.close(ose[0]) si.close() logfile.close() if os.path.exists(logfn) and os.path.getsize(logfn) == 0: logger.debug(2, "Zero size logfn %s, removing", logfn) bb.utils.remove(logfn) bb.utils.remove(loglink) event.fire(TaskSucceeded(task, localdata), localdata) if not localdata.getVarFlag(task, 'nostamp') and not localdata.getVarFlag( task, 'selfstamp'): make_stamp(task, localdata) return 0
envdata = self.bb_cache.loadDataFull(fn, self.configuration.data) except IOError, e: bb.msg.fatal(bb.msg.domain.Parsing, "Unable to read %s: %s" % (fn, e)) except Exception, e: bb.msg.fatal(bb.msg.domain.Parsing, "%s" % e) # emit variables and shell functions try: data.update_data( envdata ) data.emit_env(sys.__stdout__, envdata, True) except Exception, e: bb.msg.fatal(bb.msg.domain.Parsing, "%s" % e) # emit the metadata which isnt valid shell data.expandKeys( envdata ) for e in envdata.keys(): if data.getVarFlag( e, 'python', envdata ): sys.__stdout__.write("\npython %s () {\n%s}\n" % (e, data.getVar(e, envdata, 1))) def generateDotGraph( self, pkgs_to_build, ignore_deps ): """ Generate a task dependency graph. pkgs_to_build A list of packages that needs to be built ignore_deps A list of names where processing of dependencies should be stopped. e.g. dependencies that get """ for dep in ignore_deps: self.status.ignored_dependencies.add(dep) localdata = data.createCopy(self.configuration.data)
def _exec_task(fn, task, d): """Execute a BB 'task' Execution of a task involves a bit more setup than executing a function, running it with its own local metadata, and with some useful variables set. """ if not data.getVarFlag(task, "task", d): event.fire(TaskInvalid(task, d), d) logger.error("No such task: %s" % task) return 1 logger.debug(1, "Executing task %s", task) localdata = _task_data(fn, task, d) tempdir = localdata.getVar("T", True) if not tempdir: bb.fatal("T variable not set, unable to build") bb.utils.mkdirhier(tempdir) loglink = os.path.join(tempdir, "log.{0}".format(task)) logfn = os.path.join(tempdir, "log.{0}.{1}".format(task, os.getpid())) if loglink: bb.utils.remove(loglink) try: os.symlink(logfn, loglink) except OSError: pass prefuncs = localdata.getVarFlag(task, "prefuncs", expand=True) postfuncs = localdata.getVarFlag(task, "postfuncs", expand=True) # Handle logfiles si = file("/dev/null", "r") try: logfile = file(logfn, "w") except OSError: logger.exception("Opening log file '%s'", logfn) pass # Dup the existing fds so we dont lose them osi = [os.dup(sys.stdin.fileno()), sys.stdin.fileno()] oso = [os.dup(sys.stdout.fileno()), sys.stdout.fileno()] ose = [os.dup(sys.stderr.fileno()), sys.stderr.fileno()] # Replace those fds with our own os.dup2(si.fileno(), osi[1]) os.dup2(logfile.fileno(), oso[1]) os.dup2(logfile.fileno(), ose[1]) # Ensure python logging goes to the logfile handler = logging.StreamHandler(logfile) handler.setFormatter(logformatter) bblogger.addHandler(handler) localdata.setVar("BB_LOGFILE", logfn) event.fire(TaskStarted(task, localdata), localdata) try: for func in (prefuncs or "").split(): exec_func(func, localdata) exec_func(task, localdata) for func in (postfuncs or "").split(): exec_func(func, localdata) except FuncFailed as exc: logger.error(str(exc)) event.fire(TaskFailed(task, logfn, localdata), localdata) return 1 finally: sys.stdout.flush() sys.stderr.flush() bblogger.removeHandler(handler) # Restore the backup fds os.dup2(osi[0], osi[1]) os.dup2(oso[0], oso[1]) os.dup2(ose[0], ose[1]) # Close the backup fds os.close(osi[0]) os.close(oso[0]) os.close(ose[0]) si.close() logfile.close() if os.path.exists(logfn) and os.path.getsize(logfn) == 0: logger.debug(2, "Zero size logfn %s, removing", logfn) bb.utils.remove(logfn) bb.utils.remove(loglink) event.fire(TaskSucceeded(task, localdata), localdata) if not localdata.getVarFlag(task, "nostamp") and not localdata.getVarFlag(task, "selfstamp"): make_stamp(task, localdata) return 0
def feeder(lineno, s, fn, root, d): global __func_start_regexp__, __inherit_regexp__, __export_func_regexp__, __addtask_regexp__, __addhandler_regexp__, __def_regexp__, __python_func_regexp__, __inpython__,__infunc__, __body__, classes, bb, __residue__ if __infunc__: if s == '}': __body__.append('') data.setVar(__infunc__, '\n'.join(__body__), d) data.setVarFlag(__infunc__, "func", 1, d) if __infunc__ == "__anonymous": anonqueue = bb.data.getVar("__anonqueue", d) or [] anonitem = {} anonitem["content"] = bb.data.getVar("__anonymous", d) anonitem["flags"] = bb.data.getVarFlags("__anonymous", d) anonqueue.append(anonitem) bb.data.setVar("__anonqueue", anonqueue, d) bb.data.delVarFlags("__anonymous", d) bb.data.delVar("__anonymous", d) __infunc__ = "" __body__ = [] else: __body__.append(s) return if __inpython__: m = __python_func_regexp__.match(s) if m and lineno != IN_PYTHON_EOF: __body__.append(s) return else: # Note we will add root to parsedmethods after having parse # 'this' file. This means we will not parse methods from # bb classes twice if not root in __parsed_methods__: text = '\n'.join(__body__) methodpool.insert_method( root, text, fn ) funcs = data.getVar('__functions__', d) or {} if not funcs.has_key( root ): funcs[root] = text else: funcs[root] = "%s\n%s" % (funcs[root], text) data.setVar('__functions__', funcs, d) __body__ = [] __inpython__ = False if lineno == IN_PYTHON_EOF: return # fall through if s == '' or s[0] == '#': return # skip comments and empty lines if s[-1] == '\\': __residue__.append(s[:-1]) return s = "".join(__residue__) + s __residue__ = [] m = __func_start_regexp__.match(s) if m: __infunc__ = m.group("func") or "__anonymous" key = __infunc__ if data.getVar(key, d): # clean up old version of this piece of metadata, as its # flags could cause problems data.setVarFlag(key, 'python', None, d) data.setVarFlag(key, 'fakeroot', None, d) if m.group("py") is not None: data.setVarFlag(key, "python", "1", d) else: data.delVarFlag(key, "python", d) if m.group("fr") is not None: data.setVarFlag(key, "fakeroot", "1", d) else: data.delVarFlag(key, "fakeroot", d) return m = __def_regexp__.match(s) if m: __body__.append(s) __inpython__ = True return m = __export_func_regexp__.match(s) if m: fns = m.group(1) n = __word__.findall(fns) for f in n: allvars = [] allvars.append(f) allvars.append(classes[-1] + "_" + f) vars = [[ allvars[0], allvars[1] ]] if len(classes) > 1 and classes[-2] is not None: allvars.append(classes[-2] + "_" + f) vars = [] vars.append([allvars[2], allvars[1]]) vars.append([allvars[0], allvars[2]]) for (var, calledvar) in vars: if data.getVar(var, d) and not data.getVarFlag(var, 'export_func', d): continue if data.getVar(var, d): data.setVarFlag(var, 'python', None, d) data.setVarFlag(var, 'func', None, d) for flag in [ "func", "python" ]: if data.getVarFlag(calledvar, flag, d): data.setVarFlag(var, flag, data.getVarFlag(calledvar, flag, d), d) for flag in [ "dirs" ]: if data.getVarFlag(var, flag, d): data.setVarFlag(calledvar, flag, data.getVarFlag(var, flag, d), d) if data.getVarFlag(calledvar, "python", d): data.setVar(var, "\tbb.build.exec_func('" + calledvar + "', d)\n", d) else: data.setVar(var, "\t" + calledvar + "\n", d) data.setVarFlag(var, 'export_func', '1', d) return m = __addtask_regexp__.match(s) if m: func = m.group("func") before = m.group("before") after = m.group("after") if func is None: return var = "do_" + func data.setVarFlag(var, "task", 1, d) bbtasks = data.getVar('__BBTASKS', d) or [] bbtasks.append(var) data.setVar('__BBTASKS', bbtasks, d) if after is not None: # set up deps for function data.setVarFlag(var, "deps", after.split(), d) if before is not None: # set up things that depend on this func data.setVarFlag(var, "postdeps", before.split(), d) return m = __addhandler_regexp__.match(s) if m: fns = m.group(1) hs = __word__.findall(fns) bbhands = data.getVar('__BBHANDLERS', d) or [] for h in hs: bbhands.append(h) data.setVarFlag(h, "handler", 1, d) data.setVar('__BBHANDLERS', bbhands, d) return m = __inherit_regexp__.match(s) if m: files = m.group(1) n = __word__.findall(files) inherit(n, d) return from bb.parse import ConfHandler return ConfHandler.feeder(lineno, s, fn, d)