def better_exec(code, context, text, realfile): """ Similiar to better_compile, better_exec will print the lines that are responsible for the error. """ import bb,sys try: exec code in context except: (t,value,tb) = sys.exc_info() if t in [bb.parse.SkipPackage, bb.build.FuncFailed]: raise # print the Header of the Error Message bb.error("Error in executing: ", realfile) bb.error("Exception:%s Message:%s" % (t,value) ) # let us find the line number now while tb.tb_next: tb = tb.tb_next import traceback line = traceback.tb_lineno(tb) _print_trace( text.split('\n'), line ) raise
def _ensure_npm(): """Check if the 'npm' command is available in the recipes""" if not TINFOIL.recipes_parsed: TINFOIL.parse_recipes() try: d = TINFOIL.parse_recipe("nodejs-native") except bb.providers.NoProvider: bb.error( "Nothing provides 'nodejs-native' which is required for the build" ) bb.note("You will likely need to add a layer that provides nodejs") sys.exit(14) bindir = d.getVar("STAGING_BINDIR_NATIVE") npmpath = os.path.join(bindir, "npm") if not os.path.exists(npmpath): TINFOIL.build_targets("nodejs-native", "addto_recipe_sysroot") if not os.path.exists(npmpath): bb.error("Failed to add 'npm' to sysroot") sys.exit(14) return bindir
def __init__(self, cookercfg, worker=False): self.prefiles = cookercfg.prefile self.postfiles = cookercfg.postfile self.tracking = cookercfg.tracking bb.utils.set_context(bb.utils.clean_context()) bb.event.set_class_handlers(bb.event.clean_class_handlers()) self.basedata = bb.data.init() if self.tracking: self.basedata.enableTracking() # Keep a datastore of the initial environment variables and their # values from when BitBake was launched to enable child processes # to use environment variables which have been cleaned from the # BitBake processes env self.savedenv = bb.data.init() for k in cookercfg.env: self.savedenv.setVar(k, cookercfg.env[k]) if k in bb.data_smart.bitbake_renamed_vars: bb.error( 'Shell environment variable %s has been renamed to %s' % (k, bb.data_smart.bitbake_renamed_vars[k])) bb.fatal( "Exiting to allow enviroment variables to be corrected") filtered_keys = bb.utils.approved_variables() bb.data.inheritFromOS(self.basedata, self.savedenv, filtered_keys) self.basedata.setVar("BB_ORIGENV", self.savedenv) if worker: self.basedata.setVar("BB_WORKERCONTEXT", "1") self.data = self.basedata self.mcdata = {}
def parseCommandLine(self): # Parse any commandline into actions if self.configuration.show_environment: self.commandlineAction = None if 'world' in self.configuration.pkgs_to_build: bb.error("'world' is not a valid target for --environment.") elif len(self.configuration.pkgs_to_build) > 1: bb.error("Only one target can be used with the --environment option.") elif self.configuration.buildfile and len(self.configuration.pkgs_to_build) > 0: bb.error("No target should be used with the --environment and --buildfile options.") else: self.commandlineAction = ["showEnvironment", self.configuration.buildfile, self.configuration.pkgs_to_build] elif self.configuration.buildfile is not None: self.commandlineAction = ["buildFile", self.configuration.buildfile, self.configuration.cmd] elif self.configuration.show_versions: self.commandlineAction = ["showVersions"] elif self.configuration.parse_only: self.commandlineAction = ["parseFiles"] # FIXME - implement #elif self.configuration.interactive: # self.interactiveMode() elif self.configuration.dot_graph: if self.configuration.pkgs_to_build: self.commandlineAction = ["generateDotGraph", self.configuration.pkgs_to_build, self.configuration.cmd] else: self.commandlineAction = None bb.error("Please specify a package name for dependency graph generation.") else: if self.configuration.pkgs_to_build: self.commandlineAction = ["buildTargets", self.configuration.pkgs_to_build, self.configuration.cmd] else: self.commandlineAction = None bb.error("Nothing to do. Use 'bitbake world' to build everything, or run 'bitbake --help' for usage information.")
def arch_split(arch): archtuple = arch.split('-', 2) if len(archtuple) == 3: return archtuple else: bb.error('invalid arch string: '+arch) return None
def sign_rpms(self, files, keyid, passphrase_file): """Sign RPM files""" import pexpect cmd = self.rpm_bin + " --addsign --define '_gpg_name %s' " % keyid if self.gpg_bin: cmd += "--define '%%__gpg %s' " % self.gpg_bin if self.gpg_path: cmd += "--define '_gpg_path %s' " % self.gpg_path cmd += ' '.join(files) # Need to use pexpect for feeding the passphrase proc = pexpect.spawn(cmd) try: proc.expect_exact('Enter pass phrase:', timeout=15) with open(passphrase_file) as fobj: proc.sendline(fobj.readline().rstrip('\n')) proc.expect(pexpect.EOF, timeout=900) proc.close() except pexpect.TIMEOUT as err: bb.error('rpmsign timeout: %s' % err) proc.terminate() if os.WEXITSTATUS(proc.status) or not os.WIFEXITED(proc.status): bb.error('rpmsign failed: %s' % proc.before.strip()) raise bb.build.FuncFailed("Failed to sign RPM packages")
def sanity(d): import bb fail = False sdk_cpu = d.get("SDK_CPU") if not sdk_cpu: bb.error("SDK_CPU not set") fail = True sdk_os = d.get("SDK_OS") if not sdk_os: bb.error("SDK_OS not set") fail = True machine = d.get("MACHINE") machine_cpu = d.get("MACHINE_CPU") machine_os = d.get("MACHINE_OS") if machine: pass elif machine_cpu and machine_os: pass elif machine_cpu: bb.error("MACHINE_CPU set, but not MACHINE_OS") fail = True elif machine_os: bb.error("MACHINE_OS set, but not MACHINE_CPU") fail = True else: bb.error("MACHINE or MACHINE_CPU and MACHINE_OS must be set") fail = True if fail: bb.fatal("Invalid MACHINE and/or SDK specification\n" "Check your conf/local.conf file and/or machine and distro config files.") return
def lockBitbake(): topdir = bb.cookerdata.findTopdir() if not topdir: bb.error("Unable to find conf/bblayers.conf or conf/bitbake.conf. BBAPTH is unset and/or not in a build directory?") raise BBMainFatal lockfile = topdir + "/bitbake.lock" return topdir, bb.utils.lockfile(lockfile, False, False)
def sign_rpms(self, files): """Sign RPM files""" import pexpect cmd = self.rpm_bin + " --addsign --define '_gpg_name %s' " % self.keyid if self.gpg_bin: cmd += "--define '%%__gpg %s' " % self.gpg_bin if self.gpg_path: cmd += "--define '_gpg_path %s' " % self.gpg_path cmd += ' '.join(files) # Need to use pexpect for feeding the passphrase proc = pexpect.spawn(cmd) try: proc.expect_exact('Enter pass phrase:', timeout=15) with open(self.passphrase_file) as fobj: proc.sendline(fobj.readline().rstrip('\n')) proc.expect(pexpect.EOF, timeout=900) proc.close() except pexpect.TIMEOUT as err: bb.error('rpmsign timeout: %s' % err) proc.terminate() if os.WEXITSTATUS(proc.status) or not os.WIFEXITED(proc.status): bb.error('rpmsign failed: %s' % proc.before.strip()) raise bb.build.FuncFailed("Failed to sign RPM packages")
def start(self, params=None, ssh=True, extra_bootparams='', runqemuparams='', launch_cmd='', discard_writes=True): if launch_cmd: start = self.runner.launch(get_ip=ssh, launch_cmd=launch_cmd, qemuparams=params) else: start = self.runner.start(params, get_ip=ssh, extra_bootparams=extra_bootparams, runqemuparams=runqemuparams, discard_writes=discard_writes) if start: if ssh: self.ip = self.runner.ip self.server_ip = self.runner.server_ip self.connection = SSHControl(ip=self.ip, logfile=self.sshlog) else: self.stop() if os.path.exists(self.qemulog): with open(self.qemulog, 'r') as f: bb.error("Qemu log output from %s:\n%s" % (self.qemulog, f.read())) raise bb.build.FuncFailed( "%s - FAILED to start qemu - check the task log and the boot log" % self.pn)
def filterProviders(providers, item, cfgData, dataCache, build_cache_fail = {}): """ Take a list of providers and filter/reorder according to the environment variables and previous build results """ eligible = [] preferred_versions = {} # Collate providers by PN pkg_pn = {} for p in providers: pn = dataCache.pkg_fn[p] if pn not in pkg_pn: pkg_pn[pn] = [] pkg_pn[pn].append(p) bb.msg.debug(1, bb.msg.domain.Provider, "providers for %s are: %s" % (item, pkg_pn.keys())) for pn in pkg_pn.keys(): preferred_versions[pn] = bb.providers.findBestProvider(pn, cfgData, dataCache, pkg_pn)[2:4] eligible.append(preferred_versions[pn][1]) for p in eligible: if p in build_cache_fail: bb.msg.debug(1, bb.msg.domain.Provider, "rejecting already-failed %s" % p) eligible.remove(p) if len(eligible) == 0: bb.error("no eligible providers for %s" % item) return 0 # look to see if one of them is already staged, or marked as preferred. # if so, bump it to the head of the queue for p in providers: pn = dataCache.pkg_fn[p] pv, pr = dataCache.pkg_pvpr[p] stamp = '%s.do_populate_staging' % dataCache.stamp[p] if os.path.exists(stamp): (newvers, fn) = preferred_versions[pn] if not fn in eligible: # package was made ineligible by already-failed check continue oldver = "%s-%s" % (pv, pr) newver = '-'.join(newvers) if (newver != oldver): extra_chat = "%s (%s) already staged but upgrading to %s to satisfy %s" % (pn, oldver, newver, item) else: extra_chat = "Selecting already-staged %s (%s) to satisfy %s" % (pn, oldver, item) bb.msg.note(2, bb.msg.domain.Provider, "%s" % extra_chat) eligible.remove(fn) eligible = [fn] + eligible discriminated = True break return eligible
def detach_sign(self, input_file, keyid, passphrase_file, passphrase=None, armor=True): """Create a detached signature of a file""" import subprocess if passphrase_file and passphrase: raise Exception( "You should use either passphrase_file of passphrase, not both" ) cmd = [ self.gpg_bin, '--detach-sign', '--batch', '--no-tty', '--yes', '--passphrase-fd', '0', '-u', keyid ] if self.gpg_path: cmd += ['--homedir', self.gpg_path] if armor: cmd += ['--armor'] #gpg > 2.1 supports password pipes only through the loopback interface #gpg < 2.1 errors out if given unknown parameters if self.gpg_version > ( 2, 1, ): cmd += ['--pinentry-mode', 'loopback'] cmd += [input_file] try: if passphrase_file: with open(passphrase_file) as fobj: passphrase = fobj.readline() job = subprocess.Popen(cmd, stdin=subprocess.PIPE, stderr=subprocess.PIPE) (_, stderr) = job.communicate(passphrase.encode("utf-8")) if job.returncode: raise bb.build.FuncFailed( "GPG exited with code %d: %s" % (job.returncode, stderr.decode("utf-8"))) except IOError as e: bb.error("IO error (%s): %s" % (e.errno, e.strerror)) raise Exception("Failed to sign '%s'" % input_file) except OSError as e: bb.error("OS error (%s): %s" % (e.errno, e.strerror)) raise Exception("Failed to sign '%s" % input_file)
def power_cycle(self, conn): if self.powercontrol_cmd: # be nice, don't just cut power conn.run("shutdown -h now") time.sleep(10) self.power_ctl("cycle") else: status, output = conn.run("sync; { sleep 1; reboot; } > /dev/null &") if status != 0: bb.error("Failed rebooting target and no power control command defined. You need to manually reset the device.\n%s" % output)
def power_cycle(self, conn): if self.powercontrol_cmd: # be nice, don't just cut power conn.run("shutdown -h now") time.sleep(10) self.power_ctl("cycle") else: status, output = conn.run("reboot") if status != 0: bb.error("Failed rebooting target and no power control command defined. You need to manually reset the device.\n%s" % output)
def parseCommandLine(self): # Parse any commandline into actions if self.configuration.show_environment: self.commandlineAction = None if 'world' in self.configuration.pkgs_to_build: bb.error("'world' is not a valid target for --environment.") elif len(self.configuration.pkgs_to_build) > 1: bb.error( "Only one target can be used with the --environment option." ) elif self.configuration.buildfile and len( self.configuration.pkgs_to_build) > 0: bb.error( "No target should be used with the --environment and --buildfile options." ) else: self.commandlineAction = [ "showEnvironment", self.configuration.buildfile, self.configuration.pkgs_to_build ] elif self.configuration.buildfile is not None: self.commandlineAction = [ "buildFile", self.configuration.buildfile, self.configuration.cmd ] elif self.configuration.show_versions: self.commandlineAction = ["showVersions"] elif self.configuration.parse_only: self.commandlineAction = ["parseFiles"] # FIXME - implement #elif self.configuration.interactive: # self.interactiveMode() elif self.configuration.dot_graph: if self.configuration.pkgs_to_build: self.commandlineAction = [ "generateDotGraph", self.configuration.pkgs_to_build, self.configuration.cmd ] else: self.commandlineAction = None bb.error( "Please specify a package name for dependency graph generation." ) else: if self.configuration.pkgs_to_build: self.commandlineAction = [ "buildTargets", self.configuration.pkgs_to_build, self.configuration.cmd ] else: self.commandlineAction = None bb.error( "Nothing to do. Use 'bitbake world' to build everything, or run 'bitbake --help' for usage information." )
def start(self, params=None): if self.runner.start(params): self.ip = self.runner.ip self.server_ip = self.runner.server_ip self.connection = SSHControl(ip=self.ip, logfile=self.sshlog) else: self.stop() if os.path.exists(self.qemulog): with open(self.qemulog, 'r') as f: bb.error("Qemu log output from %s:\n%s" % (self.qemulog, f.read())) raise bb.build.FuncFailed("%s - FAILED to start qemu - check the task log and the boot log" % self.pn)
def _print_trace(body, line): """ Print the Environment of a Text Body """ import bb # print the environment of the method bb.error("Printing the environment of the function") min_line = max(1,line-4) max_line = min(line+4,len(body)-1) for i in range(min_line,max_line+1): bb.error("\t%.4d:%s" % (i, body[i-1]) )
def exec_func_shell(func, d): """Execute a shell BB 'function' Returns true if execution was successful. For this, it creates a bash shell script in the tmp dectory, writes the local data into it and finally executes. The output of the shell will end in a log file and stdout. Note on directory behavior. The 'dirs' varflag should contain a list of the directories you need created prior to execution. The last item in the list is where we will chdir/cd to. """ import sys deps = data.getVarFlag(func, 'deps', d) check = data.getVarFlag(func, 'check', d) interact = data.getVarFlag(func, 'interactive', d) if check in globals(): if globals()[check](func, deps): return global logfile t = data.getVar('T', d, 1) if not t: return 0 mkdirhier(t) logfile = "%s/log.%s.%s" % (t, func, str(os.getpid())) runfile = "%s/run.%s.%s" % (t, func, str(os.getpid())) f = open(runfile, "w") f.write("#!/bin/sh -e\n") if bb.msg.debug_level > 0: f.write("set -x\n") data.emit_env(f, d) f.write("cd %s\n" % os.getcwd()) if func: f.write("%s\n" % func) f.close() os.chmod(runfile, 0775) if not func: error("Function not specified") raise FuncFailed() # open logs si = file('/dev/null', 'r') try: if bb.msg.debug_level > 0: so = os.popen("tee \"%s\"" % logfile, "w") else: so = file(logfile, 'w') except OSError, e: bb.error("opening log file: %s" % e) pass
def start(self, qemuparams=None, ssh=True, extra_bootparams=None, runqemuparams='', discard_writes=True): if self.display: os.environ["DISPLAY"] = self.display else: bb.error( "To start qemu I need a X desktop, please set DISPLAY correctly (e.g. DISPLAY=:1)" ) return False if not os.path.exists(self.rootfs): bb.error("Invalid rootfs %s" % self.rootfs) return False if not os.path.exists(self.tmpdir): bb.error("Invalid TMPDIR path %s" % self.tmpdir) return False else: os.environ["OE_TMPDIR"] = self.tmpdir if not os.path.exists(self.deploy_dir_image): bb.error("Invalid DEPLOY_DIR_IMAGE path %s" % self.deploy_dir_image) return False else: os.environ["DEPLOY_DIR_IMAGE"] = self.deploy_dir_image # Set this flag so that Qemu doesn't do any grabs as SDL grabs interact # badly with screensavers. os.environ["QEMU_DONT_GRAB"] = "1" self.qemuparams = '--append "root=/dev/ram0 console=ttyS0" -nographic -serial unix:%s,server,nowait' % self.socketfile launch_cmd = 'qemu-system-i386 -kernel %s -initrd %s %s' % ( self.kernel, self.rootfs, self.qemuparams) self.runqemu = subprocess.Popen(launch_cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, preexec_fn=os.setpgrp) bb.note("runqemu started, pid is %s" % self.runqemu.pid) bb.note("waiting at most %s seconds for qemu pid" % self.runqemutime) endtime = time.time() + self.runqemutime while not self.is_alive() and time.time() < endtime: time.sleep(1) if self.is_alive(): bb.note("qemu started - qemu procces pid is %s" % self.qemupid) self.create_socket() else: bb.note("Qemu pid didn't appeared in %s seconds" % self.runqemutime) output = self.runqemu.stdout self.stop() bb.note("Output from runqemu:\n%s" % output.read().decode("utf-8")) return False return self.is_alive()
def expand(s, d = _data, varname = None): """Variable expansion using the data store. Example: Standard expansion: >>> setVar('A', 'sshd') >>> print expand('/usr/bin/${A}') /usr/bin/sshd Python expansion: >>> print expand('result: ${@37 * 72}') result: 2664 """ def var_sub(match): key = match.group()[2:-1] if varname and key: if varname == key: raise Exception("variable %s references itself!" % varname) var = getVar(key, d, 1) if var is not None: return var else: return match.group() def python_sub(match): import bb code = match.group()[3:-1] locals()['d'] = d s = eval(code) if type(s) == types.IntType: s = str(s) return s if type(s) is not types.StringType: # sanity check return s while s.find('$') != -1: olds = s try: s = __expand_var_regexp__.sub(var_sub, s) s = __expand_python_regexp__.sub(python_sub, s) if s == olds: break if type(s) is not types.StringType: # sanity check import bb bb.error('expansion of %s returned non-string %s' % (olds, s)) except KeyboardInterrupt: raise except: note("%s:%s while evaluating:\n%s" % (sys.exc_info()[0], sys.exc_info()[1], s)) raise return s
def arch_find_script(d, filename): try: scripts = globals()['arch_scripts'] except KeyError: scripts = {} globals()['arch_scripts'] = scripts if not filename in scripts: for oepath in d.get('OEPATH', 1).split(':'): filepath = os.path.join(oepath, 'scripts', filename) if os.path.isfile(filepath): #bb.debug("found %s: %s"%(filename, filepath)) scripts[filename] = filepath break if not filename in scripts: bb.error('could not find script: %s'%filename) return scripts[filename]
def start(self, params=None, ssh=True, extra_bootparams='', runqemuparams='', launch_cmd='', discard_writes=True): if launch_cmd: start = self.runner.launch(get_ip=ssh, launch_cmd=launch_cmd) else: start = self.runner.start(params, get_ip=ssh, extra_bootparams=extra_bootparams, runqemuparams=runqemuparams, discard_writes=discard_writes) if start: if ssh: self.ip = self.runner.ip self.server_ip = self.runner.server_ip self.connection = SSHControl(ip=self.ip, logfile=self.sshlog) else: self.stop() if os.path.exists(self.qemulog): with open(self.qemulog, 'r') as f: bb.error("Qemu log output from %s:\n%s" % (self.qemulog, f.read())) raise bb.build.FuncFailed("%s - FAILED to start qemu - check the task log and the boot log" % self.pn)
def insert_method(modulename, code, fn): """ Add code of a module should be added. The methods will be simply added, no checking will be done """ comp = better_compile(code, modulename, fn ) better_exec(comp, None, code, fn) # now some instrumentation code = comp.co_names for name in code: if name in ['None', 'False']: continue elif name in _parsed_fns and not _parsed_fns[name] == modulename: error( "Error Method already seen: %s in' %s' now in '%s'" % (name, _parsed_fns[name], modulename)) else: _parsed_fns[name] = modulename
def insert_method(modulename, code, fn): """ Add code of a module should be added. The methods will be simply added, no checking will be done """ comp = better_compile(code, modulename, fn ) better_exec(comp, None, code, fn) # now some instrumentation code = comp.co_names for name in code: if name in ['None', 'False']: continue elif name in _parsed_fns and not _parsed_fns[name] == modulename: error("The function %s defined in %s was already declared in %s. BitBake has a global python function namespace so shared functions should be declared in a common include file rather than being duplicated, or if the functions are different, please use different function names." % (name, modulename, _parsed_fns[name])) else: _parsed_fns[name] = modulename
def insert_method(modulename, code, fn): """ Add code of a module should be added. The methods will be simply added, no checking will be done """ comp = better_compile(code, "<bb>", fn ) better_exec(comp, __builtins__, code, fn) # now some instrumentation code = comp.co_names for name in code: if name in ['None', 'False']: continue elif name in _parsed_fns and not _parsed_fns[name] == modulename: error( "Error Method already seen: %s in' %s' now in '%s'" % (name, _parsed_fns[name], modulename)) else: _parsed_fns[name] = modulename
def generate_locale_archive(d, rootfs, target_arch, localedir): # Pretty sure we don't need this for locale archive generation but # keeping it to be safe... locale_arch_options = { \ "arc": ["--uint32-align=4", "--little-endian"], "arceb": ["--uint32-align=4", "--big-endian"], "arm": ["--uint32-align=4", "--little-endian"], "armeb": ["--uint32-align=4", "--big-endian"], "aarch64": ["--uint32-align=4", "--little-endian"], "aarch64_be": ["--uint32-align=4", "--big-endian"], "sh4": ["--uint32-align=4", "--big-endian"], "powerpc": ["--uint32-align=4", "--big-endian"], "powerpc64": ["--uint32-align=4", "--big-endian"], "powerpc64le": ["--uint32-align=4", "--little-endian"], "mips": ["--uint32-align=4", "--big-endian"], "mipsisa32r6": ["--uint32-align=4", "--big-endian"], "mips64": ["--uint32-align=4", "--big-endian"], "mipsisa64r6": ["--uint32-align=4", "--big-endian"], "mipsel": ["--uint32-align=4", "--little-endian"], "mipsisa32r6el": ["--uint32-align=4", "--little-endian"], "mips64el": ["--uint32-align=4", "--little-endian"], "mipsisa64r6el": ["--uint32-align=4", "--little-endian"], "riscv64": ["--uint32-align=4", "--little-endian"], "riscv32": ["--uint32-align=4", "--little-endian"], "i586": ["--uint32-align=4", "--little-endian"], "i686": ["--uint32-align=4", "--little-endian"], "x86_64": ["--uint32-align=4", "--little-endian"] } if target_arch in locale_arch_options: arch_options = locale_arch_options[target_arch] else: bb.error("locale_arch_options not found for target_arch=" + target_arch) bb.fatal("unknown arch:" + target_arch + " for locale_arch_options") # Need to set this so cross-localedef knows where the archive is env = dict(os.environ) env["LOCALEARCHIVE"] = oe.path.join(localedir, "locale-archive") for name in sorted(os.listdir(localedir)): path = os.path.join(localedir, name) if os.path.isdir(path): cmd = ["cross-localedef", "--verbose"] cmd += arch_options cmd += ["--add-to-archive", path] subprocess.check_output(cmd, env=env, stderr=subprocess.STDOUT)
def prserv_dump_db(d): if d.getVar('USE_PR_SERV', True) != "1": bb.error("Not using network based PR service") return None conn = d.getVar("__PRSERV_CONN", True) if conn is None: conn = prserv_make_conn(d) if conn is None: bb.error("Making connection failed to remote PR service") return None #dump db opt_version = d.getVar('PRSERV_DUMPOPT_VERSION', True) opt_pkgarch = d.getVar('PRSERV_DUMPOPT_PKGARCH', True) opt_checksum = d.getVar('PRSERV_DUMPOPT_CHECKSUM', True) opt_col = ("1" == d.getVar('PRSERV_DUMPOPT_COL', True)) return conn.export(opt_version, opt_pkgarch, opt_checksum, opt_col)
def detach_sign(self, input_file, keyid, passphrase_file, passphrase=None, armor=True): """Create a detached signature of a file""" import subprocess if passphrase_file and passphrase: raise Exception("You should use either passphrase_file of passphrase, not both") cmd = [self.gpg_bin, '--detach-sign', '--batch', '--no-tty', '--yes', '--passphrase-fd', '0', '-u', keyid] if self.gpg_path: cmd += ['--homedir', self.gpg_path] if armor: cmd += ['--armor'] #gpg > 2.1 supports password pipes only through the loopback interface #gpg < 2.1 errors out if given unknown parameters dots = self.get_gpg_version().split('.') assert len(dots) >= 2 if int(dots[0]) >= 2 and int(dots[1]) >= 1: cmd += ['--pinentry-mode', 'loopback'] cmd += [input_file] try: if passphrase_file: with open(passphrase_file) as fobj: passphrase = fobj.readline(); job = subprocess.Popen(cmd, stdin=subprocess.PIPE, stderr=subprocess.PIPE) (_, stderr) = job.communicate(passphrase.encode("utf-8")) if job.returncode: raise bb.build.FuncFailed("GPG exited with code %d: %s" % (job.returncode, stderr.decode("utf-8"))) except IOError as e: bb.error("IO error (%s): %s" % (e.errno, e.strerror)) raise Exception("Failed to sign '%s'" % input_file) except OSError as e: bb.error("OS error (%s): %s" % (e.errno, e.strerror)) raise Exception("Failed to sign '%s" % input_file)
def create_socket(self): bb.note("waiting at most %s seconds for qemu pid" % self.runqemutime) tries = self.runqemutime while tries > 0: time.sleep(1) try: self.server_socket = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) self.server_socket.connect(self.socketname) bb.note("Created listening socket for qemu serial console.") break except socket.error: self.server_socket.close() tries -= 1 if tries == 0: bb.error("Failed to create listening socket %s: " % (self.socketname)) return False return True
def arch_config_sub(d, arch): try: config_sub_cache = globals()['config_sub_cache'] except KeyError: config_sub_cache = {} globals()['config_sub_cache'] = config_sub_cache try: canonical_arch = config_sub_cache[arch] except KeyError: script = arch_find_script(d, 'config.sub') try: bb.debug("%s %s"%(script, arch)) canonical_arch = os.popen("%s %s"%(script, arch)).readline().strip() config_sub_cache[arch] = canonical_arch except OSError, e: bb.error("config.sub(%s) failed: %s"%(arch, e)) return arch
def insert_method(modulename, code, fn): """ Add code of a module should be added. The methods will be simply added, no checking will be done """ comp = better_compile(code, modulename, fn) better_exec(comp, None, code, fn) # now some instrumentation code = comp.co_names for name in code: if name in ['None', 'False']: continue elif name in _parsed_fns and not _parsed_fns[name] == modulename: error( "The function %s defined in %s was already declared in %s. BitBake has a global python function namespace so shared functions should be declared in a common include file rather than being duplicated, or if the functions are different, please use different function names." % (name, modulename, _parsed_fns[name])) else: _parsed_fns[name] = modulename
def expand(self, s, varname): def var_sub(match): key = match.group()[2:-1] if varname and key: if varname == key: raise Exception("variable %s references itself!" % varname) var = self.getVar(key, 1) if var is not None: return var else: return match.group() def python_sub(match): import bb code = match.group()[3:-1] locals()["d"] = self s = eval(code) if type(s) == types.IntType: s = str(s) return s if type(s) is not types.StringType: # sanity check return s while s.find("$") != -1: olds = s try: s = __expand_var_regexp__.sub(var_sub, s) s = __expand_python_regexp__.sub(python_sub, s) if s == olds: break if type(s) is not types.StringType: # sanity check import bb bb.error("expansion of %s returned non-string %s" % (olds, s)) except KeyboardInterrupt: raise except: note("%s:%s while evaluating:\n%s" % (sys.exc_info()[0], sys.exc_info()[1], s)) raise return s
def __init__(self, lock, sockname, configuration, featureset): self.configuration = configuration self.featureset = featureset self.sockname = sockname self.bitbake_lock = lock self.readypipe, self.readypipein = os.pipe() # Create server control socket if os.path.exists(sockname): os.unlink(sockname) self.sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) # AF_UNIX has path length issues so chdir here to workaround cwd = os.getcwd() logfile = os.path.join(cwd, "bitbake-cookerdaemon.log") try: os.chdir(os.path.dirname(sockname)) self.sock.bind(os.path.basename(sockname)) finally: os.chdir(cwd) self.sock.listen(1) os.set_inheritable(self.sock.fileno(), True) bb.daemonize.createDaemon(self._startServer, logfile) self.sock.close() self.bitbake_lock.close() ready = ConnectionReader(self.readypipe) r = ready.wait(30) if not r: ready.close() bb.error("Unable to start bitbake server") if os.path.exists(logfile): with open(logfile, "r") as f: logs=f.readlines() bb.error("Last 10 lines of server log %s:\n%s" % (logfile, "".join(logs[-10:]))) raise SystemExit(1) ready.close() os.close(self.readypipein)
def prserv_import_db(d, filter_version=None, filter_pkgarch=None, filter_checksum=None): if d.getVar('USE_PR_SERV', True) != "1": bb.error("Not using network based PR service") return None conn = d.getVar("__PRSERV_CONN", True) if conn is None: conn = prserv_make_conn(d) if conn is None: bb.error("Making connection failed to remote PR service") return None #get the entry values imported = [] prefix = "PRAUTO$" for v in d.keys(): if v.startswith(prefix): (remain, sep, checksum) = v.rpartition('$') (remain, sep, pkgarch) = remain.rpartition('$') (remain, sep, version) = remain.rpartition('$') if (remain + '$' != prefix) or \ (filter_version and filter_version != version) or \ (filter_pkgarch and filter_pkgarch != pkgarch) or \ (filter_checksum and filter_checksum != checksum): continue try: value = int(d.getVar(remain + '$' + version + '$' + pkgarch + '$' + checksum, True)) except BaseException as exc: bb.debug("Not valid value of %s:%s" % (v,str(exc))) continue ret = conn.importone(version,pkgarch,checksum,value) if ret != value: bb.error("importing(%s,%s,%s,%d) failed. DB may have larger value %d" % (version,pkgarch,checksum,value,ret)) else: imported.append((version,pkgarch,checksum,value)) return imported
def readfifo(data): lines = data.split(b'\0') for line in lines: # Just skip empty commands if not line: continue splitval = line.split(b' ', 1) cmd = splitval[0].decode("utf-8") if len(splitval) > 1: value = splitval[1].decode("utf-8") else: value = '' if cmd == 'bbplain': bb.plain(value) elif cmd == 'bbnote': bb.note(value) elif cmd == 'bbwarn': bb.warn(value) elif cmd == 'bberror': bb.error(value) elif cmd == 'bbfatal': # The caller will call exit themselves, so bb.error() is # what we want here rather than bb.fatal() bb.error(value) elif cmd == 'bbfatal_log': bb.error(value, forcelog=True) elif cmd == 'bbdebug': splitval = value.split(' ', 1) level = int(splitval[0]) value = splitval[1] bb.debug(level, value) else: bb.warn("Unrecognised command '%s' on FIFO" % cmd)
def __init__(self, lock, sockname, configuration, featureset): self.configuration = configuration self.featureset = featureset self.sockname = sockname self.bitbake_lock = lock self.readypipe, self.readypipein = os.pipe() # Create server control socket if os.path.exists(sockname): os.unlink(sockname) self.sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) # AF_UNIX has path length issues so chdir here to workaround cwd = os.getcwd() logfile = os.path.join(cwd, "bitbake-cookerdaemon.log") try: os.chdir(os.path.dirname(sockname)) self.sock.bind(os.path.basename(sockname)) finally: os.chdir(cwd) self.sock.listen(1) os.set_inheritable(self.sock.fileno(), True) startdatetime = datetime.datetime.now() bb.daemonize.createDaemon(self._startServer, logfile) self.sock.close() self.bitbake_lock.close() ready = ConnectionReader(self.readypipe) r = ready.poll(30) if r: r = ready.get() if not r or r != "ready": ready.close() bb.error("Unable to start bitbake server") if os.path.exists(logfile): logstart_re = re.compile(self.start_log_format % ('([0-9]+)', '([0-9-]+ [0-9:.]+)')) started = False lines = [] with open(logfile, "r") as f: for line in f: if started: lines.append(line) else: res = logstart_re.match(line.rstrip()) if res: ldatetime = datetime.datetime.strptime(res.group(2), self.start_log_datetime_format) if ldatetime >= startdatetime: started = True lines.append(line) if lines: if len(lines) > 10: bb.error("Last 10 lines of server log for this session (%s):\n%s" % (logfile, "".join(lines[-10:]))) else: bb.error("Server log for this session (%s):\n%s" % (logfile, "".join(lines))) raise SystemExit(1) ready.close() os.close(self.readypipein)
def readfifo(data): lines = data.split('\0') for line in lines: splitval = line.split(' ', 1) cmd = splitval[0] if len(splitval) > 1: value = splitval[1] else: value = '' if cmd == 'bbplain': bb.plain(value) elif cmd == 'bbnote': bb.note(value) elif cmd == 'bbwarn': bb.warn(value) elif cmd == 'bberror': bb.error(value) elif cmd == 'bbfatal': # The caller will call exit themselves, so bb.error() is # what we want here rather than bb.fatal() bb.error(value) elif cmd == 'bbfatal_log': bb.error(value, forcelog=True) elif cmd == 'bbdebug': splitval = value.split(' ', 1) level = int(splitval[0]) value = splitval[1] bb.debug(level, value)
def readfifo(data): lines = data.split(b"\0") for line in lines: splitval = line.split(b" ", 1) cmd = splitval[0] if len(splitval) > 1: value = splitval[1].decode("utf-8") else: value = "" if cmd == "bbplain": bb.plain(value) elif cmd == "bbnote": bb.note(value) elif cmd == "bbwarn": bb.warn(value) elif cmd == "bberror": bb.error(value) elif cmd == "bbfatal": # The caller will call exit themselves, so bb.error() is # what we want here rather than bb.fatal() bb.error(value) elif cmd == "bbfatal_log": bb.error(value, forcelog=True) elif cmd == "bbdebug": splitval = value.split(" ", 1) level = int(splitval[0]) value = splitval[1] bb.debug(level, value)
def start(self, qemuparams=None, ssh=True, extra_bootparams=None): if self.display: os.environ["DISPLAY"] = self.display else: bb.error("To start qemu I need a X desktop, please set DISPLAY correctly (e.g. DISPLAY=:1)") return False if not os.path.exists(self.rootfs): bb.error("Invalid rootfs %s" % self.rootfs) return False if not os.path.exists(self.tmpdir): bb.error("Invalid TMPDIR path %s" % self.tmpdir) return False else: os.environ["OE_TMPDIR"] = self.tmpdir if not os.path.exists(self.deploy_dir_image): bb.error("Invalid DEPLOY_DIR_IMAGE path %s" % self.deploy_dir_image) return False else: os.environ["DEPLOY_DIR_IMAGE"] = self.deploy_dir_image # Set this flag so that Qemu doesn't do any grabs as SDL grabs interact # badly with screensavers. os.environ["QEMU_DONT_GRAB"] = "1" self.qemuparams = ( '--append "root=/dev/ram0 console=ttyS0" -nographic -serial unix:%s,server,nowait' % self.socketfile ) launch_cmd = "qemu-system-i386 -kernel %s -initrd %s %s" % (self.kernel, self.rootfs, self.qemuparams) self.runqemu = subprocess.Popen( launch_cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, preexec_fn=os.setpgrp ) bb.note("runqemu started, pid is %s" % self.runqemu.pid) bb.note("waiting at most %s seconds for qemu pid" % self.runqemutime) endtime = time.time() + self.runqemutime while not self.is_alive() and time.time() < endtime: time.sleep(1) if self.is_alive(): bb.note("qemu started - qemu procces pid is %s" % self.qemupid) self.create_socket() else: bb.note("Qemu pid didn't appeared in %s seconds" % self.runqemutime) output = self.runqemu.stdout self.stop() bb.note("Output from runqemu:\n%s" % output.read().decode("utf-8")) return False return self.is_alive()
def call_plugins(self, methodname, *parameters, **keywords): for name in isaplugins.__all__: plugin = getattr(isaplugins, name) method = getattr(plugin, methodname, None) if not method: # Not having init() is an error, everything else is optional. if methodname == "init": error("No init() defined for plugin %s.\n" "Skipping this plugin." % (methodname, plugin.getPluginName())) continue if self.ISA_config.plugin_whitelist and plugin.getPluginName() not in self.ISA_config.plugin_whitelist: continue if self.ISA_config.plugin_blacklist and plugin.getPluginName() in self.ISA_config.plugin_blacklist: continue try: method(*parameters, **keywords) except: error("Exception in plugin %s %s():\n%s" % (plugin.getPluginName(), methodname, traceback.format_exc()))
def generate_dependency_hash(tasklist, gendeps, lookupcache, ignored_vars, fn): taskdeps = {} basehash = {} for task in tasklist: data = lookupcache[task] if data is None: bb.error("Task %s from %s seems to be empty?!" % (task, fn)) data = [] else: data = [data] gendeps[task] -= ignored_vars newdeps = gendeps[task] seen = set() while newdeps: nextdeps = newdeps seen |= nextdeps newdeps = set() for dep in nextdeps: if dep in ignored_vars: continue gendeps[dep] -= ignored_vars newdeps |= gendeps[dep] newdeps -= seen alldeps = sorted(seen) for dep in alldeps: data.append(dep) var = lookupcache[dep] if var is not None: data.append(str(var)) k = fn + ":" + task basehash[k] = hashlib.sha256("".join(data).encode("utf-8")).hexdigest() taskdeps[task] = alldeps return taskdeps, basehash
def handle(fn, d, include): print "" print "fn: %s" % fn print "data: %s" % d print dir(d) print d.getVar.__doc__ print "include: %s" % include # check if we include or are the beginning if include: oldfile = d.getVar('FILE') else: #d.inheritFromOS() oldfile = None # find the file if not os.path.isabs(fn): bb.error("No Absolute FILE name") abs_fn = bb.which(d.getVar('BBPATH'), fn) else: abs_fn = fn # check if the file exists if not os.path.exists(abs_fn): raise IOError("file '%(fn)' not found" % locals() ) # now we know the file is around mark it as dep if include: parse.mark_dependency(d, abs_fn) # now parse this file - by defering it to C++ parsefile(fn, d) # restore the original FILE if oldfile: d.setVar('FILE', oldfile) return d
def generate_dependency_hash(tasklist, gendeps, lookupcache, whitelist, fn): taskdeps = {} basehash = {} for task in tasklist: data = lookupcache[task] if data is None: bb.error("Task %s from %s seems to be empty?!" % (task, fn)) data = '' gendeps[task] -= whitelist newdeps = gendeps[task] seen = set() while newdeps: nextdeps = newdeps seen |= nextdeps newdeps = set() for dep in nextdeps: if dep in whitelist: continue gendeps[dep] -= whitelist newdeps |= gendeps[dep] newdeps -= seen alldeps = sorted(seen) for dep in alldeps: data = data + dep var = lookupcache[dep] if var is not None: data = data + str(var) k = fn + ":" + task basehash[k] = hashlib.sha256(data.encode("utf-8")).hexdigest() taskdeps[task] = alldeps return taskdeps, basehash
def generate_dependency_hash(tasklist, gendeps, lookupcache, whitelist, fn): taskdeps = {} basehash = {} for task in tasklist: data = lookupcache[task] if data is None: bb.error("Task %s from %s seems to be empty?!" % (task, fn)) data = '' gendeps[task] -= whitelist newdeps = gendeps[task] seen = set() while newdeps: nextdeps = newdeps seen |= nextdeps newdeps = set() for dep in nextdeps: if dep in whitelist: continue gendeps[dep] -= whitelist newdeps |= gendeps[dep] newdeps -= seen alldeps = sorted(seen) for dep in alldeps: data = data + dep var = lookupcache[dep] if var is not None: data = data + str(var) k = fn + "." + task basehash[k] = hashlib.sha256(data.encode("utf-8")).hexdigest() taskdeps[task] = alldeps return taskdeps, basehash
def prserv_import_db(d, filter_version=None, filter_pkgarch=None, filter_checksum=None): if d.getVar('USE_PR_SERV', True) != "1": bb.error("Not using network based PR service") return None conn = d.getVar("__PRSERV_CONN", True) if conn is None: conn = prserv_make_conn(d) if conn is None: bb.error("Making connection failed to remote PR service") return None #get the entry values imported = [] prefix = "PRAUTO$" for v in d.keys(): if v.startswith(prefix): (remain, sep, checksum) = v.rpartition('$') (remain, sep, pkgarch) = remain.rpartition('$') (remain, sep, version) = remain.rpartition('$') if (remain + '$' != prefix) or \ (filter_version and filter_version != version) or \ (filter_pkgarch and filter_pkgarch != pkgarch) or \ (filter_checksum and filter_checksum != checksum): continue try: value = int( d.getVar( remain + '$' + version + '$' + pkgarch + '$' + checksum, True)) except BaseException as exc: bb.debug("Not valid value of %s:%s" % (v, str(exc))) continue ret = conn.importone(version, pkgarch, checksum, value) if ret != value: bb.error( "importing(%s,%s,%s,%d) failed. DB may have larger value %d" % (version, pkgarch, checksum, value, ret)) else: imported.append((version, pkgarch, checksum, value)) return imported
def better_compile(text, file, realfile): """ A better compile method. This method will print the offending lines. """ try: return compile(text, file, "exec") except Exception, e: import bb,sys # split the text into lines again body = text.split('\n') bb.error("Error in compiling: ", realfile) bb.error("The lines resulting into this error were:") bb.error("\t%d:%s:'%s'" % (e.lineno, e.__class__.__name__, body[e.lineno-1])) _print_trace(body, e.lineno) # exit now sys.exit(1)
def readfifo(data): nonlocal fifobuffer fifobuffer.extend(data) while fifobuffer: message, token, nextmsg = fifobuffer.partition(b"\00") if token: splitval = message.split(b' ', 1) cmd = splitval[0].decode("utf-8") if len(splitval) > 1: value = splitval[1].decode("utf-8") else: value = '' if cmd == 'bbplain': bb.plain(value) elif cmd == 'bbnote': bb.note(value) elif cmd == 'bbverbnote': bb.verbnote(value) elif cmd == 'bbwarn': bb.warn(value) elif cmd == 'bberror': bb.error(value) elif cmd == 'bbfatal': # The caller will call exit themselves, so bb.error() is # what we want here rather than bb.fatal() bb.error(value) elif cmd == 'bbfatal_log': bb.error(value, forcelog=True) elif cmd == 'bbdebug': splitval = value.split(' ', 1) level = int(splitval[0]) value = splitval[1] bb.debug(level, value) else: bb.warn("Unrecognised command '%s' on FIFO" % cmd) fifobuffer = nextmsg else: break
def __init__(self, lock, sockname, configuration, featureset): self.configuration = configuration self.featureset = featureset self.sockname = sockname self.bitbake_lock = lock self.readypipe, self.readypipein = os.pipe() # Create server control socket if os.path.exists(sockname): os.unlink(sockname) # Place the log in the builddirectory alongside the lock file logfile = os.path.join(os.path.dirname(self.bitbake_lock.name), "bitbake-cookerdaemon.log") self.sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) # AF_UNIX has path length issues so chdir here to workaround cwd = os.getcwd() try: os.chdir(os.path.dirname(sockname)) self.sock.bind(os.path.basename(sockname)) finally: os.chdir(cwd) self.sock.listen(1) os.set_inheritable(self.sock.fileno(), True) startdatetime = datetime.datetime.now() bb.daemonize.createDaemon(self._startServer, logfile) self.sock.close() self.bitbake_lock.close() os.close(self.readypipein) ready = ConnectionReader(self.readypipe) r = ready.poll(5) if not r: bb.note( "Bitbake server didn't start within 5 seconds, waiting for 90") r = ready.poll(90) if r: try: r = ready.get() except EOFError: # Trap the child exitting/closing the pipe and error out r = None if not r or r[0] != "r": ready.close() bb.error("Unable to start bitbake server (%s)" % str(r)) if os.path.exists(logfile): logstart_re = re.compile(self.start_log_format % ('([0-9]+)', '([0-9-]+ [0-9:.]+)')) started = False lines = [] lastlines = [] with open(logfile, "r") as f: for line in f: if started: lines.append(line) else: lastlines.append(line) res = logstart_re.match(line.rstrip()) if res: ldatetime = datetime.datetime.strptime( res.group(2), self.start_log_datetime_format) if ldatetime >= startdatetime: started = True lines.append(line) if len(lastlines) > 60: lastlines = lastlines[-60:] if lines: if len(lines) > 60: bb.error( "Last 60 lines of server log for this session (%s):\n%s" % (logfile, "".join(lines[-60:]))) else: bb.error("Server log for this session (%s):\n%s" % (logfile, "".join(lines))) elif lastlines: bb.error( "Server didn't start, last 60 loglines (%s):\n%s" % (logfile, "".join(lastlines))) else: bb.error("%s doesn't exist" % logfile) raise SystemExit(1) ready.close()
def __init__(self, lock, sockname, featureset, server_timeout, xmlrpcinterface): self.server_timeout = server_timeout self.xmlrpcinterface = xmlrpcinterface self.featureset = featureset self.sockname = sockname self.bitbake_lock = lock self.readypipe, self.readypipein = os.pipe() # Place the log in the builddirectory alongside the lock file logfile = os.path.join(os.path.dirname(self.bitbake_lock.name), "bitbake-cookerdaemon.log") self.logfile = logfile startdatetime = datetime.datetime.now() bb.daemonize.createDaemon(self._startServer, logfile) self.bitbake_lock.close() os.close(self.readypipein) ready = ConnectionReader(self.readypipe) r = ready.poll(5) if not r: bb.note("Bitbake server didn't start within 5 seconds, waiting for 90") r = ready.poll(90) if r: try: r = ready.get() except EOFError: # Trap the child exiting/closing the pipe and error out r = None if not r or r[0] != "r": ready.close() bb.error("Unable to start bitbake server (%s)" % str(r)) if os.path.exists(logfile): logstart_re = re.compile(start_log_format % ('([0-9]+)', '([0-9-]+ [0-9:.]+)')) started = False lines = [] lastlines = [] with open(logfile, "r") as f: for line in f: if started: lines.append(line) else: lastlines.append(line) res = logstart_re.search(line.rstrip()) if res: ldatetime = datetime.datetime.strptime(res.group(2), start_log_datetime_format) if ldatetime >= startdatetime: started = True lines.append(line) if len(lastlines) > 60: lastlines = lastlines[-60:] if lines: if len(lines) > 60: bb.error("Last 60 lines of server log for this session (%s):\n%s" % (logfile, "".join(lines[-60:]))) else: bb.error("Server log for this session (%s):\n%s" % (logfile, "".join(lines))) elif lastlines: bb.error("Server didn't start, last 60 loglines (%s):\n%s" % (logfile, "".join(lastlines))) else: bb.error("%s doesn't exist" % logfile) raise SystemExit(1) ready.close()
def feeder(lineno, s, fn, root, statements): global __func_start_regexp__, __inherit_regexp__, __export_func_regexp__, __addtask_regexp__, __addhandler_regexp__, __def_regexp__, __python_func_regexp__, __inpython__, __infunc__, __body__, classes, bb, __residue__ if __infunc__: if s == '}': __body__.append('') ast.handleMethod(statements, fn, lineno, __infunc__, __body__) __infunc__ = "" __body__ = [] else: __body__.append(s) return if __inpython__: m = __python_func_regexp__.match(s) if m and lineno != IN_PYTHON_EOF: __body__.append(s) return else: ast.handlePythonMethod(statements, fn, lineno, __inpython__, root, __body__) __body__ = [] __inpython__ = False if lineno == IN_PYTHON_EOF: return if s and s[0] == '#': if len(__residue__) != 0 and __residue__[0][0] != "#": bb.error( "There is a comment on line %s of file %s (%s) which is in the middle of a multiline expression.\nBitbake used to ignore these but no longer does so, please fix your metadata as errors are likely as a result of this change." % (lineno, fn, s)) if s and s[-1] == '\\': __residue__.append(s[:-1]) return s = "".join(__residue__) + s __residue__ = [] # Skip empty lines if s == '': return # Skip comments if s[0] == '#': return m = __func_start_regexp__.match(s) if m: __infunc__ = m.group("func") or "__anonymous" ast.handleMethodFlags(statements, fn, lineno, __infunc__, m) return m = __def_regexp__.match(s) if m: __body__.append(s) __inpython__ = m.group(1) return m = __export_func_regexp__.match(s) if m: ast.handleExportFuncs(statements, fn, lineno, m, classes) return m = __addtask_regexp__.match(s) if m: ast.handleAddTask(statements, fn, lineno, m) return m = __addhandler_regexp__.match(s) if m: ast.handleBBHandlers(statements, fn, lineno, m) return m = __inherit_regexp__.match(s) if m: ast.handleInherit(statements, fn, lineno, m) return return ConfHandler.feeder(lineno, s, fn, statements)
def __init__(self, lock, sockname, configuration, featureset): self.configuration = configuration self.featureset = featureset self.sockname = sockname self.bitbake_lock = lock self.readypipe, self.readypipein = os.pipe() # Create server control socket if os.path.exists(sockname): os.unlink(sockname) self.sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) # AF_UNIX has path length issues so chdir here to workaround cwd = os.getcwd() logfile = os.path.join(cwd, "bitbake-cookerdaemon.log") try: os.chdir(os.path.dirname(sockname)) self.sock.bind(os.path.basename(sockname)) finally: os.chdir(cwd) self.sock.listen(1) os.set_inheritable(self.sock.fileno(), True) startdatetime = datetime.datetime.now() bb.daemonize.createDaemon(self._startServer, logfile) self.sock.close() self.bitbake_lock.close() ready = ConnectionReader(self.readypipe) r = ready.poll(30) if r: r = ready.get() if not r or r != "ready": ready.close() bb.error("Unable to start bitbake server") if os.path.exists(logfile): logstart_re = re.compile(self.start_log_format % ('([0-9]+)', '([0-9-]+ [0-9:.]+)')) started = False lines = [] with open(logfile, "r") as f: for line in f: if started: lines.append(line) else: res = logstart_re.match(line.rstrip()) if res: ldatetime = datetime.datetime.strptime( res.group(2), self.start_log_datetime_format) if ldatetime >= startdatetime: started = True lines.append(line) if lines: if len(lines) > 10: bb.error( "Last 10 lines of server log for this session (%s):\n%s" % (logfile, "".join(lines[-10:]))) else: bb.error("Server log for this session (%s):\n%s" % (logfile, "".join(lines))) raise SystemExit(1) ready.close() os.close(self.readypipein)