def readfifo(data): lines = data.split(b'\0') for line in lines: # Just skip empty commands if not line: continue splitval = line.split(b' ', 1) cmd = splitval[0].decode("utf-8") if len(splitval) > 1: value = splitval[1].decode("utf-8") else: value = '' if cmd == 'bbplain': bb.plain(value) elif cmd == 'bbnote': bb.note(value) elif cmd == 'bbwarn': bb.warn(value) elif cmd == 'bberror': bb.error(value) elif cmd == 'bbfatal': # The caller will call exit themselves, so bb.error() is # what we want here rather than bb.fatal() bb.error(value) elif cmd == 'bbfatal_log': bb.error(value, forcelog=True) elif cmd == 'bbdebug': splitval = value.split(' ', 1) level = int(splitval[0]) value = splitval[1] bb.debug(level, value) else: bb.warn("Unrecognised command '%s' on FIFO" % cmd)
def run(self, cmd, args=None, configs=None, workdir=None): """Run npm command in a controlled environment""" with tempfile.TemporaryDirectory() as tmpdir: d = bb.data.createCopy(self.d) d.setVar("HOME", tmpdir) if not workdir: workdir = tmpdir def _run(cmd): cmd = "NPM_CONFIG_USERCONFIG=%s " % ( self.user_config.name) + cmd cmd = "NPM_CONFIG_GLOBALCONFIG=%s " % ( self.global_config_name) + cmd return runfetchcmd(cmd, d, workdir=workdir) if configs: bb.warn( "Use of configs argument of NpmEnvironment.run() function" " is deprecated. Please use args argument instead.") for key, value in configs: cmd += " --%s=%s" % (key, shlex.quote(value)) if args: for key, value in args: cmd += " --%s=%s" % (key, shlex.quote(value)) return _run(cmd)
def gettask_bashhash(taskname, d): tasklist, gendeps, lookupcache = bb.data.generate_dependencies( d, set()) taskdeps, basehash = bb.data.generate_dependency_hash( tasklist, gendeps, lookupcache, set(), "somefile") bb.warn(str(lookupcache)) return basehash["somefile:" + taskname]
def readfifo(data): lines = data.split(b"\0") for line in lines: splitval = line.split(b" ", 1) cmd = splitval[0] if len(splitval) > 1: value = splitval[1].decode("utf-8") else: value = "" if cmd == "bbplain": bb.plain(value) elif cmd == "bbnote": bb.note(value) elif cmd == "bbwarn": bb.warn(value) elif cmd == "bberror": bb.error(value) elif cmd == "bbfatal": # The caller will call exit themselves, so bb.error() is # what we want here rather than bb.fatal() bb.error(value) elif cmd == "bbfatal_log": bb.error(value, forcelog=True) elif cmd == "bbdebug": splitval = value.split(" ", 1) level = int(splitval[0]) value = splitval[1] bb.debug(level, value)
def create_progress_handler(func, progress, logfile, d): if progress == 'percent': # Use default regex return bb.progress.BasicProgressHandler(d, outfile=logfile) elif progress.startswith('percent:'): # Use specified regex return bb.progress.BasicProgressHandler(d, regex=progress.split(':', 1)[1], outfile=logfile) elif progress.startswith('outof:'): # Use specified regex return bb.progress.OutOfProgressHandler(d, regex=progress.split(':', 1)[1], outfile=logfile) elif progress.startswith("custom:"): # Use a custom progress handler that was injected via OE_EXTRA_IMPORTS or __builtins__ import functools from types import ModuleType parts = progress.split(":", 2) _, cls, otherargs = parts[0], parts[1], (parts[2] or None) if parts[2:] else None if cls: def resolve(x, y): if not x: return None if isinstance(x, ModuleType): return getattr(x, y, None) return x.get(y) cls_obj = functools.reduce(resolve, cls.split("."), bb.utils._context) if not cls_obj: # Fall-back on __builtins__ cls_obj = functools.reduce(resolve, cls.split("."), __builtins__) if cls_obj: return cls_obj(d, outfile=logfile, otherargs=otherargs) bb.warn('%s: unknown custom progress handler in task progress varflag value "%s", ignoring' % (func, cls)) else: bb.warn('%s: invalid task progress varflag value "%s", ignoring' % (func, progress)) return logfile
def verify(self, sig_file, valid_sigs = ''): """Verify signature""" cmd = self.gpg_cmd + ["--verify", "--no-permission-warning", "--status-fd", "1"] if self.gpg_path: cmd += ["--homedir", self.gpg_path] cmd += [sig_file] status = subprocess.run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) # Valid if any key matches if unspecified if not valid_sigs: ret = False if status.returncode else True return ret import re goodsigs = [] sigre = re.compile(r'^\[GNUPG:\] GOODSIG (\S+)\s(.*)$') for l in status.stdout.decode("utf-8").splitlines(): s = sigre.match(l) if s: goodsigs += [s.group(1)] for sig in valid_sigs.split(): if sig in goodsigs: return True if len(goodsigs): bb.warn('No accepted signatures found. Good signatures found: %s.' % ' '.join(goodsigs)) return False
def readfifo(data): lines = data.split('\0') for line in lines: splitval = line.split(' ', 1) cmd = splitval[0] if len(splitval) > 1: value = splitval[1] else: value = '' if cmd == 'bbplain': bb.plain(value) elif cmd == 'bbnote': bb.note(value) elif cmd == 'bbwarn': bb.warn(value) elif cmd == 'bberror': bb.error(value) elif cmd == 'bbfatal': # The caller will call exit themselves, so bb.error() is # what we want here rather than bb.fatal() bb.error(value) elif cmd == 'bbfatal_log': bb.error(value, forcelog=True) elif cmd == 'bbdebug': splitval = value.split(' ', 1) level = int(splitval[0]) value = splitval[1] bb.debug(level, value)
def send_rpm( remote_path ): # if it is not required to have an extracted binary, but to send an .rpm file rpm_to_send = "" for item in os.listdir(arch_rpms_dir): if param_list[1] and re.match( "%s-%s-.*rpm" % (param_list[0], param_list[1]), item): rpm_to_send = item break elif re.match("%s-[0-9]+\.[0-9]+-.*rpm" % param_list[0], item): rpm_to_send = item break else: bb.warn( "No rpm package found for %s %s in .rpm files dir %s. Skipping deployment." % (param_list[0], "ver. " + param_list[1] if param_list[1] else "", rpms_file_dir)) return (status, output) = oeRuntimeTest.tc.target.copy_to( os.path.join(arch_rpms_dir, rpm_to_send), remote_path) if status != 0: bb.warn("Failed to copy %s on the remote target: %s" % (param_list[0], d.getVar("MACHINE"))) return if param_list[4] == "rm": cleanup_list.append(os.path.join(remote_path, rpm_to_send)) return cleanup_list
def _vercmp(self, old, new): """ Check whether 'new' is newer than 'old' version. We use existing vercmp() for the purpose. PE is cleared in comparison as it's not for build, and PR is cleared too for simplicity as it's somehow difficult to get from various upstream format """ (oldpn, oldpv, oldsuffix) = old (newpn, newpv, newsuffix) = new """ Check for a new suffix type that we have never heard of before """ if (newsuffix): m = self.suffix_regex_comp.search(newsuffix) if not m: bb.warn("%s has a possible unknown suffix: %s" % (newpn, newsuffix)) return False """ Not our package so ignore it """ if oldpn != newpn: return False oldpv = self._modelate_version(oldpv) newpv = self._modelate_version(newpv) return bb.utils.vercmp(("0", oldpv, ""), ("0", newpv, ""))
def dump_target_logs(self): commands = ["top -bn1", "ps", "free", "df", "_ping", "dmesg", "netstat -a", "ifconfig -a", "_logs"] dump_dir = "/tmp/oe-saved-tests" dump_sub_dir = ("%s_%s" % ( datetime.datetime.now().strftime('%Y%m%d%H%M'), self._testMethodName)) dump_dir = os.path.join(dump_dir, dump_sub_dir) os.makedirs(dump_dir) bb.warn("%s failed: getting data from target and " "saving into %s" % (self._testMethodName, dump_dir)) for command in commands: # This will ping the host from target if command == "_ping": comm = "ping -c3 %s" % self.target.server_ip # This will get all the logs from /var/log/ elif command == "_logs": comm = 'find /var/log/ -type f 2>/dev/null ' comm = '%s-exec echo "%s" \\; ' % (comm, '='*20) comm = '%s-exec echo {} \\; ' % comm comm = '%s-exec echo "%s" \\; ' % (comm, '='*20) comm = '%s-exec cat {} \\; -exec echo "" \\;' % comm else: comm = command (status, output) = self.target.run_serial(comm) filename = command.split()[0] with open(os.path.join(dump_dir, filename), 'w') as f: f.write(output)
def include(oldfn, fn, lineno, data, error_out): """ error_out: A string indicating the verb (e.g. "include", "inherit") to be used in a ParseError that will be raised if the file to be included could not be included. Specify False to avoid raising an error in this case. """ if oldfn == fn: # prevent infinite recursion return None import bb fn = data.expand(fn) oldfn = data.expand(oldfn) if not os.path.isabs(fn): dname = os.path.dirname(oldfn) bbpath = "%s:%s" % (dname, data.getVar("BBPATH", True)) abs_fn, attempts = bb.utils.which(bbpath, fn, history=True) if abs_fn and bb.parse.check_dependency(data, abs_fn): bb.warn("Duplicate inclusion for %s in %s" % (abs_fn, data.getVar('FILE', True))) for af in attempts: bb.parse.mark_dependency(data, af) if abs_fn: fn = abs_fn elif bb.parse.check_dependency(data, fn): bb.warn("Duplicate inclusion for %s in %s" % (fn, data.getVar('FILE', True))) from bb.parse import handle try: ret = handle(fn, data, True) except (IOError, OSError): if error_out: raise ParseError("Could not %(error_out)s file %(fn)s" % vars(), oldfn, lineno) logger.debug(2, "CONF file '%s' not found", fn) bb.parse.mark_dependency(data, fn)
def _vercmp(self, old, new): """ Check whether 'new' is newer than 'old' version. We use existing vercmp() for the purpose. PE is cleared in comparison as it's not for build, and PR is cleared too for simplicity as it's somehow difficult to get from various upstream format """ (oldpn, oldpv, oldsuffix) = old (newpn, newpv, newsuffix) = new # Check for a new suffix type that we have never heard of before if newsuffix: m = self.suffix_regex_comp.search(newsuffix) if not m: bb.warn("%s has a possible unknown suffix: %s" % (newpn, newsuffix)) return False # Not our package so ignore it if oldpn != newpn: return False oldpv = self._modelate_version(oldpv) newpv = self._modelate_version(newpv) return bb.utils.vercmp(("0", oldpv, ""), ("0", newpv, ""))
def __exit__(self, a, b, c): if self.current.parent: self.current = self.current.parent else: bb.warn("Include log: Tried to finish '%s' at top level." % self.filename) return False
def expandKeys(alterdata, readdata = None): if readdata == None: readdata = alterdata todolist = {} for key in alterdata: if not '${' in key: continue ekey = expand(key, readdata) if key == ekey: continue todolist[key] = ekey # These two for loops are split for performance to maximise the # usefulness of the expand cache for key in todolist: ekey = todolist[key] newval = alterdata.getVar(ekey, 0) if newval: val = alterdata.getVar(key, 0) if val is not None and newval is not None: bb.warn("Variable key %s (%s) replaces original key %s (%s)." % (key, val, ekey, newval)) alterdata.renameVar(key, ekey)
def _npm_view(): args = [] args.append(("json", "true")) args.append(("registry", ud.registry)) pkgver = shlex.quote(ud.package + "@" + ud.version) cmd = ud.basecmd + " view %s" % pkgver env = NpmEnvironment(d) check_network_access(d, cmd, ud.registry) view_string = env.run(cmd, args=args) if not view_string: raise FetchError("Unavailable package %s" % pkgver, ud.url) try: view = json.loads(view_string) error = view.get("error") if error is not None: raise FetchError(error.get("summary"), ud.url) if ud.version == "latest": bb.warn("The npm package %s is using the latest " \ "version available. This could lead to " \ "non-reproducible builds." % pkgver) elif ud.version != view.get("version"): raise ParameterError("Invalid 'version' parameter", ud.url) return view except Exception as e: raise FetchError("Invalid view from npm: %s" % str(e), ud.url)
def rm_bin(removal_list): # need to know both if the binary is sent archived and the path where it is sent if archived from oeqa.oetest import oeRuntimeTest for item in removal_list: (status,output) = oeRuntimeTest.tc.target.run("rm " + item) if status != 0: bb.warn("Failed to remove: %s. Please ensure connection with the target device is up and running and " "you have the needed rights." % item)
def add_controller_list(path): if not os.path.exists(os.path.join(path, '__init__.py')): bb.fatal('Controllers directory %s exists but is missing __init__.py' % path) files = sorted([f for f in os.listdir(path) if f.endswith('.py') and not f.startswith('_')]) for f in files: module = 'oeqa.controllers.' + f[:-3] if module not in controllerslist: controllerslist.append(module) else: bb.warn("Duplicate controller module found for %s, only one added. Layers should create unique controller module names" % module)
def rm_bin( removal_list ): # need to know both if the binary is sent archived and the path where it is sent if archived from oeqa.oetest import oeRuntimeTest for item in removal_list: (status, output) = oeRuntimeTest.tc.target.run("rm " + item) if status != 0: bb.warn( "Failed to remove: %s. Please ensure connection with the target device is up and running and " "you have the needed rights." % item)
def upload(self, Filename, Bucket, Key): if self.s3client is None: self.makeclient() try: self.s3client.upload_file(Bucket=Bucket, Key=Key, Filename=Filename) except botocore.exceptions.ClientError as e: err = e.repsonse['Error'] bb.warn("{}/{}: {} {}".format(Bucket, Key, err['Code'], err['Message'])) return False return True
def explode_dep_versions(s): r = explode_dep_versions2(s) for d in r: if not r[d]: r[d] = None continue if len(r[d]) > 1: bb.warn("explode_dep_versions(): Item %s appeared in dependency string '%s' multiple times with different values. explode_dep_versions cannot cope with this." % (d, s)) r[d] = r[d][0] return r
def _start(self): runfn = "%s/%s.%s.run" % (self.tmpdir, self.name, self.meta.get("DATETIME")) runsymlink = "%s/%s.run" % (self.tmpdir, self.name) body = self.meta.get(self.name) if not body: return True runfile = open(runfn, "w") runfile.write("#!/bin/bash -e\n\n") if os.path.exists(runsymlink) or os.path.islink(runsymlink): os.remove(runsymlink) os.symlink(os.path.basename(runfn), runsymlink) vars = self.meta.keys() vars.sort() bashfuncs = [] for var in vars: if self.meta.get_flag(var, "python"): continue if "-" in var: bb.warn("cannot emit var with '-' to bash:", var) continue if self.meta.get_flag(var, "unexport"): continue val = self.meta.get(var) if self.meta.get_flag(var, "bash"): bashfuncs.append((var, val)) continue if self.meta.get_flag(var, "export"): runfile.write("export ") if val is None: val = "" if not isinstance(val, basestring): #print "ignoring var %s type=%s"%(var, type(val)) continue quotedval = re.sub('"', '\\"', val or "") if var == "LD_LIBRARY_PATH": var = (self.meta.get("LD_LIBRARY_PATH_VAR") or "LD_LIBRARY_PATH") runfile.write('%s="%s"\n'%(var, quotedval)) for (var, val) in bashfuncs: runfile.write("\n%s() {\n%s\n}\n"%( var, (val or "\t:").rstrip())) runfile.write("set -x\n") runfile.write("cd %s\n"%(os.getcwd())) runfile.write("%s\n"%(self.name)) runfile.close() os.chmod(runfn, 0755) cmd = "%s"%(runfn) if self.meta.get_flag(self.name, "fakeroot"): cmd = "%s "%(self.meta.get("FAKEROOT") or "fakeroot") + cmd cmd = "LC_ALL=C " + cmd return self.startscript(cmd)
def _start(self): runfn = "%s/%s.%s.run" % (self.tmpdir, self.name, self.meta.get("DATETIME")) runsymlink = "%s/%s.run" % (self.tmpdir, self.name) body = self.meta.get(self.name) if not body: return True runfile = open(runfn, "w") runfile.write("#!/bin/bash -e\n\n") if os.path.exists(runsymlink) or os.path.islink(runsymlink): os.remove(runsymlink) os.symlink(os.path.basename(runfn), runsymlink) vars = self.meta.keys() vars.sort() bashfuncs = [] for var in vars: if self.meta.get_flag(var, "python"): continue if "-" in var: bb.warn("cannot emit var with '-' to bash:", var) continue if self.meta.get_flag(var, "unexport"): continue val = self.meta.get(var) if self.meta.get_flag(var, "bash"): bashfuncs.append((var, val)) continue if self.meta.get_flag(var, "export"): runfile.write("export ") if val is None: val = "" if not isinstance(val, basestring): #print "ignoring var %s type=%s"%(var, type(val)) continue quotedval = re.sub('"', '\\"', val or "") if var == "LD_LIBRARY_PATH": var = (self.meta.get("LD_LIBRARY_PATH_VAR") or "LD_LIBRARY_PATH") runfile.write('%s="%s"\n' % (var, quotedval)) for (var, val) in bashfuncs: runfile.write("\n%s() {\n%s\n}\n" % (var, (val or "\t:").rstrip())) runfile.write("set -x\n") runfile.write("cd %s\n" % (os.getcwd())) runfile.write("%s\n" % (self.name)) runfile.close() os.chmod(runfn, 0755) cmd = "%s" % (runfn) if self.meta.get_flag(self.name, "fakeroot"): cmd = "%s " % (self.meta.get("FAKEROOT") or "fakeroot") + cmd cmd = "LC_ALL=C " + cmd return self.startscript(cmd)
def exported_vars(d): k = list(exported_keys(d)) for key in k: try: value = d.getVar(key) except Exception as err: bb.warn("%s: Unable to export ${%s}: %s" % (d.getVar("FILE"), key, err)) continue if value is not None: yield key, str(value)
def getTask(name): if not name in task_deps: task_deps[name] = {} if name in flags: deptask = d.expand(flags[name]) if name in ['noexec', 'fakeroot', 'nostamp']: if deptask != '1': bb.warn("In a future version of BitBake, setting the '{}' flag to something other than '1' " "will result in the flag not being set. See YP bug #13808.".format(name)) task_deps[name][task] = deptask
def __handle_complete(e, prefix, name, fn): __record_resources(prefix + '_end', e) info = e.data.getVarFlags('_event_info', False) res_start = info['resources'][prefix + '_start'] res_end = info['resources'][prefix + '_end'] if res_start['pid'] != res_end['pid']: bb.warn("PID mismatch in %s (%u vs %u)" % (name, res_start['pid'], res_end['pid'])) fn(res_start, res_end)
def eval(self, data): groupd = self.groupd key = groupd["var"] loginfo = { 'variable': key, 'file': self.filename, 'line': self.lineno, } if "exp" in groupd and groupd["exp"] is not None: data.setVarFlag(key, "export", 1, op = 'exported', **loginfo) op = "set" if "ques" in groupd and groupd["ques"] is not None: val = self.getFunc(key, data) op = "set?" if val is None: val = groupd["value"] elif "colon" in groupd and groupd["colon"] is not None: e = data.createCopy() op = "immediate" val = e.expand(groupd["value"], key + "[:=]") elif "append" in groupd and groupd["append"] is not None: op = "append" val = "%s %s" % ((self.getFunc(key, data) or ""), groupd["value"]) elif "prepend" in groupd and groupd["prepend"] is not None: op = "prepend" val = "%s %s" % (groupd["value"], (self.getFunc(key, data) or "")) elif "postdot" in groupd and groupd["postdot"] is not None: op = "postdot" val = "%s%s" % ((self.getFunc(key, data) or ""), groupd["value"]) elif "predot" in groupd and groupd["predot"] is not None: op = "predot" val = "%s%s" % (groupd["value"], (self.getFunc(key, data) or "")) else: val = groupd["value"] if ":append" in key or ":remove" in key or ":prepend" in key: if op in ["append", "prepend", "postdot", "predot", "ques"]: bb.warn(key + " " + groupd[op] + " is not a recommended operator combination, please replace it.") flag = None if 'flag' in groupd and groupd['flag'] is not None: flag = groupd['flag'] elif groupd["lazyques"]: flag = "_defaultval" loginfo['op'] = op loginfo['detail'] = groupd["value"] if flag: data.setVarFlag(key, flag, val, **loginfo) else: data.setVar(key, val, parsing=True, **loginfo)
def get_object_info(self, Bucket, Key, quiet=True): if self.s3client is None: self.makeclient() try: info = self.s3client.head_object(Bucket=Bucket, Key=Key) except botocore.exceptions.ClientError as e: err = e.response['Error'] if quiet and err['Code'] == "404": bb.debug(2, "not found: {}/{}".format(Bucket, Key)) else: bb.warn("{}/{}: {} {}".format(Bucket, Key, err['Code'], err['Message'])) return None return info
def send_extracted_binary(): bin_local_dir = os.path.join(extracted_bin_dir, param_list[1] if param_list[1] else os.listdir(extracted_bin_dir)[0]) for item in files_to_copy(bin_local_dir): split_path = item.split(bin_local_dir)[1] path_on_DUT = split_path if split_path[0] is "/" else "/" + split_path # create the path as on DUT; eg. /usr/bin/bin_file (status, output) = oeRuntimeTest.tc.target.copy_to(item, path_on_DUT) if status != 0: bb.warn("Failed to copy %s binary file %s on the remote target: %s" % (param_list[0], "ver. " + param_list[1] if param_list[1] else "", d.getVar("MACHINE"))) return if param_list[4] == "rm": cleanup_list.append(path_on_DUT) return cleanup_list
def run(self): if self.xmlrpcinterface[0]: self.xmlrpc = bb.server.xmlrpcserver.BitBakeXMLRPCServer(self.xmlrpcinterface, self.cooker, self) print("Bitbake XMLRPC server address: %s, server port: %s" % (self.xmlrpc.host, self.xmlrpc.port)) heartbeat_event = self.cooker.data.getVar('BB_HEARTBEAT_EVENT') if heartbeat_event: try: self.heartbeat_seconds = float(heartbeat_event) except: bb.warn('Ignoring invalid BB_HEARTBEAT_EVENT=%s, must be a float specifying seconds.' % heartbeat_event) self.timeout = self.server_timeout or self.cooker.data.getVar('BB_SERVER_TIMEOUT') try: if self.timeout: self.timeout = float(self.timeout) except: bb.warn('Ignoring invalid BB_SERVER_TIMEOUT=%s, must be a float specifying seconds.' % self.timeout) try: self.bitbake_lock.seek(0) self.bitbake_lock.truncate() if self.xmlrpc: self.bitbake_lock.write("%s %s:%s\n" % (os.getpid(), self.xmlrpc.host, self.xmlrpc.port)) else: self.bitbake_lock.write("%s\n" % (os.getpid())) self.bitbake_lock.flush() except Exception as e: print("Error writing to lock file: %s" % str(e)) pass if self.cooker.configuration.profile: try: import cProfile as profile except: import profile prof = profile.Profile() ret = profile.Profile.runcall(prof, self.main) prof.dump_stats("profile.log") bb.utils.process_profilelog("profile.log") print("Raw profiling information saved to profile.log and processed statistics to profile.log.processed") else: ret = self.main() return ret
def serve_forever(self): heartbeat_event = self.cooker.data.getVar('BB_HEARTBEAT_EVENT') if heartbeat_event: try: self.heartbeat_seconds = float(heartbeat_event) except: # Throwing an exception here causes bitbake to hang. # Just warn about the invalid setting and continue bb.warn( 'Ignoring invalid BB_HEARTBEAT_EVENT=%s, must be a float specifying seconds.' % heartbeat_event) # Start the actual XMLRPC server bb.cooker.server_main(self.cooker, self._serve_forever)
def ioprio_set(who, cls, value): NR_ioprio_set = None if _unamearch == "x86_64": NR_ioprio_set = 251 elif _unamearch[0] == "i" and _unamearch[2:3] == "86": NR_ioprio_set = 289 if NR_ioprio_set: ioprio = value | (cls << IOPRIO_CLASS_SHIFT) rc = cdll['libc.so.6'].syscall(NR_ioprio_set, IOPRIO_WHO_PROCESS, who, ioprio) if rc != 0: raise ValueError("Unable to set ioprio, syscall returned %s" % rc) else: bb.warn("Unable to set IO Prio for arch %s" % _unamearch)
def run(self): for event in bb.event.ui_queue: self.event_queue.put(event) self.event_handle.value = bb.event.register_UIHhandler(self, True) heartbeat_event = self.cooker.data.getVar('BB_HEARTBEAT_EVENT', True) if heartbeat_event: try: self.heartbeat_seconds = float(heartbeat_event) except: # Throwing an exception here causes bitbake to hang. # Just warn about the invalid setting and continue bb.warn('Ignoring invalid BB_HEARTBEAT_EVENT=%s, must be a float specifying seconds.' % heartbeat_event) bb.cooker.server_main(self.cooker, self.main)
def __init__(self, BBServer, clientinfo=("localhost, 0")): self.eventQueue = [] self.eventQueueLock = threading.Lock() self.eventQueueNotify = threading.Event() self.BBServer = BBServer self.clientinfo = clientinfo server = UIXMLRPCServer(self.clientinfo) self.host, self.port = server.socket.getsockname() server.register_function(self.system_quit, "event.quit") server.register_function(self.send_event, "event.sendpickle") server.socket.settimeout(1) self.EventHandle = None # the event handler registration may fail here due to cooker being in invalid state # this is a transient situation, and we should retry a couple of times before # giving up for count_tries in range(5): ret = self.BBServer.registerEventHandler(self.host, self.port) if isinstance(ret, collections.abc.Iterable): self.EventHandle, error = ret else: self.EventHandle = ret error = "" if self.EventHandle is not None: break errmsg = "Could not register UI event handler. Error: %s, host %s, "\ "port %d" % (error, self.host, self.port) bb.warn("%s, retry" % errmsg) import time time.sleep(1) else: raise Exception(errmsg) self.server = server self.t = threading.Thread() self.t.setDaemon(True) self.t.run = self.startCallbackHandler self.t.start()
def latest_versionstring(self, ud, d): """ Manipulate the URL and try to obtain the latest package version sanity check to ensure same name and type. """ package = ud.path.split("/")[-1] current_version = ['', d.getVar('PV'), ''] """possible to have no version in pkg name, such as spectrum-fw""" if not re.search(r"\d+", package): current_version[1] = re.sub('_', '.', current_version[1]) current_version[1] = re.sub('-', '.', current_version[1]) return (current_version[1], '') package_regex = self._init_regexes(package, ud, d) if package_regex is None: bb.warn("latest_versionstring: package %s don't match pattern" % (package)) return ('', '') bb.debug(3, "latest_versionstring, regex: %s" % (package_regex.pattern)) uri = "" regex_uri = d.getVar("UPSTREAM_CHECK_URI") if not regex_uri: path = ud.path.split(package)[0] # search for version matches on folders inside the path, like: # "5.7" in http://download.gnome.org/sources/${PN}/5.7/${PN}-${PV}.tar.gz dirver_regex = re.compile( r"(?P<dirver>[^/]*(\d+\.)*\d+([-_]r\d+)*)/") m = dirver_regex.search(path) if m: pn = d.getVar('PN') dirver = m.group('dirver') dirver_pn_regex = re.compile(r"%s\d?" % (re.escape(pn))) if not dirver_pn_regex.search(dirver): return (self._check_latest_version_by_dir( dirver, package, package_regex, current_version, ud, d), '') uri = bb.fetch.encodeurl( [ud.type, ud.host, path, ud.user, ud.pswd, {}]) else: uri = regex_uri return (self._check_latest_version(uri, package, package_regex, current_version, ud, d), '')
def load_controller_from_module(self, target, modulename): obj = None # import module, allowing it to raise import exception module = __import__(modulename, globals(), locals(), [target]) # look for target class in the module, catching any exceptions as it # is valid that a module may not have the target class. try: obj = getattr(module, target) if obj: from oeqa.targetcontrol import BaseTarget if( not issubclass(obj, BaseTarget)): bb.warn("Target {0} found, but subclass is not BaseTarget".format(target)) except: obj = None return obj
def upload(self, Filename, Bucket, Key): if self.s3client is None: self.makeclient() for attempt in range(self.maxtries): try: self.s3client.upload_file(Bucket=Bucket, Key=Key, Filename=Filename) except (botocore.exceptions.NoCredentialsError, botocore.exceptions.EndpointConnectionError): s3tretry_wait(attempt) continue except botocore.exceptions.ClientError as e: err = e.repsonse['Error'] bb.warn("{}/{}: {} {}".format(Bucket, Key, err['Code'], err['Message'])) return False return True bb.warn("{}/{}: credentials error on upload for 10 attempts".format(Bucket, Key)) return False
def check_connection(remote, timeout): try: host, port = remote.split(":") port = int(port) except Exception as e: bb.warn("Failed to read remote definition (%s)" % str(e)) raise e server, _transport = _create_server(host, port, timeout) try: ret, err = server.runCommand(['getVariable', 'TOPDIR']) if err or not ret: return False except ConnectionError: return False return True
def run(self): for event in bb.event.ui_queue: self.event_queue.put(event) self.event_handle.value = bb.event.register_UIHhandler(self, True) heartbeat_event = self.cooker.data.getVar('BB_HEARTBEAT_EVENT') if heartbeat_event: try: self.heartbeat_seconds = float(heartbeat_event) except: # Throwing an exception here causes bitbake to hang. # Just warn about the invalid setting and continue bb.warn( 'Ignoring invalid BB_HEARTBEAT_EVENT=%s, must be a float specifying seconds.' % heartbeat_event) bb.cooker.server_main(self.cooker, self.main)
def renameVar(self, key, newkey, **loginfo): """ Rename the variable key to newkey """ if key == newkey: bb.warn("Calling renameVar with equivalent keys (%s) is invalid" % key) return if '_remote_data' in self.dict: connector = self.dict["_remote_data"]["_content"] res = connector.renameVar(key, newkey) if not res: return val = self.getVar(key, 0, parsing=True) if val is not None: self.varhistory.rename_variable_hist(key, newkey) loginfo['variable'] = newkey loginfo['op'] = 'rename from %s' % key loginfo['detail'] = val self.varhistory.record(**loginfo) self.setVar(newkey, val, ignore=True, parsing=True) for i in (__setvar_keyword__): src = self.getVarFlag(key, i, False) if src is None: continue dest = self.getVarFlag(newkey, i, False) or [] dest.extend(src) self.setVarFlag(newkey, i, dest, ignore=True) if key in self.overridedata: self.overridedata[newkey] = [] for (v, o) in self.overridedata[key]: self.overridedata[newkey].append([v.replace(key, newkey), o]) self.renameVar(v, v.replace(key, newkey)) if '_' in newkey and val is None: self._setvar_update_overrides(newkey, **loginfo) loginfo['variable'] = key loginfo['op'] = 'rename (to)' loginfo['detail'] = newkey self.varhistory.record(**loginfo) self.delVar(key, ignore=True)
def latest_versionstring(self, ud, d): """ Manipulate the URL and try to obtain the latest package version sanity check to ensure same name and type. """ package = ud.path.split("/")[-1] current_version = ["", d.getVar("PV", True), ""] """possible to have no version in pkg name, such as spectrum-fw""" if not re.search("\d+", package): current_version[1] = re.sub("_", ".", current_version[1]) current_version[1] = re.sub("-", ".", current_version[1]) return (current_version[1], "") package_regex = self._init_regexes(package, ud, d) if package_regex is None: bb.warn("latest_versionstring: package %s don't match pattern" % (package)) return ("", "") bb.debug(3, "latest_versionstring, regex: %s" % (package_regex.pattern)) uri = "" regex_uri = d.getVar("UPSTREAM_CHECK_URI", True) if not regex_uri: path = ud.path.split(package)[0] # search for version matches on folders inside the path, like: # "5.7" in http://download.gnome.org/sources/${PN}/5.7/${PN}-${PV}.tar.gz dirver_regex = re.compile("(?P<dirver>[^/]*(\d+\.)*\d+([-_]r\d+)*)/") m = dirver_regex.search(path) if m: pn = d.getVar("PN", True) dirver = m.group("dirver") dirver_pn_regex = re.compile("%s\d?" % (re.escape(pn))) if not dirver_pn_regex.search(dirver): return ( self._check_latest_version_by_dir(dirver, package, package_regex, current_version, ud, d), "", ) uri = bb.fetch.encodeurl([ud.type, ud.host, path, ud.user, ud.pswd, {}]) else: uri = regex_uri return (self._check_latest_version(uri, package, package_regex, current_version, ud, d), "")
def get_file_search_metadata(d): '''Given the metadata, return the mirrors and sysroots to operate against.''' premirrors, invalid = parse_mirrors(d.getVar('FILES_PREMIRRORS')) for invalid_entry in invalid: bb.warn('Invalid FILES_MIRRORS entry: {0}'.format(invalid_entry)) mirrors, invalid = parse_mirrors(d.getVar('FILES_MIRRORS')) for invalid_entry in invalid: bb.warn('Invalid FILES_MIRRORS entry: {0}'.format(invalid_entry)) source_paths = [ os.path.realpath(p) for p in d.getVar('EXTERNAL_INSTALL_SOURCE_PATHS').split() ] return source_paths, mirrors, premirrors
def exec_func_python(func, d, runfile, cwd=None, pythonexception=False): """Execute a python BB 'function'""" code = _functionfmt.format(function=func) bb.utils.mkdirhier(os.path.dirname(runfile)) with open(runfile, 'w') as script: bb.data.emit_func_python(func, script, d) if cwd: try: olddir = os.getcwd() except OSError as e: bb.warn("%s: Cannot get cwd: %s" % (func, e)) olddir = None os.chdir(cwd) bb.debug(2, "Executing python function %s" % func) try: text = "def %s(d):\n%s" % (func, d.getVar(func, False)) fn = d.getVarFlag(func, "filename", False) lineno = int(d.getVarFlag(func, "lineno", False)) bb.methodpool.insert_method(func, text, fn, lineno - 1) comp = utils.better_compile(code, func, "exec_python_func() autogenerated") utils.better_exec(comp, {"d": d}, code, "exec_python_func() autogenerated", pythonexception=pythonexception) except (bb.parse.SkipRecipe, bb.build.FuncFailed): raise except Exception as e: if pythonexception: raise logger.error(str(e)) raise FuncFailed(func, None) finally: bb.debug(2, "Python function %s finished" % func) if cwd and olddir: try: os.chdir(olddir) except OSError as e: bb.warn("%s: Cannot restore cwd %s: %s" % (func, olddir, e))
def expand_paths(pathnames, mirrors): '''Apply search/replace to paths to get alternate search paths. Returns a generator with tuples of (pathname, expanded_paths).''' import re for pathname in pathnames: expanded_paths = [pathname] for search, replace in mirrors: try: new_pathname = re.sub(search, replace, pathname, count=1) except re.error as exc: bb.warn("Invalid pattern for") continue if new_pathname != pathname: expanded_paths.append(new_pathname) yield pathname, expanded_paths
def runCommand(self, command): # @todo try/except self.connection.send(command) if not self.server.is_alive(): raise SystemExit while True: # don't let the user ctrl-c while we're waiting for a response try: for idx in range(0,4): # 0, 1, 2, 3 if self.connection.poll(5): return self.connection.recv() else: bb.warn("Timeout while attempting to communicate with bitbake server") bb.fatal("Gave up; Too many tries: timeout while attempting to communicate with bitbake server") except KeyboardInterrupt: pass
def _write_configspec(self, ud, d): """ Create config spec file (ud.configspecfile) for ccase view """ config_spec = "" custom_config_spec = d.getVar("CCASE_CUSTOM_CONFIG_SPEC", d) if custom_config_spec is not None: for line in custom_config_spec.split("\\n"): config_spec += line+"\n" bb.warn("A custom config spec has been set, SRCREV is only relevant for the tarball name.") else: config_spec += "element * CHECKEDOUT\n" config_spec += "element * %s\n" % ud.label config_spec += "load %s%s\n" % (ud.vob, ud.module) logger.info("Using config spec: \n%s" % config_spec) with open(ud.configspecfile, 'w') as f: f.write(config_spec)
def send_rpm(remote_path): # if it is not required to have an extracted binary, but to send an .rpm file rpm_to_send = "" for item in os.listdir(arch_rpms_dir): if param_list[1] and re.match("%s-%s-.*rpm"%(param_list[0], param_list[1]), item): rpm_to_send = item break elif re.match("%s-[0-9]+\.[0-9]+-.*rpm" % param_list[0], item): rpm_to_send = item break else: bb.warn("No rpm package found for %s %s in .rpm files dir %s. Skipping deployment." % (param_list[0], "ver. " + param_list[1] if param_list[1] else "", rpms_file_dir) ) return (status, output) = oeRuntimeTest.tc.target.copy_to(os.path.join(arch_rpms_dir, rpm_to_send), remote_path) if status != 0: bb.warn("Failed to copy %s on the remote target: %s" %(param_list[0], d.getVar("MACHINE"))) return if param_list[4] == "rm": cleanup_list.append(os.path.join(remote_path, rpm_to_send)) return cleanup_list
def establishConnection(self, featureset): # The format of "remote" must be "server:port" try: [host, port] = self.remote.split(":") port = int(port) except Exception as e: bb.warn("Failed to read remote definition (%s)" % str(e)) raise e # We need our IP for the server connection. We get the IP # by trying to connect with the server try: s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) s.connect((host, port)) ip = s.getsockname()[0] s.close() except Exception as e: bb.warn("Could not create socket for %s:%s (%s)" % (host, port, str(e))) raise e try: self.serverImpl = XMLRPCProxyServer(host, port) self.connection = BitBakeXMLRPCServerConnection(self.serverImpl, (ip, 0), self.observer_only, featureset) return self.connection.connect(self.token) except Exception as e: bb.warn("Could not connect to server at %s:%s (%s)" % (host, port, str(e))) raise e
def connectXMLRPC(remote, featureset, observer_only = False, token = None): # The format of "remote" must be "server:port" try: [host, port] = remote.split(":") port = int(port) except Exception as e: bb.warn("Failed to parse remote definition %s (%s)" % (remote, str(e))) raise e # We need our IP for the server connection. We get the IP # by trying to connect with the server try: s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) s.connect((host, port)) ip = s.getsockname()[0] s.close() except Exception as e: bb.warn("Could not create socket for %s:%s (%s)" % (host, port, str(e))) raise e try: connection = BitBakeXMLRPCServerConnection(host, port, (ip, 0), observer_only, featureset) return connection.connect(token) except Exception as e: bb.warn("Could not connect to server at %s:%s (%s)" % (host, port, str(e))) raise e
def download(self, Bucket, Key, Filename, quiet=True): if self.s3client is None: self.makeclient() try: info = self.s3client.head_object(Bucket=Bucket, Key=Key) self.s3client.download_file(Bucket=Bucket, Key=Key, Filename=Filename) if 'LastModified' in info: mtime = int(time.mktime(info['LastModified'].timetuple())) os.utime(Filename, (mtime, mtime)) except botocore.exceptions.ClientError as e: err = e.response['Error'] if quiet and err['Code'] == "404": bb.debug(2, "not found: {}/{}".format(Bucket, Key)) else: bb.warn("{}/{}: {} {}".format(Bucket, Key, err['Code'], err['Message'])) return False except OSError as e: if quiet: pass bb.warn("os.utime({}): {} (errno {})".format(Filename, e.strerror, e.errno)) return False return True