def arch_is_compatible(meta, arch_type): compatible_archs = meta.get("COMPATIBLE_%s_ARCHS"%arch_type) if compatible_archs is None: return True arch = meta.get(arch_type + "_ARCH") for compatible_arch in compatible_archs.split(): if re.match(compatible_arch, arch): return True debug("skipping %s_ARCH incompatible recipe %s:%s"%( arch_type, recipe_type, meta.get("PN"))) return False
def compatible_use_flags(meta): flags = meta.get("COMPATIBLE_IF_FLAGS") if not flags: return True for name in flags.split(): val = meta.get("USE_"+name) if not val: debug("skipping %s:%s_%s (required %s USE flag not set)"%( recipe_type, meta.get("PN"), meta.get("PV"), name)) return False return True
def compatible_use_flags(meta): flags = meta.get("COMPATIBLE_IF_FLAGS") if not flags: return True for name in flags.split(): val = meta.get("USE_"+name) if not val or val == "0": debug("skipping %s:%s_%s (required %s USE flag not set)"%( recipe_type, meta.get("PN"), meta.get("PV"), name)) return False return True
def cpu_families_is_compatible(meta, arch_type): compatible_cpu_fams = meta.get("COMPATIBLE_%s_CPU_FAMILIES"%arch_type) if compatible_cpu_fams is None: return True cpu_fams = meta.get(arch_type + "_CPU_FAMILIES") if not cpu_fams: return False for compatible_cpu_fam in compatible_cpu_fams.split(): for cpu_fam in cpu_fams.split(): if re.match(compatible_cpu_fam, cpu_fam): return True debug("skipping %s_CPU_FAMILIES incompatible recipe %s:%s"%( arch_type, recipe_type, meta.get("PN"))) return False
def machine_is_compatible(meta): compatible_machines = meta.get("COMPATIBLE_MACHINES") if compatible_machines is None: return True machine = meta.get("MACHINE") if machine is None: debug("skipping MACHINE incompatible recipe %s:%s"%( recipe_type, meta.get("PN"))) return False for compatible_machine in compatible_machines.split(): if re.match(compatible_machine, machine): return True debug("skipping MACHINE incompatible recipe %s:%s"%( recipe_type, meta.get("PN"))) return False
def get_weight(self, meta): if not self.name == "do_compile": return 1 pmake = meta.get("PARALLEL_MAKE") if pmake is None or pmake == "": return 1 return int(pmake.replace("-j", ""))
def prepare_context(self): meta = self.meta() self.function = meta.get_function(self.name) self.do_cleandirs() self.cwd = self.do_dirs() or meta.get("B") self.stdin = open_cloexec("/dev/null", os.O_RDONLY) self.logfn = "%s/%s.%s.log"%(self.function.tmpdir, self.name, meta.get("DATETIME")) self.logsymlink = "%s/%s.log"%(self.function.tmpdir, self.name) oelite.util.makedirs(os.path.dirname(self.logfn)) try: self.logfilefd = open_cloexec(self.logfn, os.O_WRONLY|os.O_CREAT|os.O_TRUNC, 0666) except OSError: print "Opening log file failed: %s"%(self.logfn) raise if os.path.exists(self.logsymlink) or os.path.islink(self.logsymlink): os.remove(self.logsymlink) os.symlink(os.path.basename(self.logfn), self.logsymlink)
def prepare_context(self): meta = self.meta() self.function = meta.get_function(self.name) self.do_cleandirs() self.cwd = self.do_dirs() or meta.get("B") self.stdin = open_cloexec("/dev/null", os.O_RDONLY) self.logfn = "%s/%s.%s.log" % (self.function.tmpdir, self.name, meta.get("DATETIME")) self.logsymlink = "%s/%s.log" % (self.function.tmpdir, self.name) oelite.util.makedirs(os.path.dirname(self.logfn)) try: self.logfilefd = open_cloexec( self.logfn, os.O_WRONLY | os.O_CREAT | os.O_TRUNC, 0666) except OSError: print "Opening log file failed: %s" % (self.logfn) raise if os.path.exists(self.logsymlink) or os.path.islink(self.logsymlink): os.remove(self.logsymlink) os.symlink(os.path.basename(self.logfn), self.logsymlink)
def recipe_is_compatible(meta): incompatible_recipes = meta.get("INCOMPATIBLE_RECIPES") if incompatible_recipes is None: return True pn = meta.get("PN") pv = meta.get("PV") for incompatible_recipe in incompatible_recipes.split(): if "_" in incompatible_recipe: incompatible_recipe = incompatible_recipe.rsplit("_", 1) else: incompatible_recipe = (incompatible_recipe, None) if not re.match("%s$"%(incompatible_recipe[0]), pn): continue if incompatible_recipe[1] is None: return False if re.match("%s$"%(incompatible_recipe[1]), pv): debug("skipping incompatible recipe %s:%s_%s"%( recipe_type, pn, pv)) return False return True
def meta(self): if self._meta is not None: return self._meta meta = self.recipe.meta.copy() # Filter meta-data, enforcing restrictions on which tasks to # emit vars to and not including other task functions. emit_prefixes = (meta.get("META_EMIT_PREFIX") or "").split() def colon_split(s): import string return string.split(s, ":", 1) emit_prefixes = map(colon_split, emit_prefixes) for var in meta.keys(): emit_flag = meta.get_flag(var, "emit") emit = (emit_flag or "").split() taskfunc_match = self.TASKFUNC_RE.match(var) if taskfunc_match: if taskfunc_match.group(0) not in emit: emit.append(taskfunc_match.group(0)) for emit_task, emit_prefix in emit_prefixes: if not var.startswith(emit_prefix): continue if emit_task == "": if emit_flag is None: emit_flag = "" continue if not emit_task.startswith("do_"): emit_task = "do_" + emit_task if not emit_task in emit: emit.append(emit_task) if (emit or emit_flag == "") and not self.name in emit: del meta[var] continue omit = meta.get_flag(var, "omit") if omit is not None and self.name in omit.split(): del meta[var] continue self._meta = meta return meta
def filter_meta(self): meta = self._meta assert (meta is not None) # Filter meta-data, enforcing restrictions on which tasks to # emit vars to and not including other task functions. emit_prefixes = (meta.get("META_EMIT_PREFIX") or "").split() def emit_prefix_pair(s): task, prefix = s.split(":", 1) if task: task = "do_" + task return (task, prefix) # To avoid looping over the entire ~20 element list of pairs # for every variable, split that list according to the first # character of the prefix, and fetch the appropriate list # based on var[0]. emit_prefix_table = {} for s in emit_prefixes: p = emit_prefix_pair(s) c = p[1][0] if c in emit_prefix_table: emit_prefix_table[c].append(p) else: emit_prefix_table[c] = [p] for var in meta.keys(): emit_flag = meta.get_flag(var, "emit") emit = (emit_flag or "").split() taskfunc_match = self.TASKFUNC_RE.match(var) if taskfunc_match: emit.append(taskfunc_match.group(0)) for emit_task, emit_prefix in emit_prefix_table.get(var[0], []): if not var.startswith(emit_prefix): continue if emit_task == "": if emit_flag is None: emit_flag = "" continue emit.append(emit_task) if (emit or emit_flag == "") and not self.name in emit: del meta[var] continue
def filter_meta(self): meta = self._meta assert(meta is not None) # Filter meta-data, enforcing restrictions on which tasks to # emit vars to and not including other task functions. emit_prefixes = (meta.get("META_EMIT_PREFIX") or "").split() def emit_prefix_pair(s): task, prefix = s.split(":", 1) if task: task = "do_" + task return (task, prefix) # To avoid looping over the entire ~20 element list of pairs # for every variable, split that list according to the first # character of the prefix, and fetch the appropriate list # based on var[0]. emit_prefix_table = {} for s in emit_prefixes: p = emit_prefix_pair(s) c = p[1][0] if c in emit_prefix_table: emit_prefix_table[c].append(p) else: emit_prefix_table[c] = [p] for var in meta.keys(): emit_flag = meta.get_flag(var, "emit") emit = (emit_flag or "").split() taskfunc_match = self.TASKFUNC_RE.match(var) if taskfunc_match: emit.append(taskfunc_match.group(0)) for emit_task, emit_prefix in emit_prefix_table.get(var[0], []): if not var.startswith(emit_prefix): continue if emit_task == "": if emit_flag is None: emit_flag = "" continue emit.append(emit_task) if (emit or emit_flag == "") and not self.name in emit: del meta[var] continue
def run(self): meta = self.meta() function = meta.get_function(self.name) self.do_cleandirs() cwd = self.do_dirs() or meta.get("B") # Setup stdin, stdout and stderr redirection stdin = open("/dev/null", "r") self.logfn = "%s/%s.%s.log"%(function.tmpdir, self.name, str(os.getpid())) self.logsymlink = "%s/%s.log"%(function.tmpdir, self.name) oelite.util.makedirs(os.path.dirname(self.logfn)) try: if self.debug: logfile = os.popen("tee %s"%self.logfn, "w") else: logfile = open(self.logfn, "w") except OSError: print "Opening log file failed: %s"%(self.logfn) raise if os.path.exists(self.logsymlink) or os.path.islink(self.logsymlink): os.remove(self.logsymlink) os.symlink(self.logfn, self.logsymlink) real_stdin = os.dup(sys.stdin.fileno()) real_stdout = os.dup(sys.stdout.fileno()) real_stderr = os.dup(sys.stderr.fileno()) os.dup2(stdin.fileno(), sys.stdin.fileno()) os.dup2(logfile.fileno(), sys.stdout.fileno()) os.dup2(logfile.fileno(), sys.stderr.fileno()) try: for prefunc in self.get_prefuncs(): print "running prefunc", prefunc self.do_cleandirs(prefunc) wd = self.do_dirs(prefunc) if not prefunc.run(wd or cwd): return False try: if not function.run(cwd): return False except oebakery.FatalError: return False for postfunc in self.get_postfuncs(): print "running postfunc", postfunc self.do_cleandirs(postfunc) wd = self.do_dirs(postfunc) if not postfunc.run(wd or cwd): return False return True finally: # Cleanup stdin, stdout and stderr redirection os.dup2(real_stdin, sys.stdin.fileno()) os.dup2(real_stdout, sys.stdout.fileno()) os.dup2(real_stderr, sys.stderr.fileno()) stdin.close() logfile.close() os.close(real_stdin) os.close(real_stdout) os.close(real_stderr) if os.path.exists(self.logfn) and os.path.getsize(self.logfn) == 0: os.remove(self.logsymlink) os.remove(self.logfn) # prune empty logfiles
def run(self): meta = self.meta() function = meta.get_function(self.name) self.do_cleandirs() cwd = self.do_dirs() or meta.get("B") # Setup stdin, stdout and stderr redirection stdin = open("/dev/null", "r") self.logfn = "%s/%s.%s.log" % (function.tmpdir, self.name, meta.get("DATETIME")) self.logsymlink = "%s/%s.log" % (function.tmpdir, self.name) oelite.util.makedirs(os.path.dirname(self.logfn)) try: if self.debug: logfile = os.popen("tee %s" % self.logfn, "w") else: logfile = open(self.logfn, "w") except OSError: print "Opening log file failed: %s" % (self.logfn) raise if os.path.exists(self.logsymlink) or os.path.islink(self.logsymlink): os.remove(self.logsymlink) os.symlink(os.path.basename(self.logfn), self.logsymlink) real_stdin = os.dup(sys.stdin.fileno()) real_stdout = os.dup(sys.stdout.fileno()) real_stderr = os.dup(sys.stderr.fileno()) os.dup2(stdin.fileno(), sys.stdin.fileno()) os.dup2(logfile.fileno(), sys.stdout.fileno()) os.dup2(logfile.fileno(), sys.stderr.fileno()) try: for prefunc in self.get_prefuncs(): print "running prefunc", prefunc self.do_cleandirs(prefunc) wd = self.do_dirs(prefunc) if not prefunc.run(wd or cwd): return False try: if not function.run(cwd): return False except oebakery.FatalError: return False for postfunc in self.get_postfuncs(): print "running postfunc", postfunc self.do_cleandirs(postfunc) wd = self.do_dirs(postfunc) if not postfunc.run(wd or cwd): return False return True finally: # Cleanup stdin, stdout and stderr redirection os.dup2(real_stdin, sys.stdin.fileno()) os.dup2(real_stdout, sys.stdout.fileno()) os.dup2(real_stderr, sys.stderr.fileno()) stdin.close() logfile.close() os.close(real_stdin) os.close(real_stdout) os.close(real_stderr) if os.path.exists(self.logfn) and os.path.getsize(self.logfn) == 0: os.remove(self.logsymlink) os.remove(self.logfn) # prune empty logfiles