def wait_task(self, poll, task): """Wait for a specific task to finish baking. Returns pair (result, delta), or None in case poll=True and the task is not yet done. """ assert(task in self) result = task.wait(poll) if result is None: return None delta = self.remove(task) task.task_time = delta task.recipe.remaining_tasks -= 1 if result: info("%s finished - %.3f s" % (task, delta)) task.build_done(self.baker.runq.get_task_buildhash(task)) self.baker.runq.mark_done(task) self.completed_tasks.append(task) else: err("%s failed - %.3f s" % (task, delta)) self.failed_tasks.append(task) task.build_failed() # If any task for a recipe fails, ensure that we don't do rmwork. task.recipe.rmwork = False if task.recipe.remaining_tasks == 0: task.recipe.do_rmwork() return (task, result, delta)
def wait_task(self, poll, task): """Wait for a specific task to finish baking. Returns pair (result, delta), or None in case poll=True and the task is not yet done. """ assert (task in self) result = task.wait(poll) if result is None: return None delta = self.remove(task) task.task_time = delta task.recipe.remaining_tasks -= 1 if result: info("%s finished - %.3f s" % (task, delta)) task.build_done(self.baker.runq.get_task_buildhash(task)) self.baker.runq.mark_done(task) self.completed_tasks.append(task) else: err("%s failed - %.3f s" % (task, delta)) self.failed_tasks.append(task) task.build_failed() # If any task for a recipe fails, ensure that we don't do rmwork. task.recipe.rmwork = False if task.recipe.remaining_tasks == 0: task.recipe.do_rmwork() return (task, result, delta)
def start(self, task): self.count += 1 debug("") debug("Preparing %s"%(task)) task.prepare() info("%s started - %d / %d "%(task, self.count, self.total)) task.build_started() self.add(task) task.start()
def start(self, task): self.count += 1 debug("") debug("Preparing %s" % (task)) task.prepare() info("%s started - %d / %d " % (task, self.count, self.total)) task.build_started() self.add(task) task.start()
def update_task(self, task): get_recipe_datahash(task.recipe) if task is fetch: get_recipe_srchash(task.recipe) get_dependencies_hash(task) taskhash = hashit(recipehash, srchash, dephash) run = 0 if has_build(task): if datahash != build_datahash(task): info("recipe changes trigger run") run = 1 if srchash != build_srchash(task): info("src changes trigger run") run = 1 if dephash != build_dephash(task): info("dep changes trigger run") run = 1 else: info("no existing build") run = 1 if run: set_runable(task, datahash, srchash, dephash) # this marks task for run # and saves a combined taskhash for following # iterations (into runq_taskdepend.hash) # and all hashes for saving with build # result set_runq_taskdepend_checked(task) return
def timing_info(msg, start): msg += " time " delta = datetime.datetime.now() - start hours = delta.seconds // 3600 minutes = delta.seconds // 60 % 60 seconds = delta.seconds % 60 milliseconds = delta.microseconds // 1000 if hours: msg += "%dh%02dm%02ds" % (hours, minutes, seconds) elif minutes: msg += "%dm%02ds" % (minutes, seconds) else: msg += "%d.%03d seconds" % (seconds, milliseconds) info(msg) return
def timing_info(msg, start): msg += " time " delta = datetime.datetime.now() - start hours = delta.seconds // 3600 minutes = delta.seconds // 60 % 60 seconds = delta.seconds % 60 milliseconds = delta.microseconds // 1000 if hours: msg += "%dh%02dm%02ds"%(hours, minutes, seconds) elif minutes: msg += "%dm%02ds"%(minutes, seconds) else: msg += "%d.%03d seconds"%(seconds, milliseconds) info(msg) return
def wait_any(self, poll): """Wait for any task currently in the oven to finish. Returns triple (task, result, time), or None. """ if not poll and len(self) == 0: raise Exception("nothing in the oven, so you'd wait forever...") tasks = self.currently_baking() if not poll and len(tasks) == 1: t = tasks[0] if self.stdout_isatty: now = oelite.util.now() info("waiting for %s (started %.3f seconds ago) to finish" % (t, now - self.starttime[t])) return self.wait_task(False, t) tasks.sort(key=lambda t: self.starttime[t]) i = 0 while True: for t in tasks: result = self.wait_task(True, t) if result is not None: return result if poll: break i += 1 if i == 4 and self.stdout_isatty: info("waiting for any of these to finish:") now = oelite.util.now() for t in tasks: info(" %-40s started %.3f seconds ago" % (t, now - self.starttime[t])) time.sleep(0.1) return None
def wait_any(self, poll): """Wait for any task currently in the oven to finish. Returns triple (task, result, time), or None. """ if not poll and len(self) == 0: raise Exception("nothing in the oven, so you'd wait forever...") tasks = self.currently_baking() if not poll and len(tasks) == 1: t = tasks[0] if self.stdout_isatty: now = oelite.util.now() info("waiting for %s (started %.3f seconds ago) to finish" % (t, now-self.starttime[t])) return self.wait_task(False, t) tasks.sort(key=lambda t: self.starttime[t]) i = 0 while True: for t in tasks: result = self.wait_task(True, t) if result is not None: return result if poll: break i += 1 if i == 4 and self.stdout_isatty: info("waiting for any of these to finish:") now = oelite.util.now() for t in tasks: info(" %-40s started %.3f seconds ago" % (t, now-self.starttime[t])) time.sleep(0.1) return None
def note(*args): oebakery.info(" ".join(args))
def get_recipe(self, id=None, task=None, package=None, filename=None, type=None, name=None, version=None, strict=True, default_type="machine"): """ Get recipe from cookbook. Returns recipe object if arguments match a single recipe. Returns None if recipe is not found. Throws MultipleRecipes exception if more than one recipe is found. """ recipes = self.get_recipes( id=id, task=task, package=package, filename=filename, type=type, name=name, version=version) if len(recipes) == 0: recipe = None elif len(recipes) == 1: recipe = recipes[0] elif strict: warn("multiple recipes found in %s.%s: returning None!"%( self.__class__.__name__, inspect.stack()[0][3])) for recipe in recipes: info("%s:%s_%s"%(recipe.type, recipe.name, recipe.version)) recipe = None else: chosen = [ recipes[0] ] for other in recipes[1:]: if chosen[0].priority > other.priority: continue if chosen[0].priority < other.priority: chosen = [ other ] continue vercmp = bb.utils.vercmp_part(chosen[0].version, other.version) if vercmp < 0: chosen = [ other ] if vercmp == 0: #debug("chosen=%s\nother=%s"%(chosen, other)) #die("you have to be more precise") chosen.append(other) if len(chosen) == 1: recipe = chosen[0] elif not default_type: warn("multiple recipes found in %s.%s: returning None!"%( self.__class__.__name__, inspect.stack()[0][3])) for recipe in chosen: info("%s:%s_%s"%(recipe.type, recipe.name, recipe.version)) recipe = None else: # there is multiple recipes with the same priority and version, # so let's try to pick the default type defaults_chosen = [] for choice in chosen: if choice.type == default_type: defaults_chosen.append(choice) if len(defaults_chosen) == 1: recipe = defaults_chosen[0] elif not defaults_chosen: debug("multiple recipes, but none with default_type (%s)"%( default_type)) recipe = None else: warn("multiple recipes found in %s.%s: returning None!"%( self.__class__.__name__, inspect.stack()[0][3])) for recipe in defaults_chosen: info("%s:%s_%s"%(recipe.type, recipe.name, recipe.version)) recipe = None return recipe
def grab(url, filename, timeout=120, retry=5, proxies=None, passive_ftp=True): print "Grabbing", url if proxies: env = os.environ.copy() env.update(proxies) else: env = None # this is the default, uses a copy of the current environment if passive_ftp: psvftp = '--passive-ftp' else: psvftp = '--no-passive-ftp' d = os.path.dirname(filename) f = os.path.basename(filename) if not os.path.exists(d): os.makedirs(d) # Use mkstemp to create and open a guaranteed unique file. We use # the file descriptor as wget's stdout. We must download to the # actual ingredient dir rather than e.g. /tmp to ensure that we # can do a link(2) call without encountering EXDEV. (fd, dl_tgt) = tempfile.mkstemp(prefix = f + ".", dir = d) # Unfortunately, mkstemp() uses mode 0o600 when opening the file, # but we'd rather have used 0o644. So we get to do a little syscall # dance, yay. mask = os.umask(0o022) os.fchmod(fd, 0o644 & ~mask) os.umask(mask) cmd = ['wget', '-t', str(retry), '-T', str(timeout), psvftp, '--no-check-certificate', '--progress=dot:mega', '-v', url, '-O', '-'] try: returncode = subprocess.call(cmd, env=env, stdout=fd) if returncode != 0: err("Error %s %d" % (cmd, returncode)) return False if os.fstat(fd).st_size == 0: err("The fetch of %s resulted in a zero size file?! Failing since this isn't right." % (url)) return False # We use link(2) rather than rename(2), since the latter would # replace an existing target. Although that's still done # atomically and the new file should be identical to the old, # it's better that once created, the target dentry is # "immutable". For example, there might be some code that, # when opening a file, first does a stat(2), then actually # opens the file, and then does an fstat() and compares the # inode numbers. We don't want such code to fail. It's also # slightly simpler that we need to do an unlink(2) on all exit # paths. try: os.link(dl_tgt, filename) except OSError as e: if e.errno == errno.EEXIST: # Some other fetcher beat us to it, signature checking # should ensure we don't end up using a wrong # file. But do make a note of this in the log file so # that we can see that the races do occur, and that # this works as intended. info("Fetching %s raced with another process - this is harmless" % url) pass else: err("os.link(%s, %s) failed: %s", dl_tgt, filename, str(e)) return False finally: # Regardless of how all of the above went, we have to delete # the temporary dentry and close the file descriptor. We do # not wrap these in ignoreall-try-except, since something is # really broken if either fails (in particular, subprocess is # not supposed to close the fd we give it; it should only dup2 # it to 1, and then close the original _in the child_). os.unlink(dl_tgt) os.close(fd) return True
self.runq.print_metahashable_tasks() err("Circular task dependencies detected. Remaining tasks:") for task in self.runq.get_unhashed_tasks(): print " %s" % (task) die("Unable to handle circular task dependencies") self.runq.set_task_build_on_nostamp_tasks() self.runq.set_task_build_on_retired_tasks() self.runq.set_task_build_on_hashdiff() # check for availability of prebaked packages, and set package # filename for all packages. depend_packages = self.runq.get_depend_packages() url_prefix = self.config.get("PREBAKE_URL") if url_prefix is not None: info("Trying to use prebakes from url: %s" % (url_prefix)) for package in depend_packages: recipe = self.cookbook.get_recipe(package=package) if recipe.get("REBUILD") == "1": continue prebake = self.find_prebaked_package(package) if prebake: self.runq.set_package_filename(package, prebake, prebake=True) # clear parent_task for all runq_depends where all runq_depend # rows with the same parent_task has prebake flag set self.runq.prune_prebaked_runq_depends() # FIXME: this might prune to much. If fx. A depends on B and # C, and B depends on C, and all A->B dependencies are # prebaked, but not all A->C dependencies, B will be used
self.runq.print_metahashable_tasks() err("Circular task dependencies detected. Remaining tasks:") for task in self.runq.get_unhashed_tasks(): print " %s" % (task) die("Unable to handle circular task dependencies") self.runq.set_task_build_on_nostamp_tasks() self.runq.set_task_build_on_retired_tasks() self.runq.set_task_build_on_hashdiff() # check for availability of prebaked packages, and set package # filename for all packages. depend_packages = self.runq.get_depend_packages() url_prefix = self.config.get("PREBAKE_URL") if url_prefix is not None: info("Trying to use prebakes from url: %s" % (url_prefix)) for package in depend_packages: # FIXME: skip this package if it is to be rebuild prebake = self.find_prebaked_package(package) if prebake: self.runq.set_package_filename(package, prebake, prebake=True) # clear parent_task for all runq_depends where all runq_depend # rows with the same parent_task has prebake flag set self.runq.prune_prebaked_runq_depends() # FIXME: this might prune to much. If fx. A depends on B and # C, and B depends on C, and all A->B dependencies are # prebaked, but not all A->C dependencies, B will be used # prebaked, and A will build with a freshly built C, which # might be different from the C used in B. This is especially
self.runq.print_metahashable_tasks() err("Circular task dependencies detected. Remaining tasks:") for task in self.runq.get_unhashed_tasks(): print " %s"%(task) die("Unable to handle circular task dependencies") self.runq.set_task_build_on_nostamp_tasks() self.runq.set_task_build_on_retired_tasks() self.runq.set_task_build_on_hashdiff() # check for availability of prebaked packages, and set package # filename for all packages. depend_packages = self.runq.get_depend_packages() url_prefix = self.config.get("PREBAKE_URL") if url_prefix is not None: info("Trying to use prebakes from url: %s"%(url_prefix)) for package in depend_packages: # FIXME: skip this package if it is to be rebuild prebake = self.find_prebaked_package(package) if prebake: self.runq.set_package_filename(package, prebake, prebake=True) # clear parent_task for all runq_depends where all runq_depend # rows with the same parent_task has prebake flag set self.runq.prune_prebaked_runq_depends() # FIXME: this might prune to much. If fx. A depends on B and # C, and B depends on C, and all A->B dependencies are # prebaked, but not all A->C dependencies, B will be used # prebaked, and A will build with a freshly built C, which
def timing_info(msg, start): msg += " time " delta = now() - start msg += pretty_time(delta) info(msg) return
def plain(*args): oebakery.info(" ".join(args))