def commit_changes(self, pkg_ctx): try: pkg_ctx['patch_file'] = None I(" %s: Auto commit changes ..." % pkg_ctx['PN']) self.git.add(pkg_ctx['recipe_dir']) self.git.commit(pkg_ctx['commit_msg'], self.opts['author']) stdout = self.git.create_patch(pkg_ctx['workdir']) pkg_ctx['patch_file'] = stdout.strip() if not pkg_ctx['patch_file']: msg = "Patch file not generated." E(" %s: %s\n %s" % (pkg_ctx['PN'], msg, stdout)) raise Error(msg, stdout) else: I(" %s: Save patch in directory: %s." % (pkg_ctx['PN'], pkg_ctx['workdir'])) if pkg_ctx['error'] is not None: I("Due to build errors, the commit will also be reverted to avoid cascading upgrade failures." ) self.git.revert("HEAD") except Error as e: msg = '' for line in e.stdout.split("\n"): if line.find("nothing to commit") == 0: msg = "Nothing to commit!" I(" %s: %s" % (pkg_ctx['PN'], msg)) I(" %s: %s" % (pkg_ctx['PN'], e.stdout)) raise e
def _handle_error(self, e, machine): handled = True if isinstance(e, IntegrationError): pkg_ctx = e.pkg_ctx E(" %s on machine %s failed in integration, removing..." % (pkg_ctx['PN'], machine)) with open( os.path.join(pkg_ctx['workdir'], 'integration_error.log'), 'a+') as f: f.write(e.stdout) if not pkg_ctx in self.pkgs_ctx: E(" Infinite loop IntegrationError trying to " \ "remove %s twice, see logs.", pkg_ctx['PN']) handled = False else: pkg_ctx['integration_error'] = e # remove previous build tmp, sstate to avoid QA errors # on lower versions I(" removing sstate directory ...") shutil.rmtree(os.path.join(get_build_dir(), "sstate-cache")) I(" removing tmp directory ...") shutil.rmtree(os.path.join(get_build_dir(), "tmp")) self.pkgs_ctx.remove(pkg_ctx) else: handled = False return handled
def _get_packages_to_upgrade(self, packages=None): pkgs = oe.recipeutils.get_recipe_upgrade_status(self.recipes) pkgs_list = [] for pkg in pkgs: pn, status, cur_ver, next_ver, maintainer, revision, no_upgrade_reason = pkg if self.args.to_version: next_ver = self.args.to_version if status == 'UPDATE' and not no_upgrade_reason: # Always do the upgrade if recipes are specified if self.recipes and pn in self.recipes: pkgs_list.append( (pn, cur_ver, next_ver, maintainer, revision)) elif self._pkg_upgradable(pn, next_ver, maintainer): pkgs_list.append( (pn, cur_ver, next_ver, maintainer, revision)) else: if no_upgrade_reason: I(" Skip package %s (status = %s, current version = %s," \ " next version = %s, no upgrade reason = %s)" % (pn, status, cur_ver, next_ver, no_upgrade_reason)) else: I(" Skip package %s (status = %s, current version = %s," \ " next version = %s)" % (pn, status, cur_ver, next_ver)) return pkgs_list
def testimage(self, pkgs_ctx, machine, image): os.environ['CORE_IMAGE_EXTRA_INSTALL'] = \ self._get_pkgs_to_install(pkgs_ctx) if 'TEST_SUITES' in os.environ: del os.environ['TEST_SUITES'] I(" building %s for %s ..." % (image, machine)) try: self.bb.complete(image, machine) except Error as e: self._handle_image_build_error(image, pkgs_ctx, e) I(" running %s/testimage for %s ..." % (image, machine)) self.bb.complete("%s -c testimage" % image, machine) log_file = self._find_log("log.do_testimage", machine) shutil.copyfile( log_file, os.path.join(self.uh_work_dir, "log_%s.do_testimage" % machine)) for pkg_ctx in pkgs_ctx: if not 'testimage' in pkg_ctx: pkg_ctx['testimage'] = {} if not 'testimage_log' in pkg_ctx: pkg_ctx['testimage_log'] = os.path.join( pkg_ctx['workdir'], "log.do_testimage") pkg_ctx['testimage'][machine] = True with open(log_file, "r") as lf: with open(pkg_ctx['testimage_log'], "a+") as of: of.write("BEGIN: TESTIMAGE for %s\n" % machine) for line in lf: of.write(line) of.write("END: TESTIMAGE for %s\n" % machine)
def run(self): if len(self.pkgs_ctx) <= 0: I(" Testimage was enabled but any upgrade was successful.") return I(" Images will test for %s." % ', '.join(self.opts['machines'])) for machine in self.opts['machines']: I(" Testing images for %s ..." % machine) while True: try: self.ptest(self.pkgs_ctx, machine) break except Exception as e: if not self._handle_error(e, machine): E(" %s/testimage on machine %s failed" % (self.image, machine)) self._log_error(e) break while True: try: self.testimage(self.pkgs_ctx, machine, self.image) break except Exception as e: if not self._handle_error(e, machine): E(" %s/testimage on machine %s failed" % (self.image, machine)) self._log_error(e) break
def _prepare(self): if settings.get("clean_sstate", "no") == "yes" and \ os.path.exists(os.path.join(get_build_dir(), "sstate-cache")): I(" Removing sstate directory ...") shutil.rmtree(os.path.join(get_build_dir(), "sstate-cache")) if settings.get("clean_tmp", "no") == "yes" and \ os.path.exists(os.path.join(get_build_dir(), "tmp")): I(" Removing tmp directory ...") shutil.rmtree(os.path.join(get_build_dir(), "tmp"))
def _get_pkgs_to_install(self, pkgs): pkgs_out = [] for c in pkgs: pkgs_out.append(c['PN']) I(" Checking if package {} has ptests...".format(c['PN'])) if 'PTEST_ENABLED' in self.bb.env(c['PN']): I(" ...yes") pkgs_out.append((c['PN']) + '-ptest') else: I(" ...no") return ' '.join(pkgs_out)
def buildhistory_init(devtool, bb, git, opts, pkg_ctx): if not opts['buildhistory']: return pkg_ctx['buildhistory'] = BuildHistory(bb, pkg_ctx['PN'], pkg_ctx['workdir']) I(" %s: Initial buildhistory for %s ..." % (pkg_ctx['PN'], opts['machines'][:1])) pkg_ctx['buildhistory'].init(opts['machines'][:1])
def compile(bb, git, opts, pkg_ctx): if opts['skip_compilation']: W(" %s: Compilation was skipped by user choice!") return for machine in opts['machines']: I(" %s: compiling for %s ..." % (pkg_ctx['PN'], machine)) pkg_ctx['recipe'].compile(machine) if opts['buildhistory']: pkg_ctx['buildhistory'].add()
def compile(devtool, bb, git, opts, pkg_ctx): if opts['skip_compilation']: W(" %s: Compilation was skipped by user choice!") return for machine in opts['machines']: I(" %s: compiling upgraded version for %s ..." % (pkg_ctx['PN'], machine)) _compile(bb, pkg_ctx['PN'], machine, pkg_ctx['workdir']) if opts['buildhistory']: pkg_ctx['buildhistory'].add()
def testimage(self, pkgs_ctx, machine, image): os.environ['CORE_IMAGE_EXTRA_INSTALL'] = \ self._get_pkgs_to_install(pkgs_ctx) os.environ['TEST_LOG_DIR'] = self.logdir os.environ['TESTIMAGE_UPDATE_VARS'] = 'TEST_LOG_DIR' I( " Installing additional packages to the image: {}".format(os.environ['CORE_IMAGE_EXTRA_INSTALL'])) I( " building %s for %s ..." % (image, machine)) bitbake_create_output = "" bitbake_run_output = "" try: bitbake_create_output = self.bb.complete(image, machine) except Error as e: I( " building the testimage failed! Collecting logs...") bitbake_create_output = e.stdout else: I( " running %s/testimage for %s ..." % (image, machine)) try: bitbake_run_output = self.bb.complete("%s -c testimage" % image, machine) except Error as e: I( " running the testimage failed! Collecting logs...") bitbake_run_output = e.stdout if bitbake_create_output: with open(os.path.join(self.logdir, "bitbake-create-testimage.log"), 'w') as f: f.write(bitbake_create_output) if bitbake_run_output: with open(os.path.join(self.logdir, "bitbake-run-testimage.log"), 'w') as f: f.write(bitbake_run_output) I(" All done! Testimage/ptest/qemu logs are collected to {}".format(self.logdir))
def ptest(self, pkgs_ctx, machine): image = 'core-image-minimal' # should use bitbake API here to trim down the list to only the recipes that inherit ptest ptest_pkgs = pkgs_ctx os.environ['CORE_IMAGE_EXTRA_INSTALL'] = \ self._get_pkgs_to_install(ptest_pkgs, ptest=True) I(" building %s for %s ..." % (image, machine)) try: self.bb.complete(image, machine) except Error as e: self._handle_image_build_error(image, pkgs_ctx, e) os.environ['TEST_SUITES'] = "ping ssh _ptest" I(" running %s/ptest for %s ..." % (image, machine)) self.bb.complete("%s -c testimage" % image, machine) ptest_log_file = self._find_log("ptest.log", machine) shutil.copyfile( ptest_log_file, os.path.join(self.uh_work_dir, "ptest_%s.log" % machine)) ptest_result = self._parse_ptest_log(ptest_log_file) for pn in ptest_result: for pkg_ctx in pkgs_ctx: if not pn == pkg_ctx['PN']: continue if not 'ptest' in pkg_ctx: pkg_ctx['ptest'] = {} if not 'ptest_log' in pkg_ctx: pkg_ctx['ptest_log'] = os.path.join( pkg_ctx['workdir'], "ptest.log") pkg_ctx['ptest'][machine] = True with open(pkg_ctx['ptest_log'], "a+") as f: f.write("BEGIN: PTEST for %s\n" % machine) for line in ptest_result[pn]: f.write(line) f.write("END: PTEST for %s\n" % machine)
def load_env(bb, git, opts, pkg_ctx): stdout = git.status() if stdout != "": if opts['interactive']: W(" %s: git repository has uncommited work which will be dropped!" \ " Proceed? (y/N)" % pkg_ctx['PN']) answer = sys.stdin.readline().strip().upper() if answer == '' or answer != 'Y': I(" %s: User abort!" % pkg_ctx['PN']) exit(1) I(" %s: Dropping uncommited work!" % pkg_ctx['PN']) git.reset_hard() git.clean_untracked() pkg_ctx['env'] = bb.env(pkg_ctx['PN']) pkg_ctx['workdir'] = os.path.join(pkg_ctx['base_dir'], pkg_ctx['PN']) os.mkdir(pkg_ctx['workdir']) pkg_ctx['recipe_dir'] = os.path.dirname(pkg_ctx['env']['FILE']) if pkg_ctx['env']['PV'] == pkg_ctx['NPV']: raise UpgradeNotNeededError
def _comment_faulty_patch(self, patch_log): patch_file = None is_reverse_applied = False with open(patch_log) as log: for line in log: m1 = re.match("^Patch ([^ ]*) does not apply.*", line) m2 = re.match("Patch ([^ ]*) can be reverse-applied", line) if m2: m1 = m2 is_reverse_applied = True if m1: patch_file = m1.group(1) break if not patch_file: return False I(" %s: Commenting patch %s ..." % (self.env['PN'], patch_file)) reason = None found = False dirs = [ self.env['PN'] + "-" + self.env['PKGV'], self.env['PN'], "files" ] for dir in dirs: patch_file_path = os.path.join(self.recipe_dir, dir, patch_file) if not os.path.exists(patch_file_path): continue else: found = True # Find out upstream status of the patch with open(patch_file_path) as patch: for line in patch: m = re.match(".*Upstream-Status:(.*)\n", line) if m: reason = m.group(1).strip().split()[0].lower() if not self._comment_patch_uri("file://" + patch_file): return False if not found: return False self.comment_patches_msg += " * " + patch_file if reason: self.comment_patches_msg += " (" + reason + ") " if is_reverse_applied: self.comment_patches_msg += "+ reverse-applied" self.comment_patches_msg += "\n" return True
def _update_master(self): if self.opts['layer_mode'] == 'yes': I(" Sync poky master ...") self.poky_git.reset_hard() self.poky_git.clean_untracked() self.poky_git.checkout_branch("master") self.poky_git.pull() I(" Drop all uncommited changes (including untracked) ...") self.git.reset_hard() self.git.clean_untracked() self.git.checkout_branch("master") try: self.git.delete_branch("upgrades") except Error: pass if self.opts['layer_mode'] == 'yes': I(" Sync %s master ..." % self.opts['layer_name']) else: I(" Sync poky master ...") self.git.pull() self.git.create_branch("upgrades")
def send_email(self, to_addr, subject, text, files=[], cc_addr=None): if self.smtp_host is None or self.from_addr is None: return 0 I(" Sending email to: %s" % to_addr) msg = MIMEMultipart() msg['From'] = self.from_addr if type(to_addr) is list: msg['To'] = ', '.join(to_addr) else: msg['To'] = to_addr if cc_addr is not None: if type(cc_addr) is list: msg['Cc'] = ', '.join(cc_addr) else: msg['Cc'] = cc_addr msg['Subject'] = subject msg.attach(MIMEText(text)) for file in files: ctype, encoding = mimetypes.guess_type(file) if ctype is None or encoding is not None: ctype = 'application/octet-stream' maintype, subtype = ctype.split('/', 1) if maintype == "text": attachment = MIMEText(open(file).read(), _subtype=subtype) else: attachment = MIMEBase(maintype, _subtype=subtype) attachment.set_payload(open(file, 'rb').read()) attachment.add_header( 'Content-Disposition', 'attachment; filename="%s"' % os.path.basename(file)) msg.attach(attachment) out = StringIO() Generator(out, mangle_from_=False).flatten(msg) msg_text = out.getvalue() try: smtp = SMTP(self.smtp_host, self.smtp_port) smtp.sendmail(self.from_addr, to_addr, msg_text) if cc_addr is not None: smtp.sendmail(self.from_addr, cc_addr, msg_text) smtp.close() except Exception as e: E("Could not send email: %s" % str(e))
def commit_changes(self, pkg_ctx): fail = False try: pkg_ctx['patch_file'] = None if 'recipe' in pkg_ctx: I(" %s: Auto commit changes ..." % pkg_ctx['PN']) self.git.commit(pkg_ctx['recipe'].commit_msg, self.opts['author']) stdout = self.git.create_patch(pkg_ctx['workdir']) pkg_ctx['patch_file'] = stdout.strip() if not pkg_ctx['patch_file']: msg = "Patch file not generated." E(" %s: %s\n %s" % (pkg_ctx['PN'], msg, stdout)) pkg_ctx['error'] = Error(msg, stdout) fail = True else: I(" %s: Save patch in directory: %s." % (pkg_ctx['PN'], pkg_ctx['workdir'])) except Error as e: msg = '' for line in e.stdout.split("\n"): if line.find("nothing to commit") == 0: msg = "Nothing to commit!" I(" %s: %s" % (pkg_ctx['PN'], msg)) I(" %s: %s" % (pkg_ctx['PN'], e.stdout)) pkg_ctx['error'] = Error(msg, e.stdout) fail = True if fail: raise pkg_ctx['error']
def _check_upstream_versions(self): I(" Fetching upstream version(s) ...") if self.recipes: recipe = " ".join(self.recipes) else: recipe = 'universe' try: self.bb.checkpkg(recipe) except Error as e: for line in e.stdout.split('\n'): if line.find("ERROR: Task do_checkpkg does not exist") == 0: C(" \"distrodata.bbclass\" not inherited. Consider adding " "the following to your local.conf:\n\n" "INHERIT =+ \"distrodata\"\n") exit(1)
def _change_recipe_checksums(self, fetch_log): sums = {} with open(os.path.realpath(fetch_log)) as log: for line in log: m = None key = None m1 = re.match("^SRC_URI\[(.*)md5sum\].*", line) m2 = re.match("^SRC_URI\[(.*)sha256sum\].*", line) if m1: m = m1 key = "md5sum" elif m2: m = m2 key = "sha256sum" if m: name = m.group(1) sum_line = m.group(0) + '\n' if name not in sums: sums[name] = {} sums[name][key] = sum_line if len(sums) == 0: raise FetchError() # checksums are usually in the main recipe but they can also be in inc # files... Go through the recipes/inc files until we find them @modify_recipe_files def _update_recipe_checksums(line, temp_recipe, *args, **kwargs): sums = args[0] for name in sums: m1 = re.match("^SRC_URI\[" + name + "md5sum\].*", line) m2 = re.match("^SRC_URI\[" + name + "sha256sum\].*", line) if m1: temp_recipe.write(sums[name]["md5sum"]) elif m2: temp_recipe.write(sums[name]["sha256sum"]) else: temp_recipe.write(line) I(" %s: Update recipe checksums ..." % self.env['PN']) _update_recipe_checksums(self.env, self.recipe_dir, sums) self.checksums_changed = True
def _clean_failed_recipes(self, failed_recipes): already_retried = False for recipe in failed_recipes: if recipe in self.retried_recipes: # we already retried, we'd best leave it to a human to handle # it :) already_retried = True # put the recipe in the retried list self.retried_recipes.add(recipe) if already_retried: return False else: I(" %s: The following recipe(s): %s, failed. " "Doing a 'cleansstate' and then retry ..." % (self.env['PN'], ' '.join(failed_recipes.keys()))) self.bb.cleansstate(' '.join(failed_recipes.keys())) return True
def _get_packages_to_upgrade(self, packages=None): last_date_checked = None last_master_commit = None last_checkpkg_file = None current_date = date.isoformat(date.today()) try: stdout = self.git.last_commit("master") cur_master_commit = stdout except Error: cur_master_commit = "unknown" if os.path.exists(get_build_dir() + "/upgrade-helper/last_checkpkg_run"): with open(get_build_dir() + "/upgrade-helper/last_checkpkg_run") as last_check: line = last_check.read() last_date_checked = line.split(',')[0] last_master_commit = line.split(',')[1] last_checkpkg_file = line.split(',')[2] if not os.path.exists(last_checkpkg_file): last_checkpkg_file = None if last_master_commit != cur_master_commit or last_date_checked != current_date or \ last_checkpkg_file is None: self._check_upstream_versions() last_checkpkg_file = os.path.realpath(get_build_dir() + "/tmp/log/checkpkg.csv") else: I(" Using last checkpkg.csv file since last master commit and last" " check date are the same ...") pkgs_list = [] for pkg in self._parse_checkpkg_file(last_checkpkg_file): if self._pkg_upgradable(pkg[0], pkg[1], pkg[2]): pkgs_list.append(pkg) # Update last_checkpkg_run only after the version check has been completed with open(get_build_dir() + "/upgrade-helper/last_checkpkg_run", "w+") as last_check: last_check.write(current_date + "," + cur_master_commit + "," + last_checkpkg_file) return pkgs_list
def fetch(self): from recipe.git import GitRecipe def _try_fetch(): try: self.bb.fetch(self.env['PN']) return except Error as e: machine, failed_recipes = self._get_failed_recipes(e.stdout) if not self.env['PN'] in failed_recipes: raise Error("Unknown error occured during fetch", stdout=e.stdout, stderr=e.stderr) fetch_log = failed_recipes[self.env['PN']][1] if not self._is_uri_failure(fetch_log) and not \ self.checksums_changed: self._change_recipe_checksums(fetch_log) self.checksums_changed = True return True return False succeed = _try_fetch() if not succeed and not isinstance(self, GitRecipe): for sfx in self.suffixes: I(" Trying new SRC_URI suffix: %s ..." % sfx) self._change_source_suffix(sfx) succeed = _try_fetch() if succeed: break if not succeed: raise Error("Can't built a valid SRC_URI") elif self.recipes_renamed and not self.checksums_changed: raise Error("Fetch succeeded without changing checksums")
def run(self): machine = self.opts['machines'][0] I(" Testing image for %s ..." % machine) self.testimage(self.pkgs_ctx, machine, self.image)
def buildhistory_diff(devtool, bb, git, opts, pkg_ctx): if not opts['buildhistory']: return I(" %s: Checking buildhistory ..." % pkg_ctx['PN']) pkg_ctx['buildhistory'].diff()
def _license_issue_handled(self, config_log): @modify_recipe_files def _update_license_checksum(line, temp_recipe, *args, **kwargs): d = args[0] m = re.match("(.*)" + d['old_md5'] + "(.*)", line) if m is not None: temp_recipe.write( m.group(1) + d['new_md5'] + m.group(2) + "\n") else: temp_recipe.write(line) license_file = None with open(config_log) as log: for line in log: if not line.startswith("ERROR:"): continue m_old = re.match( "ERROR: " + self.env['PN'] + "[^:]*: md5 data is not matching for file://([^;]*);md5=(.*)$", line) if not m_old: m_old = re.match( "ERROR: " + self.env['PN'] + "[^:]*: md5 data is not matching for file://([^;]*);beginline=[0-9]*;endline=[0-9]*;md5=(.*)$", line) if not m_old: m_old = re.match( "ERROR: " + self.env['PN'] + "[^:]*: md5 data is not matching for file://([^;]*);endline=[0-9]*;md5=(.*)$", line) if not m_old: m_old = re.match( "ERROR: " + self.env['PN'] + "[^:]*: md5 data is not matching for file://([^;]*);beginline=[0-9]*;md5=(.*)$", line) m_new = re.match( "ERROR: " + self.env['PN'] + "[^:]*: The new md5 checksum is (.*)", line) if m_old: license_file = m_old.group(1) old_md5 = m_old.group(2) elif m_new: new_md5 = m_new.group(1) if license_file is not None: d = {} d['old_md5'] = old_md5 d['new_md5'] = new_md5 _update_license_checksum(self.env, self.recipe_dir, d) self.create_diff_file(license_file, old_md5, new_md5) self.license_diff_file = os.path.join( self.workdir, os.path.basename(license_file + ".diff")) if self.interactive: W(" %s: license checksum failed for file %s. The recipe has" "been updated! View diff? (Y/n)" % (self.env['PN'], license_file)) answer = sys.stdin.readline().strip().upper() if answer == '' or answer == 'Y': I(" ################ Licence file diff #################") with open(self.license_diff_file) as diff: I("%s" % diff.read()) I(" ####################################################") I(" Retry compilation? (Y/n)") answer = sys.stdin.readline().strip().upper() if answer == '' or answer == 'Y': return True else: W(" %s: license checksum failed for file %s." " The recipe has been updated! Diff file located at %s" % (self.env['PN'], license_file, self.license_diff_file)) I(" Recompiling ...") self.commit_msg += "License checksum changed for file " + license_file return True return False
def _add_not_shipped(self, package_log): files_not_shipped = False files = [] occurences = [] prefixes = { "/usr": "******", "/bin": "base_bindir", "/sbin": "base_sbindir", "/lib": "base_libdir", "/usr/share": "datadir", "/etc": "sysconfdir", "/var": "localstatedir", "/usr/share/info": "infodir", "/usr/share/man": "mandir", "/usr/share/doc": "docdir", "/srv": "servicedir", "/usr/bin": "bindir", "/usr/sbin": "sbindir", "/usr/libexec": "libexecdir", "/usr/lib": "libdir", "/usr/include": "includedir", "/usr/lib/opie": "palmtopdir", "/usr/lib/opie": "palmqtdir", } with open(package_log) as log: for line in log: if re.match( ".*Files/directories were installed but not shipped.*", line): I(" %s: Add new files in recipe ..." % self.env['PN']) files_not_shipped = True # Extract path line = line.strip() if line: line = line.split()[0] if files_not_shipped and os.path.isabs(line): # Count occurences for globbing path_exists = False for i in range(0, len(files)): if line.find(files[i]) == 0: path_exists = True occurences[i] += 1 break if not path_exists: files.append(line) occurences.append(1) for i in range(0, len(files)): # Change paths to globbing expressions where is the case if occurences[i] > 1: files[i] += "/*" largest_prefix = "" # Substitute prefix for prefix in prefixes: if files[i].find( prefix) == 0 and len(prefix) > len(largest_prefix): largest_prefix = prefix if largest_prefix: replacement = "${" + prefixes[largest_prefix] + "}" files[i] = files[i].replace(largest_prefix, replacement) @modify_recipe_files def _append_new_files(line, temp_recipe, *args, **kwargs): d = args[0] if re.match("^FILES_\${PN}[ +=].*", line): d['files_clause'] = True temp_recipe.write(line) return # Get front spacing if d['files_clause']: front_spacing = re.sub("[^ \t]", "", line) # Append once the last line has of FILES has been reached if re.match(".*\".*", line) and d['files_clause']: d['files_clause'] = False line = line.replace("\"", "") line = line.rstrip() front_spacing = re.sub("[^ \t]", "", line) # Do not write an empty line if line.strip(): temp_recipe.write(line + " \\\n") # Add spacing in case there was none if len(front_spacing) == 0: front_spacing = " " * 8 # Write to file for i in range(len(files) - 1): line = front_spacing + files[i] + " \\\n" temp_recipe.write(line) line = front_spacing + files[len(files) - 1] + "\"\n" temp_recipe.write(line) return temp_recipe.write(line) d = {} d['files_clause'] = False _append_new_files(self.env, self.recipe_dir, d) return files_not_shipped
def run(self, package_list=None): #pkgs_to_upgrade = self._order_pkgs_to_upgrade( # self._get_packages_to_upgrade(package_list)) pkgs_to_upgrade = self._get_packages_to_upgrade(package_list) total_pkgs = len(pkgs_to_upgrade) pkgs_ctx = {} I(" ########### The list of recipes to be upgraded #############") for p, v, m in pkgs_to_upgrade: I(" %s, %s, %s" % (p, v, m)) pkgs_ctx[p] = {} pkgs_ctx[p]['PN'] = p pkgs_ctx[p]['NPV'] = v pkgs_ctx[p]['MAINTAINER'] = m pkgs_ctx[p]['base_dir'] = self.uh_recipes_all_dir I(" ############################################################") if pkgs_to_upgrade: I(" Building gcc runtimes ...") for machine in self.opts['machines']: I(" building gcc runtime for %s" % machine) try: self.bb.complete("gcc-runtime", machine) except Exception as e: E(" Can't build gcc-runtime for %s." % machine) if isinstance(e, Error): E(e.stdout) else: import traceback traceback.print_exc(file=sys.stdout) succeeded_pkgs_ctx = [] failed_pkgs_ctx = [] attempted_pkgs = 0 for pn, _, _ in pkgs_to_upgrade: pkg_ctx = pkgs_ctx[pn] pkg_ctx['error'] = None attempted_pkgs += 1 I(" ATTEMPT PACKAGE %d/%d" % (attempted_pkgs, total_pkgs)) try: I(" %s: Upgrading to %s" % (pkg_ctx['PN'], pkg_ctx['NPV'])) for step, msg in upgrade_steps: if msg is not None: I(" %s: %s" % (pkg_ctx['PN'], msg)) step(self.bb, self.git, self.opts, pkg_ctx) succeeded_pkgs_ctx.append(pkg_ctx) I(" %s: Upgrade SUCCESSFUL! Please test!" % pkg_ctx['PN']) except Exception as e: if isinstance(e, UpgradeNotNeededError): I(" %s: %s" % (pkg_ctx['PN'], e.message)) elif isinstance(e, UnsupportedProtocolError): I(" %s: %s" % (pkg_ctx['PN'], e.message)) else: if not isinstance(e, Error): import traceback msg = "Failed(unknown error)\n" + traceback.format_exc( ) e = Error(message=msg) error = e E(" %s: %s" % (pkg_ctx['PN'], e.message)) if os.listdir(pkg_ctx['workdir']): E(" %s: Upgrade FAILED! Logs and/or file diffs are available in %s" % (pkg_ctx['PN'], pkg_ctx['workdir'])) pkg_ctx['error'] = e failed_pkgs_ctx.append(pkg_ctx) try: self.commit_changes(pkg_ctx) except: if pkg_ctx in succeeded_pkgs_ctx: succeeded_pkgs_ctx.remove(pkg_ctx) failed_pkgs_ctx.append(pkg_ctx) if self.opts['testimage']: ctxs = {} ctxs['succeeded'] = succeeded_pkgs_ctx ctxs['failed'] = failed_pkgs_ctx image = settings.get('testimage_name', DEFAULT_TESTIMAGE) tim = TestImage(self.bb, self.git, self.uh_work_dir, self.opts, ctxs, image) tim.run() for pn in pkgs_ctx.keys(): pkg_ctx = pkgs_ctx[pn] if pkg_ctx in succeeded_pkgs_ctx: os.symlink(pkg_ctx['workdir'], os.path.join( \ self.uh_recipes_succeed_dir, pkg_ctx['PN'])) else: os.symlink(pkg_ctx['workdir'], os.path.join( \ self.uh_recipes_failed_dir, pkg_ctx['PN'])) self.statistics.update(pkg_ctx['PN'], pkg_ctx['NPV'], pkg_ctx['MAINTAINER'], pkg_ctx['error']) self.pkg_upgrade_handler(pkg_ctx) if attempted_pkgs > 0: publish_work_url = settings.get('publish_work_url', '') work_tarball = os.path.join( self.uh_base_work_dir, os.path.basename(self.uh_work_dir) + '.tar.gz') if publish_work_url: I(" Generating work tarball in %s ..." % work_tarball) import subprocess if subprocess.call( ["tar", "-chzf", work_tarball, self.uh_work_dir]): E(" Work tarball (%s) generation failed..." % (work_tarball)) publish_work_url = '' statistics_summary = self.statistics.get_summary( publish_work_url, os.path.basename(self.uh_work_dir)) statistics_file = os.path.join(self.uh_work_dir, "statistics_summary") with open(statistics_file, "w+") as f: f.write(statistics_summary) I(" %s" % statistics_summary) if self.opts['send_email']: self.send_status_mail(statistics_summary)
def _get_packages_to_upgrade(self, packages=None): if packages is None: I("Nothing to upgrade") exit(0) else: return packages
def compile(self, machine): try: self.bb.complete(self.env['PN'], machine) if self.removed_patches: # move temporary changes into upgrades branch self.git.checkout_branch("upgrades") self.git.delete_branch("comment_patches") self.git.reset_soft(1) self.commit_msg += self.comment_patches_msg + "\n" self.removed_patches = False except Error as e: if self._is_incompatible_host(e.stdout): W(" %s: compilation failed: incompatible host" % self.env['PN']) return machine, failed_recipes = self._get_failed_recipes(e.stdout) if not self.env['PN'] in failed_recipes: if not self._clean_failed_recipes(failed_recipes): self._undo_temporary() raise CompilationError() # retry self.compile(machine) else: failed_task = failed_recipes[self.env['PN']][0] log_file = failed_recipes[self.env['PN']][1] if failed_task == "do_patch": # Comment one patch after the other until # compilation works. if not self.removed_patches: self.git.commit("temporary") self.git.create_branch("comment_patches") self.git.checkout_branch("comment_patches") self.removed_patches = True if not self._comment_faulty_patch(log_file): self._undo_temporary() raise PatchError() # retry I(" %s: Recompiling for %s ..." % (self.env['PN'], machine)) self.compile(machine) elif failed_task == "do_configure": self._undo_temporary() if not self._is_license_issue(log_file): raise ConfigureError() if not self._license_issue_handled(log_file): raise LicenseError() #retry self.compile(machine) elif failed_task == "do_fetch": raise FetchError() elif failed_task == "do_package": raise PackageError() #if self._add_not_shipped(log_file): # self.compile(machine) #else: else: self._undo_temporary() # throw a compilation exception for everything else. It # doesn't really matter raise CompilationError()