def run(self): if self.conf.fedpkg_build_tasks: logger.warning("Option --fedpkg-build-tasks is deprecated, use --build-tasks instead.") if not self.conf.build_tasks: self.conf.build_tasks = self.conf.fedpkg_build_tasks if self.conf.build_tasks and not self.conf.builds_nowait: if self.conf.buildtool in ['fedpkg', 'copr']: logger.error("--builds-nowait has to be specified with --build-tasks.") return 1 else: logger.warning("Options are allowed only for fedpkg or copr build tools. Suppress them.") self.conf.build_tasks = self.conf.builds_nowait = False sources = None if self.conf.build_tasks is None: sources = self.prepare_sources() if not self.conf.build_only and not self.conf.comparepkgs: self.patch_sources(sources) build = False if not self.conf.patch_only: if not self.conf.comparepkgs: # check build dependencies for rpmbuild if self.conf.buildtool == 'rpmbuild': Application.check_build_requires(self.spec_file) # Build packages try: build = self.build_packages() if self.conf.builds_nowait and not self.conf.build_tasks: if self.conf.buildtool == 'fedpkg': self.print_koji_logs() elif self.conf.buildtool == 'copr': self.print_copr_logs() return 0 except RuntimeError: logger.error('Unknown error caused by build log analysis') return 1 # Perform checks else: build = self.get_rpm_packages(self.conf.comparepkgs) # We don't care dirname doesn't contain any RPM packages # Therefore return 1 if build: self.pkgdiff_packages(self.results_dir) else: if not self.upstream_monitoring: logger.info('Rebase package to %s FAILED. See for more details', self.conf.sources) return 1 self.print_summary() if not self.conf.keep_workspace: self._delete_workspace_dir() if self.debug_log_file: logger.info("Detailed debug log is located in '%s'", self.debug_log_file) if not self.upstream_monitoring and not self.conf.patch_only: logger.info('Rebase package to %s was SUCCESSFUL.\n', self.conf.sources) return 0
def run_check(cls, result_dir): """Compares old and new RPMs using pkgdiff""" debug_old, rest_pkgs_old = cls._get_packages_for_abipkgdiff(OutputLogger.get_build('old')) debug_new, rest_pkgs_new = cls._get_packages_for_abipkgdiff(OutputLogger.get_build('new')) cmd = [cls.CMD] if debug_old is None: logger.warning("Package doesn't contain any debug package") return None try: cmd.append('--d1') cmd.append(debug_old[0]) except IndexError: logger.error('Debuginfo package not found for old package.') return None try: cmd.append('--d2') cmd.append(debug_new[0]) except IndexError: logger.error('Debuginfo package not found for new package.') return None reports = {} for pkg in rest_pkgs_old: command = list(cmd) # Package can be <letters><numbers>-<letters>-<and_whatever> regexp = r'^(\w*)(-\D+)?.*$' reg = re.compile(regexp) matched = reg.search(os.path.basename(pkg)) if matched: file_name = matched.group(1) command.append(pkg) find = [x for x in rest_pkgs_new if os.path.basename(x).startswith(file_name)] command.append(find[0]) package_name = os.path.basename(os.path.basename(pkg)) logger.debug('Package name for ABI comparision %s', package_name) regexp_name = r'(\w-)*(\D+)*' reg_name = re.compile(regexp_name) matched = reg_name.search(os.path.basename(pkg)) logger.debug('Found matches %s', matched.groups()) if matched: package_name = matched.group(0) + cls.log_name else: package_name = package_name + '-' + cls.log_name output = os.path.join(cls.results_dir, result_dir, package_name) try: ret_code = ProcessHelper.run_subprocess(command, output=output) except OSError: raise CheckerNotFoundError("Checker '%s' was not found or installed." % cls.CMD) if int(ret_code) & settings.ABIDIFF_ERROR and int(ret_code) & settings.ABIDIFF_USAGE_ERROR: raise RebaseHelperError('Execution of %s failed.\nCommand line is: %s' % (cls.CMD, cmd)) if int(ret_code) == 0: text = 'ABI of the compared binaries in package %s are equal.' % package_name else: text = 'ABI of the compared binaries in package %s are not equal.' % package_name reports[output] = text else: logger.debug("Rebase-helper did not find a package name in '%s'", package_name) return reports
def run_check(cls, results_dir): """Compares old and new RPMs using rpmdiff""" results_dict = {} for tag in settings.CHECKER_TAGS: results_dict[tag] = [] cls.results_dir = results_dir # Only S (size), M(mode) and 5 (checksum) are now important not_catched_flags = ['T', 'F', 'G', 'U', 'V', 'L', 'D', 'N'] old_pkgs = cls._get_rpms(results_store.get_old_build().get( 'rpm', None)) new_pkgs = cls._get_rpms(results_store.get_new_build().get( 'rpm', None)) for key, value in six.iteritems(old_pkgs): if 'debuginfo' in key or 'debugsource' in key: # skip debug{info,source} packages continue cmd = [cls.CMD] # TODO modify to online command for x in not_catched_flags: cmd.extend(['-i', x]) cmd.append(value) # We would like to build correct old package against correct new packages try: cmd.append(new_pkgs[key]) except KeyError: logger.warning('New version of package %s was not found!', key) continue output = StringIO() try: ProcessHelper.run_subprocess(cmd, output=output) except OSError: raise CheckerNotFoundError( "Checker '%s' was not found or installed." % cls.CMD) results_dict = cls._analyze_logs(output, results_dict) results_dict = cls.update_added_removed(results_dict) results_dict = dict( (k, v) for k, v in six.iteritems(results_dict) if v) lines = [] for key, val in six.iteritems(results_dict): if val: if lines: lines.append('') lines.append('Following files were %s:' % key) lines.extend(val) rpmdiff_report = os.path.join(cls.results_dir, 'report-' + cls.CMD + '.log') try: with open(rpmdiff_report, "w") as f: f.write('\n'.join(lines)) except IOError: raise RebaseHelperError("Unable to write result from %s to '%s'" % (cls.CMD, rpmdiff_report)) return {rpmdiff_report: None}
def run_check(cls, result_dir): """Compares old and new RPMs using pkgdiff""" debug_old, rest_pkgs_old = cls._get_packages_for_abipkgdiff(results_store.get_build('old')) debug_new, rest_pkgs_new = cls._get_packages_for_abipkgdiff(results_store.get_build('new')) cmd = [cls.CMD] if debug_old is None: logger.warning("Package doesn't contain any debug package") return None try: cmd.append('--d1') cmd.append(debug_old[0]) except IndexError: logger.error('Debuginfo package not found for old package.') return None try: cmd.append('--d2') cmd.append(debug_new[0]) except IndexError: logger.error('Debuginfo package not found for new package.') return None reports = {} for pkg in rest_pkgs_old: command = list(cmd) # Package can be <letters><numbers>-<letters>-<and_whatever> regexp = r'^(\w*)(-\D+)?.*$' reg = re.compile(regexp) matched = reg.search(os.path.basename(pkg)) if matched: file_name = matched.group(1) command.append(pkg) find = [x for x in rest_pkgs_new if os.path.basename(x).startswith(file_name)] command.append(find[0]) package_name = os.path.basename(os.path.basename(pkg)) logger.debug('Package name for ABI comparision %s', package_name) regexp_name = r'(\w-)*(\D+)*' reg_name = re.compile(regexp_name) matched = reg_name.search(os.path.basename(pkg)) logger.debug('Found matches %s', matched.groups()) if matched: package_name = matched.group(0) + cls.log_name else: package_name = package_name + '-' + cls.log_name output = os.path.join(cls.results_dir, result_dir, package_name) try: ret_code = ProcessHelper.run_subprocess(command, output=output) except OSError: raise CheckerNotFoundError("Checker '%s' was not found or installed." % cls.CMD) if int(ret_code) & settings.ABIDIFF_ERROR and int(ret_code) & settings.ABIDIFF_USAGE_ERROR: raise RebaseHelperError('Execution of %s failed.\nCommand line is: %s' % (cls.CMD, cmd)) if int(ret_code) == 0: text = 'ABI of the compared binaries in package %s are equal.' % package_name else: text = 'ABI of the compared binaries in package %s are not equal.' % package_name reports[output] = text else: logger.debug("Rebase-helper did not find a package name in '%s'", package_name) return reports
def _check_workspace_dir(self): """ Check if workspace dir exists, and removes it if yes. :return: """ if os.path.exists(self.workspace_dir): logger.warning("Workspace directory '%s' exists, removing it", os.path.basename(self.workspace_dir)) self._delete_workspace_dir() os.makedirs(self.workspace_dir)
def run_check(cls, results_dir, **kwargs): """Compares old and new RPMs using rpmdiff""" results_dict = {} for tag in cls.CHECKER_TAGS: results_dict[tag] = [] cls.results_dir = os.path.join(results_dir, cls.NAME) os.makedirs(cls.results_dir) # Only S (size), M(mode) and 5 (checksum) are now important not_catched_flags = ['T', 'F', 'G', 'U', 'V', 'L', 'D', 'N'] old_pkgs = cls._get_rpms(results_store.get_old_build().get('rpm', None)) new_pkgs = cls._get_rpms(results_store.get_new_build().get('rpm', None)) for key, value in six.iteritems(old_pkgs): if 'debuginfo' in key or 'debugsource' in key: # skip debug{info,source} packages continue cmd = [cls.NAME] # TODO modify to online command for x in not_catched_flags: cmd.extend(['-i', x]) cmd.append(value) # We would like to build correct old package against correct new packages try: cmd.append(new_pkgs[key]) except KeyError: logger.warning('New version of package %s was not found!', key) continue output = StringIO() try: ProcessHelper.run_subprocess(cmd, output_file=output) except OSError: raise CheckerNotFoundError("Checker '{}' was not found or installed.".format(cls.NAME)) results_dict = cls._analyze_logs(output, results_dict) results_dict = cls.update_added_removed(results_dict) cls.results_dict = {k: v for k, v in six.iteritems(results_dict) if v} lines = [] for key, val in six.iteritems(results_dict): if val: if lines: lines.append('') lines.append('Following files were {}:'.format(key)) lines.extend(val) rpmdiff_report = os.path.join(cls.results_dir, 'report.txt') counts = {k: len(v) for k, v in six.iteritems(results_dict)} try: with open(rpmdiff_report, "w") as f: f.write('\n'.join(lines)) except IOError: raise RebaseHelperError("Unable to write result from {} to '{}'".format(cls.NAME, rpmdiff_report)) return {'path': cls.get_checker_output_dir_short(), 'files_changes': counts}
def run_check(cls, results_dir, **kwargs): """Compares old and new RPMs using rpmdiff""" results_dict = {} for tag in cls.CHECKER_TAGS: results_dict[tag] = [] cls.results_dir = os.path.join(results_dir, cls.name) os.makedirs(cls.results_dir) # Only S (size), M(mode) and 5 (checksum) are now important not_catched_flags = ['T', 'F', 'G', 'U', 'V', 'L', 'D', 'N'] old_pkgs = cls._get_rpms(results_store.get_old_build().get('rpm', None)) new_pkgs = cls._get_rpms(results_store.get_new_build().get('rpm', None)) for key, value in old_pkgs.items(): if 'debuginfo' in key or 'debugsource' in key: # skip debug{info,source} packages continue cmd = [cls.CMD] # TODO modify to online command for x in not_catched_flags: cmd.extend(['-i', x]) cmd.append(value) # We would like to build correct old package against correct new packages try: cmd.append(new_pkgs[key]) except KeyError: logger.warning('New version of package %s was not found!', key) continue output = io.StringIO() try: ProcessHelper.run_subprocess(cmd, output_file=output) except OSError: raise CheckerNotFoundError("Checker '{}' was not found or installed.".format(cls.name)) results_dict = cls._analyze_logs(output, results_dict) results_dict = cls.update_added_removed(results_dict) cls.results_dict = {k: v for k, v in results_dict.items() if v} lines = [] for key, val in results_dict.items(): if val: if lines: lines.append('') lines.append('Following files were {}:'.format(key)) lines.extend(val) rpmdiff_report = os.path.join(cls.results_dir, 'report.txt') counts = {k: len(v) for k, v in results_dict.items()} try: with open(rpmdiff_report, "w") as f: f.write('\n'.join(lines)) except IOError: raise RebaseHelperError("Unable to write result from {} to '{}'".format(cls.name, rpmdiff_report)) return {'path': cls.get_checker_output_dir_short(), 'files_changes': counts}
def _get_commit_hash_from_github(cls, spec_file): """ Tries to find a commit using Github API :param spec_file: SPEC file to base the search on :return: SHA of a commit, or None """ m = re.match( r'^https?://github\.com/(?P<owner>[\w-]+)/(?P<project>[\w-]+)/.*$', spec_file.sources[0]) if not m: return None baseurl = 'https://api.github.com/repos/{owner}/{project}'.format( **m.groupdict()) # try to get tag name from a release matching version r = DownloadHelper.request('{}/releases'.format(baseurl)) if r is None: return None if not r.ok: if r.status_code == 403 and r.headers.get( 'X-RateLimit-Remaining') == '0': logger.warning( "Rate limit exceeded on Github API! Try again later.") return None data = r.json() version = spec_file.get_version() tag_name = None for release in data: if version in release.get('name'): tag_name = release.get('tag_name') break r = DownloadHelper.request('{}/tags'.format(baseurl)) if r is None: return None if not r.ok: if r.status_code == 403 and r.headers.get( 'X-RateLimit-Remaining') == '0': logger.warning( "Rate limit exceeded on Github API! Try again later.") return None data = r.json() for tag in data: name = tag.get('name') if tag_name: if name != tag_name: continue else: # no specific tag name, try common tag names if name not in [version, 'v{}'.format(version)]: continue commit = tag.get('commit') if commit: return commit.get('sha') return None
def _check_results_dir(self): """ Check if results dir exists, and removes it if yes. :return: """ # TODO: We may not want to delete the directory in the future if os.path.exists(self.results_dir): logger.warning("Results directory '%s' exists, removing it", os.path.basename(self.results_dir)) shutil.rmtree(self.results_dir) os.makedirs(self.results_dir)
def run_check(cls, results_dir, **kwargs): """Compares old and new RPMs using abipkgdiff""" # Check if ABI changes occured cls.abi_changes = None cls.results_dir = os.path.join(results_dir, cls.NAME) os.makedirs(cls.results_dir) debug_old, rest_pkgs_old = cls._get_packages_for_abipkgdiff( results_store.get_build('old')) debug_new, rest_pkgs_new = cls._get_packages_for_abipkgdiff( results_store.get_build('new')) cmd = [cls.NAME] reports = {} for pkg in rest_pkgs_old: command = list(cmd) debug = cls._find_debuginfo(debug_old, pkg) if debug: command.append('--d1') command.append(debug) old_name = RpmHelper.split_nevra(os.path.basename(pkg))['name'] find = [ x for x in rest_pkgs_new if RpmHelper.split_nevra( os.path.basename(x))['name'] == old_name ] if not find: logger.warning('New version of package %s was not found!', old_name) continue new_pkg = find[0] debug = cls._find_debuginfo(debug_new, new_pkg) if debug: command.append('--d2') command.append(debug) command.append(pkg) command.append(new_pkg) logger.debug('Package name for ABI comparison %s', old_name) output = os.path.join(cls.results_dir, old_name + '.txt') try: ret_code = ProcessHelper.run_subprocess(command, output_file=output) except OSError: raise CheckerNotFoundError( "Checker '{}' was not found or installed.".format( cls.NAME)) if int(ret_code) & cls.ABIDIFF_ERROR and int( ret_code) & cls.ABIDIFF_USAGE_ERROR: raise RebaseHelperError( 'Execution of {} failed.\nCommand line is: {}'.format( cls.NAME, cmd)) reports[old_name] = int(ret_code) return dict(packages=cls.parse_abi_logs(reports), abi_changes=cls.abi_changes, path=cls.get_checker_output_dir_short())
def apply_changes(self): try: repo = git.Repo(self.execution_dir) except git.InvalidGitRepositoryError: repo = git.Repo.init(self.execution_dir) patch = results_store.get_changes_patch() if not patch: logger.warning('Cannot apply changes.patch. No patch file was created') try: repo.git.am(patch['changes_patch']) except git.GitCommandError as e: logger.warning('changes.patch was not applied properly. Please review changes manually.' '\nThe error message is: %s', six.text_type(e))
def apply_changes(self): try: repo = git.Repo(self.execution_dir) except git.InvalidGitRepositoryError: repo = git.Repo.init(self.execution_dir) patch = results_store.get_changes_patch() if not patch: logger.warning('Cannot apply changes.patch. No patch file was created') try: repo.git.am(patch['changes_patch']) except git.GitCommandError as e: logger.warning('changes.patch was not applied properly. Please review changes manually.' '\nThe error message is: %s', str(e))
def run_check(cls, result_dir): """Compares old and new RPMs using abipkgdiff""" debug_old, rest_pkgs_old = cls._get_packages_for_abipkgdiff( results_store.get_build('old')) debug_new, rest_pkgs_new = cls._get_packages_for_abipkgdiff( results_store.get_build('new')) cmd = [cls.CMD] reports = {} for pkg in rest_pkgs_old: command = list(cmd) debug = cls._find_debuginfo(debug_old, pkg) if debug: command.append('--d1') command.append(debug) old_name = RpmHelper.split_nevra(os.path.basename(pkg))['name'] find = [ x for x in rest_pkgs_new if RpmHelper.split_nevra( os.path.basename(x))['name'] == old_name ] if not find: logger.warning('New version of package %s was not found!', old_name) continue new_pkg = find[0] debug = cls._find_debuginfo(debug_new, new_pkg) if debug: command.append('--d2') command.append(debug) command.append(pkg) command.append(new_pkg) logger.debug('Package name for ABI comparison %s', old_name) output = os.path.join(cls.results_dir, result_dir, old_name + '-' + cls.log_name) try: ret_code = ProcessHelper.run_subprocess(command, output=output) except OSError: raise CheckerNotFoundError( "Checker '%s' was not found or installed." % cls.CMD) if int(ret_code) & settings.ABIDIFF_ERROR and int( ret_code) & settings.ABIDIFF_USAGE_ERROR: raise RebaseHelperError( 'Execution of %s failed.\nCommand line is: %s' % (cls.CMD, cmd)) if int(ret_code) == 0: text = 'ABI of the compared binaries in package %s are equal.' % old_name else: text = 'ABI of the compared binaries in package %s are not equal.' % old_name reports[output] = text return reports
def _check_results_dir(results_dir): """ Check if results dir exists, and removes it if yes. :return: """ # TODO: We may not want to delete the directory in the future if os.path.exists(results_dir): logger.warning("Results directory '%s' exists, removing it", os.path.basename(results_dir)) shutil.rmtree(results_dir) os.makedirs(results_dir) os.makedirs(os.path.join(results_dir, settings.REBASE_HELPER_LOGS)) os.makedirs(os.path.join(results_dir, 'old')) os.makedirs(os.path.join(results_dir, 'new'))
def _get_commit_hash_from_github(cls, spec_file): """ Tries to find a commit using Github API :param spec_file: SPEC file to base the search on :return: SHA of a commit, or None """ m = re.match(r'^https?://github\.com/(?P<owner>[\w-]+)/(?P<project>[\w-]+)/.*$', spec_file.sources[0]) if not m: return None baseurl = 'https://api.github.com/repos/{owner}/{project}'.format(**m.groupdict()) # try to get tag name from a release matching version r = DownloadHelper.request('{}/releases'.format(baseurl)) if r is None: return None if not r.ok: if r.status_code == 403 and r.headers.get('X-RateLimit-Remaining') == '0': logger.warning("Rate limit exceeded on Github API! Try again later.") return None data = r.json() version = spec_file.get_version() tag_name = None for release in data: if version in release.get('name'): tag_name = release.get('tag_name') break r = DownloadHelper.request('{}/tags'.format(baseurl)) if r is None: return None if not r.ok: if r.status_code == 403 and r.headers.get('X-RateLimit-Remaining') == '0': logger.warning("Rate limit exceeded on Github API! Try again later.") return None data = r.json() for tag in data: name = tag.get('name') if tag_name: if name != tag_name: continue else: # no specific tag name, try common tag names if name not in [version, 'v{}'.format(version)]: continue commit = tag.get('commit') if commit: return commit.get('sha') return None
def _add_report_log_file(results_dir): """ Add the application report log file :return: log file path """ report_log_file = os.path.join(results_dir, settings.REBASE_HELPER_REPORT_LOG) try: LoggerHelper.add_file_handler(logger_report, report_log_file, None, logging.INFO) except (IOError, OSError): logger.warning("Can not create report log '%s'", report_log_file) else: return report_log_file
def _add_report_log_file(self): """ Add the application report log file :return: """ report_log_file = os.path.join(self.results_dir, settings.REBASE_HELPER_REPORT_LOG) try: LoggerHelper.add_file_handler(logger_report, report_log_file, None, logging.INFO) except (IOError, OSError): logger.warning("Can not create report log '%s'", report_log_file) else: self.report_log_file = report_log_file
def _check_results_dir(results_dir): """ Check if results dir exists, and removes it if yes. :return: """ # TODO: We may not want to delete the directory in the future if os.path.exists(results_dir): logger.warning("Results directory '%s' exists, removing it", os.path.basename(results_dir)) shutil.rmtree(results_dir) os.makedirs(results_dir) os.makedirs(os.path.join(results_dir, constants.LOGS_DIR)) os.makedirs(os.path.join(results_dir, constants.OLD_BUILD_DIR)) os.makedirs(os.path.join(results_dir, constants.NEW_BUILD_DIR)) os.makedirs(os.path.join(results_dir, constants.CHECKERS_DIR)) os.makedirs(os.path.join(results_dir, constants.REBASED_SOURCES_DIR))
def install_build_dependencies(spec_path=None, assume_yes=False): """ Install all build requires for a package using PolicyKits :param spec_path: absolute path to SPEC file :return: """ cmd = ['dnf', 'builddep', spec_path] if os.geteuid() != 0: logger.warning( "Authentication required to install build dependencies using '%s'", ' '.join(cmd)) cmd = ['pkexec'] + cmd if assume_yes: cmd.append('-y') return ProcessHelper.run_subprocess(cmd)
def _add_debug_log_file(self): """ Add the application wide debug log file :return: """ debug_log_file = os.path.join(self.results_dir, settings.REBASE_HELPER_DEBUG_LOG) try: LoggerHelper.add_file_handler(logger, debug_log_file, logging.Formatter("%(asctime)s %(levelname)s\t%(filename)s" ":%(lineno)s %(funcName)s: %(message)s"), logging.DEBUG) except (IOError, OSError): logger.warning("Can not create debug log '%s'", debug_log_file) else: self.debug_log_file = debug_log_file
def _add_debug_log_file(results_dir): """ Add the application wide debug log file :return: log file path """ debug_log_file = os.path.join(results_dir, constants.DEBUG_LOG) try: LoggerHelper.add_file_handler(logger, debug_log_file, logging.Formatter("%(asctime)s %(levelname)s\t%(filename)s" ":%(lineno)s %(funcName)s: %(message)s"), logging.DEBUG) except (IOError, OSError): logger.warning("Can not create debug log '%s'", debug_log_file) else: return debug_log_file
def install_build_dependencies(spec_path=None, assume_yes=False): """Installs build dependencies of a package using dnf. Args: spec_path (str): Absolute path to the SPEC file. assume_yes (bool): Whether to automatically answer 'yes' to all questions. Returns: int: Exit code of the subprocess run. """ cmd = ['dnf', 'builddep', spec_path] if os.geteuid() != 0: logger.warning( "Authentication required to install build dependencies using '%s'", ' '.join(cmd)) cmd = ['pkexec'] + cmd if assume_yes: cmd.append('-y') return ProcessHelper.run_subprocess(cmd)
def run_check(cls, results_dir, **kwargs): """Compares old and new RPMs using abipkgdiff""" # Check if ABI changes occured cls.abi_changes = False cls.results_dir = os.path.join(results_dir, cls.name) os.makedirs(cls.results_dir) debug_old, rest_pkgs_old = cls._get_packages_for_abipkgdiff(results_store.get_build('old')) debug_new, rest_pkgs_new = cls._get_packages_for_abipkgdiff(results_store.get_build('new')) cmd = [cls.CMD] reports = {} for pkg in rest_pkgs_old: command = list(cmd) debug = cls._find_debuginfo(debug_old, pkg) if debug: command.append('--d1') command.append(debug) old_name = RpmHelper.split_nevra(os.path.basename(pkg))['name'] find = [x for x in rest_pkgs_new if RpmHelper.split_nevra(os.path.basename(x))['name'] == old_name] if not find: logger.warning('New version of package %s was not found!', old_name) continue new_pkg = find[0] debug = cls._find_debuginfo(debug_new, new_pkg) if debug: command.append('--d2') command.append(debug) command.append(pkg) command.append(new_pkg) logger.verbose('Package name for ABI comparison %s', old_name) output = os.path.join(cls.results_dir, old_name + '.txt') try: ret_code = ProcessHelper.run_subprocess(command, output_file=output) except OSError: raise CheckerNotFoundError("Checker '{}' was not found or installed.".format(cls.name)) if int(ret_code) & cls.ABIDIFF_ERROR and int(ret_code) & cls.ABIDIFF_USAGE_ERROR: raise RebaseHelperError('Execution of {} failed.\nCommand line is: {}'.format(cls.CMD, cmd)) reports[old_name] = int(ret_code) return dict(packages=cls.parse_abi_logs(reports), abi_changes=cls.abi_changes, path=cls.get_checker_output_dir_short())
def get_old_build_info(cls, package_name, package_version): """Gets old build info from Koji. Args: package_name (str): Package name from specfile. package_version (str): Package version from specfile. Returns: tuple: Koji build id, package version, package full version. """ if cls.functional: session = KojiHelper.create_session() koji_version, koji_build_id = KojiHelper.get_build(session, package_name, package_version) if not koji_version: # fallback to the latest Koji build koji_version, koji_build_id = KojiHelper.get_latest_build(session, package_name) if koji_version: if koji_version != package_version: logger.warning('Version of the latest Koji build (%s) with id (%s) ' 'differs from version in SPEC file (%s)!', koji_version, koji_build_id, package_version) package_version = package_full_version = koji_version return koji_build_id, package_version, package_full_version else: logger.warning('Unable to find old version Koji build!') return None, None, None else: logger.warning('Unable to get old version Koji build!') return None, None, None
def get_old_build_info(cls, package_name, package_version): """Gets old build info from Koji. Args: package_name (str): Package name from specfile. package_version (str): Package version from specfile. Returns: tuple: Koji build id, package version, package full version. """ if cls.functional: session = KojiHelper.create_session() koji_version, koji_build_id = KojiHelper.get_build( session, package_name, package_version) if not koji_version: # fallback to the latest Koji build koji_version, koji_build_id = KojiHelper.get_latest_build( session, package_name) if koji_version: if koji_version != package_version: logger.warning( 'Version of the latest Koji build (%s) with id (%s) ' 'differs from version in SPEC file (%s)!', koji_version, koji_build_id, package_version) package_version = package_full_version = koji_version return koji_build_id, package_version, package_full_version else: logger.warning('Unable to find old version Koji build!') return None, None, None else: logger.warning('Unable to get old version Koji build!') return None, None, None
def build_packages(self): """ Function calls build class for building packages """ try: builder = Builder(self.conf.buildtool) except NotImplementedError as ni_e: raise RebaseHelperError('%s. Supported build tools are %s', ni_e.message, Builder.get_supported_tools()) for version in ['old', 'new']: spec_object = self.spec_file if version == 'old' else self.rebase_spec_file build_dict = {} build_dict['name'] = spec_object.get_package_name() build_dict['version'] = spec_object.get_version() logger.debug(build_dict) patches = [x.get_path() for x in spec_object.get_patches()] results_dir = os.path.join(self.results_dir, version) spec = spec_object.get_path() sources = spec_object.get_sources() failed_before = False while True: try: build_dict.update(builder.build(spec, sources, patches, results_dir, **build_dict)) OutputLogger.set_build_data(version, build_dict) break except SourcePackageBuildError: # always fail for original version if version == 'old': raise RebaseHelperError('Creating old SRPM package failed.') logger.error('Building source package failed.') # TODO: implement log analyzer for SRPMs and add the checks here!!! raise except BinaryPackageBuildError: # always fail for original version rpm_dir = os.path.join(results_dir, 'RPM') build_log = 'build.log' build_log_path = os.path.join(rpm_dir, build_log) if version == 'old': raise RebaseHelperError('Building old RPM package failed. Check log %s', build_log_path) logger.error('Building binary packages failed.') try: files = BuildLogAnalyzer.parse_log(rpm_dir, build_log) except BuildLogAnalyzerMissingError: raise RebaseHelperError('Build log %s does not exist', build_log_path) except BuildLogAnalyzerMakeError: raise RebaseHelperError('Building package failed during build. Check log %s', build_log_path) except BuildLogAnalyzerPatchError: raise RebaseHelperError('Building package failed during patching. Check log %s' % build_log_path) if files['missing']: missing_files = '\n'.join(files['added']) logger.info('Files not packaged in the SPEC file:\n%s', missing_files) elif files['deleted']: deleted_files = '\n'.join(files['deleted']) logger.warning('Removed files packaged in SPEC file:\n%s', deleted_files) else: raise RebaseHelperError("Build failed, but no issues were found in the build log %s", build_log) self.rebase_spec_file.modify_spec_files_section(files) if not self.conf.non_interactive: if failed_before: if not ConsoleHelper.get_message('Do you want rebase-helper to try build the packages one more time'): raise KeyboardInterrupt else: logger.warning('Some patches were not successfully applied') shutil.rmtree(os.path.join(results_dir, 'RPM')) shutil.rmtree(os.path.join(results_dir, 'SRPM')) return False # build just failed, otherwise we would break out of the while loop failed_before = True shutil.rmtree(os.path.join(results_dir, 'RPM')) shutil.rmtree(os.path.join(results_dir, 'SRPM')) return True
def build_packages(self): """Function calls build class for building packages""" if self.conf.buildtool == KojiBuildTool.CMD and not koji_builder: logger.info( 'Importing module koji failed. Switching to mock builder.') self.conf.buildtool = MockBuildTool.CMD try: builder = Builder(self.conf.buildtool) except NotImplementedError as ni_e: raise RebaseHelperError( '%s. Supported build tools are %s' % six.text_type(ni_e), Builder.get_supported_tools()) for version in ['old', 'new']: spec_object = self.spec_file if version == 'old' else self.rebase_spec_file build_dict = {} task_id = None if self.conf.build_tasks is None: build_dict['name'] = spec_object.get_package_name() build_dict['version'] = spec_object.get_version() patches = [x.get_path() for x in spec_object.get_patches()] spec = spec_object.get_path() sources = spec_object.get_sources() logger.info('Building packages for %s version %s', spec_object.get_package_name(), spec_object.get_full_version()) else: if version == 'old': task_id = self.conf.build_tasks[0] else: task_id = self.conf.build_tasks[1] results_dir = os.path.join(self.results_dir, version) build_dict['builds_nowait'] = self.conf.builds_nowait build_dict['build_tasks'] = self.conf.build_tasks build_dict['builder_options'] = self.conf.builder_options files = {} number_retries = 0 while self.conf.build_retries != number_retries: try: if self.conf.build_tasks is None: build_dict.update( builder.build(spec, sources, patches, results_dir, **build_dict)) if not self.conf.builds_nowait: if self.conf.buildtool == KojiBuildTool.CMD: while not build_dict['rpm']: kh = KojiHelper() build_dict['rpm'], build_dict[ 'logs'] = kh.get_koji_tasks( build_dict['koji_task_id'], results_dir) else: if self.conf.build_tasks: if self.conf.buildtool == KojiBuildTool.CMD: kh = KojiHelper() try: build_dict['rpm'], build_dict[ 'logs'] = kh.get_koji_tasks( task_id, results_dir) results_store.set_build_data( version, build_dict) if not build_dict['rpm']: return False except TypeError: logger.info( 'Koji tasks are not finished yet. Try again later' ) return False elif self.conf.buildtool == CoprBuildTool.CMD: copr_helper = CoprHelper() client = copr_helper.get_client() build_id = int(task_id) status = copr_helper.get_build_status( client, build_id) if status in [ 'importing', 'pending', 'starting', 'running' ]: logger.info( 'Copr build is not finished yet. Try again later' ) return False else: build_dict['rpm'], build_dict[ 'logs'] = copr_helper.download_build( client, build_id, results_dir) if status not in ['succeeded', 'skipped']: logger.info( 'Copr build {} did not complete successfully' .format(build_id)) return False # Build finishes properly. Go out from while cycle results_store.set_build_data(version, build_dict) break except SourcePackageBuildError: build_dict.update(builder.get_logs()) results_store.set_build_data(version, build_dict) # always fail for original version if version == 'old': raise RebaseHelperError( 'Creating old SRPM package failed.') logger.error('Building source package failed.') # TODO: implement log analyzer for SRPMs and add the checks here!!! raise except BinaryPackageBuildError: # always fail for original version rpm_dir = os.path.join(results_dir, 'RPM') build_dict.update(builder.get_logs()) results_store.set_build_data(version, build_dict) build_log = 'build.log' build_log_path = os.path.join(rpm_dir, build_log) if version == 'old': error_message = 'Building old RPM package failed. Check logs: {} '.format( builder.get_logs().get('logs', 'N/A')) raise RebaseHelperError(error_message) logger.error('Building binary packages failed.') msg = 'Building package failed' try: files = BuildLogAnalyzer.parse_log(rpm_dir, build_log) except BuildLogAnalyzerMissingError: raise RebaseHelperError('Build log %s does not exist', build_log_path) except BuildLogAnalyzerMakeError: raise RebaseHelperError( '%s during build. Check log %s', msg, build_log_path) except BuildLogAnalyzerPatchError: raise RebaseHelperError( '%s during patching. Check log %s', msg, build_log_path) except RuntimeError: if self.conf.build_retries == number_retries: raise RebaseHelperError( '%s with unknown reason. Check log %s', msg, build_log_path) if 'missing' in files: missing_files = '\n'.join(files['missing']) logger.info('Files not packaged in the SPEC file:\n%s', missing_files) elif 'deleted' in files: deleted_files = '\n'.join(files['deleted']) logger.warning( 'Removed files packaged in SPEC file:\n%s', deleted_files) else: if self.conf.build_retries == number_retries: raise RebaseHelperError( "Build failed, but no issues were found in the build log %s", build_log) self.rebase_spec_file.modify_spec_files_section(files) if not self.conf.non_interactive: msg = 'Do you want rebase-helper to try build the packages one more time' if not ConsoleHelper.get_message(msg): raise KeyboardInterrupt else: logger.warning( 'Some patches were not successfully applied') # build just failed, otherwise we would break out of the while loop logger.debug('Number of retries is %s', self.conf.build_retries) if os.path.exists(os.path.join(results_dir, 'RPM')): shutil.rmtree(os.path.join(results_dir, 'RPM')) if os.path.exists(os.path.join(results_dir, 'SRPM')): shutil.rmtree(os.path.join(results_dir, 'SRPM')) number_retries += 1 if self.conf.build_retries == number_retries: raise RebaseHelperError( 'Building package failed with unknown reason. Check all available log files.' ) return True
def run(self): # TODO: Move this check to CliHelper OR possibly to a private method validating the configuration. if self.conf.fedpkg_build_tasks: logger.warning( "Option --fedpkg-build-tasks is deprecated, use --build-tasks instead." ) if not self.conf.build_tasks: self.conf.build_tasks = self.conf.fedpkg_build_tasks # Certain options can be used only with specific build tools # here are checks for remote build tools if self.conf.buildtool not in [KojiBuildTool.CMD, CoprBuildTool.CMD]: options_used = [] if self.conf.build_tasks is not None: options_used.append('--build-tasks') if self.conf.builds_nowait is True: options_used.append('--builds-nowait') if options_used: raise RebaseHelperError( "%s can be used only with the following build tools: %s", ' and '.join(options_used), ', '.join([KojiBuildTool.CMD, CoprBuildTool.CMD])) # here are checks for local builders elif self.conf.buildtool not in [ RpmbuildBuildTool.CMD, MockBuildTool.CMD ]: options_used = [] if self.conf.builder_options is not None: options_used.append('--builder-options') if options_used: raise RebaseHelperError( "%s can be used only with the following build tools: %s", ' and '.join(options_used), ', '.join([RpmbuildBuildTool.CMD, MockBuildTool.CMD])) sources = None if self.conf.build_tasks is None: sources = self.prepare_sources() if not self.conf.build_only and not self.conf.comparepkgs: self.patch_sources(sources) build = False if not self.conf.patch_only: if not self.conf.comparepkgs: # check build dependencies for rpmbuild if self.conf.buildtool == RpmbuildBuildTool.CMD: Application.check_build_requires(self.spec_file) # Build packages try: build = self.build_packages() if self.conf.builds_nowait and not self.conf.build_tasks: if self.conf.buildtool == KojiBuildTool.CMD: self.print_koji_logs() elif self.conf.buildtool == CoprBuildTool.CMD: self.print_copr_logs() return except RuntimeError: logger.error('Unknown error caused by build log analysis') # TODO: exception should be raised instead of returning a value - it is never checked! return 1 # Perform checks else: build = self.get_rpm_packages(self.conf.comparepkgs) # We don't care dirname doesn't contain any RPM packages # Therefore return 1 if build: self.run_package_checkers(self.results_dir) else: if not self.upstream_monitoring: # TODO: This should be an ERROR logger.info( 'Rebase package to %s FAILED. See for more details', self.conf.sources) # TODO: exception should be raised instead of returning a value - it is never checked! return 1 self.print_summary() if not self.conf.keep_workspace: self._delete_workspace_dir() if self.debug_log_file: logger.info("Detailed debug log is located in '%s'", self.debug_log_file) if not self.upstream_monitoring and not self.conf.patch_only: logger.info('Rebase package to %s was SUCCESSFUL.\n', self.conf.sources) return 0
def run(self): # TODO: Move this check to CliHelper OR possibly to a private method validating the configuration. if self.conf.fedpkg_build_tasks: logger.warning("Option --fedpkg-build-tasks is deprecated, use --build-tasks instead.") if not self.conf.build_tasks: self.conf.build_tasks = self.conf.fedpkg_build_tasks # Certain options can be used only with specific build tools # here are checks for remote build tools if self.conf.buildtool not in [KojiBuildTool.CMD, CoprBuildTool.CMD]: options_used = [] if self.conf.build_tasks is not None: options_used.append('--build-tasks') if self.conf.builds_nowait is True: options_used.append('--builds-nowait') if options_used: raise RebaseHelperError("%s can be used only with the following build tools: %s", ' and '.join(options_used), ', '.join([KojiBuildTool.CMD, CoprBuildTool.CMD]) ) # here are checks for local builders elif self.conf.buildtool not in [RpmbuildBuildTool.CMD, MockBuildTool.CMD]: options_used = [] if self.conf.builder_options is not None: options_used.append('--builder-options') if options_used: raise RebaseHelperError("%s can be used only with the following build tools: %s", ' and '.join(options_used), ', '.join([RpmbuildBuildTool.CMD, MockBuildTool.CMD]) ) sources = None if self.conf.build_tasks is None: sources = self.prepare_sources() if not self.conf.build_only and not self.conf.comparepkgs: self.patch_sources(sources) build = False if not self.conf.patch_only: if not self.conf.comparepkgs: # check build dependencies for rpmbuild if self.conf.buildtool == RpmbuildBuildTool.CMD: Application.check_build_requires(self.spec_file) # Build packages try: build = self.build_packages() if self.conf.builds_nowait and not self.conf.build_tasks: if self.conf.buildtool == KojiBuildTool.CMD: self.print_koji_logs() elif self.conf.buildtool == CoprBuildTool.CMD: self.print_copr_logs() return except RuntimeError: logger.error('Unknown error caused by build log analysis') # TODO: exception should be raised instead of returning a value - it is never checked! return 1 # Perform checks else: build = self.get_rpm_packages(self.conf.comparepkgs) # We don't care dirname doesn't contain any RPM packages # Therefore return 1 if build: self.run_package_checkers(self.results_dir) else: if not self.upstream_monitoring: # TODO: This should be an ERROR logger.info('Rebase package to %s FAILED. See for more details', self.conf.sources) # TODO: exception should be raised instead of returning a value - it is never checked! return 1 self.print_summary() if not self.conf.keep_workspace: self._delete_workspace_dir() if self.debug_log_file: logger.info("Detailed debug log is located in '%s'", self.debug_log_file) if not self.upstream_monitoring and not self.conf.patch_only: logger.info('Rebase package to %s was SUCCESSFUL.\n', self.conf.sources) return 0
def get_user(cls): try: return git.cmd.Git().config('user.name', get=True, stdout_as_string=True) except git.GitCommandError: logger.warning("Failed to get configured git user name, using '%s'", cls.GIT_USER_NAME) return cls.GIT_USER_NAME
def build_packages(self): """Function calls build class for building packages""" try: builder = Builder(self.conf.buildtool) except NotImplementedError as ni_e: raise RebaseHelperError( '%s. Supported build tools are %s' % six.text_type(ni_e), Builder.get_supported_tools()) for version in ['old', 'new']: spec_object = self.spec_file if version == 'old' else self.rebase_spec_file build_dict = {} task_id = None koji_build_id = None if self.conf.build_tasks is None: pkg_name = spec_object.get_package_name() pkg_version = spec_object.get_version() pkg_full_version = spec_object.get_full_version() if version == 'old' and self.conf.get_old_build_from_koji: if KojiHelper.functional: koji_version, koji_build_id = KojiHelper.get_latest_build( pkg_name) if koji_version: if koji_version != pkg_version: logger.warning( 'Version of the latest Koji build (%s) with id (%s) ' 'differs from version in SPEC file (%s)!', koji_version, koji_build_id, pkg_version) pkg_version = pkg_full_version = koji_version else: logger.warning( 'Unable to find the latest Koji build!') else: logger.warning('Unable to get the latest Koji build!') # prepare for building builder.prepare(spec_object, self.conf) build_dict['name'] = pkg_name build_dict['version'] = pkg_version patches = [x.get_path() for x in spec_object.get_patches()] spec = spec_object.get_path() sources = spec_object.get_sources() logger.info('Building packages for %s version %s', pkg_name, pkg_full_version) else: if version == 'old': task_id = self.conf.build_tasks[0] else: task_id = self.conf.build_tasks[1] results_dir = os.path.join(self.results_dir, version) + '-build' files = {} number_retries = 0 while self.conf.build_retries != number_retries: try: if self.conf.build_tasks is None: if koji_build_id: build_dict['rpm'], build_dict[ 'logs'] = KojiHelper.download_build( koji_build_id, results_dir) else: build_dict.update( builder.build(spec, sources, patches, results_dir, **build_dict)) if builder.creates_tasks() and not koji_build_id: if not self.conf.builds_nowait: build_dict['rpm'], build_dict[ 'logs'] = builder.wait_for_task( build_dict, results_dir) if build_dict['rpm'] is None: return False elif self.conf.build_tasks: build_dict['rpm'], build_dict[ 'logs'] = builder.get_detached_task( task_id, results_dir) if build_dict['rpm'] is None: return False # Build finishes properly. Go out from while cycle results_store.set_build_data(version, build_dict) break except SourcePackageBuildError as e: build_dict.update(builder.get_logs()) build_dict['source_package_build_error'] = six.text_type(e) results_store.set_build_data(version, build_dict) # always fail for original version if version == 'old': raise RebaseHelperError( 'Creating old SRPM package failed.') logger.error('Building source package failed.') # TODO: implement log analyzer for SRPMs and add the checks here!!! raise except BinaryPackageBuildError as e: # always fail for original version rpm_dir = os.path.join(results_dir, 'RPM') build_dict.update(builder.get_logs()) build_dict['binary_package_build_error'] = six.text_type(e) results_store.set_build_data(version, build_dict) build_log = 'build.log' build_log_path = os.path.join(rpm_dir, build_log) if version == 'old': error_message = 'Building old RPM package failed. Check logs: {} '.format( builder.get_logs().get('logs', 'N/A')) raise RebaseHelperError( error_message, logfiles=builder.get_logs().get('logs')) logger.error('Building binary packages failed.') msg = 'Building package failed' try: files = BuildLogAnalyzer.parse_log(rpm_dir, build_log) except BuildLogAnalyzerMissingError: raise RebaseHelperError('Build log %s does not exist', build_log_path) except BuildLogAnalyzerMakeError: raise RebaseHelperError( '%s during build. Check log %s', msg, build_log_path, logfiles=[build_log_path]) except BuildLogAnalyzerPatchError: raise RebaseHelperError( '%s during patching. Check log %s', msg, build_log_path, logfiles=[build_log_path]) except RuntimeError: if self.conf.build_retries == number_retries: raise RebaseHelperError( '%s with unknown reason. Check log %s', msg, build_log_path, logfiles=[build_log_path]) if 'missing' in files: missing_files = '\n'.join(files['missing']) logger.info('Files not packaged in the SPEC file:\n%s', missing_files) elif 'deleted' in files: deleted_files = '\n'.join(files['deleted']) logger.warning( 'Removed files packaged in SPEC file:\n%s', deleted_files) else: if self.conf.build_retries == number_retries: raise RebaseHelperError( "Build failed, but no issues were found in the build log %s", build_log, logfiles=[build_log]) self.rebase_spec_file.modify_spec_files_section(files) if not self.conf.non_interactive: msg = 'Do you want rebase-helper to try to build the packages one more time' if not ConsoleHelper.get_message(msg): raise KeyboardInterrupt else: logger.warning( 'Some patches were not successfully applied') # build just failed, otherwise we would break out of the while loop logger.debug('Number of retries is %s', self.conf.build_retries) number_retries += 1 if self.conf.build_retries > number_retries: # only remove builds if this retry is not the last one if os.path.exists(os.path.join(results_dir, 'RPM')): shutil.rmtree(os.path.join(results_dir, 'RPM')) if os.path.exists(os.path.join(results_dir, 'SRPM')): shutil.rmtree(os.path.join(results_dir, 'SRPM')) if self.conf.build_retries == number_retries: raise RebaseHelperError( 'Building package failed with unknown reason. Check all available log files.' ) if self.conf.builds_nowait and not self.conf.build_tasks: if builder.creates_tasks(): self.print_task_info(builder) return True
def get_email(cls): try: return git.cmd.Git().config('user.email', get=True, stdout_as_string=True) except git.GitCommandError: logger.warning("Failed to get configured git user email, using '%s'", cls.GIT_USER_EMAIL) return cls.GIT_USER_EMAIL
def get_email(cls): try: return git.cmd.Git().config('user.email', get=True, stdout_as_string=six.PY3) except git.GitCommandError: logger.warning("Failed to get configured git user email, using '%s'", cls.GIT_USER_EMAIL) return cls.GIT_USER_EMAIL
def build_binary_packages(self): """Function calls build class for building packages""" try: builder = Builder(self.conf.buildtool) except NotImplementedError as e: raise RebaseHelperError('{}. Supported build tools are {}'.format( six.text_type(e), Builder.get_supported_tools())) for version in ['old', 'new']: successful_builds = 0 try_build_again = False while successful_builds < 1: results_dir = '{}-build'.format(os.path.join(self.results_dir, version)) spec = None task_id = None koji_build_id = None build_dict = {} if self.conf.build_tasks is None: spec = self.spec_file if version == 'old' else self.rebase_spec_file package_name = spec.get_package_name() package_version = spec.get_version() package_full_version = spec.get_full_version() if version == 'old' and self.conf.get_old_build_from_koji: if KojiHelper.functional: session = KojiHelper.create_session() koji_version, koji_build_id = KojiHelper.get_latest_build(session, package_name) if koji_version: if koji_version != package_version: logger.warning('Version of the latest Koji build (%s) with id (%s) ' 'differs from version in SPEC file (%s)!', koji_version, koji_build_id, package_version) package_version = package_full_version = koji_version else: logger.warning('Unable to find the latest Koji build!') else: logger.warning('Unable to get the latest Koji build!') build_dict = dict( name=package_name, version=package_version, builds_nowait=self.conf.builds_nowait, build_tasks=self.conf.build_tasks, builder_options=self.conf.builder_options, srpm=results_store.get_build(version).get('srpm'), srpm_logs=results_store.get_build(version).get('logs')) # prepare for building builder.prepare(spec, self.conf) logger.info('Building binary packages for %s version %s', package_name, package_full_version) else: task_id = self.conf.build_tasks[0] if version == 'old' else self.conf.build_tasks[1] try: if self.conf.build_tasks is None: if koji_build_id: session = KojiHelper.create_session() build_dict['rpm'], build_dict['logs'] = KojiHelper.download_build(session, koji_build_id, results_dir) else: build_dict.update(builder.build(spec, results_dir, **build_dict)) if builder.creates_tasks() and task_id and not koji_build_id: if not self.conf.builds_nowait: build_dict['rpm'], build_dict['logs'] = builder.wait_for_task(build_dict, task_id, results_dir) elif self.conf.build_tasks: build_dict['rpm'], build_dict['logs'] = builder.get_detached_task(task_id, results_dir) build_dict = self._sanitize_build_dict(build_dict) results_store.set_build_data(version, build_dict) successful_builds += 1 except RebaseHelperError: # Proper RebaseHelperError instance was created already. Re-raise it. raise except BinaryPackageBuildError as e: build_dict.update(builder.get_logs()) build_dict['binary_package_build_error'] = six.text_type(e) build_dict = self._sanitize_build_dict(build_dict) results_store.set_build_data(version, build_dict) if e.logfile is None: msg = 'Building {} RPM packages failed; see logs in {} for more information'.format( version, os.path.join(results_dir, 'RPM') ) else: msg = 'Building {} RPM packages failed; see {} for more information'.format(version, e.logfile) logger.info(msg) if self.rebase_spec_file: # Save current rebase spec file content self.rebase_spec_file.save() if not self.conf.non_interactive and \ ConsoleHelper.get_message('Do you want to try it one more time'): try_build_again = True else: raise RebaseHelperError(msg, logfiles=builder.get_logs().get('logs')) except Exception: raise RebaseHelperError('Building package failed with unknown reason. ' 'Check all available log files.') if try_build_again: successful_builds = 0 try_build_again = False logger.info('Now it is time to make changes to %s if necessary.', self.rebase_spec_file.path) if not ConsoleHelper.get_message('Do you want to continue with the rebuild now'): raise KeyboardInterrupt # Update rebase spec file content after potential manual modifications self.rebase_spec_file._read_spec_content() # pylint: disable=protected-access self.rebase_spec_file._update_data() # pylint: disable=protected-access # clear current version output directories if os.path.exists(os.path.join(results_dir, 'RPM')): shutil.rmtree(os.path.join(results_dir, 'RPM')) if self.conf.builds_nowait and not self.conf.build_tasks: if builder.creates_tasks(): self.print_task_info(builder)
def get_user(cls): try: return git.cmd.Git().config('user.name', get=True, stdout_as_string=six.PY3) except git.GitCommandError: logger.warning("Failed to get configured git user name, using '%s'", cls.GIT_USER_NAME) return cls.GIT_USER_NAME
def build_packages(self): """Function calls build class for building packages""" if self.conf.buildtool == 'fedpkg' and not koji_builder: print ('Importing module koji failed. Switching to mockbuild.') self.conf.buildtool = 'mock' try: builder = Builder(self.conf.buildtool) except NotImplementedError as ni_e: raise RebaseHelperError('%s. Supported build tools are %s' % six.text_type(ni_e), Builder.get_supported_tools()) for version in ['old', 'new']: spec_object = self.spec_file if version == 'old' else self.rebase_spec_file build_dict = {} task_id = None if self.conf.build_tasks is None: build_dict['name'] = spec_object.get_package_name() build_dict['version'] = spec_object.get_version() patches = [x.get_path() for x in spec_object.get_patches()] spec = spec_object.get_path() sources = spec_object.get_sources() logger.info('Building packages for %s version %s', spec_object.get_package_name(), spec_object.get_version()) else: if version == 'old': task_id = self.conf.build_tasks.split(',')[0] else: task_id = self.conf.build_tasks.split(',')[1] results_dir = os.path.join(self.results_dir, version) build_dict['builds_nowait'] = self.conf.builds_nowait build_dict['build_tasks'] = self.conf.build_tasks files = {} number_retries = 0 while self.conf.build_retries != number_retries: try: if self.conf.build_tasks is None: build_dict.update(builder.build(spec, sources, patches, results_dir, **build_dict)) if not self.conf.builds_nowait: if self.conf.buildtool == 'fedpkg': while True: kh = KojiHelper() build_dict['rpm'], build_dict['logs'] = kh.get_koji_tasks(build_dict['koji_task_id'], results_dir) if build_dict['rpm']: break else: if self.conf.build_tasks: if self.conf.buildtool == 'fedpkg': kh = KojiHelper() try: build_dict['rpm'], build_dict['logs'] = kh.get_koji_tasks(task_id, results_dir) OutputLogger.set_build_data(version, build_dict) if not build_dict['rpm']: return False except TypeError: logger.info('Koji tasks are not finished yet. Try again later') return False elif self.conf.buildtool == 'copr': copr_helper = CoprHelper() client = copr_helper.get_client() build_id = int(task_id) status = copr_helper.get_build_status(client, build_id) if status in ['importing', 'pending', 'starting', 'running']: logger.info('Copr build is not finished yet. Try again later') return False else: build_dict['rpm'], build_dict['logs'] = copr_helper.download_build(client, build_id, results_dir) if status not in ['succeeded', 'skipped']: logger.info('Copr build {} did not complete successfully'.format(build_id)) return False # Build finishes properly. Go out from while cycle OutputLogger.set_build_data(version, build_dict) break except SourcePackageBuildError: # always fail for original version if version == 'old': raise RebaseHelperError('Creating old SRPM package failed.') logger.error('Building source package failed.') # TODO: implement log analyzer for SRPMs and add the checks here!!! raise except BinaryPackageBuildError: # always fail for original version rpm_dir = os.path.join(results_dir, 'RPM') build_dict.update(builder.get_logs()) OutputLogger.set_build_data(version, build_dict) build_log = 'build.log' build_log_path = os.path.join(rpm_dir, build_log) if version == 'old': raise RebaseHelperError('Building old RPM package failed. Check log %s', build_log_path) logger.error('Building binary packages failed.') msg = 'Building package failed' try: files = BuildLogAnalyzer.parse_log(rpm_dir, build_log) except BuildLogAnalyzerMissingError: raise RebaseHelperError('Build log %s does not exist', build_log_path) except BuildLogAnalyzerMakeError: raise RebaseHelperError('%s during build. Check log %s', msg, build_log_path) except BuildLogAnalyzerPatchError: raise RebaseHelperError('%s during patching. Check log %s', msg, build_log_path) except RuntimeError: if self.conf.build_retries == number_retries: raise RebaseHelperError('%s with unknown reason. Check log %s', msg, build_log_path) if 'missing' in files: missing_files = '\n'.join(files['missing']) logger.info('Files not packaged in the SPEC file:\n%s', missing_files) elif 'deleted' in files: deleted_files = '\n'.join(files['deleted']) logger.warning('Removed files packaged in SPEC file:\n%s', deleted_files) else: if self.conf.build_retries == number_retries: raise RebaseHelperError("Build failed, but no issues were found in the build log %s", build_log) self.rebase_spec_file.modify_spec_files_section(files) if not self.conf.non_interactive: msg = 'Do you want rebase-helper to try build the packages one more time' if not ConsoleHelper.get_message(msg): raise KeyboardInterrupt else: logger.warning('Some patches were not successfully applied') # build just failed, otherwise we would break out of the while loop logger.debug('Number of retries is %s', self.conf.build_retries) if os.path.exists(os.path.join(results_dir, 'RPM')): shutil.rmtree(os.path.join(results_dir, 'RPM')) if os.path.exists(os.path.join(results_dir, 'SRPM')): shutil.rmtree(os.path.join(results_dir, 'SRPM')) number_retries += 1 if self.conf.build_retries == number_retries: raise RebaseHelperError('Building package failed with unknow reason. Check all available log files.') return True