def print_patches_section_cli(cls, color, patch_type): """ Print info about one of the patches key section :param color: color used for the message printing :param patch_type: string containing key for the patch_dict """ patches = results_store.get_patches() if not patches: return if patch_type in patches: print('\n%s patches:' % patch_type) for patch in patches[patch_type]: ConsoleHelper.cprint(patch, color=color)
def test_get_message(self, monkeypatch, capsys, suffix, answer, kwargs, expected_input): question = 'bla bla' monkeypatch.setattr('sys.stdin', StringIO(answer)) inp = ConsoleHelper.get_message(question, **(kwargs or {})) assert capsys.readouterr()[0] == question + suffix assert inp is expected_input
def test_get_message_no(self): question = "bla bla" answer = "no" self._setup_fake_IO(answer) inp = ConsoleHelper.get_message(question) sys.stdout.seek(0) assert sys.stdout.readline() == question + " [Y/n]? " assert inp is False
def test_get_message_yes_default_no(self): question = "bla bla" answer = "yes" self._setup_fake_IO(answer) inp = ConsoleHelper.get_message(question, default_yes=False) sys.stdout.seek(0) assert sys.stdout.readline() == question + " [y/N]? " assert inp is True
def test_get_message_no_input_default_yes(self): question = "bla bla" answer = "\n" self._setup_fake_IO(answer) inp = ConsoleHelper.get_message(question) sys.stdout.seek(0) assert sys.stdout.readline() == question + " [Y/n]? " assert inp is True
def test_get_message_any_input_default_yes(self): question = 'bla bla' answer = 'random input\ndsfdf' self._setup_fake_IO(answer) inp = ConsoleHelper.get_message(question, any_input=True) sys.stdout.seek(0) assert sys.stdout.readline() == question + ' ' assert inp is True
def test_get_message_no(self): question = 'bla bla' answer = 'no' self._setup_fake_IO(answer) inp = ConsoleHelper.get_message(question) sys.stdout.seek(0) assert sys.stdout.readline() == question + ' [Y/n]? ' assert inp is False
def test_get_message_no_input_default_no(self): question = 'bla bla' answer = '\n' self._setup_fake_IO(answer) inp = ConsoleHelper.get_message(question, default_yes=False) sys.stdout.seek(0) assert sys.stdout.readline() == question + ' (y/[n])? ' assert inp is False
def test_get_message_no(self): question = 'bla bla' answer = 'no' self._setup_fake_IO(answer) inp = ConsoleHelper.get_message(question) sys.stdout.seek(0) assert sys.stdout.readline() == question + ' ([y]/n)? ' assert inp is False
def test_get_message_yes(self): question = 'bla bla' answer = 'yes' self._setup_fake_IO(answer) inp = ConsoleHelper.get_message(question) sys.stdout.seek(0) assert sys.stdout.readline() == question + ' [Y/n]? ' assert inp is True
def test_get_message_yes_default_no(self): question = 'bla bla' answer = 'yes' self._setup_fake_IO(answer) inp = ConsoleHelper.get_message(question, default_yes=False) sys.stdout.seek(0) assert sys.stdout.readline() == question + ' [y/N]? ' assert inp is True
def test_get_message_no_input_default_yes(self): question = 'bla bla' answer = '\n' self._setup_fake_IO(answer) inp = ConsoleHelper.get_message(question) sys.stdout.seek(0) assert sys.stdout.readline() == question + ' [Y/n]? ' assert inp is True
def test_get_message_any_input_default_no(self): question = "bla bla" answer = "random input\n" self._setup_fake_IO(answer) inp = ConsoleHelper.get_message(question, default_yes=False, any_input=True) sys.stdout.seek(0) assert sys.stdout.readline() == question + " " assert inp is False
def test_capture_output(self): def write(): with os.fdopen(sys.__stdout__.fileno(), "w") as f: # pylint:disable=no-member f.write("test stdout") with os.fdopen(sys.__stderr__.fileno(), "w") as f: # pylint:disable=no-member f.write("test stderr") stdout, stderr = ConsoleHelper.capture_output(write, capture_stdout=True, capture_stderr=True) assert stdout == "test stdout" assert stderr == "test stderr"
def test_capture_output(self): def write(): with os.fdopen(sys.__stdout__.fileno(), 'w') as f: # pylint: disable=no-member f.write('test stdout') with os.fdopen(sys.__stderr__.fileno(), 'w') as f: # pylint: disable=no-member f.write('test stderr') with ConsoleHelper.Capturer(stdout=True, stderr=True) as capturer: write() assert capturer.stdout == 'test stdout' assert capturer.stderr == 'test stderr'
def test_capture_output(self): def write(): with os.fdopen(sys.__stdout__.fileno(), 'w') as f: f.write('test stdout') with os.fdopen(sys.__stderr__.fileno(), 'w') as f: f.write('test stderr') stdout, stderr = ConsoleHelper.capture_output( write, capture_stdout=True, capture_stderr=True) assert stdout == 'test stdout' assert stderr == 'test stderr'
def check_build_requires(spec): """ Check if all build dependencies are installed. If not, asks user they should be installed. If yes, it installs build dependencies using PolicyKit. :param spec: SpecFile object :return: """ req_pkgs = spec.get_requires() if not RpmHelper.all_packages_installed(req_pkgs): if ConsoleHelper.get_message('\nSome build dependencies are missing. Do you want to install them now'): if RpmHelper.install_build_dependencies(spec.get_path()) != 0: raise RebaseHelperError('Failed to install build dependencies')
def _git_rebase(cls): """Function performs git rebase between old and new sources""" # in old_sources do. # 1) git remote add new_sources <path_to_new_sources> # 2) git fetch new_sources # 3 git rebase -i --onto new_sources/master <oldest_commit_old_source> <the_latest_commit_old_sourcese> if not cls.cont: logger.info('Git-rebase operation to %s is ongoing...', os.path.basename(cls.new_sources)) upstream = 'new_upstream' init_hash, last_hash = cls._prepare_git(upstream) ret_code = cls.git_helper.command_rebase(parameters='--onto', upstream_name=upstream, first_hash=init_hash, last_hash=last_hash) else: logger.info('Git-rebase operation continues...') ret_code = cls.git_helper.command_rebase(parameters='--skip') cls._get_git_helper_data() logger.debug(cls.output_data) modified_patches = [] deleted_patches = [] unapplied_patches = [] while True: if int(ret_code) != 0: patch_name = cls.git_helper.get_unapplied_patch(cls.output_data) logger.info("Git has problems with rebasing patch %s", patch_name) if not cls.non_interactive: cls.git_helper.command_mergetool() else: unapplied_patches.append(patch_name) modified_files = cls.git_helper.command_diff_status() cls.git_helper.command_add_files(parameters=modified_files) base_name = os.path.join(cls.kwargs['results_dir'], patch_name) cls.git_helper.command_diff('HEAD', output_file=base_name) with open(base_name, "r") as f: del_patches = f.readlines() if not del_patches: deleted_patches.append(base_name) else: logger.info('Following files were modified: %s', ','.join(modified_files).decode(defenc)) cls.git_helper.command_commit(message=patch_name) cls.git_helper.command_diff('HEAD~1', output_file=base_name) modified_patches.append(base_name) if not cls.non_interactive: if not ConsoleHelper.get_message('Do you want to continue with another patch'): raise KeyboardInterrupt ret_code = cls.git_helper.command_rebase('--skip') cls._get_git_helper_data() else: break deleted_patches = cls._update_deleted_patches(deleted_patches) #TODO correct settings for merge tool in ~/.gitconfig # currently now meld is not started return {'modified': modified_patches, 'deleted': deleted_patches, 'unapplied': unapplied_patches}
def prepare(cls, spec, conf): """ Checks if all build dependencies are installed. If not, asks user whether they should be installed. If he agrees, installs build dependencies using PolicyKit. :param spec: SpecFile object """ req_pkgs = spec.get_requires() if not RpmHelper.all_packages_installed(req_pkgs): question = '\nSome build dependencies are missing. Do you want to install them now' if conf.non_interactive or ConsoleHelper.get_message(question): if RpmHelper.install_build_dependencies(spec.get_path(), assume_yes=conf.non_interactive) != 0: raise RebaseHelperError('Failed to install build dependencies')
def run(): debug_log_file = None try: # be verbose until debug_log_file is created cli = CLI() if hasattr(cli, 'version'): logger.info(VERSION) sys.exit(0) config = Config(getattr(cli, 'config-file', None)) config.merge(cli) for handler in [main_handler, output_tool_handler]: handler.set_terminal_background(config.background) ConsoleHelper.use_colors = ConsoleHelper.should_use_colors(config) execution_dir, results_dir, debug_log_file = Application.setup( config) if not config.verbose: main_handler.setLevel(logging.INFO) app = Application(config, execution_dir, results_dir, debug_log_file) app.run() except KeyboardInterrupt: logger.info('Interrupted by user') except RebaseHelperError as e: if e.msg: logger.error('%s', e.msg) else: logger.error('%s', six.text_type(e)) sys.exit(1) except SystemExit as e: sys.exit(e.code) except BaseException: if debug_log_file: logger.error( 'rebase-helper failed due to an unexpected error. Please report this problem' '\nusing the following link: %s' '\nand include the content of' '\n\'%s\'' '\nfile in the report.' '\nThank you!', NEW_ISSUE_LINK, debug_log_file) else: logger.error( 'rebase-helper failed due to an unexpected error. Please report this problem' '\nusing the following link: %s' '\nand include the traceback following this message in the report.' '\nThank you!', NEW_ISSUE_LINK) logger.trace('', exc_info=1) sys.exit(1) sys.exit(0)
def run(): debug_log_file = None try: # be verbose until debug_log_file is created handler = LoggerHelper.add_stream_handler(logger, logging.DEBUG) if "--builder-options" in sys.argv[1:]: raise RebaseHelperError( 'Wrong format of --builder-options. It must be in the following form:' ' --builder-options="--desired-builder-option".') cli = CLI() if cli.version: logger.info(VERSION) sys.exit(0) ConsoleHelper.use_colors = ConsoleHelper.should_use_colors(cli) execution_dir, results_dir, debug_log_file = Application.setup(cli) if not cli.verbose: handler.setLevel(logging.INFO) app = Application(cli, execution_dir, results_dir, debug_log_file) app.run() except KeyboardInterrupt: logger.info('\nInterrupted by user') except RebaseHelperError as e: if e.msg: logger.error('\n%s', e.msg) else: logger.error('\n%s', six.text_type(e)) sys.exit(1) except SystemExit as e: sys.exit(e.code) except BaseException: if debug_log_file: logger.error( '\nrebase-helper failed due to an unexpected error. Please report this problem' '\nusing the following link: %s' '\nand include the content of' '\n\'%s\'' '\nfile in the report.' '\nThank you!', NEW_ISSUE_LINK, debug_log_file) else: logger.error( '\nrebase-helper failed due to an unexpected error. Please report this problem' '\nusing the following link: %s' '\nand include the traceback following this message in the report.' '\nThank you!', NEW_ISSUE_LINK) logger.debug('\n', exc_info=1) sys.exit(1) sys.exit(0)
def check_build_requires(spec): """ Check if all build dependencies are installed. If not, asks user they should be installed. If yes, it installs build dependencies using PolicyKit. :param spec: SpecFile object :return: """ req_pkgs = spec.get_requires() if not RpmHelper.all_packages_installed(req_pkgs): if ConsoleHelper.get_message( '\nSome build dependencies are missing. Do you want to install them now' ): if RpmHelper.install_build_dependencies(spec.get_path()) != 0: raise RebaseHelperError( 'Failed to install build dependencies')
def _git_rebase(cls): """Function performs git rebase between old and new sources""" # in old_sources do. # 1) git remote add new_sources <path_to_new_sources> # 2) git fetch new_sources # 3 git rebase -i --onto new_sources/master <oldest_commit_old_source> <the_latest_commit_old_sourcese> if not cls.cont: logger.info('Git-rebase operation to %s is ongoing...', os.path.basename(cls.new_sources)) upstream = 'new_upstream' init_hash, last_hash = cls._prepare_git(upstream) ret_code = cls.git_helper.command_rebase(parameters='--onto', upstream_name=upstream, first_hash=init_hash, last_hash=last_hash) else: logger.info('Git-rebase operation continues...') ret_code = cls.git_helper.command_rebase(parameters='--skip') cls._get_git_helper_data() logger.debug(cls.output_data) patch_dictionary = {} modified_patches = [] deleted_patches = [] unapplied_patches = [] while True: log = cls.git_helper.command_log(parameters='--pretty=oneline') for patch_name in cls.git_helper.get_automerged_patches( cls.output_data): index = [ i for i, l in enumerate(log) if l.endswith(patch_name) ] if index: commit = GitHelper.get_commit_hash_log(log, number=index[0]) base_name = os.path.join(cls.kwargs['results_dir'], patch_name) cls.git_helper.command_diff('{}~1'.format(commit), commit, output_file=base_name) modified_patches.append(base_name) if int(ret_code) != 0: if not cls.non_interactive: patch_name = cls.git_helper.get_unapplied_patch( cls.output_data) logger.info("Git has problems with rebasing patch %s", patch_name) cls.git_helper.command_mergetool() else: # Take the patch which failed from .git/rebase-apply/next file try: with open( os.path.join(cls.old_sources, '.git', 'rebase-apply', 'next')) as f: number = '\n'.join(f.readlines()) except IOError: raise RuntimeError( "Git rebase failed with unknown reason. Please check log file" ) # Getting the patch which failed unapplied_patches.append(cls.patches[int(number) - 1].get_patch_name()) ret_code = cls.git_helper.command_rebase('--skip') cls._get_git_helper_data() continue modified_files = cls.git_helper.command_diff_status() cls.git_helper.command_add_files(parameters=modified_files) base_name = os.path.join(cls.kwargs['results_dir'], patch_name) cls.git_helper.command_diff('HEAD', output_file=base_name) with open(base_name, "r") as f: del_patches = f.readlines() if not del_patches: deleted_patches.append(base_name) else: logger.info('Following files were modified: %s', ','.join(modified_files)) cls.git_helper.command_commit(message=patch_name) cls.git_helper.command_diff('HEAD~1', output_file=base_name) modified_patches.append(base_name) if not cls.non_interactive: if not ConsoleHelper.get_message( 'Do you want to continue with another patch'): raise KeyboardInterrupt ret_code = cls.git_helper.command_rebase('--skip') cls._get_git_helper_data() else: break deleted_patches = cls._update_deleted_patches(deleted_patches, unapplied_patches) if deleted_patches: patch_dictionary['deleted'] = deleted_patches if modified_patches: patch_dictionary['modified'] = modified_patches if unapplied_patches: patch_dictionary['unapplied'] = unapplied_patches #TODO correct settings for merge tool in ~/.gitconfig # currently now meld is not started return patch_dictionary
def build_binary_packages(self): """Function calls build class for building packages""" try: builder = Builder(self.conf.buildtool) except NotImplementedError as e: raise RebaseHelperError('{}. Supported build tools are {}'.format( six.text_type(e), Builder.get_supported_tools())) for version in ['old', 'new']: successful_builds = 0 try_build_again = False while successful_builds < 1: results_dir = '{}-build'.format(os.path.join(self.results_dir, version)) spec = None task_id = None koji_build_id = None build_dict = {} if self.conf.build_tasks is None: spec = self.spec_file if version == 'old' else self.rebase_spec_file package_name = spec.get_package_name() package_version = spec.get_version() package_full_version = spec.get_full_version() if version == 'old' and self.conf.get_old_build_from_koji: if KojiHelper.functional: session = KojiHelper.create_session() koji_version, koji_build_id = KojiHelper.get_latest_build(session, package_name) if koji_version: if koji_version != package_version: logger.warning('Version of the latest Koji build (%s) with id (%s) ' 'differs from version in SPEC file (%s)!', koji_version, koji_build_id, package_version) package_version = package_full_version = koji_version else: logger.warning('Unable to find the latest Koji build!') else: logger.warning('Unable to get the latest Koji build!') build_dict = dict( name=package_name, version=package_version, builds_nowait=self.conf.builds_nowait, build_tasks=self.conf.build_tasks, builder_options=self.conf.builder_options, srpm=results_store.get_build(version).get('srpm'), srpm_logs=results_store.get_build(version).get('logs')) # prepare for building builder.prepare(spec, self.conf) logger.info('Building binary packages for %s version %s', package_name, package_full_version) else: task_id = self.conf.build_tasks[0] if version == 'old' else self.conf.build_tasks[1] try: if self.conf.build_tasks is None: if koji_build_id: session = KojiHelper.create_session() build_dict['rpm'], build_dict['logs'] = KojiHelper.download_build(session, koji_build_id, results_dir) else: build_dict.update(builder.build(spec, results_dir, **build_dict)) if builder.creates_tasks() and task_id and not koji_build_id: if not self.conf.builds_nowait: build_dict['rpm'], build_dict['logs'] = builder.wait_for_task(build_dict, task_id, results_dir) elif self.conf.build_tasks: build_dict['rpm'], build_dict['logs'] = builder.get_detached_task(task_id, results_dir) build_dict = self._sanitize_build_dict(build_dict) results_store.set_build_data(version, build_dict) successful_builds += 1 except RebaseHelperError: # Proper RebaseHelperError instance was created already. Re-raise it. raise except BinaryPackageBuildError as e: build_dict.update(builder.get_logs()) build_dict['binary_package_build_error'] = six.text_type(e) build_dict = self._sanitize_build_dict(build_dict) results_store.set_build_data(version, build_dict) if e.logfile is None: msg = 'Building {} RPM packages failed; see logs in {} for more information'.format( version, os.path.join(results_dir, 'RPM') ) else: msg = 'Building {} RPM packages failed; see {} for more information'.format(version, e.logfile) logger.info(msg) if self.rebase_spec_file: # Save current rebase spec file content self.rebase_spec_file.save() if not self.conf.non_interactive and \ ConsoleHelper.get_message('Do you want to try it one more time'): try_build_again = True else: raise RebaseHelperError(msg, logfiles=builder.get_logs().get('logs')) except Exception: raise RebaseHelperError('Building package failed with unknown reason. ' 'Check all available log files.') if try_build_again: successful_builds = 0 try_build_again = False logger.info('Now it is time to make changes to %s if necessary.', self.rebase_spec_file.path) if not ConsoleHelper.get_message('Do you want to continue with the rebuild now'): raise KeyboardInterrupt # Update rebase spec file content after potential manual modifications self.rebase_spec_file._read_spec_content() # pylint: disable=protected-access self.rebase_spec_file._update_data() # pylint: disable=protected-access # clear current version output directories if os.path.exists(os.path.join(results_dir, 'RPM')): shutil.rmtree(os.path.join(results_dir, 'RPM')) if self.conf.builds_nowait and not self.conf.build_tasks: if builder.creates_tasks(): self.print_task_info(builder)
def print_cli_summary(cls, app): """ Print report of the rebase :param app: Application instance """ cls.app = app cls.print_patches_cli() ConsoleHelper.cprint('\nAvailable logs:', color='yellow') print('{0}:\n{1}'.format('Debug log', app.debug_log_file)) if results_store.get_old_build() is not None: print('{0}:\n{1}'.format( 'Old build logs and (S)RPMs', os.path.join(app.results_dir, 'old-build'))) if results_store.get_new_build() is not None: print('{0}:\n{1}'.format( 'New build logs and (S)RPMs', os.path.join(app.results_dir, 'new-build'))) print('') ConsoleHelper.cprint('%s:' % 'Rebased sources', color='yellow') print("%s" % app.rebased_sources_dir) ConsoleHelper.cprint('%s:' % 'Generated patch', color='yellow') print("%s\n" % os.path.join(app.results_dir, 'changes.patch')) cls.print_report_file_path() result = results_store.get_result_message() if not app.conf.patch_only: if 'success' in result: ConsoleHelper.cprint('\n%s' % result['success'], color='green') # Error is printed out through exception caught in CliHelper.run() else: if results_store.get_patches()['success']: ConsoleHelper.cprint("\nPatching successful", color='green') elif results_store.get_patches()['success']: ConsoleHelper.cprint("\nPatching failed", color='red')
def print_report_file_path(cls): """Print path to the report file""" ConsoleHelper.cprint('%s report:' % cls.NAME, color='yellow') print( '%s' % os.path.join(cls.app.results_dir, 'report.' + cls.get_extension()))
def test_get_message(self, suffix, kwargs, expected_input): question = 'bla bla' inp = ConsoleHelper.get_message(question, **(kwargs or {})) sys.stdout.seek(0) assert sys.stdout.readline().decode(sys.stdout.encoding) == question + suffix assert inp is expected_input
def _git_rebase(cls): """Function performs git rebase between old and new sources""" # in old_sources do: # 1) git remote add new_sources <path_to_new_sources> # 2) git fetch new_sources # 3) git rebase --onto new_sources/master <root_commit_old_sources> <last_commit_old_sources> if not cls.cont: logger.info('git-rebase operation to %s is ongoing...', os.path.basename(cls.new_sources)) upstream = 'new_upstream' cls.old_repo.create_remote(upstream, url=cls.new_sources).fetch() root_commit = cls.old_repo.git.rev_list('HEAD', max_parents=0) last_commit = cls.old_repo.commit('HEAD') try: cls.output_data = cls.old_repo.git.rebase( root_commit, last_commit, onto='{}/master'.format(upstream), stdout_as_string=six.PY3) except git.GitCommandError as e: ret_code = e.status cls.output_data = e.stdout else: ret_code = 0 else: logger.info('git-rebase operation continues...') try: cls.output_data = cls.old_repo.git.rebase( skip=True, stdout_as_string=six.PY3) except git.GitCommandError as e: ret_code = e.status cls.output_data = e.stdout else: ret_code = 0 logger.debug(cls.output_data) patch_dictionary = {} modified_patches = [] inapplicable_patches = [] while True: automerged_patches = cls._get_automerged_patches(cls.output_data) for patch_name in automerged_patches: commits = [ c for c in cls.old_repo.iter_commits() if c.summary.endswith(patch_name) ] if commits: base_name = os.path.join(cls.kwargs['rebased_sources_dir'], patch_name) diff = cls.old_repo.git.diff(commits[0].parents[0], commits[0], stdout_as_string=False) with open(base_name, 'wb') as f: f.write(diff) f.write(b'\n') modified_patches.append(patch_name) if ret_code != 0: # get name of the current patch using .git/rebase-apply/next try: with open( os.path.join(cls.old_sources, '.git', 'rebase-apply', 'next')) as f: patch_name = cls.patches[int(f.readline()) - 1].get_patch_name() except IOError: raise RuntimeError( 'Git rebase failed with unknown reason. Please check log file' ) if not cls.non_interactive: logger.info("Git has problems with rebasing patch %s", patch_name) GitHelper.run_mergetool(cls.old_repo) else: inapplicable_patches.append(patch_name) try: cls.output_data = cls.old_repo.git.rebase( skip=True, stdout_as_string=six.PY3) except git.GitCommandError as e: ret_code = e.status cls.output_data = e.stdout else: ret_code = 0 continue base_name = os.path.join(cls.kwargs['rebased_sources_dir'], patch_name) # unstaged changes diff = cls.old_repo.commit().diff(None) if diff: # staged changes diff = cls.old_repo.index.diff(cls.old_repo.commit()) modified_files = [d.a_path for d in diff] logger.info('Following files were modified: %s', ', '.join(modified_files)) try: commit = cls.old_repo.index.commit(patch_name, skip_hooks=True) except git.UnmergedEntriesError: inapplicable_patches.append(patch_name) else: diff = cls.old_repo.git.diff(commit.parents[0], commit, stdout_as_string=False) with open(base_name, 'wb') as f: f.write(diff) f.write(b'\n') modified_patches.append(patch_name) if not cls.non_interactive: if not ConsoleHelper.get_message( 'Do you want to continue with another patch'): raise KeyboardInterrupt try: cls.output_data = cls.old_repo.git.rebase( skip=True, stdout_as_string=six.PY3) except git.GitCommandError as e: ret_code = e.status cls.output_data = e.stdout else: ret_code = 0 else: break deleted_patches = cls._update_deleted_patches(inapplicable_patches) if deleted_patches: patch_dictionary['deleted'] = deleted_patches if modified_patches: patch_dictionary['modified'] = modified_patches if inapplicable_patches: patch_dictionary['inapplicable'] = inapplicable_patches patches = [os.path.basename(p.path) for p in cls.patches] untouched_patches = [ p for p in patches if p not in deleted_patches + modified_patches + inapplicable_patches ] if untouched_patches: patch_dictionary['untouched'] = untouched_patches # TODO correct settings for merge tool in ~/.gitconfig # currently now meld is not started return patch_dictionary
def build_packages(self): """Function calls build class for building packages""" try: builder = Builder(self.conf.buildtool) except NotImplementedError as ni_e: raise RebaseHelperError( '%s. Supported build tools are %s' % six.text_type(ni_e), Builder.get_supported_tools()) for version in ['old', 'new']: spec_object = self.spec_file if version == 'old' else self.rebase_spec_file build_dict = {} task_id = None koji_build_id = None if self.conf.build_tasks is None: pkg_name = spec_object.get_package_name() pkg_version = spec_object.get_version() pkg_full_version = spec_object.get_full_version() if version == 'old' and self.conf.get_old_build_from_koji: if KojiHelper.functional: koji_version, koji_build_id = KojiHelper.get_latest_build( pkg_name) if koji_version: if koji_version != pkg_version: logger.warning( 'Version of the latest Koji build (%s) with id (%s) ' 'differs from version in SPEC file (%s)!', koji_version, koji_build_id, pkg_version) pkg_version = pkg_full_version = koji_version else: logger.warning( 'Unable to find the latest Koji build!') else: logger.warning('Unable to get the latest Koji build!') # prepare for building builder.prepare(spec_object, self.conf) build_dict['name'] = pkg_name build_dict['version'] = pkg_version patches = [x.get_path() for x in spec_object.get_patches()] spec = spec_object.get_path() sources = spec_object.get_sources() logger.info('Building packages for %s version %s', pkg_name, pkg_full_version) else: if version == 'old': task_id = self.conf.build_tasks[0] else: task_id = self.conf.build_tasks[1] results_dir = os.path.join(self.results_dir, version) + '-build' files = {} number_retries = 0 while self.conf.build_retries != number_retries: try: if self.conf.build_tasks is None: if koji_build_id: build_dict['rpm'], build_dict[ 'logs'] = KojiHelper.download_build( koji_build_id, results_dir) else: build_dict.update( builder.build(spec, sources, patches, results_dir, **build_dict)) if builder.creates_tasks() and not koji_build_id: if not self.conf.builds_nowait: build_dict['rpm'], build_dict[ 'logs'] = builder.wait_for_task( build_dict, results_dir) if build_dict['rpm'] is None: return False elif self.conf.build_tasks: build_dict['rpm'], build_dict[ 'logs'] = builder.get_detached_task( task_id, results_dir) if build_dict['rpm'] is None: return False # Build finishes properly. Go out from while cycle results_store.set_build_data(version, build_dict) break except SourcePackageBuildError as e: build_dict.update(builder.get_logs()) build_dict['source_package_build_error'] = six.text_type(e) results_store.set_build_data(version, build_dict) # always fail for original version if version == 'old': raise RebaseHelperError( 'Creating old SRPM package failed.') logger.error('Building source package failed.') # TODO: implement log analyzer for SRPMs and add the checks here!!! raise except BinaryPackageBuildError as e: # always fail for original version rpm_dir = os.path.join(results_dir, 'RPM') build_dict.update(builder.get_logs()) build_dict['binary_package_build_error'] = six.text_type(e) results_store.set_build_data(version, build_dict) build_log = 'build.log' build_log_path = os.path.join(rpm_dir, build_log) if version == 'old': error_message = 'Building old RPM package failed. Check logs: {} '.format( builder.get_logs().get('logs', 'N/A')) raise RebaseHelperError( error_message, logfiles=builder.get_logs().get('logs')) logger.error('Building binary packages failed.') msg = 'Building package failed' try: files = BuildLogAnalyzer.parse_log(rpm_dir, build_log) except BuildLogAnalyzerMissingError: raise RebaseHelperError('Build log %s does not exist', build_log_path) except BuildLogAnalyzerMakeError: raise RebaseHelperError( '%s during build. Check log %s', msg, build_log_path, logfiles=[build_log_path]) except BuildLogAnalyzerPatchError: raise RebaseHelperError( '%s during patching. Check log %s', msg, build_log_path, logfiles=[build_log_path]) except RuntimeError: if self.conf.build_retries == number_retries: raise RebaseHelperError( '%s with unknown reason. Check log %s', msg, build_log_path, logfiles=[build_log_path]) if 'missing' in files: missing_files = '\n'.join(files['missing']) logger.info('Files not packaged in the SPEC file:\n%s', missing_files) elif 'deleted' in files: deleted_files = '\n'.join(files['deleted']) logger.warning( 'Removed files packaged in SPEC file:\n%s', deleted_files) else: if self.conf.build_retries == number_retries: raise RebaseHelperError( "Build failed, but no issues were found in the build log %s", build_log, logfiles=[build_log]) self.rebase_spec_file.modify_spec_files_section(files) if not self.conf.non_interactive: msg = 'Do you want rebase-helper to try to build the packages one more time' if not ConsoleHelper.get_message(msg): raise KeyboardInterrupt else: logger.warning( 'Some patches were not successfully applied') # build just failed, otherwise we would break out of the while loop logger.debug('Number of retries is %s', self.conf.build_retries) number_retries += 1 if self.conf.build_retries > number_retries: # only remove builds if this retry is not the last one if os.path.exists(os.path.join(results_dir, 'RPM')): shutil.rmtree(os.path.join(results_dir, 'RPM')) if os.path.exists(os.path.join(results_dir, 'SRPM')): shutil.rmtree(os.path.join(results_dir, 'SRPM')) if self.conf.build_retries == number_retries: raise RebaseHelperError( 'Building package failed with unknown reason. Check all available log files.' ) if self.conf.builds_nowait and not self.conf.build_tasks: if builder.creates_tasks(): self.print_task_info(builder) return True
def test_parse_rgb_device_specification(self, specification, expected_rgb, expected_bit_width): rgb, bit_width = ConsoleHelper.parse_rgb_device_specification( specification) assert rgb == expected_rgb assert bit_width == expected_bit_width
def test_color_is_light(self, rgb_tuple, bit_width, expected_result): assert ConsoleHelper.color_is_light(rgb_tuple, bit_width) == expected_result
def build_packages(self): """ Function calls build class for building packages """ try: builder = Builder(self.conf.buildtool) except NotImplementedError as ni_e: raise RebaseHelperError('%s. Supported build tools are %s', ni_e.message, Builder.get_supported_tools()) for version in ['old', 'new']: spec_object = self.spec_file if version == 'old' else self.rebase_spec_file build_dict = {} build_dict['name'] = spec_object.get_package_name() build_dict['version'] = spec_object.get_version() logger.debug(build_dict) patches = [x.get_path() for x in spec_object.get_patches()] results_dir = os.path.join(self.results_dir, version) spec = spec_object.get_path() sources = spec_object.get_sources() failed_before = False while True: try: build_dict.update(builder.build(spec, sources, patches, results_dir, **build_dict)) OutputLogger.set_build_data(version, build_dict) break except SourcePackageBuildError: # always fail for original version if version == 'old': raise RebaseHelperError('Creating old SRPM package failed.') logger.error('Building source package failed.') # TODO: implement log analyzer for SRPMs and add the checks here!!! raise except BinaryPackageBuildError: # always fail for original version rpm_dir = os.path.join(results_dir, 'RPM') build_log = 'build.log' build_log_path = os.path.join(rpm_dir, build_log) if version == 'old': raise RebaseHelperError('Building old RPM package failed. Check log %s', build_log_path) logger.error('Building binary packages failed.') try: files = BuildLogAnalyzer.parse_log(rpm_dir, build_log) except BuildLogAnalyzerMissingError: raise RebaseHelperError('Build log %s does not exist', build_log_path) except BuildLogAnalyzerMakeError: raise RebaseHelperError('Building package failed during build. Check log %s', build_log_path) except BuildLogAnalyzerPatchError: raise RebaseHelperError('Building package failed during patching. Check log %s' % build_log_path) if files['missing']: missing_files = '\n'.join(files['added']) logger.info('Files not packaged in the SPEC file:\n%s', missing_files) elif files['deleted']: deleted_files = '\n'.join(files['deleted']) logger.warning('Removed files packaged in SPEC file:\n%s', deleted_files) else: raise RebaseHelperError("Build failed, but no issues were found in the build log %s", build_log) self.rebase_spec_file.modify_spec_files_section(files) if not self.conf.non_interactive: if failed_before: if not ConsoleHelper.get_message('Do you want rebase-helper to try build the packages one more time'): raise KeyboardInterrupt else: logger.warning('Some patches were not successfully applied') shutil.rmtree(os.path.join(results_dir, 'RPM')) shutil.rmtree(os.path.join(results_dir, 'SRPM')) return False # build just failed, otherwise we would break out of the while loop failed_before = True shutil.rmtree(os.path.join(results_dir, 'RPM')) shutil.rmtree(os.path.join(results_dir, 'SRPM')) return True
def build_packages(self): """Function calls build class for building packages""" if self.conf.buildtool == KojiBuildTool.CMD and not koji_builder: logger.info( 'Importing module koji failed. Switching to mock builder.') self.conf.buildtool = MockBuildTool.CMD try: builder = Builder(self.conf.buildtool) except NotImplementedError as ni_e: raise RebaseHelperError( '%s. Supported build tools are %s' % six.text_type(ni_e), Builder.get_supported_tools()) for version in ['old', 'new']: spec_object = self.spec_file if version == 'old' else self.rebase_spec_file build_dict = {} task_id = None if self.conf.build_tasks is None: build_dict['name'] = spec_object.get_package_name() build_dict['version'] = spec_object.get_version() patches = [x.get_path() for x in spec_object.get_patches()] spec = spec_object.get_path() sources = spec_object.get_sources() logger.info('Building packages for %s version %s', spec_object.get_package_name(), spec_object.get_full_version()) else: if version == 'old': task_id = self.conf.build_tasks[0] else: task_id = self.conf.build_tasks[1] results_dir = os.path.join(self.results_dir, version) build_dict['builds_nowait'] = self.conf.builds_nowait build_dict['build_tasks'] = self.conf.build_tasks build_dict['builder_options'] = self.conf.builder_options files = {} number_retries = 0 while self.conf.build_retries != number_retries: try: if self.conf.build_tasks is None: build_dict.update( builder.build(spec, sources, patches, results_dir, **build_dict)) if not self.conf.builds_nowait: if self.conf.buildtool == KojiBuildTool.CMD: while not build_dict['rpm']: kh = KojiHelper() build_dict['rpm'], build_dict[ 'logs'] = kh.get_koji_tasks( build_dict['koji_task_id'], results_dir) else: if self.conf.build_tasks: if self.conf.buildtool == KojiBuildTool.CMD: kh = KojiHelper() try: build_dict['rpm'], build_dict[ 'logs'] = kh.get_koji_tasks( task_id, results_dir) results_store.set_build_data( version, build_dict) if not build_dict['rpm']: return False except TypeError: logger.info( 'Koji tasks are not finished yet. Try again later' ) return False elif self.conf.buildtool == CoprBuildTool.CMD: copr_helper = CoprHelper() client = copr_helper.get_client() build_id = int(task_id) status = copr_helper.get_build_status( client, build_id) if status in [ 'importing', 'pending', 'starting', 'running' ]: logger.info( 'Copr build is not finished yet. Try again later' ) return False else: build_dict['rpm'], build_dict[ 'logs'] = copr_helper.download_build( client, build_id, results_dir) if status not in ['succeeded', 'skipped']: logger.info( 'Copr build {} did not complete successfully' .format(build_id)) return False # Build finishes properly. Go out from while cycle results_store.set_build_data(version, build_dict) break except SourcePackageBuildError: build_dict.update(builder.get_logs()) results_store.set_build_data(version, build_dict) # always fail for original version if version == 'old': raise RebaseHelperError( 'Creating old SRPM package failed.') logger.error('Building source package failed.') # TODO: implement log analyzer for SRPMs and add the checks here!!! raise except BinaryPackageBuildError: # always fail for original version rpm_dir = os.path.join(results_dir, 'RPM') build_dict.update(builder.get_logs()) results_store.set_build_data(version, build_dict) build_log = 'build.log' build_log_path = os.path.join(rpm_dir, build_log) if version == 'old': error_message = 'Building old RPM package failed. Check logs: {} '.format( builder.get_logs().get('logs', 'N/A')) raise RebaseHelperError(error_message) logger.error('Building binary packages failed.') msg = 'Building package failed' try: files = BuildLogAnalyzer.parse_log(rpm_dir, build_log) except BuildLogAnalyzerMissingError: raise RebaseHelperError('Build log %s does not exist', build_log_path) except BuildLogAnalyzerMakeError: raise RebaseHelperError( '%s during build. Check log %s', msg, build_log_path) except BuildLogAnalyzerPatchError: raise RebaseHelperError( '%s during patching. Check log %s', msg, build_log_path) except RuntimeError: if self.conf.build_retries == number_retries: raise RebaseHelperError( '%s with unknown reason. Check log %s', msg, build_log_path) if 'missing' in files: missing_files = '\n'.join(files['missing']) logger.info('Files not packaged in the SPEC file:\n%s', missing_files) elif 'deleted' in files: deleted_files = '\n'.join(files['deleted']) logger.warning( 'Removed files packaged in SPEC file:\n%s', deleted_files) else: if self.conf.build_retries == number_retries: raise RebaseHelperError( "Build failed, but no issues were found in the build log %s", build_log) self.rebase_spec_file.modify_spec_files_section(files) if not self.conf.non_interactive: msg = 'Do you want rebase-helper to try build the packages one more time' if not ConsoleHelper.get_message(msg): raise KeyboardInterrupt else: logger.warning( 'Some patches were not successfully applied') # build just failed, otherwise we would break out of the while loop logger.debug('Number of retries is %s', self.conf.build_retries) if os.path.exists(os.path.join(results_dir, 'RPM')): shutil.rmtree(os.path.join(results_dir, 'RPM')) if os.path.exists(os.path.join(results_dir, 'SRPM')): shutil.rmtree(os.path.join(results_dir, 'SRPM')) number_retries += 1 if self.conf.build_retries == number_retries: raise RebaseHelperError( 'Building package failed with unknown reason. Check all available log files.' ) return True
def _git_rebase(cls): """Function performs git rebase between old and new sources""" # in old_sources do. # 1) git remote add new_sources <path_to_new_sources> # 2) git fetch new_sources # 3 git rebase -i --onto new_sources/master <oldest_commit_old_source> <the_latest_commit_old_sourcese> if not cls.cont: logger.info('Git-rebase operation to %s is ongoing...', os.path.basename(cls.new_sources)) upstream = 'new_upstream' init_hash, last_hash = cls._prepare_git(upstream) ret_code = cls.git_helper.command_rebase(parameters='--onto', upstream_name=upstream, first_hash=init_hash, last_hash=last_hash) else: logger.info('Git-rebase operation continues...') ret_code = cls.git_helper.command_rebase(parameters='--skip') cls._get_git_helper_data() logger.debug(cls.output_data) patch_dictionary = {} modified_patches = [] deleted_patches = [] unapplied_patches = [] while True: log = cls.git_helper.command_log(parameters='--pretty=oneline') for patch_name in cls.git_helper.get_automerged_patches(cls.output_data): index = [i for i, l in enumerate(log) if l.endswith(patch_name)] if index: commit = GitHelper.get_commit_hash_log(log, number=index[0]) base_name = os.path.join(cls.kwargs['results_dir'], patch_name) cls.git_helper.command_diff('{}~1'.format(commit), commit, output_file=base_name) modified_patches.append(base_name) if int(ret_code) != 0: if not cls.non_interactive: patch_name = cls.git_helper.get_unapplied_patch(cls.output_data) logger.info("Git has problems with rebasing patch %s", patch_name) cls.git_helper.command_mergetool() else: # Take the patch which failed from .git/rebase-apply/next file try: with open(os.path.join(cls.old_sources, '.git', 'rebase-apply', 'next')) as f: number = '\n'.join(f.readlines()) except IOError: raise RuntimeError("Git rebase failed with unknown reason. Please check log file") # Getting the patch which failed unapplied_patches.append(cls.patches[int(number) - 1].get_patch_name()) ret_code = cls.git_helper.command_rebase('--skip') cls._get_git_helper_data() continue modified_files = cls.git_helper.command_diff_status() cls.git_helper.command_add_files(parameters=modified_files) base_name = os.path.join(cls.kwargs['results_dir'], patch_name) cls.git_helper.command_diff('HEAD', output_file=base_name) with open(base_name, "r") as f: del_patches = f.readlines() if not del_patches: deleted_patches.append(base_name) else: logger.info('Following files were modified: %s', ','.join(modified_files)) cls.git_helper.command_commit(message=patch_name) cls.git_helper.command_diff('HEAD~1', output_file=base_name) modified_patches.append(base_name) if not cls.non_interactive: if not ConsoleHelper.get_message('Do you want to continue with another patch'): raise KeyboardInterrupt ret_code = cls.git_helper.command_rebase('--skip') cls._get_git_helper_data() else: break deleted_patches = cls._update_deleted_patches(deleted_patches) if deleted_patches: patch_dictionary['deleted'] = deleted_patches if modified_patches: patch_dictionary['modified'] = modified_patches if unapplied_patches: patch_dictionary['unapplied'] = unapplied_patches #TODO correct settings for merge tool in ~/.gitconfig # currently now meld is not started return patch_dictionary
def build_packages(self): """Function calls build class for building packages""" if self.conf.buildtool == 'fedpkg' and not koji_builder: print ('Importing module koji failed. Switching to mockbuild.') self.conf.buildtool = 'mock' try: builder = Builder(self.conf.buildtool) except NotImplementedError as ni_e: raise RebaseHelperError('%s. Supported build tools are %s' % six.text_type(ni_e), Builder.get_supported_tools()) for version in ['old', 'new']: spec_object = self.spec_file if version == 'old' else self.rebase_spec_file build_dict = {} task_id = None if self.conf.build_tasks is None: build_dict['name'] = spec_object.get_package_name() build_dict['version'] = spec_object.get_version() patches = [x.get_path() for x in spec_object.get_patches()] spec = spec_object.get_path() sources = spec_object.get_sources() logger.info('Building packages for %s version %s', spec_object.get_package_name(), spec_object.get_version()) else: if version == 'old': task_id = self.conf.build_tasks.split(',')[0] else: task_id = self.conf.build_tasks.split(',')[1] results_dir = os.path.join(self.results_dir, version) build_dict['builds_nowait'] = self.conf.builds_nowait build_dict['build_tasks'] = self.conf.build_tasks files = {} number_retries = 0 while self.conf.build_retries != number_retries: try: if self.conf.build_tasks is None: build_dict.update(builder.build(spec, sources, patches, results_dir, **build_dict)) if not self.conf.builds_nowait: if self.conf.buildtool == 'fedpkg': while True: kh = KojiHelper() build_dict['rpm'], build_dict['logs'] = kh.get_koji_tasks(build_dict['koji_task_id'], results_dir) if build_dict['rpm']: break else: if self.conf.build_tasks: if self.conf.buildtool == 'fedpkg': kh = KojiHelper() try: build_dict['rpm'], build_dict['logs'] = kh.get_koji_tasks(task_id, results_dir) OutputLogger.set_build_data(version, build_dict) if not build_dict['rpm']: return False except TypeError: logger.info('Koji tasks are not finished yet. Try again later') return False elif self.conf.buildtool == 'copr': copr_helper = CoprHelper() client = copr_helper.get_client() build_id = int(task_id) status = copr_helper.get_build_status(client, build_id) if status in ['importing', 'pending', 'starting', 'running']: logger.info('Copr build is not finished yet. Try again later') return False else: build_dict['rpm'], build_dict['logs'] = copr_helper.download_build(client, build_id, results_dir) if status not in ['succeeded', 'skipped']: logger.info('Copr build {} did not complete successfully'.format(build_id)) return False # Build finishes properly. Go out from while cycle OutputLogger.set_build_data(version, build_dict) break except SourcePackageBuildError: # always fail for original version if version == 'old': raise RebaseHelperError('Creating old SRPM package failed.') logger.error('Building source package failed.') # TODO: implement log analyzer for SRPMs and add the checks here!!! raise except BinaryPackageBuildError: # always fail for original version rpm_dir = os.path.join(results_dir, 'RPM') build_dict.update(builder.get_logs()) OutputLogger.set_build_data(version, build_dict) build_log = 'build.log' build_log_path = os.path.join(rpm_dir, build_log) if version == 'old': raise RebaseHelperError('Building old RPM package failed. Check log %s', build_log_path) logger.error('Building binary packages failed.') msg = 'Building package failed' try: files = BuildLogAnalyzer.parse_log(rpm_dir, build_log) except BuildLogAnalyzerMissingError: raise RebaseHelperError('Build log %s does not exist', build_log_path) except BuildLogAnalyzerMakeError: raise RebaseHelperError('%s during build. Check log %s', msg, build_log_path) except BuildLogAnalyzerPatchError: raise RebaseHelperError('%s during patching. Check log %s', msg, build_log_path) except RuntimeError: if self.conf.build_retries == number_retries: raise RebaseHelperError('%s with unknown reason. Check log %s', msg, build_log_path) if 'missing' in files: missing_files = '\n'.join(files['missing']) logger.info('Files not packaged in the SPEC file:\n%s', missing_files) elif 'deleted' in files: deleted_files = '\n'.join(files['deleted']) logger.warning('Removed files packaged in SPEC file:\n%s', deleted_files) else: if self.conf.build_retries == number_retries: raise RebaseHelperError("Build failed, but no issues were found in the build log %s", build_log) self.rebase_spec_file.modify_spec_files_section(files) if not self.conf.non_interactive: msg = 'Do you want rebase-helper to try build the packages one more time' if not ConsoleHelper.get_message(msg): raise KeyboardInterrupt else: logger.warning('Some patches were not successfully applied') # build just failed, otherwise we would break out of the while loop logger.debug('Number of retries is %s', self.conf.build_retries) if os.path.exists(os.path.join(results_dir, 'RPM')): shutil.rmtree(os.path.join(results_dir, 'RPM')) if os.path.exists(os.path.join(results_dir, 'SRPM')): shutil.rmtree(os.path.join(results_dir, 'SRPM')) number_retries += 1 if self.conf.build_retries == number_retries: raise RebaseHelperError('Building package failed with unknow reason. Check all available log files.') return True