def extract_archive(self, path=None): """ Extracts the archive into the given path :param path: Path where to extract the archive to. :return: """ if path is None: TypeError("Expected argument 'path' (pos 1) is missing") logger.verbose("Extracting '%s' into '%s'", self._filename, path) try: LZMAError = lzma.LZMAError except AttributeError: LZMAError = lzma.error try: archive = self._archive_type.open(self._filename) except (tarfile.ReadError, LZMAError) as e: raise IOError(six.text_type(e)) self._archive_type.extract(archive, self._filename, path) try: archive.close() except AttributeError: # pseudo archive types don't return real file-like object pass
def build(cls, spec, results_dir, srpm, **kwargs): """ Builds the RPMs using rpmbuild :param spec: SpecFile object :param results_dir: absolute path to DIR where results should be stored :param srpm: absolute path to SRPM :return: dict with: 'rpm' -> list with absolute paths to RPMs 'logs' -> list with absolute paths to build_logs """ cls.logs = [] rpm_results_dir = os.path.join(results_dir, "RPM") sources = spec.get_sources() patches = [p.get_path() for p in spec.get_patches()] with RpmbuildTemporaryEnvironment(sources, patches, spec.get_path(), rpm_results_dir) as tmp_env: env = tmp_env.env() tmp_dir = tmp_env.path() tmp_results_dir = env.get(RpmbuildTemporaryEnvironment.TEMPDIR_RESULTS) rpms = cls._build_rpm(srpm, tmp_dir, tmp_results_dir, rpm_results_dir, builder_options=cls.get_builder_options(**kwargs)) logger.info("Building RPMs finished successfully") # RPMs paths in results_dir rpms = [os.path.join(rpm_results_dir, os.path.basename(f)) for f in rpms] logger.verbose("Successfully built RPMs: '%s'", str(rpms)) # gather logs cls.logs.extend([l for l in PathHelper.find_all_files(rpm_results_dir, '*.log')]) logger.verbose("logs: '%s'", str(cls.logs)) return dict(rpm=rpms, logs=cls.logs)
def build(cls, spec, results_dir, **kwargs): """ Build SRPM with chosen SRPM Build Tool :param spec: SpecFile object :param results_dir: absolute path to DIR where results should be stored :return: absolute path to SRPM, list with absolute paths to logs """ srpm_results_dir = os.path.join(results_dir, "SRPM") sources = spec.get_sources() patches = [p.get_path() for p in spec.get_patches()] with RpmbuildTemporaryEnvironment(sources, patches, spec.get_path(), srpm_results_dir) as tmp_env: srpm_builder_options = cls.get_srpm_builder_options(**kwargs) env = tmp_env.env() tmp_dir = tmp_env.path() tmp_spec = env.get(RpmbuildTemporaryEnvironment.TEMPDIR_SPEC) tmp_results_dir = env.get( RpmbuildTemporaryEnvironment.TEMPDIR_RESULTS) srpm = cls._build_srpm(tmp_spec, tmp_dir, tmp_results_dir, srpm_results_dir, srpm_builder_options=srpm_builder_options) logger.info("Building SRPM finished successfully") # srpm path in results_dir srpm = os.path.join(srpm_results_dir, os.path.basename(srpm)) logger.verbose("Successfully built SRPM: '%s'", str(srpm)) # gather logs logs = [l for l in PathHelper.find_all_files(srpm_results_dir, '*.log')] logger.verbose("logs: '%s'", str(logs)) return dict(srpm=srpm, logs=logs)
def build(cls, spec, results_dir, srpm, **kwargs): """ Builds the RPMs using rpmbuild :param spec: SpecFile object :param results_dir: absolute path to DIR where results should be stored :param srpm: absolute path to SRPM :return: dict with: 'rpm' -> list with absolute paths to RPMs 'logs' -> list with absolute paths to build_logs """ cls.logs = [] rpm_results_dir = os.path.join(results_dir, "RPM") sources = spec.get_sources() patches = [p.get_path() for p in spec.get_patches()] with RpmbuildTemporaryEnvironment(sources, patches, spec.get_path(), rpm_results_dir) as tmp_env: env = tmp_env.env() tmp_dir = tmp_env.path() tmp_results_dir = env.get(RpmbuildTemporaryEnvironment.TEMPDIR_RESULTS) rpms = cls._build_rpm(srpm, tmp_dir, tmp_results_dir, rpm_results_dir, builder_options=cls.get_builder_options(**kwargs)) logger.info("Building RPMs finished successfully") # RPMs paths in results_dir rpms = [os.path.join(rpm_results_dir, os.path.basename(f)) for f in rpms] logger.verbose("Successfully built RPMs: '%s'", str(rpms)) # gather logs cls.logs.extend(l for l in PathHelper.find_all_files(rpm_results_dir, '*.log')) logger.verbose("logs: '%s'", str(cls.logs)) return dict(rpm=rpms, logs=cls.logs)
def build(cls, spec, results_dir, **kwargs): """ Build SRPM with chosen SRPM Build Tool :param spec: SpecFile object :param results_dir: absolute path to DIR where results should be stored :return: absolute path to SRPM, list with absolute paths to logs """ srpm_results_dir = os.path.join(results_dir, "SRPM") sources = spec.get_sources() patches = [p.get_path() for p in spec.get_patches()] with MockTemporaryEnvironment(sources, patches, spec.get_path(), srpm_results_dir) as tmp_env: srpm_builder_options = cls.get_srpm_builder_options(**kwargs) env = tmp_env.env() tmp_dir = tmp_env.path() tmp_spec = env.get(MockTemporaryEnvironment.TEMPDIR_SPEC) tmp_results_dir = env.get( MockTemporaryEnvironment.TEMPDIR_RESULTS) srpm = cls._build_srpm(tmp_spec, tmp_dir, tmp_results_dir, srpm_results_dir, srpm_builder_options=srpm_builder_options) logger.info("Building SRPM finished successfully") # srpm path in results_dir srpm = os.path.join(srpm_results_dir, os.path.basename(srpm)) logger.verbose("Successfully built SRPM: '%s'", str(srpm)) # gather logs logs = [l for l in PathHelper.find_all_files(srpm_results_dir, '*.log')] logger.verbose("logs: '%s'", str(logs)) return dict(srpm=srpm, logs=logs)
def _prepare_spec_objects(self): """ Prepare spec files and initialize objects :return: """ self.rebase_spec_file_path = get_rebase_name(self.rebased_sources_dir, self.spec_file_path) self.spec_file = SpecFile(self.spec_file_path, self.conf.changelog_entry, self.execution_dir, download=not self.conf.not_download_sources) # Check whether test suite is enabled at build time if not self.spec_file.is_test_suite_enabled(): results_store.set_info_text('WARNING', 'Test suite is not enabled at build time.') # create an object representing the rebased SPEC file self.rebase_spec_file = self.spec_file.copy(self.rebase_spec_file_path) if not self.conf.sources: self.conf.sources = versioneers_runner.run(self.conf.versioneer, self.spec_file.get_package_name(), self.spec_file.category, self.conf.versioneer_blacklist) if self.conf.sources: logger.info("Determined latest upstream version '%s'", self.conf.sources) else: raise RebaseHelperError('Could not determine latest upstream version ' 'and no SOURCES argument specified!') # Prepare rebased_sources_dir self.rebased_repo = self._prepare_rebased_repository(self.spec_file.patches, self.execution_dir, self.rebased_sources_dir) # check if argument passed as new source is a file or just a version if [True for ext in Archive.get_supported_archives() if self.conf.sources.endswith(ext)]: logger.verbose("argument passed as a new source is a file") self.rebase_spec_file.set_version_using_archive(self.conf.sources) else: logger.verbose("argument passed as a new source is a version") version, extra_version, separator = SpecFile.split_version_string(self.conf.sources) self.rebase_spec_file.set_version(version) self.rebase_spec_file.set_extra_version_separator(separator) self.rebase_spec_file.set_extra_version(extra_version) if not self.conf.skip_version_check and parse_version(self.rebase_spec_file.get_version()) \ <= parse_version(self.spec_file.get_version()): raise RebaseHelperError("Current version is equal to or newer than the requested version, nothing to do.") # run spec hooks spec_hooks_runner.run_spec_hooks(self.spec_file, self.rebase_spec_file, **self.kwargs) # spec file object has been sanitized downloading can proceed for spec_file in [self.spec_file, self.rebase_spec_file]: if spec_file.download: spec_file.download_remote_sources() # parse spec again with sources downloaded to properly expand %prep section spec_file._update_data() # pylint: disable=protected-access
def _delete_workspace_dir(self): """ Deletes workspace directory and loggs message :return: """ logger.verbose("Removing the workspace directory '%s'", self.workspace_dir) if os.path.isdir(self.workspace_dir): shutil.rmtree(self.workspace_dir)
def _prepare_spec_objects(self): """ Prepare spec files and initialize objects :return: """ self.rebase_spec_file_path = get_rebase_name(self.rebased_sources_dir, self.spec_file_path) self.spec_file = SpecFile(self.spec_file_path, self.execution_dir) # Check whether test suite is enabled at build time if not self.spec_file.is_test_suite_enabled(): results_store.set_info_text('WARNING', 'Test suite is not enabled at build time.') # create an object representing the rebased SPEC file self.rebase_spec_file = self.spec_file.copy(self.rebase_spec_file_path) if not self.conf.sources: self.conf.sources = plugin_manager.versioneers.run(self.conf.versioneer, self.spec_file.get_package_name(), self.spec_file.category, self.conf.versioneer_blacklist) if self.conf.sources: logger.info("Determined latest upstream version '%s'", self.conf.sources) else: raise RebaseHelperError('Could not determine latest upstream version ' 'and no SOURCES argument specified!') # Prepare rebased_sources_dir self.rebased_repo = self._prepare_rebased_repository(self.spec_file.patches, self.execution_dir, self.rebased_sources_dir) # check if argument passed as new source is a file or just a version if [True for ext in Archive.get_supported_archives() if self.conf.sources.endswith(ext)]: logger.verbose("argument passed as a new source is a file") self.rebase_spec_file.set_version_using_archive(self.conf.sources) else: logger.verbose("argument passed as a new source is a version") version, extra_version, separator = SpecFile.split_version_string(self.conf.sources) self.rebase_spec_file.set_version(version) self.rebase_spec_file.set_extra_version_separator(separator) self.rebase_spec_file.set_extra_version(extra_version) if not self.conf.skip_version_check and parse_version(self.rebase_spec_file.get_version()) \ <= parse_version(self.spec_file.get_version()): raise RebaseHelperError("Current version is equal to or newer than the requested version, nothing to do.") self.rebase_spec_file.update_changelog(self.conf.changelog_entry) # run spec hooks plugin_manager.spec_hooks.run(self.spec_file, self.rebase_spec_file, **self.kwargs) # spec file object has been sanitized downloading can proceed if not self.conf.not_download_sources: for spec_file in [self.spec_file, self.rebase_spec_file]: spec_file.download_remote_sources() # parse spec again with sources downloaded to properly expand %prep section spec_file._update_data() # pylint: disable=protected-access
def download_file(url, destination_path, blocksize=8192): """Downloads a file from HTTP, HTTPS or FTP URL. Args: url (str): URL to be downloaded. destination_path (str): Path to where the downloaded file will be stored. blocksize (int): Block size in bytes. """ r = DownloadHelper.request(url, stream=True) if r is None: raise DownloadError( "An unexpected error occurred during the download.") if not 200 <= r.status_code < 300: raise DownloadError(r.reason) file_size = int(r.headers.get('content-length', -1)) # file exists, check the size if os.path.exists(destination_path): if file_size < 0 or file_size != os.path.getsize(destination_path): logger.verbose( "The destination file '%s' exists, but sizes don't match! Removing it.", destination_path) os.remove(destination_path) else: logger.verbose( "The destination file '%s' exists, and the size is correct! Skipping download.", destination_path) return try: with open(destination_path, 'wb') as local_file: logger.info('Downloading file from URL %s', url) download_start = time.time() downloaded = 0 # report progress DownloadHelper.progress(file_size, downloaded, download_start) # do the actual download for chunk in r.iter_content(chunk_size=blocksize): downloaded += len(chunk) local_file.write(chunk) # report progress DownloadHelper.progress(file_size, downloaded, download_start) sys.stdout.write('\n') sys.stdout.flush() except KeyboardInterrupt as e: os.remove(destination_path) raise e
def __init__(self, cli_conf, execution_dir, results_dir, debug_log_file): """ Initialize the application :param cli_conf: CLI object with configuration gathered from commandline :return: """ results_store.clear() self.conf = cli_conf self.execution_dir = execution_dir self.rebased_sources_dir = os.path.join(results_dir, 'rebased-sources') self.debug_log_file = debug_log_file self.kwargs.update(self.conf.config) # Temporary workspace for Builder, checks, ... self.kwargs['workspace_dir'] = self.workspace_dir = os.path.join(self.execution_dir, constants.WORKSPACE_DIR) # Directory where results should be put self.kwargs['results_dir'] = self.results_dir = results_dir # Directory contaning only those files, which are relevant for the new rebased version self.kwargs['rebased_sources_dir'] = self.rebased_sources_dir logger.verbose("Rebase-helper version: %s", VERSION) if self.conf.build_tasks is None: # check the workspace dir if not self.conf.cont: self._check_workspace_dir() self._get_spec_file() self._prepare_spec_objects() # verify all sources for the new version are present missing_sources = [os.path.basename(s) for s in self.rebase_spec_file.sources if not os.path.isfile(os.path.basename(s))] if missing_sources: raise RebaseHelperError('The following sources are missing: {}'.format(','.join(missing_sources))) if self.conf.update_sources: sources = [os.path.basename(s) for s in self.spec_file.sources] rebased_sources = [os.path.basename(s) for s in self.rebase_spec_file.sources] uploaded = LookasideCacheHelper.update_sources('fedpkg', self.rebased_sources_dir, self.rebase_spec_file.get_package_name(), sources, rebased_sources, upload=not self.conf.skip_upload) self._update_gitignore(uploaded, self.rebased_sources_dir) # TODO: Remove the value from kwargs and use only CLI attribute! self.kwargs['continue'] = self.conf.cont self._initialize_data() if self.conf.cont or self.conf.build_only: self._delete_old_builds()
def run_check(cls, results_dir, **kwargs): """Compares old and new RPMs using abipkgdiff""" # Check if ABI changes occured cls.abi_changes = None cls.results_dir = os.path.join(results_dir, cls.name) os.makedirs(cls.results_dir) debug_old, rest_pkgs_old = cls._get_packages_for_abipkgdiff( results_store.get_build('old')) debug_new, rest_pkgs_new = cls._get_packages_for_abipkgdiff( results_store.get_build('new')) cmd = [cls.CMD] reports = {} for pkg in rest_pkgs_old: command = list(cmd) debug = cls._find_debuginfo(debug_old, pkg) if debug: command.append('--d1') command.append(debug) old_name = RpmHelper.split_nevra(os.path.basename(pkg))['name'] find = [ x for x in rest_pkgs_new if RpmHelper.split_nevra( os.path.basename(x))['name'] == old_name ] if not find: logger.warning('New version of package %s was not found!', old_name) continue new_pkg = find[0] debug = cls._find_debuginfo(debug_new, new_pkg) if debug: command.append('--d2') command.append(debug) command.append(pkg) command.append(new_pkg) logger.verbose('Package name for ABI comparison %s', old_name) output = os.path.join(cls.results_dir, old_name + '.txt') try: ret_code = ProcessHelper.run_subprocess(command, output_file=output) except OSError: raise CheckerNotFoundError( "Checker '{}' was not found or installed.".format( cls.name)) if int(ret_code) & cls.ABIDIFF_ERROR and int( ret_code) & cls.ABIDIFF_USAGE_ERROR: raise RebaseHelperError( 'Execution of {} failed.\nCommand line is: {}'.format( cls.CMD, cmd)) reports[old_name] = int(ret_code) return dict(packages=cls.parse_abi_logs(reports), abi_changes=cls.abi_changes, path=cls.get_checker_output_dir_short())
def download_file(url, destination_path, blocksize=8192): """Downloads a file from HTTP, HTTPS or FTP URL. Args: url (str): URL to be downloaded. destination_path (str): Path to where the downloaded file will be stored. blocksize (int): Block size in bytes. """ r = DownloadHelper.request(url, stream=True) if r is None: raise DownloadError("An unexpected error occurred during the download.") if not 200 <= r.status_code < 300: raise DownloadError(r.reason) file_size = int(r.headers.get('content-length', -1)) # file exists, check the size if os.path.exists(destination_path): if file_size < 0 or file_size != os.path.getsize(destination_path): logger.verbose("The destination file '%s' exists, but sizes don't match! Removing it.", destination_path) os.remove(destination_path) else: logger.verbose("The destination file '%s' exists, and the size is correct! Skipping download.", destination_path) return try: with open(destination_path, 'wb') as local_file: logger.info('Downloading file from URL %s', url) download_start = time.time() downloaded = 0 # report progress DownloadHelper.progress(file_size, downloaded, download_start) # do the actual download for chunk in r.iter_content(chunk_size=blocksize): downloaded += len(chunk) local_file.write(chunk) # report progress DownloadHelper.progress(file_size, downloaded, download_start) sys.stdout.write('\n') sys.stdout.flush() except KeyboardInterrupt as e: os.remove(destination_path) raise e
def patch(self, old_dir, new_dir, rest_sources, patches, **kwargs): """ Apply patches and generate rebased patches if needed :param old_dir: path to dir with old patches :param new_dir: path to dir with new patches :param patches: old patches :param rebased_patches: rebased patches :param kwargs: -- :return: """ logger.verbose("Patching source by patch tool %s", self._patch_tool_name) return self._tool.run_patch(old_dir, new_dir, rest_sources, patches, **kwargs)
def fill_dictionary(cls, result_dir, old_version=None, new_version=None): """ Parsed files.xml and symbols.xml and fill dictionary :param result_dir: where should be stored file for pkgdiff :param old_version: old version of package :param new_version: new version of package :return: """ XML_FILES = ['files.xml', 'symbols.xml'] if old_version is None: old_version = results_store.get_old_build().get('version') if old_version is '': old_version = cls._get_rpm_info( 'version', results_store.get_old_build()['rpm']) if new_version is None: new_version = results_store.get_new_build().get('version') if new_version is '': new_version = cls._get_rpm_info( 'version', results_store.get_new_build()['rpm']) for tag in cls.CHECKER_TAGS: cls.results_dict[tag] = [] for file_name in [os.path.join(result_dir, x) for x in XML_FILES]: logger.verbose('Processing %s file.', file_name) try: with open(file_name, "r") as f: lines = ['<pkgdiff>'] lines.extend(f.readlines()) lines.append('</pkgdiff>') pkgdiff_tree = ElementTree.fromstringlist(lines) for tag in cls.CHECKER_TAGS: for pkgdiff in pkgdiff_tree.findall('.//' + tag): files = [ x.strip() for x in pkgdiff.text.strip().split('\n') ] files = [ x.replace(old_version, '*') for x in files ] files = [ x.replace(new_version, '*') for x in files ] cls.results_dict[tag].extend(files) except IOError: continue
def parse_spec(cls, path, flags=None): with open(path, 'rb') as orig: with tempfile.NamedTemporaryFile() as tmp: # remove BuildArch to workaround rpm bug tmp.write(b''.join([ l for l in orig.readlines() if not l.startswith(b'BuildArch') ])) tmp.flush() with ConsoleHelper.Capturer(stderr=True) as capturer: result = rpm.spec( tmp.name, flags) if flags is not None else rpm.spec( tmp.name) for line in capturer.stderr.split('\n'): if line: logger.verbose('rpm: %s', line) return result
def build(cls, spec, results_dir, srpm, **kwargs): """ Builds the RPMs using mock :param spec: SpecFile object :param results_dir: absolute path to directory where results will be stored :param srpm: absolute path to SRPM :param root: mock root used for building :param arch: architecture to build the RPM for :return: dict with: 'rpm' -> list with absolute paths to RPMs 'logs' -> list with absolute paths to logs """ cls.logs = [] rpm_results_dir = os.path.join(results_dir, "RPM") sources = spec.get_sources() patches = [p.get_path() for p in spec.get_patches()] with MockTemporaryEnvironment(sources, patches, spec.get_path(), rpm_results_dir) as tmp_env: env = tmp_env.env() tmp_results_dir = env.get(MockTemporaryEnvironment.TEMPDIR_RESULTS) rpms = cls._build_rpm( srpm, tmp_results_dir, rpm_results_dir, builder_options=cls.get_builder_options(**kwargs)) # remove SRPM - side product of building RPM tmp_srpm = PathHelper.find_first_file(tmp_results_dir, "*.src.rpm") if tmp_srpm is not None: os.unlink(tmp_srpm) logger.info("Building RPMs finished successfully") rpms = [ os.path.join(rpm_results_dir, os.path.basename(f)) for f in rpms ] logger.verbose("Successfully built RPMs: '%s'", str(rpms)) # gather logs cls.logs.extend( [l for l in PathHelper.find_all_files(rpm_results_dir, '*.log')]) logger.verbose("logs: '%s'", str(cls.logs)) return dict(rpm=rpms, logs=cls.logs)
def _analyze_logs(cls, output, results_dict): removed_things = ['.build-id', '.dwz', 'PROVIDE', 'REQUIRES'] for line in output: if [x for x in removed_things if x in line]: continue fields = line.strip().split() logger.verbose(fields) if line.startswith('removed'): results_dict['removed'].append(fields[1]) continue if line.startswith('added'): results_dict['added'].append(fields[1]) continue if re.match(r'(S..|..5)........', fields[0]): # size or checksum changed results_dict['changed'].append(fields[1]) return results_dict
def run_check(cls, results_dir, **kwargs): """Compares old and new RPMs using abipkgdiff""" # Check if ABI changes occured cls.abi_changes = False cls.results_dir = os.path.join(results_dir, cls.name) os.makedirs(cls.results_dir) debug_old, rest_pkgs_old = cls._get_packages_for_abipkgdiff(results_store.get_build('old')) debug_new, rest_pkgs_new = cls._get_packages_for_abipkgdiff(results_store.get_build('new')) cmd = [cls.CMD] reports = {} for pkg in rest_pkgs_old: command = list(cmd) debug = cls._find_debuginfo(debug_old, pkg) if debug: command.append('--d1') command.append(debug) old_name = RpmHelper.split_nevra(os.path.basename(pkg))['name'] find = [x for x in rest_pkgs_new if RpmHelper.split_nevra(os.path.basename(x))['name'] == old_name] if not find: logger.warning('New version of package %s was not found!', old_name) continue new_pkg = find[0] debug = cls._find_debuginfo(debug_new, new_pkg) if debug: command.append('--d2') command.append(debug) command.append(pkg) command.append(new_pkg) logger.verbose('Package name for ABI comparison %s', old_name) output = os.path.join(cls.results_dir, old_name + '.txt') try: ret_code = ProcessHelper.run_subprocess(command, output_file=output) except OSError: raise CheckerNotFoundError("Checker '{}' was not found or installed.".format(cls.name)) if int(ret_code) & cls.ABIDIFF_ERROR and int(ret_code) & cls.ABIDIFF_USAGE_ERROR: raise RebaseHelperError('Execution of {} failed.\nCommand line is: {}'.format(cls.CMD, cmd)) reports[old_name] = int(ret_code) return dict(packages=cls.parse_abi_logs(reports), abi_changes=cls.abi_changes, path=cls.get_checker_output_dir_short())
def apply_patch(cls, repo, patch_object): """ Function applies patches to old sources It tries apply patch with am command and if it fails then with command --apply """ logger.verbose('Applying patch with git-am') patch_name = patch_object.get_path() patch_strip = patch_object.get_strip() try: repo.git.am(patch_name) commit = repo.head.commit except git.GitCommandError: logger.verbose('Applying patch with git-am failed.') try: repo.git.am(abort=True) except git.GitCommandError: pass logger.verbose('Applying patch with git-apply') try: repo.git.apply(patch_name, p=patch_strip) except git.GitCommandError: repo.git.apply(patch_name, p=patch_strip, reject=True, whitespace='fix') repo.git.add(all=True) commit = repo.index.commit(cls.decorate_patch_name( os.path.basename(patch_name)), skip_hooks=True) repo.git.commit(amend=True, m=cls.insert_patch_name(commit.message, os.path.basename(patch_name)))
def extract_archive(self, path=None): """ Extracts the archive into the given path :param path: Path where to extract the archive to. :return: """ if path is None: TypeError("Expected argument 'path' (pos 1) is missing") logger.verbose("Extracting '%s' into '%s'", self._filename, path) try: archive = self._archive_type.open(self._filename) except (EOFError, tarfile.ReadError, lzma.LZMAError) as e: raise IOError(str(e)) self._archive_type.extract(archive, self._filename, path) try: archive.close() except AttributeError: # pseudo archive types don't return real file-like object pass
def build(cls, spec, results_dir, srpm, **kwargs): """ Builds the RPMs using mock :param spec: SpecFile object :param results_dir: absolute path to directory where results will be stored :param srpm: absolute path to SRPM :param root: mock root used for building :param arch: architecture to build the RPM for :return: dict with: 'rpm' -> list with absolute paths to RPMs 'logs' -> list with absolute paths to logs """ cls.logs = [] rpm_results_dir = os.path.join(results_dir, "RPM") sources = spec.get_sources() patches = [p.get_path() for p in spec.get_patches()] with MockTemporaryEnvironment(sources, patches, spec.get_path(), rpm_results_dir) as tmp_env: env = tmp_env.env() tmp_results_dir = env.get(MockTemporaryEnvironment.TEMPDIR_RESULTS) rpms = cls._build_rpm(srpm, tmp_results_dir, rpm_results_dir, builder_options=cls.get_builder_options(**kwargs)) # remove SRPM - side product of building RPM tmp_srpm = PathHelper.find_first_file(tmp_results_dir, "*.src.rpm") if tmp_srpm is not None: os.unlink(tmp_srpm) logger.info("Building RPMs finished successfully") rpms = [os.path.join(rpm_results_dir, os.path.basename(f)) for f in rpms] logger.verbose("Successfully built RPMs: '%s'", str(rpms)) # gather logs cls.logs.extend(l for l in PathHelper.find_all_files(rpm_results_dir, '*.log')) logger.verbose("logs: '%s'", str(cls.logs)) return dict(rpm=rpms, logs=cls.logs)
def fill_dictionary(cls, result_dir, old_version=None, new_version=None): """ Parsed files.xml and symbols.xml and fill dictionary :param result_dir: where should be stored file for pkgdiff :param old_version: old version of package :param new_version: new version of package :return: """ XML_FILES = ['files.xml', 'symbols.xml'] if old_version is None: old_version = results_store.get_old_build().get('version') if old_version is '': old_version = cls._get_rpm_info('version', results_store.get_old_build()['rpm']) if new_version is None: new_version = results_store.get_new_build().get('version') if new_version is '': new_version = cls._get_rpm_info('version', results_store.get_new_build()['rpm']) for tag in cls.CHECKER_TAGS: cls.results_dict[tag] = [] for file_name in [os.path.join(result_dir, x) for x in XML_FILES]: logger.verbose('Processing %s file.', file_name) try: with open(file_name, "r") as f: lines = ['<pkgdiff>'] lines.extend(f.readlines()) lines.append('</pkgdiff>') pkgdiff_tree = ElementTree.fromstringlist(lines) for tag in cls.CHECKER_TAGS: for pkgdiff in pkgdiff_tree.findall('.//' + tag): files = [x.strip() for x in pkgdiff.text.strip().split('\n')] files = [x.replace(old_version, '*') for x in files] files = [x.replace(new_version, '*') for x in files] cls.results_dict[tag].extend(files) except IOError: continue
def _git_rebase(cls): """Function performs git rebase between old and new sources""" def compare_commits(a, b): # compare commit diffs disregarding differences in blob hashes attributes = ('a_mode', 'b_mode', 'a_rawpath', 'b_rawpath', 'new_file', 'deleted_file', 'raw_rename_from', 'raw_rename_to', 'diff', 'change_type', 'score') diff1 = a.diff(a.parents[0], create_patch=True) diff2 = b.diff(b.parents[0], create_patch=True) if len(diff1) != len(diff2): return False for d1, d2 in zip(diff1, diff2): for attr in attributes: if getattr(d1, attr) != getattr(d2, attr): return False return True # in old_sources do: # 1) git remote add new_sources <path_to_new_sources> # 2) git fetch new_sources # 3) git rebase --onto new_sources/master <root_commit_old_sources> <last_commit_old_sources> if not cls.cont: logger.info('git-rebase operation to %s is ongoing...', os.path.basename(cls.new_sources)) upstream = 'new_upstream' cls.old_repo.create_remote(upstream, url=cls.new_sources).fetch() root_commit = cls.old_repo.git.rev_list('HEAD', max_parents=0) last_commit = cls.old_repo.commit('HEAD') if cls.favor_on_conflict == 'upstream': strategy_option = 'ours' elif cls.favor_on_conflict == 'downstream': strategy_option = 'theirs' else: strategy_option = False try: cls.output_data = cls.old_repo.git.rebase( root_commit, last_commit, strategy_option=strategy_option, onto='{}/master'.format(upstream), stdout_as_string=six.PY3) except git.GitCommandError as e: ret_code = e.status cls.output_data = e.stdout else: ret_code = 0 else: logger.info('git-rebase operation continues...') try: cls.output_data = cls.old_repo.git.rebase( '--continue', stdout_as_string=six.PY3) except git.GitCommandError as e: ret_code = e.status cls.output_data = e.stdout else: ret_code = 0 logger.verbose(cls.output_data) patch_dictionary = {} modified_patches = [] inapplicable_patches = [] while ret_code != 0: if not cls.old_repo.index.unmerged_blobs( ) and not cls.old_repo.index.diff(cls.old_repo.commit()): # empty commit - conflict has been automatically resolved - skip try: cls.output_data = cls.old_repo.git.rebase( skip=True, stdout_as_string=six.PY3) except git.GitCommandError as e: ret_code = e.status cls.output_data = e.stdout continue else: break try: with open( os.path.join(cls.old_sources, '.git', 'rebase-apply', 'next')) as f: next_index = int(f.readline()) with open( os.path.join(cls.old_sources, '.git', 'rebase-apply', 'last')) as f: last_index = int(f.readline()) except (FileNotFoundError, IOError): raise RuntimeError( 'Git rebase failed with unknown reason. Please check log file' ) patch_name = cls.patches[next_index - 1].get_patch_name() inapplicable = False if cls.non_interactive: inapplicable = True else: logger.info('Failed to auto-merge patch %s', patch_name) unmerged = cls.old_repo.index.unmerged_blobs() GitHelper.run_mergetool(cls.old_repo) if cls.old_repo.index.unmerged_blobs(): if InputHelper.get_message( 'There are still unmerged entries. Do you want to skip this patch', default_yes=False): inapplicable = True else: continue if not inapplicable: # check for unresolved conflicts unresolved = [] for file in unmerged: with open(os.path.join(cls.old_sources, file)) as f: if [l for l in f.readlines() if '<<<<<<<' in l]: unresolved.append(file) if unresolved: if InputHelper.get_message( 'There are still unresolved conflicts. ' 'Do you want to skip this patch', default_yes=False): inapplicable = True else: cls.old_repo.index.reset(paths=unresolved) unresolved.insert(0, '--') cls.old_repo.git.checkout(*unresolved, conflict='diff3') continue if inapplicable: inapplicable_patches.append(patch_name) try: cls.output_data = cls.old_repo.git.rebase( skip=True, stdout_as_string=six.PY3) except git.GitCommandError as e: ret_code = e.status cls.output_data = e.stdout continue else: break diff = cls.old_repo.index.diff(cls.old_repo.commit()) if diff: modified_patches.append(patch_name) if next_index < last_index: if not InputHelper.get_message( 'Do you want to continue with another patch'): raise KeyboardInterrupt try: if diff: cls.output_data = cls.old_repo.git.rebase( '--continue', stdout_as_string=six.PY3) else: cls.output_data = cls.old_repo.git.rebase( skip=True, stdout_as_string=six.PY3) except git.GitCommandError as e: ret_code = e.status cls.output_data = e.stdout else: break original_commits = list( cls.old_repo.iter_commits(rev=cls.old_repo.branches.master)) commits = list(cls.old_repo.iter_commits()) untouched_patches = [] deleted_patches = [] for patch in cls.patches: patch_name = patch.get_patch_name() original_commit = [ c for c in original_commits if cls.extract_patch_name(c.message) == patch_name ] commit = [ c for c in commits if cls.extract_patch_name(c.message) == patch_name ] if original_commit and commit: if patch_name not in modified_patches and compare_commits( original_commit[0], commit[0]): untouched_patches.append(patch_name) else: base_name = os.path.join(cls.kwargs['rebased_sources_dir'], patch_name) if commit[0].summary == cls.decorate_patch_name( patch_name): diff = cls.old_repo.git.diff(commit[0].parents[0], commit[0], stdout_as_string=False) else: diff = cls.old_repo.git.format_patch( commit[0], '-1', stdout=True, no_numbered=True, no_attach=True, stdout_as_string=False) diff = cls.strip_patch_name(diff, patch_name) with open(base_name, 'wb') as f: f.write(diff) f.write(b'\n') if patch_name not in modified_patches: modified_patches.append(patch_name) elif patch_name not in inapplicable_patches: deleted_patches.append(patch_name) if deleted_patches: patch_dictionary['deleted'] = deleted_patches if modified_patches: patch_dictionary['modified'] = modified_patches if inapplicable_patches: patch_dictionary['inapplicable'] = inapplicable_patches if untouched_patches: patch_dictionary['untouched'] = untouched_patches return patch_dictionary
def __init__(self, cli_conf, execution_dir, results_dir, debug_log_file): """ Initialize the application :param cli_conf: CLI object with configuration gathered from commandline :return: """ results_store.clear() self.conf = cli_conf self.execution_dir = execution_dir self.rebased_sources_dir = os.path.join(results_dir, 'rebased-sources') self.debug_log_file = debug_log_file # Temporary workspace for Builder, checks, ... self.kwargs['workspace_dir'] = self.workspace_dir = os.path.join(self.execution_dir, constants.WORKSPACE_DIR) # Directory where results should be put self.kwargs['results_dir'] = self.results_dir = results_dir # Directory contaning only those files, which are relevant for the new rebased version self.kwargs['rebased_sources_dir'] = self.rebased_sources_dir self.kwargs['non_interactive'] = self.conf.non_interactive self.kwargs['favor_on_conflict'] = self.conf.favor_on_conflict self.kwargs['changelog_entry'] = self.conf.changelog_entry self.kwargs['spec_hook_blacklist'] = self.conf.spec_hook_blacklist self.kwargs['build_log_hook_blacklist'] = self.conf.build_log_hook_blacklist self.kwargs['force_build_log_hooks'] = self.conf.force_build_log_hooks logger.verbose("Rebase-helper version: %s", VERSION) if self.conf.build_tasks is None: # check the workspace dir if not self.conf.cont: self._check_workspace_dir() self._get_spec_file() self._prepare_spec_objects() # verify all sources for the new version are present missing_sources = [os.path.basename(s) for s in self.rebase_spec_file.sources if not os.path.isfile(os.path.basename(s))] if missing_sources: raise RebaseHelperError('The following sources are missing: {}'.format(','.join(missing_sources))) if self.conf.update_sources: sources = [os.path.basename(s) for s in self.spec_file.sources] rebased_sources = [os.path.basename(s) for s in self.rebase_spec_file.sources] uploaded = LookasideCacheHelper.update_sources('fedpkg', self.rebased_sources_dir, self.rebase_spec_file.get_package_name(), sources, rebased_sources, upload=not self.conf.skip_upload) self._update_gitignore(uploaded, self.rebased_sources_dir) # TODO: Remove the value from kwargs and use only CLI attribute! self.kwargs['continue'] = self.conf.cont self._initialize_data() if self.conf.cont or self.conf.build_only: self._delete_old_builds()