Exemplo n.º 1
0
    def run(
        cls, spec_file: SpecFile, rebase_spec_file: SpecFile, results_dir: str,
        **kwargs: Any
    ) -> Tuple[Dict[str, Union[RemovedFiles, AddedFiles]], bool]:
        if not results_dir:
            return {}, False
        log = os.path.join(results_dir, NEW_BUILD_DIR, 'RPM', 'build.log')

        nvr = rebase_spec_file.get_NVR()
        error_type, files = cls._parse_build_log(log, nvr)

        result: Dict[str, Union[AddedFiles, RemovedFiles]] = {}
        if error_type == 'deleted':
            logger.info(
                'The following files are absent in sources but are in the SPEC file, trying to remove them:'
            )
            for file in files:
                logger.info('\t%s', file)
            result = cls._correct_deleted_files(rebase_spec_file, files)
        elif error_type == 'missing':
            logger.info(
                'The following files are in the sources but are missing in the SPEC file, trying to add them:'
            )
            for file in files:
                logger.info('\t%s', file)
            result = cls._correct_missing_files(rebase_spec_file, files)
        rebase_spec_file.save()
        return result, 'added' in result or 'removed' in result
Exemplo n.º 2
0
    def _prepare_spec_objects(self):
        """
        Prepare spec files and initialize objects

        :return: 
        """
        self.rebase_spec_file_path = get_rebase_name(self.results_dir,
                                                     self.spec_file_path)

        self.spec_file = SpecFile(self.spec_file_path,
                                  self.execution_dir,
                                  download=not self.conf.not_download_sources)
        # Check whether test suite is enabled at build time
        if not self.spec_file.is_test_suite_enabled():
            results_store.set_info_text(
                'WARNING', 'Test suite is not enabled at build time.')
        #  create an object representing the rebased SPEC file
        self.rebase_spec_file = self.spec_file.copy(self.rebase_spec_file_path)

        #  check if argument passed as new source is a file or just a version
        if [
                True for ext in Archive.get_supported_archives()
                if self.conf.sources.endswith(ext)
        ]:
            logger.debug("argument passed as a new source is a file")
            self.rebase_spec_file.set_version_using_archive(self.conf.sources)
        else:
            logger.debug("argument passed as a new source is a version")
            version, extra_version, separator = SpecFile.split_version_string(
                self.conf.sources)
            self.rebase_spec_file.set_version(version)
            self.rebase_spec_file.set_extra_version_separator(separator)
            self.rebase_spec_file.set_extra_version(extra_version)
Exemplo n.º 3
0
    def _prepare_spec_objects(self):
        """
        Prepare spec files and initialize objects
        :return:
        """
        self.rebase_spec_file_path = get_rebase_name(self.spec_file_path)

        self.spec_file = SpecFile(self.spec_file_path,
                                  self.execution_dir,
                                  download=not self.conf.not_download_sources)
        # Check whether test suite is enabled at build time
        if not self.spec_file.is_test_suite_enabled():
            OutputLogger.set_info_text('WARNING', 'Test suite is not enabled at build time.')
        #  create an object representing the rebased SPEC file
        self.rebase_spec_file = self.spec_file.copy(self.rebase_spec_file_path)

        #  check if argument passed as new source is a file or just a version
        if [True for ext in Archive.get_supported_archives() if self.conf.sources.endswith(ext)]:
            logger.debug("argument passed as a new source is a file")
            self.rebase_spec_file.set_version_using_archive(self.conf.sources)
        else:
            logger.debug("argument passed as a new source is a version")
            version, extra_version = SpecFile.split_version_string(self.conf.sources)
            self.rebase_spec_file.set_version(version)
            if extra_version:
                self.rebase_spec_file.set_extra_version(extra_version)
Exemplo n.º 4
0
 def test_extract_version_from_archive_name(self):
     # Basic tests
     assert SpecFile.extract_version_from_archive_name(
         'test-1.0.1.tar.gz', '') == '1.0.1'
     assert SpecFile.extract_version_from_archive_name(
         '/home/user/test-1.0.1.tar.gz', '') == '1.0.1'
     assert SpecFile.extract_version_from_archive_name(
         'test-1.0.1.tar.gz',
         'ftp://ftp.test.org/test-%{version}.tar.gz') == '1.0.1'
     assert SpecFile.extract_version_from_archive_name(
         '/home/user/test-1.0.1.tar.gz',
         'ftp://ftp.test.org/test-%{version}.tar.gz') == '1.0.1'
     # Real world tests
     name = 'http://www.cups.org/software/%{version}/cups-%{version}-source.tar.bz2'
     assert SpecFile.extract_version_from_archive_name(
         'cups-1.7.5-source.tar.bz2', name) == '1.7.5'
     name = 'ftp://ftp.isc.org/isc/bind9/%{VERSION}/bind-%{VERSION}.tar.gz'
     assert SpecFile.extract_version_from_archive_name(
         'bind-9.9.5rc2.tar.gz', name) == '9.9.5rc2'
     name = 'http://www.thekelleys.org.uk/dnsmasq/%{?extrapath}%{name}-%{version}%{?extraversion}.tar.xz'
     assert SpecFile.extract_version_from_archive_name(
         'dnsmasq-2.69rc1.tar.xz', name) == '2.69rc1'
     name = 'http://downloads.sourceforge.net/%{name}/%{name}-%{version}%{?prever:-%{prever}}.tar.xz'
     assert SpecFile.extract_version_from_archive_name(
         'log4cplus-1.1.3-rc3.tar.xz', name) == '1.1.3-rc3'
     name = 'http://downloads.sourceforge.net/%{name}/%{name}-%{version}%{?prever:_%{prever}}.tar.xz'
     assert SpecFile.extract_version_from_archive_name(
         'log4cplus-1.1.3_rc3.tar.xz', name) == '1.1.3_rc3'
     name = 'http://download.gnome.org/sources/libsigc++/%{release_version}/libsigc++-%{version}.tar.xz'
     assert SpecFile.extract_version_from_archive_name(
         'libsigc++-2.10.0.tar.xz', name) == '2.10.0'
Exemplo n.º 5
0
    def _prepare_spec_objects(self):
        """
        Prepare spec files and initialize objects

        :return:
        """
        self.rebase_spec_file_path = get_rebase_name(self.rebased_sources_dir, self.spec_file_path)

        self.spec_file = SpecFile(self.spec_file_path,
                                  self.conf.changelog_entry,
                                  self.execution_dir,
                                  download=not self.conf.not_download_sources)
        # Check whether test suite is enabled at build time
        if not self.spec_file.is_test_suite_enabled():
            results_store.set_info_text('WARNING', 'Test suite is not enabled at build time.')
        # create an object representing the rebased SPEC file
        self.rebase_spec_file = self.spec_file.copy(self.rebase_spec_file_path)

        if not self.conf.sources:
            self.conf.sources = versioneers_runner.run(self.conf.versioneer,
                                                       self.spec_file.get_package_name(),
                                                       self.spec_file.category,
                                                       self.conf.versioneer_blacklist)
            if self.conf.sources:
                logger.info("Determined latest upstream version '%s'", self.conf.sources)
            else:
                raise RebaseHelperError('Could not determine latest upstream version '
                                        'and no SOURCES argument specified!')

        # Prepare rebased_sources_dir
        self.rebased_repo = self._prepare_rebased_repository(self.spec_file.patches,
                                                             self.execution_dir,
                                                             self.rebased_sources_dir)

        # check if argument passed as new source is a file or just a version
        if [True for ext in Archive.get_supported_archives() if self.conf.sources.endswith(ext)]:
            logger.debug("argument passed as a new source is a file")
            self.rebase_spec_file.set_version_using_archive(self.conf.sources)
        else:
            logger.debug("argument passed as a new source is a version")
            version, extra_version, separator = SpecFile.split_version_string(self.conf.sources)
            self.rebase_spec_file.set_version(version)
            self.rebase_spec_file.set_extra_version_separator(separator)
            self.rebase_spec_file.set_extra_version(extra_version)

        if not self.conf.skip_version_check and parse_version(self.rebase_spec_file.get_version()) \
                <= parse_version(self.spec_file.get_version()):
            raise RebaseHelperError("Current version is equal to or newer than the requested version, nothing to do.")

        # run spec hooks
        spec_hooks_runner.run_spec_hooks(self.spec_file, self.rebase_spec_file, **self.kwargs)

        # spec file object has been sanitized downloading can proceed
        for spec_file in [self.spec_file, self.rebase_spec_file]:
            if spec_file.download:
                spec_file.download_remote_sources()
                # parse spec again with sources downloaded to properly expand %prep section
                spec_file._update_data()  # pylint: disable=protected-access
Exemplo n.º 6
0
    def _prepare_spec_objects(self):
        """
        Prepare spec files and initialize objects

        :return:
        """
        self.rebase_spec_file_path = get_rebase_name(self.rebased_sources_dir, self.spec_file_path)

        self.spec_file = SpecFile(self.spec_file_path, self.execution_dir)
        # Check whether test suite is enabled at build time
        if not self.spec_file.is_test_suite_enabled():
            results_store.set_info_text('WARNING', 'Test suite is not enabled at build time.')
        # create an object representing the rebased SPEC file
        self.rebase_spec_file = self.spec_file.copy(self.rebase_spec_file_path)

        if not self.conf.sources:
            self.conf.sources = plugin_manager.versioneers.run(self.conf.versioneer,
                                                               self.spec_file.get_package_name(),
                                                               self.spec_file.category,
                                                               self.conf.versioneer_blacklist)
            if self.conf.sources:
                logger.info("Determined latest upstream version '%s'", self.conf.sources)
            else:
                raise RebaseHelperError('Could not determine latest upstream version '
                                        'and no SOURCES argument specified!')

        # Prepare rebased_sources_dir
        self.rebased_repo = self._prepare_rebased_repository(self.spec_file.patches,
                                                             self.execution_dir,
                                                             self.rebased_sources_dir)

        # check if argument passed as new source is a file or just a version
        if [True for ext in Archive.get_supported_archives() if self.conf.sources.endswith(ext)]:
            logger.verbose("argument passed as a new source is a file")
            self.rebase_spec_file.set_version_using_archive(self.conf.sources)
        else:
            logger.verbose("argument passed as a new source is a version")
            version, extra_version, separator = SpecFile.split_version_string(self.conf.sources)
            self.rebase_spec_file.set_version(version)
            self.rebase_spec_file.set_extra_version_separator(separator)
            self.rebase_spec_file.set_extra_version(extra_version)

        if not self.conf.skip_version_check and parse_version(self.rebase_spec_file.get_version()) \
                <= parse_version(self.spec_file.get_version()):
            raise RebaseHelperError("Current version is equal to or newer than the requested version, nothing to do.")

        self.rebase_spec_file.update_changelog(self.conf.changelog_entry)

        # run spec hooks
        plugin_manager.spec_hooks.run(self.spec_file, self.rebase_spec_file, **self.kwargs)

        # spec file object has been sanitized downloading can proceed
        if not self.conf.not_download_sources:
            for spec_file in [self.spec_file, self.rebase_spec_file]:
                spec_file.download_remote_sources()
                # parse spec again with sources downloaded to properly expand %prep section
                spec_file._update_data()  # pylint: disable=protected-access
Exemplo n.º 7
0
def test_single_message(github_release_fedmsg, mock_upstream_remote_functionality):
    u, d = mock_upstream_remote_functionality

    conf = get_test_config()
    api = PackitBotAPI(conf)
    api.sync_upstream_release_with_fedmsg(github_release_fedmsg)
    assert (d / TARBALL_NAME).is_file()
    spec = SpecFile(str(d / "beer.spec"), None)
    assert spec.get_version() == "0.1.0"
Exemplo n.º 8
0
 def test_constructor(self, workdir):
     shutil.copy(os.path.join(TEST_FILES_DIR, SPEC_FILE), workdir)
     # check that no mandatory argument was added
     spec = SpecFile(SPEC_FILE)
     assert isinstance(spec, SpecFile)
     # check that arguments were not renamed or removed
     spec = SpecFile(path=SPEC_FILE,
                     sources_location=workdir,
                     predefined_macros=None)
     assert isinstance(spec, SpecFile)
Exemplo n.º 9
0
 def test_extract_version_from_archive_name(self):
     # Basic tests
     assert SpecFile.extract_version_from_archive_name('test-1.0.1.tar.gz') == ('1.0.1', '', '')
     assert SpecFile.extract_version_from_archive_name('/home/user/test-1.0.1.tar.gz') == ('1.0.1', '', '')
     assert SpecFile.extract_version_from_archive_name('test-1.0.1.tar.gz',
                                                       'ftp://ftp.test.org/test-%{version}.tar.gz') == ('1.0.1',
                                                                                                        '',
                                                                                                        '')
     assert SpecFile.extract_version_from_archive_name('/home/user/test-1.0.1.tar.gz',
                                                       'ftp://ftp.test.org/test-%{version}.tar.gz') == ('1.0.1',
                                                                                                        '',
                                                                                                        '')
     # Real world tests
     name = 'http://www.cups.org/software/%{version}/cups-%{version}-source.tar.bz2'
     assert SpecFile.extract_version_from_archive_name('cups-1.7.5-source.tar.bz2',
                                                       name) == ('1.7.5', '', '')
     # the 'rc1' can't be in the version number
     name = 'ftp://ftp.isc.org/isc/bind9/%{VERSION}/bind-%{VERSION}.tar.gz'
     assert SpecFile.extract_version_from_archive_name('bind-9.9.5rc2.tar.gz',
                                                       name) == ('9.9.5', 'rc2', '')
     name = 'http://www.thekelleys.org.uk/dnsmasq/%{?extrapath}%{name}-%{version}%{?extraversion}.tar.xz'
     assert SpecFile.extract_version_from_archive_name('dnsmasq-2.69rc1.tar.xz',
                                                       name) == ('2.69', 'rc1', '')
     name = 'http://downloads.sourceforge.net/%{name}/%{name}-%{version}%{?prever:-%{prever}}.tar.xz'
     assert SpecFile.extract_version_from_archive_name('log4cplus-1.1.3-rc3.tar.xz',
                                                       name) == ('1.1.3', 'rc3', '-')
     name = 'http://downloads.sourceforge.net/%{name}/%{name}-%{version}%{?prever:_%{prever}}.tar.xz'
     assert SpecFile.extract_version_from_archive_name('log4cplus-1.1.3_rc3.tar.xz',
                                                       name) == ('1.1.3', 'rc3', '_')
Exemplo n.º 10
0
 def test_extract_version_from_archive_name(self):
     # Basic tests
     assert SpecFile.extract_version_from_archive_name('test-1.0.1.tar.gz') == ('1.0.1', '', '')
     assert SpecFile.extract_version_from_archive_name('/home/user/test-1.0.1.tar.gz') == ('1.0.1', '', '')
     assert SpecFile.extract_version_from_archive_name('test-1.0.1.tar.gz',
                                                       'ftp://ftp.test.org/test-%{version}.tar.gz') == ('1.0.1',
                                                                                                        '',
                                                                                                        '')
     assert SpecFile.extract_version_from_archive_name('/home/user/test-1.0.1.tar.gz',
                                                       'ftp://ftp.test.org/test-%{version}.tar.gz') == ('1.0.1',
                                                                                                        '',
                                                                                                        '')
     # Real world tests
     name = 'http://www.cups.org/software/%{version}/cups-%{version}-source.tar.bz2'
     assert SpecFile.extract_version_from_archive_name('cups-1.7.5-source.tar.bz2',
                                                       name) == ('1.7.5', '', '')
     # the 'rc1' can't be in the version number
     name = 'ftp://ftp.isc.org/isc/bind9/%{VERSION}/bind-%{VERSION}.tar.gz'
     assert SpecFile.extract_version_from_archive_name('bind-9.9.5rc2.tar.gz',
                                                       name) == ('9.9.5', 'rc2', '')
     name = 'http://www.thekelleys.org.uk/dnsmasq/%{?extrapath}%{name}-%{version}%{?extraversion}.tar.xz'
     assert SpecFile.extract_version_from_archive_name('dnsmasq-2.69rc1.tar.xz',
                                                       name) == ('2.69', 'rc1', '')
     name = 'http://downloads.sourceforge.net/%{name}/%{name}-%{version}%{?prever:-%{prever}}.tar.xz'
     assert SpecFile.extract_version_from_archive_name('log4cplus-1.1.3-rc3.tar.xz',
                                                       name) == ('1.1.3', 'rc3', '-')
     name = 'http://downloads.sourceforge.net/%{name}/%{name}-%{version}%{?prever:_%{prever}}.tar.xz'
     assert SpecFile.extract_version_from_archive_name('log4cplus-1.1.3_rc3.tar.xz',
                                                       name) == ('1.1.3', 'rc3', '_')
Exemplo n.º 11
0
 def specfile(self) -> SpecFile:
     # changing API is fun, where else could we use inspect?
     s = inspect.signature(SpecFile)
     if "changelog_entry" in s.parameters:
         return SpecFile(
             path=self.specfile_path,
             sources_location=self.specfile_dir,
             changelog_entry=None,
         )
     else:
         return SpecFile(path=self.specfile_path,
                         sources_location=self.specfile_dir)
Exemplo n.º 12
0
def test_single_message(github_release_fedmsg,
                        mock_remote_functionality_upstream):
    u, d = mock_remote_functionality_upstream

    conf = get_test_config()
    api = PackitBotAPI(conf)

    flexmock(Repo).should_receive("clone_from").and_return(git.Repo(str(u)))

    api.sync_upstream_release_with_fedmsg(github_release_fedmsg)
    assert (d / TARBALL_NAME).is_file()
    spec = SpecFile(str(d / "beer.spec"), None)
    assert spec.get_version() == "0.1.0"
Exemplo n.º 13
0
    def _create_possible_replacements(
            cls, spec_file: SpecFile, rebase_spec_file: SpecFile,
            use_macro: bool) -> List[Tuple[Pattern[str], str]]:
        """Creates possible subversion replacements.

        Args:
            spec_file: Old SpecFile.
            rebase_spec_file: New SpecFile.
            use_macro: Whether %{version} macro should be used as a replacement.

        Returns:
            List of tuples containing regex pattern and replacement
            that can be passed to re.sub. The first tuple always
            represents the whole version.

        Example:
            Subversions 1.2, 1.2.3, 1.2.3.4 would be created from version 1.2.3.4.

        """
        old = spec_file.get_version()
        new = rebase_spec_file.get_version()
        version_re = r'([\ /\-\s]){}([/.\-\s]|$)'
        # Allow any character after whole version to replace strings such as
        # 1.0.1bc1
        full_version_re = r'([\ /\-\s]){}(.*)'
        replacement = r'\g<1>{}\g<2>'
        split_version = old.split('.')

        res = [(re.compile(full_version_re.format(re.escape(old))),
                replacement.format('%{version}' if use_macro else new))]
        # iterate backwards to go from longer to shorter subversions
        for i in reversed(range(2, len(split_version))):
            pattern = re.compile(
                version_re.format(re.escape('.'.join(split_version[:i]))))
            new_subversion = replacement.format('.'.join(new.split('.')[:i]))
            res.append((pattern, new_subversion))
        # add hardcoded extra version replacement
        try:
            old_extra = spec_file.parse_release()[2]
            if old_extra:
                new_extra = rebase_spec_file.parse_release()[2] or ''
                # allow extraversion to immediately follow version
                extraversion = re.compile(r'([\ /\-\s\d\}])' +
                                          re.escape(old_extra) +
                                          r'([/.\-\s]|$)')
                res.append((extraversion, replacement.format(new_extra)))
        except RebaseHelperError:
            # silently skip unparsable release
            pass
        return res
Exemplo n.º 14
0
    def test_get_paths_with_rpm_macros(self):
        raw_paths = ['/usr/bin/binary1',
                     '/usr/sbin/binary2',
                     '/usr/include/header.h',
                     '/usr/lib/library1.so',
                     '/usr/lib64/library2.so',
                     '/usr/libexec/script.sh',
                     '/usr/lib/systemd/system/daemond.service',
                     '/usr/share/man/man1/test.1.gz',
                     '/usr/share/info/file.info',
                     '/usr/share/doc/RFC.pdf',
                     '/usr/share/config.site',
                     '/var/lib/libvirt',
                     '/var/tmp/abrt',
                     '/var/lock']

        expected_paths = {'%{_bindir}/binary1',
                              '%{_sbindir}/binary2',
                              '%{_includedir}/header.h',
                              '%{_libdir}/library1.so',
                              '%{_libdir}/library2.so',
                              '%{_libexecdir}/script.sh',
                              '%{_unitdir}/daemond.service',
                              '%{_mandir}/man1/test.1.gz',
                              '%{_infodir}/file.info',
                              '%{_docdir}/RFC.pdf',
                              '%{_datarootdir}/config.site',
                              '%{_sharedstatedir}/libvirt',
                              '%{_tmppath}/abrt',
                              '%{_localstatedir}/lock'}
        paths = SpecFile.get_paths_with_rpm_macros(raw_paths)
        assert len(set(paths)) == len(expected_paths)
        assert len(expected_paths.intersection(set(paths))) == len(expected_paths)
Exemplo n.º 15
0
    def _correct_deleted_files(cls, rebase_spec_file: SpecFile,
                               files: List[str]) -> Dict[str, RemovedFiles]:
        """Removes files newly missing in buildroot from %files sections
        of the SPEC file. If a file cannot be removed, the user is informed
        and it is mentioned in the final report.

        Args:
            rebase_spec_file: SPEC file to remove the files from.
            files: List of files to remove.

        Returns:
            Dict summarizing the changes done to the SPEC file.

        """
        result: Dict[str, RemovedFromSections] = collections.defaultdict(
            lambda: collections.defaultdict(list))
        for sec_name, sec_content in rebase_spec_file.spec_content.sections:
            if sec_name.startswith('%files'):
                subpackage = rebase_spec_file.get_subpackage_name(sec_name)
                cls._correct_one_section(subpackage, sec_name, sec_content,
                                         files, result)
                if not files:
                    # Nothing more to be done
                    return cast(Dict[str, RemovedFiles], result)

        result_with_irremovable: Dict[str, RemovedFiles] = cast(
            Dict[str, RemovedFiles], result)
        logger.info('Could not remove the following files:')
        for file in files:
            logger.info('\t%s', file)

        result_with_irremovable['unable_to_remove'] = files
        return result_with_irremovable
Exemplo n.º 16
0
def test_basic_local_update(upstream_n_distgit, mock_upstream_remote_functionality):
    """ basic propose-update test: mock remote API, use local upstream and dist-git """
    u, d = upstream_n_distgit

    chdir(u)
    c = get_test_config()

    pc = get_local_package_config(str(u))
    pc.upstream_project_url = str(u)
    pc.downstream_project_url = str(d)
    api = PackitAPI(c, pc)
    api.sync_release("master", "0.1.0")

    assert (d / TARBALL_NAME).is_file()
    spec = SpecFile(str(d / "beer.spec"), None)
    assert spec.get_version() == "0.1.0"
Exemplo n.º 17
0
    def test_get_paths_with_rpm_macros(self):
        raw_paths = ['/usr/bin/binary1',
                     '/usr/sbin/binary2',
                     '/usr/include/header.h',
                     '/usr/lib/library1.so',
                     '/usr/lib64/library2.so',
                     '/usr/libexec/script.sh',
                     '/usr/lib/systemd/system/daemond.service',
                     '/usr/share/man/man1/test.1.gz',
                     '/usr/share/info/file.info',
                     '/usr/share/doc/RFC.pdf',
                     '/usr/share/config.site',
                     '/var/lib/libvirt',
                     '/var/tmp/abrt',
                     '/var/lock']

        expected_paths = set(['%{_bindir}/binary1',
                              '%{_sbindir}/binary2',
                              '%{_includedir}/header.h',
                              '%{_libdir}/library1.so',
                              '%{_libdir}/library2.so',
                              '%{_libexecdir}/script.sh',
                              '%{_unitdir}/daemond.service',
                              '%{_mandir}/man1/test.1.gz',
                              '%{_infodir}/file.info',
                              '%{_docdir}/RFC.pdf',
                              '%{_datarootdir}/config.site',
                              '%{_sharedstatedir}/libvirt',
                              '%{_tmppath}/abrt',
                              '%{_localstatedir}/lock'])
        paths = SpecFile.get_paths_with_rpm_macros(raw_paths)
        assert len(set(paths)) == len(expected_paths)
        assert len(expected_paths.intersection(set(paths))) == len(expected_paths)
Exemplo n.º 18
0
def test_basic_local_update_patch_content(sourcegit_n_distgit,
                                          mock_remote_functionality_sourcegit,
                                          api_instance_source_git):
    """ propose-update for sourcegit test: mock remote API, use local upstream and dist-git """

    sourcegit, distgit = sourcegit_n_distgit

    source_file = sourcegit / "big-source-file.txt"
    source_file.write_text("new changes")
    git_add_and_commit(directory=sourcegit, message="source change")

    api_instance_source_git.sync_release("master",
                                         "0.1.0",
                                         upstream_ref="0.1.0")

    spec = SpecFile(str(distgit / "beer.spec"), None)

    spec_package_section = "\n".join(spec.spec_content.sections["%package"])
    assert "Patch0001: 0001" in spec_package_section
    assert "Patch0002: 0002" in spec_package_section
    git_diff = subprocess.check_output(["git", "diff", "HEAD~", "HEAD"],
                                       cwd=distgit).decode()

    assert "-Version:        0.0.0\n+Version:        0.1.0" in git_diff
    assert "+# PATCHES FROM SOURCE GIT:" in git_diff
    assert (
        "-* Mon Feb 24 2019 Tomas Tomecek <*****@*****.**> - 0.0.0-1\n"
        "-- No brewing, yet.\n"
        "+* Mon Feb 25 2019 Tomas Tomecek <*****@*****.**> - 0.1.0-1\n"
        "+- Initial brewing" in git_diff)

    # direct diff in the synced file
    assert ("diff --git a/.packit.yaml b/.packit.yaml\n"
            "new file mode 100644" in git_diff)
    assert "--- /dev/null\n" "+++ b/.packit.yaml" in git_diff

    # diff of the synced file should not be in the patch
    assert ("+diff --git a/.packit.yaml b/.packit.yaml\n"
            "+new file mode 100644\n" not in git_diff)

    # diff of the source file (not synced) has to be in the patch
    assert ("patch\n"
            "@@ -0,0 +1,9 @@\n"
            "+diff --git a/big-source-file.txt b/big-source-file.txt\n"
            in git_diff)

    assert ("+--- a/big-source-file.txt\n"
            "++++ b/big-source-file.txt\n"
            "+@@ -1,2 +1 @@\n"
            "+-This is a testing file\n"
            "+-containing some text.\n"
            "++new changes\n" in git_diff)

    # diff of the source files (not synced) should not be directly in the git diff
    assert ("--- a/big-source-file.txt\n"
            "+++ b/big-source-file.txt\n"
            "@@ -1,2 +1 @@\n"
            "-This is a testing file\n"
            "-containing some text.\n"
            "+new changes\n" not in git_diff)
Exemplo n.º 19
0
 def specfile(self) -> SpecFile:
     if self._specfile is None:
         # changing API is fun, where else could we use inspect?
         s = inspect.signature(SpecFile)
         if "changelog_entry" in s.parameters:
             self._specfile = SpecFile(
                 path=self.absolute_specfile_path,
                 sources_location=str(self.absolute_specfile_dir),
                 changelog_entry="",
             )
         else:
             self._specfile = SpecFile(
                 path=self.absolute_specfile_path,
                 sources_location=str(self.absolute_specfile_dir),
             )
     return self._specfile
Exemplo n.º 20
0
def test_basic_local_update_from_downstream(
        downstream_n_distgit, mock_downstream_remote_functionality):
    flexmock(LocalProject, _parse_namespace_from_git_url=lambda: None)
    u, d = downstream_n_distgit

    with cwd(u):
        c = get_test_config()
        pc = get_local_package_config(str(u))
        pc.upstream_project_url = str(u)
        pc.downstream_project_url = str(d)
        up_lp = LocalProject(path_or_url=str(u))
        api = PackitAPI(c, pc, up_lp)
        api.sync_from_downstream("master", "master", True)

        assert (u / "beer.spec").is_file()
        spec = SpecFile(str(u / "beer.spec"), None)
        assert spec.get_version() == "0.0.0"
Exemplo n.º 21
0
    def run(cls, spec_file: SpecFile, rebase_spec_file: SpecFile,
            **kwargs: Any):
        replace_with_macro = bool(kwargs.get('replace_old_version_with_macro'))

        subversion_patterns = cls._create_possible_replacements(
            spec_file, rebase_spec_file, replace_with_macro)
        examined_lines: Dict[int, Set[int]] = collections.defaultdict(set)
        for tag in rebase_spec_file.tags.filter():
            examined_lines[tag.section_index].add(tag.line)
            value = rebase_spec_file.get_raw_tag_value(tag.name,
                                                       tag.section_index)
            if not value or tag.name in cls.IGNORED_TAGS:
                continue
            scheme = urllib.parse.urlparse(value).scheme
            if (tag.name.startswith('Patch')
                    or tag.name.startswith('Source')) and not scheme:
                # skip local sources
                continue

            # replace the whole version first
            updated_value = subversion_patterns[0][0].sub(
                subversion_patterns[0][1], value)
            # replace subversions only for remote sources/patches
            if tag.name.startswith('Patch') or tag.name.startswith('Source'):
                for sub_pattern, repl in subversion_patterns[1:]:
                    updated_value = sub_pattern.sub(repl, updated_value)
            rebase_spec_file.set_raw_tag_value(tag.name, updated_value,
                                               tag.section_index)

        for sec_index, (sec_name, section) in enumerate(
                rebase_spec_file.spec_content.sections):
            if sec_name.startswith('%changelog'):
                continue
            for index, line in enumerate(section):
                tag_ignored = any(
                    MacroHelper.expand(line, line).startswith(tag)
                    for tag in cls.IGNORED_TAGS)
                if index in examined_lines[sec_index] or tag_ignored:
                    continue
                start, end = spec_file.spec_content.get_comment_span(
                    line, sec_name)
                updated_line = subversion_patterns[0][0].sub(
                    subversion_patterns[0][1], line[:start])
                section[index] = updated_line + line[start:end]

        rebase_spec_file.save()
Exemplo n.º 22
0
def test_basic_local_update_without_patching(
    sourcegit_n_distgit,
    mock_patching,
    mock_remote_functionality_sourcegit,
    api_instance_source_git,
):
    """ propose-update for sourcegit test: mock remote API, use local upstream and dist-git """

    sourcegit, distgit = sourcegit_n_distgit

    api_instance_source_git.sync_release("master",
                                         "0.1.0",
                                         upstream_ref="0.1.0")

    assert (distgit / TARBALL_NAME).is_file()
    spec = SpecFile(str(distgit / "beer.spec"), None)
    assert spec.get_version() == "0.1.0"
Exemplo n.º 23
0
 def specfile(self):
     if self._specfile is None:
         self._specfile = SpecFile(
             path=self.specfile_path,
             sources_location=self.local_project.working_dir,
             changelog_entry=None,
         )
     return self._specfile
Exemplo n.º 24
0
 def test_split_version_string(self):
     assert SpecFile.split_version_string() == (None, None, None)
     assert SpecFile.split_version_string('1.0.1') == ('1.0.1', '', '')
     assert SpecFile.split_version_string('1.0.1b1') == ('1.0.1', 'b1', '')
     assert SpecFile.split_version_string('1.0.1rc1') == ('1.0.1', 'rc1', '')
     assert SpecFile.split_version_string('1.1.3-rc6') == ('1.1.3', 'rc6', '-')
     assert SpecFile.split_version_string('1.1.3_rc6') == ('1.1.3', 'rc6', '_')
     assert SpecFile.split_version_string('.1.1.1') == ('1.1.1', '', '')
Exemplo n.º 25
0
 def test_split_version_string(self):
     assert SpecFile.split_version_string() == (None, None, None)
     assert SpecFile.split_version_string("1.0.1") == ("1.0.1", "", "")
     assert SpecFile.split_version_string("1.0.1b1") == ("1.0.1", "b1", "")
     assert SpecFile.split_version_string("1.0.1rc1") == ("1.0.1", "rc1", "")
     assert SpecFile.split_version_string("1.1.3-rc6") == ("1.1.3", "rc6", "-")
     assert SpecFile.split_version_string("1.1.3_rc6") == ("1.1.3", "rc6", "_")
     assert SpecFile.split_version_string(".1.1.1") == ("1.1.1", "", "")
Exemplo n.º 26
0
 def test_split_version_string(self):
     assert SpecFile.split_version_string() == (None, None, None)
     assert SpecFile.split_version_string('1.0.1') == ('1.0.1', '', '')
     assert SpecFile.split_version_string('1.0.1b1') == ('1.0.1', 'b1', '')
     assert SpecFile.split_version_string('1.0.1rc1') == ('1.0.1', 'rc1', '')
     assert SpecFile.split_version_string('1.1.3-rc6') == ('1.1.3', 'rc6', '-')
     assert SpecFile.split_version_string('1.1.3_rc6') == ('1.1.3', 'rc6', '_')
     assert SpecFile.split_version_string('.1.1.1') == ('1.1.1', '', '')
Exemplo n.º 27
0
def test_basic_local_update_patching(sourcegit_n_distgit,
                                     mock_remote_functionality_sourcegit,
                                     api_instance_source_git):
    """ propose-update for sourcegit test: mock remote API, use local upstream and dist-git """

    sourcegit, distgit = sourcegit_n_distgit
    api_instance_source_git.sync_release("master",
                                         "0.1.0",
                                         upstream_ref="0.1.0")

    assert (distgit / TARBALL_NAME).is_file()
    spec = SpecFile(str(distgit / "beer.spec"), None)
    assert spec.get_version() == "0.1.0"

    spec_package_section = "\n".join(spec.spec_content.sections["%package"])
    assert "# PATCHES FROM SOURCE GIT" in spec_package_section
    assert "# sourcegit content" in spec_package_section
    assert "Patch0001: 0001" in spec_package_section

    spec_package_section_split = spec_package_section.split(
        "# PATCHES FROM SOURCE GIT")
    assert "Source" in spec_package_section_split[0]
    assert "Source" not in spec_package_section_split[1]
Exemplo n.º 28
0
 def test_extract_version_from_archive_name(self):
     # Basic tests
     assert SpecFile.extract_version_from_archive_name("test-1.0.1.tar.gz") == ("1.0.1", "", "")
     assert SpecFile.extract_version_from_archive_name("/home/user/test-1.0.1.tar.gz") == ("1.0.1", "", "")
     assert SpecFile.extract_version_from_archive_name(
         "test-1.0.1.tar.gz", "ftp://ftp.test.org/test-%{version}.tar.gz"
     ) == ("1.0.1", "", "")
     assert SpecFile.extract_version_from_archive_name(
         "/home/user/test-1.0.1.tar.gz", "ftp://ftp.test.org/test-%{version}.tar.gz"
     ) == ("1.0.1", "", "")
     # Real world tests
     name = "http://www.cups.org/software/%{version}/cups-%{version}-source.tar.bz2"
     assert SpecFile.extract_version_from_archive_name("cups-1.7.5-source.tar.bz2", name) == ("1.7.5", "", "")
     # the 'rc1' can't be in the version number
     name = "ftp://ftp.isc.org/isc/bind9/%{VERSION}/bind-%{VERSION}.tar.gz"
     assert SpecFile.extract_version_from_archive_name("bind-9.9.5rc2.tar.gz", name) == ("9.9.5", "rc2", "")
     name = "http://www.thekelleys.org.uk/dnsmasq/%{?extrapath}%{name}-%{version}%{?extraversion}.tar.xz"
     assert SpecFile.extract_version_from_archive_name("dnsmasq-2.69rc1.tar.xz", name) == ("2.69", "rc1", "")
     name = "http://downloads.sourceforge.net/%{name}/%{name}-%{version}%{?prever:-%{prever}}.tar.xz"
     assert SpecFile.extract_version_from_archive_name("log4cplus-1.1.3-rc3.tar.xz", name) == ("1.1.3", "rc3", "-")
     name = "http://downloads.sourceforge.net/%{name}/%{name}-%{version}%{?prever:_%{prever}}.tar.xz"
     assert SpecFile.extract_version_from_archive_name("log4cplus-1.1.3_rc3.tar.xz", name) == ("1.1.3", "rc3", "_")
Exemplo n.º 29
0
 def test_split_version_string(self):
     assert SpecFile.split_version_string('1.0.1',
                                          '1.0.1') == ('1.0.1', None)
     assert SpecFile.split_version_string('1.0.1b1',
                                          '1.0.1') == ('1.0.1', 'b1')
     assert SpecFile.split_version_string('1.0.1rc1',
                                          '1.0.1') == ('1.0.1', 'rc1')
     assert SpecFile.split_version_string('1.1.3-rc6',
                                          '1.1.3') == ('1.1.3', 'rc6')
     assert SpecFile.split_version_string('1.1.3_rc6',
                                          '1.1.3') == ('1.1.3', 'rc6')
     assert SpecFile.split_version_string('1.1.3~rc6',
                                          '1.1.3') == ('1.1.3', 'rc6')
     assert SpecFile.split_version_string('1.1.1d',
                                          '1.1.1c') == ('1.1.1d', None)
Exemplo n.º 30
0
 def test_extract_version_from_archive_name(self):
     # Basic tests
     assert SpecFile.extract_version_from_archive_name('test-1.0.1.tar.gz') == ('1.0.1', '')
     assert SpecFile.extract_version_from_archive_name('/home/user/test-1.0.1.tar.gz') == ('1.0.1', '')
     assert SpecFile.extract_version_from_archive_name('test-1.0.1.tar.gz',
                                                       'ftp://ftp.test.org/test-%{version}.tar.gz') == ('1.0.1', '')
     assert SpecFile.extract_version_from_archive_name('/home/user/test-1.0.1.tar.gz',
                                                       'ftp://ftp.test.org/test-%{version}.tar.gz') == ('1.0.1', '')
     # Real world tests
     assert SpecFile.extract_version_from_archive_name('cups-1.7.5-source.tar.bz2',
                                                       'http://www.cups.org/software/%{version}/cups-%{version}-source.tar.bz2') == ('1.7.5', '')
     # the 'rc1' can't be in the version number
     assert SpecFile.extract_version_from_archive_name('bind-9.9.5rc2.tar.gz',
                                                       'ftp://ftp.isc.org/isc/bind9/%{VERSION}/bind-%{VERSION}.tar.gz') == ('9.9.5', 'rc2')
     assert SpecFile.extract_version_from_archive_name('dnsmasq-2.69rc1.tar.xz',
                                                       'http://www.thekelleys.org.uk/dnsmasq/%{?extrapath}%{name}-%{version}%{?extraversion}.tar.xz') == ('2.69', 'rc1')
Exemplo n.º 31
0
 def parse_sources(spec_path):
     spec = SpecFile(spec_path)
     upstream_url = ""
     for source in spec.sources:
         print(f"Parsing source: {source}")
         source_parsed = urlparse(source)
         if not source_parsed.netloc:
             print(f"netloc not found - {source}")
             continue
         if source_parsed.netloc not in SUPPORTED_UPSTREAM_HOSTS:
             print(f"unsupported upstream host found - {source_parsed.netloc}")
             # self.unsuported_hosts.append(source_parsed.netloc)
             continue
         repo_path = "/".join(source_parsed.path.split("/")[:3])
         url = f"{source_parsed.scheme}://{source_parsed.netloc}{repo_path}"
         if not upstream_url:
             upstream_url = url
         else:
             assert upstream_url == url
     return upstream_url
Exemplo n.º 32
0
def mocked_spec_object(spec_attributes):
    spec = SpecFile.__new__(SpecFile)
    spec.save = lambda: None
    for attribute, value in spec_attributes.items():
        if attribute == 'macros':
            for macro, properties in value.items():
                rpm.addMacro(macro, properties.get('value', ''))
            macros = MacroHelper.dump()
            for macro, properties in value.items():
                for m in macros:
                    if m['name'] == macro:
                        for prop, v in properties.items():
                            if prop != 'value':
                                m[prop] = v
            value = macros
        if attribute == 'spec_content' and isinstance(value, str):
            value = SpecContent(value)
        setattr(spec, attribute, value)
    if hasattr(spec, 'spec_content') and not hasattr(spec, 'tags'):
        spec.tags = Tags(spec.spec_content, spec.spec_content)
    return spec
Exemplo n.º 33
0
class TestSpecFile(BaseTest):
    """ SpecFile tests """

    NAME = "test"
    VERSION = "1.0.2"
    OLD_ARCHIVE = NAME + "-" + VERSION + ".tar.xz"
    SPEC_FILE = "test.spec"
    SOURCE_0 = "test-source.sh"
    SOURCE_1 = "source-tests.sh"
    SOURCE_2 = ""
    SOURCE_4 = "file.txt.bz2"
    SOURCE_5 = "documentation.tar.xz"
    SOURCE_6 = "misc.zip"
    PATCH_1 = "test-testing.patch"
    PATCH_2 = "test-testing2.patch"
    PATCH_3 = "test-testing3.patch"
    PATCH_4 = "test-testing4.patch"
    BUILD_MISSING_LOG = "build_missing.log"
    BUILD_OBSOLETES_LOG = "build_obsoletes.log"

    TEST_FILES = [SPEC_FILE, PATCH_1, PATCH_2, PATCH_3, PATCH_4, BUILD_MISSING_LOG, BUILD_OBSOLETES_LOG]

    def setup(self):
        super(TestSpecFile, self).setup()
        self.SPEC_FILE_OBJECT = SpecFile(self.SPEC_FILE, self.WORKING_DIR, download=False)

    def test_get_release(self):
        match = re.search(r"([0-9.]*[0-9]+)\w*", self.SPEC_FILE_OBJECT.get_release())
        assert match is not None
        assert match.group(1) == self.SPEC_FILE_OBJECT.get_release_number()

    def test_get_release_number(self):
        assert self.SPEC_FILE_OBJECT.get_release_number() == "33"

    def test_set_release_number(self):
        self.SPEC_FILE_OBJECT.set_release_number(0.1)
        assert self.SPEC_FILE_OBJECT.get_release_number() == "0.1"
        self.SPEC_FILE_OBJECT.set_release_number(22)
        assert self.SPEC_FILE_OBJECT.get_release_number() == "22"

    def test_get_version(self):
        assert self.SPEC_FILE_OBJECT.get_version() == self.VERSION

    def test_set_version(self):
        NEW_VERSION = "1.2.3.4.5"
        self.SPEC_FILE_OBJECT.set_version(NEW_VERSION)
        self.SPEC_FILE_OBJECT.save()
        assert self.SPEC_FILE_OBJECT.get_version() == NEW_VERSION

    def test_set_version_using_archive(self):
        NEW_VERSION = "1.2.3.4.5"
        ARCHIVE_NAME = "test-{0}.tar.xz".format(NEW_VERSION)
        self.SPEC_FILE_OBJECT.set_version_using_archive(ARCHIVE_NAME)
        self.SPEC_FILE_OBJECT.save()
        assert self.SPEC_FILE_OBJECT.get_version() == NEW_VERSION

    def test_get_package_name(self):
        assert self.SPEC_FILE_OBJECT.get_package_name() == self.NAME

    def test__write_spec_file_to_disc(self):
        new_content = ["testing line 1\n", "testing line 2\n"]
        self.SPEC_FILE_OBJECT.spec_content = new_content
        self.SPEC_FILE_OBJECT._write_spec_file_to_disc()
        with open(self.SPEC_FILE) as spec:
            assert new_content == spec.readlines()

    def test__get_raw_source_string(self):
        assert self.SPEC_FILE_OBJECT._get_raw_source_string(0) == "ftp://ftp.test.org/%{name}-%{version}.tar.xz"
        assert self.SPEC_FILE_OBJECT._get_raw_source_string(1) == "source-tests.sh"
        assert self.SPEC_FILE_OBJECT._get_raw_source_string(2) == "ftp://test.com/test-source.sh"
        assert self.SPEC_FILE_OBJECT._get_raw_source_string(3) is None

    def test_old_tarball(self):
        assert self.SPEC_FILE_OBJECT.get_archive() == self.OLD_ARCHIVE

    def test_get_sources(self):
        sources = [self.SOURCE_0, self.SOURCE_1, self.SOURCE_4, self.SOURCE_5, self.SOURCE_6, self.OLD_ARCHIVE]
        sources = [os.path.join(self.WORKING_DIR, f) for f in sources]
        assert len(set(sources).intersection(set(self.SPEC_FILE_OBJECT.get_sources()))) == 6
        # The Source0 has to be always in the beginning
        assert self.SPEC_FILE_OBJECT.get_archive() == "test-1.0.2.tar.xz"

    def test_get_patches(self):
        expected_patches = {
            0: [os.path.join(self.WORKING_DIR, self.PATCH_1), 0],
            1: [os.path.join(self.WORKING_DIR, self.PATCH_2), 1],
            2: [os.path.join(self.WORKING_DIR, self.PATCH_3), 2],
            3: [os.path.join(self.WORKING_DIR, self.PATCH_4), 3],
        }
        patches = {}
        for index, p in enumerate(self.SPEC_FILE_OBJECT.get_patches()):
            patches[index] = [p.get_path(), p.get_index()]
        assert patches == expected_patches

    def test_get_requires(self):
        expected = set(["openssl-devel", "pkgconfig", "texinfo", "gettext", "autoconf"])
        req = self.SPEC_FILE_OBJECT.get_requires()
        assert len(expected.intersection(req)) == len(expected)

    def test_get_paths_with_rpm_macros(self):
        raw_paths = [
            "/usr/bin/binary1",
            "/usr/sbin/binary2",
            "/usr/include/header.h",
            "/usr/lib/library1.so",
            "/usr/lib64/library2.so",
            "/usr/libexec/script.sh",
            "/usr/lib/systemd/system/daemond.service",
            "/usr/share/man/man1/test.1.gz",
            "/usr/share/info/file.info",
            "/usr/share/doc/RFC.pdf",
            "/usr/share/config.site",
            "/var/lib/libvirt",
            "/var/tmp/abrt",
            "/var/lock",
        ]

        expected_paths = set(
            [
                "%{_bindir}/binary1",
                "%{_sbindir}/binary2",
                "%{_includedir}/header.h",
                "%{_libdir}/library1.so",
                "%{_libdir}/library2.so",
                "%{_libexecdir}/script.sh",
                "%{_unitdir}/daemond.service",
                "%{_mandir}/man1/test.1.gz",
                "%{_infodir}/file.info",
                "%{_docdir}/RFC.pdf",
                "%{_datarootdir}/config.site",
                "%{_sharedstatedir}/libvirt",
                "%{_tmppath}/abrt",
                "%{_localstatedir}/lock",
            ]
        )
        paths = SpecFile.get_paths_with_rpm_macros(raw_paths)
        assert len(set(paths)) == len(expected_paths)
        assert len(expected_paths.intersection(set(paths))) == len(expected_paths)

    def test_split_version_string(self):
        assert SpecFile.split_version_string() == (None, None, None)
        assert SpecFile.split_version_string("1.0.1") == ("1.0.1", "", "")
        assert SpecFile.split_version_string("1.0.1b1") == ("1.0.1", "b1", "")
        assert SpecFile.split_version_string("1.0.1rc1") == ("1.0.1", "rc1", "")
        assert SpecFile.split_version_string("1.1.3-rc6") == ("1.1.3", "rc6", "-")
        assert SpecFile.split_version_string("1.1.3_rc6") == ("1.1.3", "rc6", "_")
        assert SpecFile.split_version_string(".1.1.1") == ("1.1.1", "", "")

    def test_extract_version_from_archive_name(self):
        # Basic tests
        assert SpecFile.extract_version_from_archive_name("test-1.0.1.tar.gz") == ("1.0.1", "", "")
        assert SpecFile.extract_version_from_archive_name("/home/user/test-1.0.1.tar.gz") == ("1.0.1", "", "")
        assert SpecFile.extract_version_from_archive_name(
            "test-1.0.1.tar.gz", "ftp://ftp.test.org/test-%{version}.tar.gz"
        ) == ("1.0.1", "", "")
        assert SpecFile.extract_version_from_archive_name(
            "/home/user/test-1.0.1.tar.gz", "ftp://ftp.test.org/test-%{version}.tar.gz"
        ) == ("1.0.1", "", "")
        # Real world tests
        name = "http://www.cups.org/software/%{version}/cups-%{version}-source.tar.bz2"
        assert SpecFile.extract_version_from_archive_name("cups-1.7.5-source.tar.bz2", name) == ("1.7.5", "", "")
        # the 'rc1' can't be in the version number
        name = "ftp://ftp.isc.org/isc/bind9/%{VERSION}/bind-%{VERSION}.tar.gz"
        assert SpecFile.extract_version_from_archive_name("bind-9.9.5rc2.tar.gz", name) == ("9.9.5", "rc2", "")
        name = "http://www.thekelleys.org.uk/dnsmasq/%{?extrapath}%{name}-%{version}%{?extraversion}.tar.xz"
        assert SpecFile.extract_version_from_archive_name("dnsmasq-2.69rc1.tar.xz", name) == ("2.69", "rc1", "")
        name = "http://downloads.sourceforge.net/%{name}/%{name}-%{version}%{?prever:-%{prever}}.tar.xz"
        assert SpecFile.extract_version_from_archive_name("log4cplus-1.1.3-rc3.tar.xz", name) == ("1.1.3", "rc3", "-")
        name = "http://downloads.sourceforge.net/%{name}/%{name}-%{version}%{?prever:_%{prever}}.tar.xz"
        assert SpecFile.extract_version_from_archive_name("log4cplus-1.1.3_rc3.tar.xz", name) == ("1.1.3", "rc3", "_")

    def test__split_sections(self):
        expected_sections = {
            0: [
                "%header",
                [
                    "Summary: A testing spec file\n",
                    "Name: test\n",
                    "Version: 1.0.2\n",
                    "Release: 33%{?dist}\n",
                    "License: GPL2+\n",
                    "Group: System Environment\n",
                    "URL: http://testing.org\n",
                    "\n",
                    "# Note: non-current tarballs get moved to the history/ subdirectory,\n",
                    "# so look there if you fail to retrieve the version you want\n",
                    "Source: ftp://ftp.test.org/%{name}-%{version}.tar.xz\n",
                    "Source1: source-tests.sh\n",
                    "Source2: ftp://test.com/test-source.sh\n",
                    "#Source3: source-tests.sh\n",
                    "Source4: file.txt.bz2\n",
                    "Source5: documentation.tar.xz\n",
                    "Source6: misc.zip\n",
                    "Patch1: test-testing.patch\n",
                    "Patch2: test-testing2.patch\n",
                    "Patch3: test-testing3.patch\n",
                    "Patch4: test-testing4.patch\n",
                    "\n",
                    "BuildRequires: openssl-devel, pkgconfig, texinfo, gettext, autoconf\n",
                    "\n",
                ],
            ],
            1: ["%description", ["Testing spec file\n", "\n"]],
            2: ["%package devel", ["Summary: A testing devel package\n", "\n"]],
            3: ["%description devel", ["Testing devel spec file\n", "\n"]],
            4: [
                "%prep",
                [
                    "%setup -q -a 5\n",
                    "%patch1\n",
                    "%patch2 -p1\n",
                    "%patch3 -p1 -b .testing3\n",
                    "%patch4 -p0 -b .testing4\n",
                    "mkdir misc\n",
                    "tar -xf %{SOURCE6} -C misc\n",
                    "\n",
                ],
            ],
            5: ["%build", ["autoreconf -vi\n", "\n", "%configure\n", "make TEST\n", "\n"]],
            6: ["%install", ["make DESTDIR=$RPM_BUILD_ROOT install\n", "\n"]],
            7: [
                "%check",
                [
                    '#to run make check use "--with check"\n',
                    "%if %{?_with_check:1}%{!?_with_check:0}\n",
                    "make check\n",
                    "%endif\n",
                    "\n",
                ],
            ],
            8: ["%files", ["%{_bindir}/file.txt\n", "\n"]],
            9: ["%files devel", ["%{_bindir}/test_example\n", "%{_libdir}/my_test.so\n", "\n"]],
            10: [
                "%changelog",
                [
                    "* Wed Nov 12 2014 Tomas Hozza <*****@*****.**> 1.0.0-33\n",
                    "- Bump the release for testing purposes\n",
                    "\n",
                    "* Tue Sep 24 2013 Petr Hracek <*****@*****.**> 1.0.0-1\n",
                    "- Initial version\n",
                    "\n",
                ],
            ],
        }
        sections = self.SPEC_FILE_OBJECT._split_sections()
        for key, value in six.iteritems(expected_sections):
            assert sections[key][0] == value[0]
            assert sections[key][1] == value[1]

    def test_get_spec_section(self):
        expected_section = ["%{_bindir}/file.txt\n", "\n"]
        section = self.SPEC_FILE_OBJECT.get_spec_section("%files")
        assert section == expected_section

    def test_spec_missing_file(self):
        files = {"missing": ["/usr/bin/test2"]}
        self.SPEC_FILE_OBJECT.modify_spec_files_section(files)
        section = self.SPEC_FILE_OBJECT.get_spec_section("%files")
        expected = [
            "#BEGIN THIS MODIFIED BY REBASE-HELPER\n",
            "%{_bindir}/test2\n",
            "#END THIS MODIFIED BY REBASE-HELPER\n",
            "%{_bindir}/file.txt\n",
            "\n",
        ]
        assert expected == section

    def test_spec_remove_file(self):
        files = {"deleted": ["/usr/lib/test.so"]}
        self.SPEC_FILE_OBJECT.modify_spec_files_section(files)
        section = self.SPEC_FILE_OBJECT.get_spec_section("%files devel")
        assert "%{_libdir}/test.so" not in section

    def test_spec_missing_and_remove_file(self):
        files = {"missing": ["/usr/bin/test2"], "deleted": ["/usr/lib/my_test.so"]}
        self.SPEC_FILE_OBJECT.modify_spec_files_section(files)
        section = self.SPEC_FILE_OBJECT.get_spec_section("%files")
        expected = [
            "#BEGIN THIS MODIFIED BY REBASE-HELPER\n",
            "%{_bindir}/test2\n",
            "#END THIS MODIFIED BY REBASE-HELPER\n",
            "%{_bindir}/file.txt\n",
            "\n",
        ]
        assert expected == section
        section_devel = self.SPEC_FILE_OBJECT.get_spec_section("%files devel")
        expected_devel = [
            "%{_bindir}/test_example\n",
            "#BEGIN THIS MODIFIED BY REBASE-HELPER\n",
            "#%{_libdir}/my_test.so\n\n",
            "#END THIS MODIFIED BY REBASE-HELPER\n",
            "\n",
        ]
        assert expected_devel == section_devel

    def test_spec_missing_from_logfile(self):
        shutil.move("build_missing.log", "build.log")
        files = BuildLogAnalyzer.parse_log(self.WORKING_DIR, "build.log")
        self.SPEC_FILE_OBJECT.modify_spec_files_section(files)
        section = self.SPEC_FILE_OBJECT.get_spec_section("%files")
        expected = [
            "#BEGIN THIS MODIFIED BY REBASE-HELPER\n",
            "%{_bindir}/test2\n",
            "#END THIS MODIFIED BY REBASE-HELPER\n",
            "%{_bindir}/file.txt\n",
            "\n",
        ]
        assert expected == section

    def test_spec_obsolete_from_logfile(self):
        shutil.move("build_obsoletes.log", "build.log")
        files = BuildLogAnalyzer.parse_log(self.WORKING_DIR, "build.log")
        self.SPEC_FILE_OBJECT.modify_spec_files_section(files)
        section = self.SPEC_FILE_OBJECT.get_spec_section("%files")
        assert "%{_libdir}/libtest.so" not in section

    def test_is_test_suite_enabled(self):
        found = self.SPEC_FILE_OBJECT.is_test_suite_enabled()
        assert found is True

    def test_set_extra_version_some_extra_version(self):
        self.SPEC_FILE_OBJECT.set_extra_version("b1")
        with open(self.SPEC_FILE_OBJECT.get_path()) as f:
            # 1st line
            assert f.readline() == "%global REBASE_EXTRA_VER b1\n"
            # 2nd line
            assert f.readline() == "%global REBASE_VER %{version}%{REBASE_EXTRA_VER}\n"
            while True:
                line = f.readline()
                if line == "#Source: ftp://ftp.test.org/%{name}-%{version}.tar.xz\n":
                    break
                assert line is not None
            # there is new Source0 after old commented out entry
            assert f.readline() == "Source: ftp://ftp.test.org/%{name}-%{REBASE_VER}.tar.xz\n"
        # the release number was changed
        assert self.SPEC_FILE_OBJECT.get_release_number() == "0.1"
        # the release string now contains the extra version
        match = re.search(r"([0-9.]*[0-9]+)\.b1\w*", self.SPEC_FILE_OBJECT.get_release())
        assert match is not None
        assert match.group(1) == self.SPEC_FILE_OBJECT.get_release_number()

    def test_set_extra_version_no_extra_version(self):
        self.SPEC_FILE_OBJECT.set_extra_version("")
        with open(self.SPEC_FILE_OBJECT.get_path()) as f:
            # 1st line
            assert f.readline() != "%global REBASE_EXTRA_VER b1\n"
            # 2nd line
            assert f.readline() != "%global REBASE_VER %{version}%{REBASE_EXTRA_VER}\n"
        # the release number was changed
        assert self.SPEC_FILE_OBJECT.get_release_number() == "1"

    def test_redefine_release_with_macro(self):
        macro = "%{REBASE_VER}"
        self.SPEC_FILE_OBJECT.redefine_release_with_macro(macro)
        with open(self.SPEC_FILE_OBJECT.get_path()) as f:
            while f.readline() != "#Release: 33%{?dist}\n":
                pass
            assert f.readline() == "Release: 33" + "." + macro + "%{?dist}\n"

    def test_revert_redefine_release_with_macro(self):
        macro = "%{REBASE_VER}"
        self.SPEC_FILE_OBJECT.redefine_release_with_macro(macro)
        self.SPEC_FILE_OBJECT.revert_redefine_release_with_macro(macro)
        with open(self.SPEC_FILE_OBJECT.get_path()) as f:
            for line in f.readlines():
                if line.startswith("Release:"):
                    assert line == "Release: 33%{?dist}\n"
                    return
        # the line has to be found, fail if not!
        assert False

    def test_get_extra_version_not_set(self):
        assert self.SPEC_FILE_OBJECT.get_extra_version() == ""

    def test_get_extra_version_set(self):
        self.SPEC_FILE_OBJECT.set_extra_version("rc1")
        assert self.SPEC_FILE_OBJECT.get_extra_version() == "rc1"

    def test_update_changelog(self):
        changelog = []
        changelog.append("* Mon Jan 01 1970 Rebase-helper <*****@*****.**> - 1.2.3")
        changelog.append("- New upstream version 1.2.3")
        self.SPEC_FILE_OBJECT.insert_changelog(changelog)
        result = self.SPEC_FILE_OBJECT.get_spec_section("%changelog")
        assert changelog[0] == result[0]
        assert changelog[1] == result[1]

    def test_patch_macro(self):
        self.SPEC_FILE_OBJECT._correct_rebased_patches(["4"])
        self.SPEC_FILE_OBJECT._write_spec_file_to_disc()
        expected_patch = ["%patch4 -b .testing4 -p1\n"]
        with open(self.SPEC_FILE) as spec:
            lines = spec.readlines()
        lines = [x for x in lines if x.startswith("%patch4")]
        assert expected_patch == lines

    def test_update_setup_dirname(self):
        prep = self.SPEC_FILE_OBJECT.get_spec_section("%prep")
        self.SPEC_FILE_OBJECT.update_setup_dirname("test-1.0.2")
        assert self.SPEC_FILE_OBJECT.get_spec_section("%prep") == prep

        self.SPEC_FILE_OBJECT.update_setup_dirname("test-1.0.2rc1")
        prep = self.SPEC_FILE_OBJECT.get_spec_section("%prep")
        assert "%setup -q -a 5 -n %{name}-%{REBASE_VER}" in prep

        self.SPEC_FILE_OBJECT.update_setup_dirname("test-1.0.2-rc1")
        prep = self.SPEC_FILE_OBJECT.get_spec_section("%prep")
        assert "%setup -q -a 5 -n %{name}-%{version}-%{REBASE_EXTRA_VER}" in prep

    def test_find_archive_target_in_prep(self):
        target = self.SPEC_FILE_OBJECT.find_archive_target_in_prep("documentation.tar.xz")
        assert target == "test-1.0.2"
        target = self.SPEC_FILE_OBJECT.find_archive_target_in_prep("misc.zip")
        assert target == "test-1.0.2/misc"
Exemplo n.º 34
0
 def spec_object(self, workdir):
     sf = SpecFile(self.SPEC_FILE,
                   'Update to %{version}',
                   workdir,
                   download=False)
     return sf
Exemplo n.º 35
0
 def setup(self):
     super(TestSpecFile, self).setup()
     self.SPEC_FILE_OBJECT = SpecFile(self.SPEC_FILE, self.WORKING_DIR, download=False)
Exemplo n.º 36
0
class TestSpecFile(BaseTest):
    """ SpecFile tests """
    NAME = 'test'
    VERSION = '1.0.2'
    OLD_ARCHIVE = NAME + '-' + VERSION + '.tar.xz'
    SPEC_FILE = 'test.spec'
    SOURCE_0 = 'test-source.sh'
    SOURCE_1 = 'source-tests.sh'
    SOURCE_2 = ''
    SOURCE_4 = 'file.txt.bz2'
    SOURCE_5 = 'documentation.tar.xz'
    PATCH_1 = 'test-testing.patch'
    PATCH_2 = 'test-testing2.patch'
    PATCH_3 = 'test-testing3.patch'
    PATCH_4 = 'test-testing4.patch'
    BUILD_MISSING_LOG = 'build_missing.log'
    BUILD_OBSOLETES_LOG = 'build_obsoletes.log'

    TEST_FILES = [
        SPEC_FILE,
        PATCH_1,
        PATCH_2,
        PATCH_3,
        PATCH_4,
        BUILD_MISSING_LOG,
        BUILD_OBSOLETES_LOG
    ]

    def setup(self):
        super(TestSpecFile, self).setup()
        self.SPEC_FILE_OBJECT = SpecFile(self.SPEC_FILE, self.WORKING_DIR, download=False)

    def test_get_release(self):
        match = re.search(r'([0-9.]*[0-9]+)\w*', self.SPEC_FILE_OBJECT.get_release())
        assert match is not None
        assert match.group(1) == self.SPEC_FILE_OBJECT.get_release_number()

    def test_get_release_number(self):
        assert self.SPEC_FILE_OBJECT.get_release_number() == '33'

    def test_set_release_number(self):
        self.SPEC_FILE_OBJECT.set_release_number(0.1)
        assert self.SPEC_FILE_OBJECT.get_release_number() == '0.1'
        self.SPEC_FILE_OBJECT.set_release_number(22)
        assert self.SPEC_FILE_OBJECT.get_release_number() == '22'

    def test_get_version(self):
        assert self.SPEC_FILE_OBJECT.get_version() == self.VERSION

    def test_set_version(self):
        NEW_VERSION = '1.2.3.4.5'
        self.SPEC_FILE_OBJECT.set_version(NEW_VERSION)
        self.SPEC_FILE_OBJECT.save()
        assert self.SPEC_FILE_OBJECT.get_version() == NEW_VERSION

    def test_set_version_using_archive(self):
        NEW_VERSION = '1.2.3.4.5'
        ARCHIVE_NAME = 'test-{0}.tar.xz'.format(NEW_VERSION)
        self.SPEC_FILE_OBJECT.set_version_using_archive(ARCHIVE_NAME)
        self.SPEC_FILE_OBJECT.save()
        assert self.SPEC_FILE_OBJECT.get_version() == NEW_VERSION

    def test_get_package_name(self):
        assert self.SPEC_FILE_OBJECT.get_package_name() == self.NAME

    def test__write_spec_file_to_disc(self):
        new_content = [
            'testing line 1\n',
            'testing line 2\n'
        ]
        self.SPEC_FILE_OBJECT.spec_content = new_content
        self.SPEC_FILE_OBJECT._write_spec_file_to_disc()
        with open(self.SPEC_FILE) as spec:
            assert new_content == spec.readlines()

    def test__get_raw_source_string(self):
        assert self.SPEC_FILE_OBJECT._get_raw_source_string(0) == 'ftp://ftp.test.org/%{name}-%{version}.tar.xz'
        assert self.SPEC_FILE_OBJECT._get_raw_source_string(1) == 'source-tests.sh'
        assert self.SPEC_FILE_OBJECT._get_raw_source_string(2) == 'ftp://test.com/test-source.sh'
        assert self.SPEC_FILE_OBJECT._get_raw_source_string(3) is None

    def test_old_tarball(self):
        assert self.SPEC_FILE_OBJECT.get_archive() == self.OLD_ARCHIVE

    def test_get_sources(self):
        sources = [self.SOURCE_0, self.SOURCE_1, self.SOURCE_4, self.SOURCE_5, self.OLD_ARCHIVE]
        sources = [os.path.join(self.WORKING_DIR, f) for f in sources]
        archives = [self.OLD_ARCHIVE, self.SOURCE_4, self.SOURCE_5]
        assert len(set(sources).intersection(set(self.SPEC_FILE_OBJECT.get_sources()))) == 5
        # The Source0 has to be always in the beginning
        assert self.SPEC_FILE_OBJECT.get_archive() == 'test-1.0.2.tar.xz'
        assert self.SPEC_FILE_OBJECT.get_archives() == archives

    def test_get_patches(self):
        expected_patches = {0: [os.path.join(self.WORKING_DIR, self.PATCH_1), 0],
                            1: [os.path.join(self.WORKING_DIR, self.PATCH_2), 1],
                            2: [os.path.join(self.WORKING_DIR, self.PATCH_3), 2],
                            3: [os.path.join(self.WORKING_DIR, self.PATCH_4), 3]}
        patches = {}
        for index, p in enumerate(self.SPEC_FILE_OBJECT.get_patches()):
            patches[index] = [p.get_path(), p.get_index()]
        assert patches == expected_patches

    def test_get_requires(self):
        expected = set(['openssl-devel', 'pkgconfig', 'texinfo', 'gettext', 'autoconf'])
        req = self.SPEC_FILE_OBJECT.get_requires()
        assert len(expected.intersection(req)) == len(expected)

    def test_get_paths_with_rpm_macros(self):
        raw_paths = ['/usr/bin/binary1',
                     '/usr/sbin/binary2',
                     '/usr/include/header.h',
                     '/usr/lib/library1.so',
                     '/usr/lib64/library2.so',
                     '/usr/libexec/script.sh',
                     '/usr/lib/systemd/system/daemond.service',
                     '/usr/share/man/man1/test.1.gz',
                     '/usr/share/info/file.info',
                     '/usr/share/doc/RFC.pdf',
                     '/usr/share/config.site',
                     '/var/lib/libvirt',
                     '/var/tmp/abrt',
                     '/var/lock']

        expected_paths = set(['%{_bindir}/binary1',
                              '%{_sbindir}/binary2',
                              '%{_includedir}/header.h',
                              '%{_libdir}/library1.so',
                              '%{_libdir}/library2.so',
                              '%{_libexecdir}/script.sh',
                              '%{_unitdir}/daemond.service',
                              '%{_mandir}/man1/test.1.gz',
                              '%{_infodir}/file.info',
                              '%{_docdir}/RFC.pdf',
                              '%{_datarootdir}/config.site',
                              '%{_sharedstatedir}/libvirt',
                              '%{_tmppath}/abrt',
                              '%{_localstatedir}/lock'])
        paths = SpecFile.get_paths_with_rpm_macros(raw_paths)
        assert len(set(paths)) == len(expected_paths)
        assert len(expected_paths.intersection(set(paths))) == len(expected_paths)

    def test_split_version_string(self):
        assert SpecFile.split_version_string() == (None, None)
        assert SpecFile.split_version_string('1.0.1') == ('1.0.1', '')
        assert SpecFile.split_version_string('1.0.1b1') == ('1.0.1', 'b1')
        assert SpecFile.split_version_string('1.0.1rc1') == ('1.0.1', 'rc1')

    def test_extract_version_from_archive_name(self):
        # Basic tests
        assert SpecFile.extract_version_from_archive_name('test-1.0.1.tar.gz') == ('1.0.1', '')
        assert SpecFile.extract_version_from_archive_name('/home/user/test-1.0.1.tar.gz') == ('1.0.1', '')
        assert SpecFile.extract_version_from_archive_name('test-1.0.1.tar.gz',
                                                          'ftp://ftp.test.org/test-%{version}.tar.gz') == ('1.0.1', '')
        assert SpecFile.extract_version_from_archive_name('/home/user/test-1.0.1.tar.gz',
                                                          'ftp://ftp.test.org/test-%{version}.tar.gz') == ('1.0.1', '')
        # Real world tests
        assert SpecFile.extract_version_from_archive_name('cups-1.7.5-source.tar.bz2',
                                                          'http://www.cups.org/software/%{version}/cups-%{version}-source.tar.bz2') == ('1.7.5', '')
        # the 'rc1' can't be in the version number
        assert SpecFile.extract_version_from_archive_name('bind-9.9.5rc2.tar.gz',
                                                          'ftp://ftp.isc.org/isc/bind9/%{VERSION}/bind-%{VERSION}.tar.gz') == ('9.9.5', 'rc2')
        assert SpecFile.extract_version_from_archive_name('dnsmasq-2.69rc1.tar.xz',
                                                          'http://www.thekelleys.org.uk/dnsmasq/%{?extrapath}%{name}-%{version}%{?extraversion}.tar.xz') == ('2.69', 'rc1')

    def test__split_sections(self):
        expected_sections = {
            0: ['%header', ['Summary: A testing spec file\n',
                            'Name: test\n',
                            'Version: 1.0.2\n',
                            'Release: 33%{?dist}\n',
                            'License: GPL2+\n',
                            'Group: System Environment\n',
                            'URL: http://testing.org\n',
                            '\n',
                            '# Note: non-current tarballs get moved to the history/ subdirectory,\n',
                            '# so look there if you fail to retrieve the version you want\n',
                            'Source: ftp://ftp.test.org/%{name}-%{version}.tar.xz\n',
                            'Source1: source-tests.sh\n',
                            'Source2: ftp://test.com/test-source.sh\n',
                            '#Source3: source-tests.sh\n',
                            'Source4: file.txt.bz2\n',
                            'Source5: documentation.tar.xz\n',
                            'Patch1: test-testing.patch\n',
                            'Patch2: test-testing2.patch\n',
                            'Patch3: test-testing3.patch\n',
                            'Patch4: test-testing4.patch\n',
                            '\n',
                            'BuildRequires: openssl-devel, pkgconfig, texinfo, gettext, autoconf\n',
                            '\n']],
            1: ['%description', ['Testing spec file\n',
                                 '\n']],
            2: ['%package devel', ['Summary: A testing devel package\n',
                                   '\n']],
            3: ['%description devel', ['Testing devel spec file\n',
                                       '\n']],
            4: ['%prep', ['%setup -q\n',
                          '%patch1\n',
                          '%patch2 -p1\n',
                          '%patch3 -p1 -b .testing3\n',
                          '%patch4 -p0 -b .testing4\n',
                          '\n']],
            5: ['%build', ['autoreconf -vi\n',
                           '\n',
                           '%configure\n',
                           'make TEST\n',
                           '\n']],
            6: ['%install', ['make DESTDIR=$RPM_BUILD_ROOT install\n',
                             '\n']],
            7: ['%check', ['#to run make check use "--with check"\n',
                           '%if %{?_with_check:1}%{!?_with_check:0}\n',
                           'make check\n',
                           '%endif\n',
                           '\n']],
            8: ['%files', ['%{_bindir}/file.txt\n',
                           '\n']],
            9: ['%files devel', ['%{_bindir}/test_example\n',
                                 '%{_libdir}/my_test.so\n',
                                 '\n']],
            10: ['%changelog', ['* Wed Nov 12 2014 Tomas Hozza <*****@*****.**> 1.0.0-33\n',
                                '- Bump the release for testing purposes\n',
                                '\n',
                                '* Tue Sep 24 2013 Petr Hracek <*****@*****.**> 1.0.0-1\n',
                                '- Initial version\n',
                                '\n']]
        }
        sections = self.SPEC_FILE_OBJECT._split_sections()
        for key, value in six.iteritems(expected_sections):
            assert sections[key][0] == value[0]
            assert sections[key][1] == value[1]

    def test_get_spec_section(self):
        expected_section = ['%{_bindir}/file.txt\n',
                            '\n']
        section = self.SPEC_FILE_OBJECT.get_spec_section('%files')
        assert section == expected_section

    def test_spec_missing_file(self):
        files = {'missing': ['/usr/bin/test2']}
        self.SPEC_FILE_OBJECT.modify_spec_files_section(files)
        section = self.SPEC_FILE_OBJECT.get_spec_section('%files')
        assert '%{_bindir}/test2' in section

    def test_spec_remove_file(self):
        files = {'deleted': ['/usr/lib/test.so']}
        self.SPEC_FILE_OBJECT.modify_spec_files_section(files)
        section = self.SPEC_FILE_OBJECT.get_spec_section('%files devel')
        assert '%{_libdir}/test.so' not in section

    def test_spec_missing_and_remove_file(self):
        files = {'missing': ['/usr/bin/test2'],
                 'deleted': ['/usr/lib/test.so']}
        self.SPEC_FILE_OBJECT.modify_spec_files_section(files)
        section = self.SPEC_FILE_OBJECT.get_spec_section('%files')
        assert '%{_bindir}/test2' in section
        section = self.SPEC_FILE_OBJECT.get_spec_section('%files devel')
        assert '%{_libdir}/test.so' not in section

    def test_spec_missing_from_logfile(self):
        shutil.move('build_missing.log', 'build.log')
        files = BuildLogAnalyzer.parse_log(self.WORKING_DIR, 'build.log')
        self.SPEC_FILE_OBJECT.modify_spec_files_section(files)
        section = self.SPEC_FILE_OBJECT.get_spec_section('%files')
        assert '%{_bindir}/test2' in section

    def test_spec_obsolete_from_logfile(self):
        shutil.move('build_obsoletes.log', 'build.log')
        files = BuildLogAnalyzer.parse_log(self.WORKING_DIR, 'build.log')
        self.SPEC_FILE_OBJECT.modify_spec_files_section(files)
        section = self.SPEC_FILE_OBJECT.get_spec_section('%files')
        assert '%{_libdir}/libtest.so' not in section

    def test_is_test_suite_enabled(self):
        found = self.SPEC_FILE_OBJECT.is_test_suite_enabled()
        assert found is True

    def test_set_extra_version_some_extra_version(self):
        self.SPEC_FILE_OBJECT.set_extra_version('b1')
        with open(self.SPEC_FILE_OBJECT.get_path()) as f:
            # 1st line
            assert f.readline() == '%global REBASE_EXTRA_VER b1\n'
            # 2nd line
            assert f.readline() == '%global REBASE_VER %{version}%{REBASE_EXTRA_VER}\n'
            while True:
                line = f.readline()
                if line == '#Source: ftp://ftp.test.org/%{name}-%{version}.tar.xz\n':
                    break
                assert line is not None
            # there is new Source0 after old commented out entry
            assert f.readline() == 'Source: ftp://ftp.test.org/%{name}-%{REBASE_VER}.tar.xz\n'
        # the release number was changed
        assert self.SPEC_FILE_OBJECT.get_release_number() == '0.1'
        # the release string now contains the extra version
        match = re.search(r'([0-9.]*[0-9]+)b1\w*', self.SPEC_FILE_OBJECT.get_release())
        assert match is not None
        assert match.group(1) == self.SPEC_FILE_OBJECT.get_release_number()

    def test_set_extra_version_no_extra_version(self):
        self.SPEC_FILE_OBJECT.set_extra_version('')
        with open(self.SPEC_FILE_OBJECT.get_path()) as f:
            # 1st line
            assert f.readline() != '%global REBASE_EXTRA_VER b1\n'
            # 2nd line
            assert f.readline() != '%global REBASE_VER %{version}%{REBASE_EXTRA_VER}\n'
        # the release number was changed
        assert self.SPEC_FILE_OBJECT.get_release_number() == '1'

    def test_redefine_release_with_macro(self):
        macro = '%{REBASE_VER}'
        self.SPEC_FILE_OBJECT.redefine_release_with_macro(macro)
        with open(self.SPEC_FILE_OBJECT.get_path()) as f:
            while f.readline() != '#Release: 33%{?dist}\n':
                pass
            assert f.readline() == 'Release: 33' + macro + '%{?dist}\n'

    def test_revert_redefine_release_with_macro(self):
        macro = '%{REBASE_VER}'
        self.SPEC_FILE_OBJECT.redefine_release_with_macro(macro)
        self.SPEC_FILE_OBJECT.revert_redefine_release_with_macro(macro)
        with open(self.SPEC_FILE_OBJECT.get_path()) as f:
            for line in f.readlines():
                if line.startswith('Release:'):
                    assert line == 'Release: 33%{?dist}\n'
                    return
        # the line has to be found, fail if not!
        assert False

    def test_get_extra_version_not_set(self):
        assert self.SPEC_FILE_OBJECT.get_extra_version() == ''

    def test_get_extra_version_set(self):
        self.SPEC_FILE_OBJECT.set_extra_version('rc1')
        assert self.SPEC_FILE_OBJECT.get_extra_version() == 'rc1'

    def test_update_changelog(self):
        changelog = []
        changelog.append('* Mon Jan 01 1970 Rebase-helper <*****@*****.**> - 1.2.3')
        changelog.append('- New upstream version 1.2.3')
        self.SPEC_FILE_OBJECT.insert_changelog(changelog)
        result = self.SPEC_FILE_OBJECT.get_spec_section('%changelog')
        assert changelog[0] == result[0]
        assert changelog[1] == result[1]

    def test_patch_macro(self):
        self.SPEC_FILE_OBJECT._correct_rebased_patches(['4'])
        self.SPEC_FILE_OBJECT._write_spec_file_to_disc()
        expected_patch = ['%patch4 -b .testing4 -p1\n']
        with open(self.SPEC_FILE) as spec:
            lines = spec.readlines()
        lines = [x for x in lines if x.startswith('%patch4')]
        assert expected_patch == lines

    def test_update_setup_dirname(self):
        prep = self.SPEC_FILE_OBJECT.get_spec_section('%prep')
        self.SPEC_FILE_OBJECT.update_setup_dirname('test-1.0.2')
        assert self.SPEC_FILE_OBJECT.get_spec_section('%prep') == prep

        self.SPEC_FILE_OBJECT.update_setup_dirname('test-1.0.2rc1')
        prep = self.SPEC_FILE_OBJECT.get_spec_section('%prep')
        assert '%setup -q -n %{name}-%{REBASE_VER}' in prep

        self.SPEC_FILE_OBJECT.update_setup_dirname('test-1.0.2-rc1')
        prep = self.SPEC_FILE_OBJECT.get_spec_section('%prep')
        assert '%setup -q -n %{name}-%{version}-%{REBASE_EXTRA_VER}' in prep
Exemplo n.º 37
0
class Application:
    def __init__(self,
                 cli_conf: Config,
                 start_dir: str,
                 execution_dir: str,
                 results_dir: str,
                 create_logs: bool = True) -> None:
        """Initializes the application.

        Args:
            cli_conf: Application configuration.
            start_dir: Directory where rebase-helper was started.
            execution_dir: Working directory.
            results_dir: Location of rebase results.
            create_logs: Whether to create default logging file handlers.

        """
        results_store.clear()

        # Initialize instance attributes
        self.old_sources = ''
        self.new_sources = ''
        self.old_rest_sources: List[str] = []
        self.new_rest_sources: List[str] = []
        self.rebased_patches: Dict[str, List[str]] = {}
        self.rebased_repo: Optional[git.Repo] = None

        self.handlers = LoggerHelper.create_file_handlers(
            results_dir) if create_logs else []

        self.conf = cli_conf
        self.start_dir = start_dir
        self.execution_dir = execution_dir
        self.rebased_sources_dir = os.path.join(results_dir, 'rebased-sources')

        self.kwargs: Dict[str, Any] = {}
        self.kwargs.update(self.conf.config)
        # Temporary workspace for Builder, checks, ...
        workspace_location = os.path.abspath(
            cli_conf.workspace_dir
        ) if cli_conf.workspace_dir else self.execution_dir
        self.kwargs['workspace_dir'] = self.workspace_dir = os.path.join(
            workspace_location, constants.WORKSPACE_DIR)

        # Directory where results should be put
        self.kwargs['results_dir'] = self.results_dir = results_dir
        # Directory contaning only those files, which are relevant for the new rebased version
        self.kwargs['rebased_sources_dir'] = self.rebased_sources_dir

        self.spec_file_path = self._find_spec_file()
        self._prepare_spec_objects()

        if self.conf.build_tasks is None:
            # check the workspace dir
            self._check_workspace_dir()

            # verify all sources for the new version are present
            missing_sources = [
                os.path.basename(s) for s in self.rebase_spec_file.sources
                if not os.path.isfile(os.path.basename(s))
            ]
            if missing_sources:
                raise RebaseHelperError(
                    'The following sources are missing: {}'.format(
                        ','.join(missing_sources)))

            if self.conf.update_sources:
                sources = [os.path.basename(s) for s in self.spec_file.sources]
                rebased_sources = [
                    os.path.basename(s) for s in self.rebase_spec_file.sources
                ]
                uploaded = LookasideCacheHelper.update_sources(
                    'fedpkg',
                    self.rebased_sources_dir,
                    self.rebase_spec_file.header.name,
                    sources,
                    rebased_sources,
                    upload=not self.conf.skip_upload)
                self._update_gitignore(uploaded, self.rebased_sources_dir)

            self._initialize_data()

    def __del__(self):
        LoggerHelper.remove_file_handlers(self.handlers)

    @staticmethod
    def setup(cli_conf):
        execution_dir = os.getcwd()
        results_dir = os.path.abspath(
            cli_conf.results_dir) if cli_conf.results_dir else execution_dir
        results_dir = os.path.join(results_dir, constants.RESULTS_DIR)

        # if not continuing, check the results dir
        Application._check_results_dir(results_dir)

        return execution_dir, results_dir

    def _prepare_spec_objects(self):
        """
        Prepare spec files and initialize objects

        :return:
        """
        self.spec_file = SpecFile(self.spec_file_path, self.execution_dir,
                                  self.kwargs['rpmmacros'])
        # Check whether test suite is enabled at build time
        if not self.spec_file.is_test_suite_enabled():
            results_store.set_info_text(
                'WARNING', 'Test suite is not enabled at build time.')
        # create an object representing the rebased SPEC file
        self.rebase_spec_file = self.spec_file.copy(
            get_rebase_name(self.rebased_sources_dir, self.spec_file_path))

        if not self.conf.sources:
            self.conf.sources = plugin_manager.versioneers.run(
                self.conf.versioneer, self.spec_file.header.name,
                self.spec_file.category, self.conf.versioneer_blacklist)
            if self.conf.sources:
                logger.info("Determined latest upstream version '%s'",
                            self.conf.sources)
            else:
                raise RebaseHelperError(
                    'Could not determine latest upstream version '
                    'and no SOURCES argument specified!')

        # Prepare rebased_sources_dir
        self.rebased_repo = self._prepare_rebased_repository()

        # check if argument passed as new source is a file or just a version
        if [
                True for ext in Archive.get_supported_archives()
                if self.conf.sources.endswith(ext)
        ]:
            logger.verbose("argument passed as a new source is a file")
            version_string = self.spec_file.extract_version_from_archive_name(
                self.conf.sources, self.spec_file.get_main_source())
        else:
            logger.verbose("argument passed as a new source is a version")
            version_string = self.conf.sources
        version, extra_version = SpecFile.split_version_string(
            version_string, self.spec_file.header.version)
        self.rebase_spec_file.set_version(version)
        self.rebase_spec_file.set_extra_version(
            extra_version, version != self.spec_file.header.version)

        oldver = parse_version(self.spec_file.header.version)
        newver = parse_version(self.rebase_spec_file.header.version)
        oldex = self.spec_file.parse_release()[2]
        newex = extra_version

        if not self.conf.skip_version_check and (newver < oldver or
                                                 (newver == oldver
                                                  and newex == oldex)):
            raise RebaseHelperError(
                "Current version is equal to or newer than the requested version, nothing to do."
            )

        self.rebase_spec_file.update_changelog(self.conf.changelog_entry)

        # run spec hooks
        plugin_manager.spec_hooks.run(self.spec_file, self.rebase_spec_file,
                                      **self.kwargs)

        # spec file object has been sanitized downloading can proceed
        if not self.conf.not_download_sources:
            for spec_file in [self.spec_file, self.rebase_spec_file]:
                spec_file.download_remote_sources()
                # parse spec again with sources downloaded to properly expand %prep section
                spec_file.update()
        # all local sources have been downloaded; we can check for name changes
        self._sanitize_sources()

    def _sanitize_sources(self) -> None:
        """Renames local sources whose name changed after version bump.

        For example if the specfile contains a Patch such as %{name}-%{version}.patch,
        the name changes after changing the version and the rebase would fail due to
        a missing patch whilst building SRPM.

        This method tries to correct such cases and rename the local file to match
        the new name. Modifies the rebase_spec_file object to contain the correct
        paths.
        """
        for source, index, source_type in self.rebase_spec_file.spc.sources:
            if urllib.parse.urlparse(source).scheme or source_type == 0:
                continue
            if os.path.exists(os.path.join(self.execution_dir, source)):
                continue
            # Find matching source in the old spec
            sources = [
                n for n, i, t in self.spec_file.spc.sources
                if i == index and t == source_type
            ]
            if not sources:
                logger.error(
                    'Failed to find the source corresponding to %s in old version spec',
                    source)
                continue
            source_old = sources[0]

            # rename the source
            old_source_path = os.path.join(self.rebased_sources_dir,
                                           source_old)
            new_source_path = os.path.join(self.rebased_sources_dir, source)
            logger.debug('Renaming %s to %s', old_source_path, new_source_path)
            try:
                os.rename(old_source_path, new_source_path)
            except OSError:
                logger.error(
                    'Failed to rename %s to %s while sanitizing sources',
                    old_source_path, new_source_path)

            # prepend the Source/Path with rebased-sources directory in the specfile
            to_prepend = os.path.relpath(self.rebased_sources_dir,
                                         self.execution_dir)
            tag = '{0}{1}'.format('Patch' if source_type == 2 else 'Source',
                                  index)
            value = self.rebase_spec_file.get_raw_tag_value(tag)
            self.rebase_spec_file.set_raw_tag_value(
                tag, os.path.join(to_prepend, value))
        self.rebase_spec_file.save()

    def _initialize_data(self):
        """Function fill dictionary with default data"""
        # Get all tarballs before self.kwargs initialization
        self.old_sources = self.spec_file.get_archive()
        new_sources = self.rebase_spec_file.get_archive()

        self.old_sources = os.path.abspath(self.old_sources)
        if new_sources:
            self.conf.sources = new_sources

        if not self.conf.sources:
            raise RebaseHelperError('You have to define new sources.')
        else:
            self.new_sources = os.path.abspath(self.conf.sources)
        # Contains all sources except the main source
        self.old_rest_sources = [
            os.path.abspath(x) for x in self.spec_file.get_sources()[1:]
        ]
        self.new_rest_sources = [
            os.path.abspath(x) for x in self.rebase_spec_file.get_sources()[1:]
        ]

    def _find_spec_file(self) -> str:
        """Finds a spec file in the execution_dir directory.

        Returns:
            Path to the spec file.

        Raises:
            RebaseHelperError: If no spec file could be found.

        """
        spec_file_path = PathHelper.find_first_file(self.execution_dir,
                                                    '*.spec', 0)
        if not spec_file_path:
            raise RebaseHelperError("Could not find any SPEC file "
                                    "in the current directory '{}'".format(
                                        self.execution_dir))
        return spec_file_path

    def _delete_old_builds(self):
        """
        Deletes the old and new result dir from previous build

        :return:
        """
        self._delete_new_results_dir()
        self._delete_old_results_dir()

    def _delete_old_results_dir(self):
        """
        Deletes old result dir

        :return:
        """
        if os.path.isdir(
                os.path.join(self.results_dir, constants.OLD_BUILD_DIR)):
            shutil.rmtree(
                os.path.join(self.results_dir, constants.OLD_BUILD_DIR))

    def _delete_new_results_dir(self):
        """
        Deletes new result dir

        :return:
        """
        if os.path.isdir(
                os.path.join(self.results_dir, constants.NEW_BUILD_DIR)):
            shutil.rmtree(
                os.path.join(self.results_dir, constants.NEW_BUILD_DIR))

    def _delete_workspace_dir(self):
        """
        Deletes workspace directory and loggs message

        :return:
        """
        logger.verbose("Removing the workspace directory '%s'",
                       self.workspace_dir)
        if os.path.isdir(self.workspace_dir):
            shutil.rmtree(self.workspace_dir)

    def _check_workspace_dir(self):
        """
        Check if workspace dir exists, and removes it if yes.

        :return:
        """
        if os.path.exists(self.workspace_dir):
            logger.warning("Workspace directory '%s' exists, removing it",
                           os.path.basename(self.workspace_dir))
            self._delete_workspace_dir()
        os.makedirs(self.workspace_dir)

    @staticmethod
    def _check_results_dir(results_dir):
        """
        Check if  results dir exists, and removes it if yes.

        :return:
        """
        # TODO: We may not want to delete the directory in the future
        if os.path.exists(results_dir):
            logger.warning("Results directory '%s' exists, removing it",
                           os.path.basename(results_dir))
            shutil.rmtree(results_dir)
        os.makedirs(results_dir)
        os.makedirs(os.path.join(results_dir, constants.OLD_BUILD_DIR))
        os.makedirs(os.path.join(results_dir, constants.NEW_BUILD_DIR))
        os.makedirs(os.path.join(results_dir, constants.CHECKERS_DIR))
        os.makedirs(os.path.join(results_dir, constants.REBASED_SOURCES_DIR))

    @staticmethod
    def extract_archive(archive_path, destination):
        """
        Extracts given archive into the destination and handle all exceptions.

        :param archive_path: path to the archive to be extracted
        :param destination: path to a destination, where the archive should be extracted to
        :return:
        """
        archive = Archive(archive_path)

        try:
            archive.extract_archive(destination)
        except IOError as e:
            raise RebaseHelperError("Archive '{}' can not be extracted".format(
                archive_path)) from e
        except (EOFError, SystemError) as e:
            raise RebaseHelperError(
                "Archive '{}' is damaged".format(archive_path)) from e

    @staticmethod
    def extract_sources(archive_path, destination):
        """Function extracts a given Archive and returns a full dirname to sources"""
        try:
            Application.extract_archive(archive_path, destination)
        except NotImplementedError:
            # not a standard archive type, can't extract it, fallback to copying
            os.makedirs(destination)
            shutil.copy(archive_path, destination)

        files = os.listdir(destination)

        if not files:
            raise RebaseHelperError('Extraction of sources failed!')
        # if there is only one directory, we can assume it's top-level directory
        elif len(files) == 1:
            sources_dir = os.path.join(destination, files[0])
            if os.path.isdir(sources_dir):
                return sources_dir

        # archive without top-level directory
        return destination

    def prepare_sources(self):
        """
        Function prepares a sources.

        :return:
        """

        old_sources_dir = os.path.join(self.workspace_dir,
                                       constants.OLD_SOURCES_DIR)
        new_sources_dir = os.path.join(self.workspace_dir,
                                       constants.NEW_SOURCES_DIR)

        old_dir = Application.extract_sources(self.old_sources,
                                              old_sources_dir)
        new_dir = Application.extract_sources(self.new_sources,
                                              new_sources_dir)

        old_tld = os.path.relpath(old_dir, old_sources_dir)
        new_tld = os.path.relpath(new_dir, new_sources_dir)

        dirname = self.spec_file.get_setup_dirname()

        if dirname and os.sep in dirname:
            dirs = os.path.split(dirname)
            if old_tld == dirs[0]:
                old_dir = os.path.join(old_dir, *dirs[1:])
            if new_tld == dirs[0]:
                new_dir = os.path.join(new_dir, *dirs[1:])

        new_dirname = os.path.relpath(new_dir, new_sources_dir)

        if new_dirname != '.':
            self.rebase_spec_file.update_setup_dirname(new_dirname)

        # extract rest of source archives to correct paths
        rest_sources = [self.old_rest_sources, self.new_rest_sources]
        spec_files = [self.spec_file, self.rebase_spec_file]
        sources_dirs = [old_sources_dir, new_sources_dir]
        for sources, spec_file, sources_dir in zip(rest_sources, spec_files,
                                                   sources_dirs):
            for rest in sources:
                archive = [
                    x for x in Archive.get_supported_archives()
                    if rest.endswith(x)
                ]
                if archive:
                    dest_dir = spec_file.find_archive_target_in_prep(rest)
                    if dest_dir:
                        Application.extract_sources(
                            rest, os.path.join(sources_dir, dest_dir))

        return [old_dir, new_dir]

    def patch_sources(self, sources):
        try:
            # Patch sources
            self.rebased_patches = Patcher.patch(
                sources[0], sources[1], self.old_rest_sources,
                self.spec_file.get_applied_patches(), **self.kwargs)
        except RuntimeError as e:
            raise RebaseHelperError('Patching failed') from e
        self.rebase_spec_file.write_updated_patches(
            self.rebased_patches, self.conf.disable_inapplicable_patches)
        results_store.set_patches_results(self.rebased_patches)

    def generate_patch(self):
        """
        Generates patch to the results_dir containing all needed changes for
        the rebased package version
        """
        # Delete removed patches from rebased_sources_dir from git
        removed_patches = self.rebase_spec_file.removed_patches
        if removed_patches:
            self.rebased_repo.index.remove(removed_patches, working_tree=True)

        self.rebase_spec_file.update_paths_to_sources_and_patches()

        # Generate patch
        self.rebased_repo.git.add(all=True)
        self.rebase_spec_file.update()
        self.rebased_repo.index.commit(
            MacroHelper.expand(self.conf.changelog_entry,
                               self.conf.changelog_entry))
        patch = self.rebased_repo.git.format_patch('-1',
                                                   stdout=True,
                                                   stdout_as_string=False)
        with open(os.path.join(self.results_dir, constants.CHANGES_PATCH),
                  'wb') as f:
            f.write(patch)
            f.write(b'\n')

        results_store.set_changes_patch(
            'changes_patch',
            os.path.join(self.results_dir, constants.CHANGES_PATCH))

    @classmethod
    def _update_gitignore(cls, sources, rebased_sources_dir):
        """Adds new entries into .gitignore file.

        Args:
            sources (list): List of new source files.
            rebased_sources_dir (str): Target directory.

        """
        gitignore = os.path.join(rebased_sources_dir, '.gitignore')

        if not os.path.isfile(gitignore):
            return

        with open(gitignore) as f:
            entries = f.readlines()

        def match(source):
            source = source.lstrip(os.path.sep).rstrip('\n')
            for entry in entries:
                if fnmatch.fnmatch(source,
                                   entry.lstrip(os.path.sep).rstrip('\n')):
                    return True
            return False

        with open(gitignore, 'a') as f:
            for src in [s for s in sources if not match(s)]:
                f.write(os.path.sep + src + '\n')

    def _prepare_rebased_repository(self):
        """
        Initialize git repository in the rebased directory
        :return: git.Repo instance of rebased_sources
        """
        for source, _, source_type in self.spec_file.spc.sources:
            # copy only existing local sources
            if not urllib.parse.urlparse(source).scheme and source_type == 1:
                source_path = os.path.join(self.execution_dir, source)
                if os.path.isfile(source_path):
                    shutil.copy(source_path, self.rebased_sources_dir)

        for patch in self.spec_file.get_applied_patches(
        ) + self.spec_file.get_not_used_patches():
            shutil.copy(patch.path, self.rebased_sources_dir)

        sources = os.path.join(self.execution_dir, 'sources')
        if os.path.isfile(sources):
            shutil.copy(sources, self.rebased_sources_dir)

        gitignore = os.path.join(self.execution_dir, '.gitignore')
        if os.path.isfile(gitignore):
            shutil.copy(gitignore, self.rebased_sources_dir)

        repo = git.Repo.init(self.rebased_sources_dir)
        repo.git.config('user.name', GitHelper.get_user(), local=True)
        repo.git.config('user.email', GitHelper.get_email(), local=True)
        repo.git.add(all=True)
        repo.index.commit('Initial commit', skip_hooks=True)
        return repo

    @staticmethod
    def _sanitize_build_dict(build_dict):
        blacklist = [
            'builds_nowait',
            'build_tasks',
            'builder_options',
            'srpm_builder_options',
            'app_kwargs',
        ]
        return {k: v for k, v in build_dict.items() if k not in blacklist}

    def build_source_packages(self):
        try:
            builder = plugin_manager.srpm_build_tools.get_plugin(
                self.conf.srpm_buildtool)
        except NotImplementedError as e:
            raise RebaseHelperError(
                '{}. Supported SRPM build tools are {}'.format(
                    str(e), ', '.join(plugin_manager.srpm_build_tools.
                                      get_supported_plugins()))) from e

        for version in ['old', 'new']:
            koji_build_id = None
            results_dir = os.path.join(self.results_dir,
                                       '{}-build'.format(version), 'SRPM')
            spec = self.spec_file if version == 'old' else self.rebase_spec_file
            package_name = spec.header.name
            package_version = spec.header.version
            logger.info('Building source package for %s version %s',
                        package_name, package_version)

            if version == 'old' and self.conf.get_old_build_from_koji:
                koji_build_id, ver = KojiHelper.get_old_build_info(
                    package_name, package_version)
                if ver:
                    package_version = ver

            build_dict = dict(
                name=package_name,
                version=package_version,
                srpm_buildtool=self.conf.srpm_buildtool,
                srpm_builder_options=self.conf.srpm_builder_options,
                app_kwargs=self.kwargs)
            try:
                os.makedirs(results_dir)
                if koji_build_id:
                    session = KojiHelper.create_session()
                    build_dict['srpm'], build_dict[
                        'logs'] = KojiHelper.download_build(session,
                                                            koji_build_id,
                                                            results_dir,
                                                            arches=['src'])

                else:
                    build_dict.update(
                        builder.build(spec, results_dir, **build_dict))
                build_dict = self._sanitize_build_dict(build_dict)
                results_store.set_build_data(version, build_dict)
            except RebaseHelperError:  # pylint: disable=try-except-raise
                raise
            except SourcePackageBuildError as e:
                build_dict['logs'] = e.logs
                build_dict['source_package_build_error'] = str(e)
                build_dict = self._sanitize_build_dict(build_dict)
                results_store.set_build_data(version, build_dict)
                if e.logfile:
                    msg = 'Building {} SRPM packages failed; see {} for more information'.format(
                        version, e.logfile)
                else:
                    msg = 'Building {} SRPM packages failed; see logs in {} for more information'.format(
                        version, results_dir)
                raise RebaseHelperError(msg, logfiles=e.logs) from e
            except Exception as e:
                raise RebaseHelperError(
                    'Building package failed with unknown reason. '
                    'Check all available log files.') from e

    def build_binary_packages(self):
        """Function calls build class for building packages"""
        try:
            builder = plugin_manager.build_tools.get_plugin(
                self.conf.buildtool)
        except NotImplementedError as e:
            raise RebaseHelperError('{}. Supported build tools are {}'.format(
                str(e), ', '.join(plugin_manager.build_tools.
                                  get_supported_plugins()))) from e

        for version in ['old', 'new']:
            results_dir = os.path.join(self.results_dir,
                                       '{}-build'.format(version), 'RPM')
            spec = None
            task_id = None
            koji_build_id = None
            build_dict: Dict[str, Any] = {}

            if self.conf.build_tasks is None:
                spec = self.spec_file if version == 'old' else self.rebase_spec_file
                package_name = spec.header.name
                package_version = spec.header.version

                if version == 'old' and self.conf.get_old_build_from_koji:
                    koji_build_id, ver = KojiHelper.get_old_build_info(
                        package_name, package_version)
                    if ver:
                        package_version = ver

                build_dict = dict(
                    name=package_name,
                    version=package_version,
                    builds_nowait=self.conf.builds_nowait,
                    build_tasks=self.conf.build_tasks,
                    builder_options=self.conf.builder_options,
                    srpm=results_store.get_build(version).get('srpm'),
                    srpm_logs=results_store.get_build(version).get('logs'),
                    app_kwargs=self.kwargs)

                # prepare for building
                builder.prepare(spec, self.conf)

                logger.info('Building binary packages for %s version %s',
                            package_name, package_version)
            else:
                task_id = self.conf.build_tasks[
                    0] if version == 'old' else self.conf.build_tasks[1]

            try:
                os.makedirs(results_dir)
                if self.conf.build_tasks is None:
                    if koji_build_id:
                        session = KojiHelper.create_session()
                        build_dict['rpm'], build_dict[
                            'logs'] = KojiHelper.download_build(
                                session,
                                koji_build_id,
                                results_dir,
                                arches=['noarch', 'x86_64'])
                    else:
                        build_dict.update(
                            builder.build(spec, results_dir, **build_dict))
                if builder.CREATES_TASKS and task_id and not koji_build_id:
                    if not self.conf.builds_nowait:
                        build_dict['rpm'], build_dict[
                            'logs'] = builder.wait_for_task(
                                build_dict, task_id, results_dir)
                    elif self.conf.build_tasks:
                        build_dict['rpm'], build_dict[
                            'logs'] = builder.get_detached_task(
                                task_id, results_dir)
                build_dict = self._sanitize_build_dict(build_dict)
                results_store.set_build_data(version, build_dict)
            except RebaseHelperError:  # pylint: disable=try-except-raise
                # Proper RebaseHelperError instance was created already. Re-raise it.
                raise
            except BinaryPackageBuildError as e:
                build_dict['logs'] = e.logs
                build_dict['binary_package_build_error'] = str(e)
                build_dict = self._sanitize_build_dict(build_dict)
                results_store.set_build_data(version, build_dict)

                if e.logfile is None:
                    msg = 'Building {} RPM packages failed; see logs in {} for more information'.format(
                        version, results_dir)
                else:
                    msg = 'Building {} RPM packages failed; see {} for more information'.format(
                        version, e.logfile)

                raise RebaseHelperError(msg, logfiles=e.logs) from e
            except Exception as e:
                raise RebaseHelperError(
                    'Building package failed with unknown reason. '
                    'Check all available log files.') from e

        if self.conf.builds_nowait and not self.conf.build_tasks:
            if builder.CREATES_TASKS:
                self.print_task_info(builder)

    def run_package_checkers(self, results_dir, **kwargs):
        """
        Runs checkers on packages and stores results in a given directory.

        :param results_dir: Path to directory in which to store the results.
        :type results_dir: str
        :param category: checker type(SOURCE/SRPM/RPM)
        :type category: str
        :return: None
        """
        results = dict()

        for checker_name in self.conf.pkgcomparetool:
            try:
                data = plugin_manager.checkers.run(
                    os.path.join(results_dir, constants.CHECKERS_DIR),
                    checker_name, **kwargs)
                if data:
                    results[checker_name] = data
            except CheckerNotFoundError:
                logger.error("Rebase-helper did not find checker '%s'.",
                             checker_name)

        for diff_name, result in results.items():
            results_store.set_checker_output(diff_name, result)

    def get_new_build_logs(self):
        result: Dict[str, Dict[str, Dict[str, Any]]] = {}
        result['build_ref'] = {}
        for version in ['old', 'new']:
            result['build_ref'][version] = results_store.get_build(version)
        return result

    def print_summary(self, exception=None):
        """
        Save rebase-helper result and print the summary using output tools.
        :param exception: Error message from rebase-helper
        :return:
        """
        logs = None
        # Store rebase helper result exception
        if exception:
            if exception.logfiles:
                logs = exception.logfiles

            results_store.set_result_message('fail', exception.msg)
        else:
            result = "Rebase from {} to {} completed without an error".format(
                self.spec_file.get_NVR(), self.rebase_spec_file.get_NVR())
            results_store.set_result_message('success', result)

        if self.rebase_spec_file:
            self.rebase_spec_file.update_paths_to_sources_and_patches()
            self.generate_patch()

        plugin_manager.output_tools.run(self.conf.outputtool, logs, self)

    def print_task_info(self, builder):
        logs = self.get_new_build_logs()['build_ref']
        for version in ['old', 'new']:
            logger.info(builder.get_task_info(logs[version]))

    def apply_changes(self):
        try:
            repo = git.Repo(self.execution_dir)
        except git.InvalidGitRepositoryError:
            repo = git.Repo.init(self.execution_dir)
        patch = results_store.get_changes_patch()
        if not patch:
            logger.warning('Cannot apply %s. No patch file was created',
                           constants.CHANGES_PATCH)
        try:
            repo.git.am(patch['changes_patch'])
        except git.GitCommandError as e:
            logger.warning(
                '%s was not applied properly. Please review changes manually.'
                '\nThe error message is: %s', constants.CHANGES_PATCH, str(e))

    def prepare_next_run(self, results_dir):
        # Running build log hooks only makes sense after a failed build
        # of new RPM packages. The folder results_dir/new-build/RPM
        # doesn't exist unless the build of new RPM packages has been run.
        changes_made = False
        if os.path.exists(
                os.path.join(results_dir, constants.NEW_BUILD_DIR, 'RPM')):
            changes_made = plugin_manager.build_log_hooks.run(
                self.spec_file, self.rebase_spec_file, **self.kwargs)
        # Save current rebase spec file content
        self.rebase_spec_file.save()
        if not self.conf.non_interactive and \
                InputHelper.get_message('Do you want to try it one more time'):
            logger.info('Now it is time to make changes to  %s if necessary.',
                        self.rebase_spec_file.path)
        elif self.conf.non_interactive and changes_made:
            logger.info(
                'Build log hooks made some changes to the SPEC file, starting the build process again.'
            )
        else:
            return False
        if not self.conf.non_interactive and not \
                InputHelper.get_message('Do you want to continue with the rebuild now'):
            return False
        # Update rebase spec file content after potential manual modifications
        self.rebase_spec_file.reload()
        # clear current version output directories
        if os.path.exists(os.path.join(results_dir, constants.OLD_BUILD_DIR)):
            shutil.rmtree(os.path.join(results_dir, constants.OLD_BUILD_DIR))
        if os.path.exists(os.path.join(results_dir, constants.NEW_BUILD_DIR)):
            shutil.rmtree(os.path.join(results_dir, constants.NEW_BUILD_DIR))
        return True

    def run(self):
        # Certain options can be used only with specific build tools
        tools_creating_tasks = []
        for tool_name, tool in plugin_manager.build_tools.plugins.items():
            if tool and tool.CREATES_TASKS:
                tools_creating_tasks.append(tool_name)
        if self.conf.buildtool not in tools_creating_tasks:
            options_used = []
            if self.conf.build_tasks is not None:
                options_used.append('--build-tasks')
            if self.conf.builds_nowait is True:
                options_used.append('--builds-nowait')
            if options_used:
                raise RebaseHelperError(
                    "{} can be used only with the following build tools: {}".
                    format(" and ".join(options_used),
                           ", ".join(tools_creating_tasks)))
        elif self.conf.builds_nowait and self.conf.get_old_build_from_koji:
            raise RebaseHelperError("{} can't be used with: {}".format(
                '--builds-nowait', '--get-old-build-from-koji'))

        tools_accepting_options = []
        for tool_name, tool in plugin_manager.build_tools.plugins.items():
            if tool and tool.ACCEPTS_OPTIONS:
                tools_accepting_options.append(tool_name)
        if self.conf.buildtool not in tools_accepting_options:
            options_used = []
            if self.conf.builder_options is not None:
                options_used.append('--builder-options')
            if options_used:
                raise RebaseHelperError(
                    "{} can be used only with the following build tools: {}".
                    format(" and ".join(options_used),
                           ", ".join(tools_accepting_options)))

        if self.conf.build_tasks is None:
            old_sources, new_sources = self.prepare_sources()
            self.run_package_checkers(self.results_dir,
                                      category=CheckerCategory.SOURCE,
                                      old_dir=old_sources,
                                      new_dir=new_sources)
            try:
                self.patch_sources([old_sources, new_sources])
            except RebaseHelperError as e:
                # Print summary and return error
                self.print_summary(e)
                raise

        # Build packages
        while True:
            try:
                if self.conf.build_tasks is None:
                    self.build_source_packages()
                self.run_package_checkers(self.results_dir,
                                          category=CheckerCategory.SRPM)
                self.build_binary_packages()
                if self.conf.builds_nowait and not self.conf.build_tasks:
                    return
                self.run_package_checkers(self.results_dir,
                                          category=CheckerCategory.RPM)
            # Print summary and return error
            except RebaseHelperError as e:
                logger.error(e.msg)
                if self.conf.build_tasks is None and self.prepare_next_run(
                        self.results_dir):
                    continue
                self.print_summary(e)
                raise
            else:
                break

        if not self.conf.keep_workspace:
            self._delete_workspace_dir()

        self.print_summary()
        if self.conf.apply_changes:
            self.apply_changes()
        return 0
Exemplo n.º 38
0
class Application(object):
    result_file = ""
    temp_dir = ""
    kwargs = {}
    old_sources = ""
    rest_sources = []
    new_sources = ""
    spec_file = None
    spec_file_path = None
    rebase_spec_file = None
    rebase_spec_file_path = None
    debug_log_file = None
    rebased_patches = {}

    def __init__(self, cli_conf=None):
        """
        Initialize the application

        :param cli_conf: CLI object with configuration gathered from commandline
        :return:
        """
        self.conf = cli_conf

        if self.conf.verbose:
            LoggerHelper.add_stream_handler(logger, logging.DEBUG)
        else:
            LoggerHelper.add_stream_handler(logger, logging.INFO)

        # The directory in which rebase-helper was executed
        self.execution_dir = os.getcwd()
        # Temporary workspace for Builder, checks, ...
        self.kwargs['workspace_dir'] = self.workspace_dir = os.path.join(self.execution_dir,
                                                                         settings.REBASE_HELPER_WORKSPACE_DIR)
        # Directory where results should be put
        self.kwargs['results_dir'] = self.results_dir = os.path.join(self.execution_dir,
                                                                     settings.REBASE_HELPER_RESULTS_DIR)

        self.kwargs['non_interactive'] = self.conf.non_interactive
        # if not continuing, check the results dir
        if not self.conf.cont and not self.conf.build_only and not self.conf.comparepkgs:
            self._check_results_dir()
        # This is used if user executes rebase-helper with --continue
        # parameter even when directory does not exist
        if not os.path.exists(self.results_dir):
            os.makedirs(self.results_dir)

        self._add_debug_log_file()
        self._get_spec_file()
        self._prepare_spec_objects()

        # check the workspace dir
        if not self.conf.cont:
            self._check_workspace_dir()

        # TODO: Remove the value from kwargs and use only CLI attribute!
        self.kwargs['continue'] = self.conf.cont
        self._initialize_data()

        if self.conf.cont or self.conf.build_only:
            self._delete_old_builds()

    def _add_debug_log_file(self):
        """
        Add the application wide debug log file
        :return:
        """
        debug_log_file = os.path.join(self.results_dir, settings.REBASE_HELPER_DEBUG_LOG)
        try:
            LoggerHelper.add_file_handler(logger,
                                          debug_log_file,
                                          logging.Formatter("%(asctime)s %(levelname)s\t%(filename)s"
                                                            ":%(lineno)s %(funcName)s: %(message)s"),
                                          logging.DEBUG)
        except (IOError, OSError):
            logger.warning("Can not create debug log '%s'", debug_log_file)
        else:
            self.debug_log_file = debug_log_file

    def _prepare_spec_objects(self):
        """
        Prepare spec files and initialize objects
        :return:
        """
        self.rebase_spec_file_path = get_rebase_name(self.spec_file_path)

        self.spec_file = SpecFile(self.spec_file_path,
                                  self.execution_dir,
                                  download=not self.conf.not_download_sources)
        # Check whether test suite is enabled at build time
        if not self.spec_file.is_test_suite_enabled():
            OutputLogger.set_info_text('WARNING', 'Test suite is not enabled at build time.')
        #  create an object representing the rebased SPEC file
        self.rebase_spec_file = self.spec_file.copy(self.rebase_spec_file_path)

        #  check if argument passed as new source is a file or just a version
        if [True for ext in Archive.get_supported_archives() if self.conf.sources.endswith(ext)]:
            logger.debug("argument passed as a new source is a file")
            self.rebase_spec_file.set_version_using_archive(self.conf.sources)
        else:
            logger.debug("argument passed as a new source is a version")
            version, extra_version = SpecFile.split_version_string(self.conf.sources)
            self.rebase_spec_file.set_version(version)
            if extra_version:
                self.rebase_spec_file.set_extra_version(extra_version)

    def _initialize_data(self):
        """
        Function fill dictionary with default data
        """
        # Get all tarballs before self.kwargs initialization
        self.old_sources = self.spec_file.get_archive()
        new_sources = self.rebase_spec_file.get_archive()

        self.old_sources = os.path.abspath(self.old_sources)
        if new_sources:
            self.conf.sources = new_sources

        if not self.conf.sources:
            raise RebaseHelperError('You have to define new sources.')
        else:
            self.new_sources = os.path.abspath(self.conf.sources)
        # Contains all source except the Source0
        self.rest_sources = self.spec_file.get_sources()[1:]
        self.rest_sources = [os.path.abspath(x) for x in self.rest_sources]

        # We want to inform user immediatelly if compare tool doesn't exists
        if self.conf.pkgcomparetool and self.conf.pkgcomparetool not in Checker.get_supported_tools():
            raise RebaseHelperError('You have to specify one of these check tools %s', Checker.get_supported_tools())

    def _get_rebase_helper_log(self):
        return os.path.join(self.results_dir, settings.REBASE_HELPER_RESULTS_LOG)

    def get_rpm_packages(self, dirname):
        """
        Function returns RPM packages stored in dirname/old and dirname/new directories
        :param dirname: directory where are stored old and new RPMS
        :return:
        """
        found = True
        for version in ['old', 'new']:
            data = {}
            data['name'] = self.spec_file.get_package_name()
            if version == 'old':
                spec_version = self.spec_file.get_version()
            else:
                spec_version = self.rebase_spec_file.get_version()
            data['version'] = spec_version
            data['rpm'] = PathHelper.find_all_files(os.path.join(os.path.realpath(dirname), version, 'RPM'), '*.rpm')
            if not data['rpm']:
                logger.error('Your path %s%s/RPM does not contain any RPM packages' % (dirname, version))
                found = False
            OutputLogger.set_build_data(version, data)
        if not found:
            return False
        return True

    def _get_spec_file(self):
        """
        Function gets the spec file from the execution_dir directory
        """
        self.spec_file_path = PathHelper.find_first_file(self.execution_dir, '*.spec', 0)
        if not self.spec_file_path:
            raise RebaseHelperError("Could not find any SPEC file in the current directory '%s'", self.execution_dir)

    def _delete_old_builds(self):
        """
        Deletes the old and new result dir from previous build
        :return:
        """
        self._delete_new_results_dir()
        self._delete_old_results_dir()

    def _delete_old_results_dir(self):
        """
        Deletes old result dir
        :return:
        """
        if os.path.isdir(os.path.join(self.results_dir, 'old')):
            shutil.rmtree(os.path.join(self.results_dir, 'old'))

    def _delete_new_results_dir(self):
        """
        Deletes new result dir
        :return:
        """
        if os.path.isdir(os.path.join(self.results_dir, 'new')):
            shutil.rmtree(os.path.join(self.results_dir, 'new'))

    def _delete_workspace_dir(self):
        """
        Deletes workspace directory and loggs message
        :return:
        """
        logger.debug("Removing the workspace directory '%s'", self.workspace_dir)
        shutil.rmtree(self.workspace_dir)

    def _check_workspace_dir(self):
        """
        Check if workspace dir exists, and removes it if yes.
        :return:
        """
        if os.path.exists(self.workspace_dir):
            logger.warning("Workspace directory '%s' exists, removing it", os.path.basename(self.workspace_dir))
            self._delete_workspace_dir()
        os.makedirs(self.workspace_dir)

    def _check_results_dir(self):
        """
        Check if  results dir exists, and removes it if yes.
        :return:
        """
        # TODO: We may not want to delete the directory in the future
        if os.path.exists(self.results_dir):
            logger.warning("Results directory '%s' exists, removing it", os.path.basename(self.results_dir))
            shutil.rmtree(self.results_dir)
        os.makedirs(self.results_dir)

    @staticmethod
    def extract_archive(archive_path, destination):
        """
        Extracts given archive into the destination and handle all exceptions.

        :param archive_path: path to the archive to be extracted
        :param destination: path to a destination, where the archive should be extracted to
        :return:
        """
        try:
            archive = Archive(archive_path)
        except NotImplementedError as ni_e:
            raise RebaseHelperError('%s. Supported archives are %s', ni_e.message, Archive.get_supported_archives())

        try:
            archive.extract_archive(destination)
        except IOError:
            raise RebaseHelperError("Archive '%s' can not be extracted", archive_path)
        except (EOFError, SystemError):
            raise RebaseHelperError("Archive '%s' is damaged", archive_path)

    @staticmethod
    def extract_sources(archive_path, destination):
        """
        Function extracts a given Archive and returns a full dirname to sources
        """
        Application.extract_archive(archive_path, destination)

        try:
            sources_dir = os.listdir(destination)[0]
        except IndexError:
            raise RebaseHelperError('Extraction of sources failed!')

        return os.path.join(destination, sources_dir)

    @staticmethod
    def check_build_requires(spec):
        """
        Check if all build dependencies are installed. If not, asks user they should be installed.
        If yes, it installs build dependencies using PolicyKit.
        :param spec: SpecFile object
        :return:
        """
        req_pkgs = spec.get_requires()
        if not RpmHelper.all_packages_installed(req_pkgs):
            if ConsoleHelper.get_message('\nSome build dependencies are missing. Do you want to install them now'):
                if RpmHelper.install_build_dependencies(spec.get_path()) != 0:
                    raise RebaseHelperError('Failed to install build dependencies')

    def prepare_sources(self):
        """
        Function prepares a sources.
        :return:
        """
        old_dir = Application.extract_sources(self.old_sources,
                                              os.path.join(self.execution_dir, settings.OLD_SOURCES_DIR))
        new_dir = Application.extract_sources(self.new_sources,
                                              os.path.join(self.execution_dir, settings.NEW_SOURCES_DIR))

        # This copies other sources to extracted sources marked as 0
        for rest in self.rest_sources:
            for source_dir in [old_dir, new_dir]:
                archive = [x for x in Archive.get_supported_archives() if rest.endswith(x)]
                # if the source is a remote file, download it
                if archive:
                    Application.extract_sources(rest, os.path.join(self.execution_dir, source_dir))

        return [old_dir, new_dir]

    def patch_sources(self, sources):
        # Patch sources
        git_helper = GitHelper(sources[0])
        git_helper.check_git_config()
        patch = Patcher(self.conf.patchtool)
        self.rebase_spec_file.update_changelog(self.rebase_spec_file.get_new_log(git_helper))
        try:
            self.rebased_patches = patch.patch(sources[0],
                                               sources[1],
                                               self.rest_sources,
                                               git_helper,
                                               self.spec_file.get_applied_patches(),
                                               self.spec_file.get_prep_section(),
                                               **self.kwargs)
        except RuntimeError as run_e:
            raise RebaseHelperError('Patching failed')
        self.rebase_spec_file.write_updated_patches(self.rebased_patches)
        if self.conf.non_interactive:
            OutputLogger.set_patch_output('Unapplied patches:', self.rebased_patches['unapplied'])
        OutputLogger.set_patch_output('Patches:', self.rebased_patches)

    def build_packages(self):
        """
        Function calls build class for building packages
        """
        try:
            builder = Builder(self.conf.buildtool)
        except NotImplementedError as ni_e:
            raise RebaseHelperError('%s. Supported build tools are %s', ni_e.message, Builder.get_supported_tools())

        for version in ['old', 'new']:
            spec_object = self.spec_file if version == 'old' else self.rebase_spec_file
            build_dict = {}
            build_dict['name'] = spec_object.get_package_name()
            build_dict['version'] = spec_object.get_version()
            logger.debug(build_dict)
            patches = [x.get_path() for x in spec_object.get_patches()]
            results_dir = os.path.join(self.results_dir, version)
            spec = spec_object.get_path()
            sources = spec_object.get_sources()

            failed_before = False
            while True:
                try:
                    build_dict.update(builder.build(spec, sources, patches, results_dir, **build_dict))
                    OutputLogger.set_build_data(version, build_dict)
                    break

                except SourcePackageBuildError:
                    #  always fail for original version
                    if version == 'old':
                        raise RebaseHelperError('Creating old SRPM package failed.')
                    logger.error('Building source package failed.')
                    #  TODO: implement log analyzer for SRPMs and add the checks here!!!
                    raise

                except BinaryPackageBuildError:
                    #  always fail for original version
                    rpm_dir = os.path.join(results_dir, 'RPM')
                    build_log = 'build.log'
                    build_log_path = os.path.join(rpm_dir, build_log)
                    if version == 'old':
                        raise RebaseHelperError('Building old RPM package failed. Check log %s', build_log_path)
                    logger.error('Building binary packages failed.')
                    try:
                        files = BuildLogAnalyzer.parse_log(rpm_dir, build_log)
                    except BuildLogAnalyzerMissingError:
                        raise RebaseHelperError('Build log %s does not exist', build_log_path)
                    except BuildLogAnalyzerMakeError:
                        raise RebaseHelperError('Building package failed during build. Check log %s', build_log_path)
                    except BuildLogAnalyzerPatchError:
                        raise RebaseHelperError('Building package failed during patching. Check log %s' % build_log_path)

                    if files['missing']:
                        missing_files = '\n'.join(files['added'])
                        logger.info('Files not packaged in the SPEC file:\n%s', missing_files)
                    elif files['deleted']:
                        deleted_files = '\n'.join(files['deleted'])
                        logger.warning('Removed files packaged in SPEC file:\n%s', deleted_files)
                    else:
                        raise RebaseHelperError("Build failed, but no issues were found in the build log %s", build_log)
                    self.rebase_spec_file.modify_spec_files_section(files)

                if not self.conf.non_interactive:
                    if failed_before:
                        if not ConsoleHelper.get_message('Do you want rebase-helper to try build the packages one more time'):
                            raise KeyboardInterrupt
                else:
                    logger.warning('Some patches were not successfully applied')
                    shutil.rmtree(os.path.join(results_dir, 'RPM'))
                    shutil.rmtree(os.path.join(results_dir, 'SRPM'))
                    return False
                #  build just failed, otherwise we would break out of the while loop
                failed_before = True

                shutil.rmtree(os.path.join(results_dir, 'RPM'))
                shutil.rmtree(os.path.join(results_dir, 'SRPM'))
        return True

    def _execute_checkers(self, checker):
        """
        Function executes a checker based on command line arguments
        :param checker: checker name based from command line
        :return: Nothing
        """
        pkgchecker = Checker(checker)
        logger.info('Comparing packages using %s...', checker)
        text = pkgchecker.run_check(self.results_dir)
        return text

    def pkgdiff_packages(self):
        """
        Function calls pkgdiff class for comparing packages
        :return:
        """
        pkgdiff_results = {}
        if not self.conf.pkgcomparetool:
            for checker in Checker.get_supported_tools():
                text = self._execute_checkers(checker)
                pkgdiff_results[checker] = text

        else:
            text = self._execute_checkers(self.conf.pkgcomparetool)
            pkgdiff_results[self.conf.pkgcomparetool] = text
        OutputLogger.set_checker_output('\nResults from checker(s):', pkgdiff_results)

    def print_summary(self):
        output_tool.check_output_argument(self.conf.outputtool)
        output = output_tool.OutputTool(self.conf.outputtool)
        output.print_information(path=self._get_rebase_helper_log())

    def run(self):
        sources = self.prepare_sources()

        if not self.conf.build_only and not self.conf.comparepkgs:
            self.patch_sources(sources)

        if not self.conf.patch_only:
            if not self.conf.comparepkgs:
                # check build dependencies for rpmbuild
                if self.conf.buildtool == 'rpmbuild':
                    Application.check_build_requires(self.spec_file)
                # Build packages
                build = self.build_packages()
                # Perform checks
            else:
                build = self.get_rpm_packages(self.conf.comparepkgs)
                # We don't care dirname doesn't contain any RPM packages
                # Therefore return 1
                if not build:
                    return 1
            if build:
                self.pkgdiff_packages()

        # print summary information
        self.print_summary()

        if not self.conf.keep_workspace:
            self._delete_workspace_dir()

        if self.debug_log_file:
            logger.info("Detailed debug log is located in '%s'", self.debug_log_file)
Exemplo n.º 39
0
class Application:
    result_file: str = ''
    temp_dir: str = ''
    kwargs: Dict[str, Any] = {}
    old_sources: str = ''
    new_sources: str = ''
    old_rest_sources: List[str] = []
    new_rest_sources: List[str] = []
    spec_file: Optional[SpecFile] = None
    spec_file_path: Optional[str] = None
    rebase_spec_file: Optional[SpecFile] = None
    rebase_spec_file_path: Optional[str] = None
    debug_log_file: Optional[str] = None
    report_log_file: Optional[str] = None
    rebased_patches: Dict[str, List[str]] = {}
    rebased_repo: Optional[git.Repo] = None

    def __init__(self, cli_conf, execution_dir, results_dir, debug_log_file):
        """
        Initialize the application

        :param cli_conf: CLI object with configuration gathered from commandline
        :return:
        """
        results_store.clear()

        self.conf = cli_conf
        self.execution_dir = execution_dir
        self.rebased_sources_dir = os.path.join(results_dir, 'rebased-sources')

        self.debug_log_file = debug_log_file

        self.kwargs.update(self.conf.config)
        # Temporary workspace for Builder, checks, ...
        self.kwargs['workspace_dir'] = self.workspace_dir = os.path.join(self.execution_dir, constants.WORKSPACE_DIR)
        # Directory where results should be put
        self.kwargs['results_dir'] = self.results_dir = results_dir
        # Directory contaning only those files, which are relevant for the new rebased version
        self.kwargs['rebased_sources_dir'] = self.rebased_sources_dir

        logger.verbose("Rebase-helper version: %s", VERSION)

        if self.conf.build_tasks is None:
            # check the workspace dir
            if not self.conf.cont:
                self._check_workspace_dir()

            self._get_spec_file()
            self._prepare_spec_objects()

            # verify all sources for the new version are present
            missing_sources = [os.path.basename(s) for s in self.rebase_spec_file.sources
                               if not os.path.isfile(os.path.basename(s))]
            if missing_sources:
                raise RebaseHelperError('The following sources are missing: {}'.format(','.join(missing_sources)))

            if self.conf.update_sources:
                sources = [os.path.basename(s) for s in self.spec_file.sources]
                rebased_sources = [os.path.basename(s) for s in self.rebase_spec_file.sources]
                uploaded = LookasideCacheHelper.update_sources('fedpkg', self.rebased_sources_dir,
                                                               self.rebase_spec_file.get_package_name(),
                                                               sources, rebased_sources,
                                                               upload=not self.conf.skip_upload)
                self._update_gitignore(uploaded, self.rebased_sources_dir)

            # TODO: Remove the value from kwargs and use only CLI attribute!
            self.kwargs['continue'] = self.conf.cont
            self._initialize_data()

        if self.conf.cont or self.conf.build_only:
            self._delete_old_builds()

    @staticmethod
    def setup(cli_conf):
        execution_dir = os.getcwd()
        results_dir = os.path.abspath(cli_conf.results_dir) if cli_conf.results_dir else execution_dir
        results_dir = os.path.join(results_dir, constants.RESULTS_DIR)

        # if not continuing, check the results dir
        if not cli_conf.cont and not cli_conf.build_only and not cli_conf.comparepkgs:
            Application._check_results_dir(results_dir)

        # This is used if user executes rebase-helper with --continue
        # parameter even when directory does not exist
        if not os.path.exists(results_dir):
            os.makedirs(results_dir)
            os.makedirs(os.path.join(results_dir, constants.LOGS_DIR))

        debug_log_file = Application.setup_logging(results_dir)

        return execution_dir, results_dir, debug_log_file

    @staticmethod
    def setup_logging(results_dir):
        """Adds file handlers of various verbosity to loggers.

        Args:
            results_dir: Path to directory which results are stored in.

        Returns:
            string: Path to debug log.

        """
        logs_dir = os.path.join(results_dir, constants.LOGS_DIR)
        debug_log = os.path.join(logs_dir, constants.DEBUG_LOG)
        LoggerHelper.add_file_handler(logger, debug_log, debug_log_formatter, logging.DEBUG)
        verbose_log = os.path.join(logs_dir, constants.VERBOSE_LOG)
        LoggerHelper.add_file_handler(logger, verbose_log, log_formatter, CustomLogger.VERBOSE)
        info_log = os.path.join(logs_dir, constants.INFO_LOG)
        LoggerHelper.add_file_handler(logger, info_log, log_formatter, logging.INFO)

        return debug_log

    def _prepare_spec_objects(self):
        """
        Prepare spec files and initialize objects

        :return:
        """
        self.rebase_spec_file_path = get_rebase_name(self.rebased_sources_dir, self.spec_file_path)

        self.spec_file = SpecFile(self.spec_file_path, self.execution_dir)
        # Check whether test suite is enabled at build time
        if not self.spec_file.is_test_suite_enabled():
            results_store.set_info_text('WARNING', 'Test suite is not enabled at build time.')
        # create an object representing the rebased SPEC file
        self.rebase_spec_file = self.spec_file.copy(self.rebase_spec_file_path)

        if not self.conf.sources:
            self.conf.sources = plugin_manager.versioneers.run(self.conf.versioneer,
                                                               self.spec_file.get_package_name(),
                                                               self.spec_file.category,
                                                               self.conf.versioneer_blacklist)
            if self.conf.sources:
                logger.info("Determined latest upstream version '%s'", self.conf.sources)
            else:
                raise RebaseHelperError('Could not determine latest upstream version '
                                        'and no SOURCES argument specified!')

        # Prepare rebased_sources_dir
        self.rebased_repo = self._prepare_rebased_repository(self.spec_file.patches,
                                                             self.execution_dir,
                                                             self.rebased_sources_dir)

        # check if argument passed as new source is a file or just a version
        if [True for ext in Archive.get_supported_archives() if self.conf.sources.endswith(ext)]:
            logger.verbose("argument passed as a new source is a file")
            self.rebase_spec_file.set_version_using_archive(self.conf.sources)
        else:
            logger.verbose("argument passed as a new source is a version")
            version, extra_version, separator = SpecFile.split_version_string(self.conf.sources)
            self.rebase_spec_file.set_version(version)
            self.rebase_spec_file.set_extra_version_separator(separator)
            self.rebase_spec_file.set_extra_version(extra_version)

        if not self.conf.skip_version_check and parse_version(self.rebase_spec_file.get_version()) \
                <= parse_version(self.spec_file.get_version()):
            raise RebaseHelperError("Current version is equal to or newer than the requested version, nothing to do.")

        self.rebase_spec_file.update_changelog(self.conf.changelog_entry)

        # run spec hooks
        plugin_manager.spec_hooks.run(self.spec_file, self.rebase_spec_file, **self.kwargs)

        # spec file object has been sanitized downloading can proceed
        if not self.conf.not_download_sources:
            for spec_file in [self.spec_file, self.rebase_spec_file]:
                spec_file.download_remote_sources()
                # parse spec again with sources downloaded to properly expand %prep section
                spec_file._update_data()  # pylint: disable=protected-access

    def _initialize_data(self):
        """Function fill dictionary with default data"""
        # Get all tarballs before self.kwargs initialization
        self.old_sources = self.spec_file.get_archive()
        new_sources = self.rebase_spec_file.get_archive()

        self.old_sources = os.path.abspath(self.old_sources)
        if new_sources:
            self.conf.sources = new_sources

        if not self.conf.sources:
            raise RebaseHelperError('You have to define new sources.')
        else:
            self.new_sources = os.path.abspath(self.conf.sources)
        # Contains all source except the Source0
        self.old_rest_sources = [os.path.abspath(x) for x in self.spec_file.get_sources()[1:]]
        self.new_rest_sources = [os.path.abspath(x) for x in self.rebase_spec_file.get_sources()[1:]]

    def get_rpm_packages(self, dirname):
        """
        Function returns RPM packages stored in dirname/old and dirname/new directories

        :param dirname: directory where are stored old and new RPMS
        :return:
        """
        found = True
        for version in ['old', 'new']:
            data = {}
            data['name'] = self.spec_file.get_package_name()
            if version == 'old':
                spec_version = self.spec_file.get_version()
            else:
                spec_version = self.rebase_spec_file.get_version()
            data['version'] = spec_version
            data['rpm'] = PathHelper.find_all_files(os.path.join(os.path.realpath(dirname), version, 'RPM'), '*.rpm')
            if not data['rpm']:
                logger.error('Your path %s%s/RPM does not contain any RPM packages', dirname, version)
                found = False
            results_store.set_build_data(version, data)
        if not found:
            return False
        return True

    def _get_spec_file(self):
        """Function gets the spec file from the execution_dir directory"""
        self.spec_file_path = PathHelper.find_first_file(self.execution_dir, '*.spec', 0)
        if not self.spec_file_path:
            raise RebaseHelperError("Could not find any SPEC file "
                                    "in the current directory '{}'".format(self.execution_dir))

    def _delete_old_builds(self):
        """
        Deletes the old and new result dir from previous build

        :return:
        """
        self._delete_new_results_dir()
        self._delete_old_results_dir()

    def _delete_old_results_dir(self):
        """
        Deletes old result dir

        :return:
        """
        if os.path.isdir(os.path.join(self.results_dir, 'old')):
            shutil.rmtree(os.path.join(self.results_dir, 'old'))

    def _delete_new_results_dir(self):
        """
        Deletes new result dir

        :return:
        """
        if os.path.isdir(os.path.join(self.results_dir, 'new')):
            shutil.rmtree(os.path.join(self.results_dir, 'new'))

    def _delete_workspace_dir(self):
        """
        Deletes workspace directory and loggs message

        :return:
        """
        logger.verbose("Removing the workspace directory '%s'", self.workspace_dir)
        if os.path.isdir(self.workspace_dir):
            shutil.rmtree(self.workspace_dir)

    def _check_workspace_dir(self):
        """
        Check if workspace dir exists, and removes it if yes.

        :return:
        """
        if os.path.exists(self.workspace_dir):
            logger.warning("Workspace directory '%s' exists, removing it", os.path.basename(self.workspace_dir))
            self._delete_workspace_dir()
        os.makedirs(self.workspace_dir)

    @staticmethod
    def _check_results_dir(results_dir):
        """
        Check if  results dir exists, and removes it if yes.

        :return:
        """
        # TODO: We may not want to delete the directory in the future
        if os.path.exists(results_dir):
            logger.warning("Results directory '%s' exists, removing it", os.path.basename(results_dir))
            shutil.rmtree(results_dir)
        os.makedirs(results_dir)
        os.makedirs(os.path.join(results_dir, constants.LOGS_DIR))
        os.makedirs(os.path.join(results_dir, constants.OLD_BUILD_DIR))
        os.makedirs(os.path.join(results_dir, constants.NEW_BUILD_DIR))
        os.makedirs(os.path.join(results_dir, constants.CHECKERS_DIR))
        os.makedirs(os.path.join(results_dir, constants.REBASED_SOURCES_DIR))

    @staticmethod
    def extract_archive(archive_path, destination):
        """
        Extracts given archive into the destination and handle all exceptions.

        :param archive_path: path to the archive to be extracted
        :param destination: path to a destination, where the archive should be extracted to
        :return:
        """
        try:
            archive = Archive(archive_path)
        except NotImplementedError as e:
            raise RebaseHelperError('{}. Supported archives are {}'.format(str(e),
                                    Archive.get_supported_archives()))

        try:
            archive.extract_archive(destination)
        except IOError:
            raise RebaseHelperError("Archive '{}' can not be extracted".format(archive_path))
        except (EOFError, SystemError):
            raise RebaseHelperError("Archive '{}' is damaged".format(archive_path))

    @staticmethod
    def extract_sources(archive_path, destination):
        """Function extracts a given Archive and returns a full dirname to sources"""
        Application.extract_archive(archive_path, destination)

        files = os.listdir(destination)

        if not files:
            raise RebaseHelperError('Extraction of sources failed!')
        # if there is only one directory, we can assume it's top-level directory
        elif len(files) == 1:
            sources_dir = os.path.join(destination, files[0])
            if os.path.isdir(sources_dir):
                return sources_dir

        # archive without top-level directory
        return destination

    def prepare_sources(self):
        """
        Function prepares a sources.

        :return:
        """

        old_sources_dir = os.path.join(self.execution_dir, constants.WORKSPACE_DIR, constants.OLD_SOURCES_DIR)
        new_sources_dir = os.path.join(self.execution_dir, constants.WORKSPACE_DIR, constants.NEW_SOURCES_DIR)

        old_dir = Application.extract_sources(self.old_sources, old_sources_dir)
        new_dir = Application.extract_sources(self.new_sources, new_sources_dir)

        old_tld = os.path.relpath(old_dir, old_sources_dir)
        new_tld = os.path.relpath(new_dir, new_sources_dir)

        dirname = self.spec_file.get_setup_dirname()

        if dirname and os.sep in dirname:
            dirs = os.path.split(dirname)
            if old_tld == dirs[0]:
                old_dir = os.path.join(old_dir, *dirs[1:])
            if new_tld == dirs[0]:
                new_dir = os.path.join(new_dir, *dirs[1:])

        new_dirname = os.path.relpath(new_dir, new_sources_dir)

        if new_dirname != '.':
            self.rebase_spec_file.update_setup_dirname(new_dirname)

        # extract rest of source archives to correct paths
        rest_sources = [self.old_rest_sources, self.new_rest_sources]
        spec_files = [self.spec_file, self.rebase_spec_file]
        sources_dirs = [
            os.path.join(constants.WORKSPACE_DIR, constants.OLD_SOURCES_DIR),
            os.path.join(constants.WORKSPACE_DIR, constants.NEW_SOURCES_DIR),
        ]
        for sources, spec_file, sources_dir in zip(rest_sources, spec_files, sources_dirs):
            for rest in sources:
                archive = [x for x in Archive.get_supported_archives() if rest.endswith(x)]
                if archive:
                    dest_dir = spec_file.find_archive_target_in_prep(rest)
                    if dest_dir:
                        Application.extract_sources(rest, os.path.join(self.execution_dir, sources_dir, dest_dir))

        return [old_dir, new_dir]

    def patch_sources(self, sources):
        try:
            # Patch sources
            self.rebased_patches = Patcher.patch(sources[0],
                                                 sources[1],
                                                 self.old_rest_sources,
                                                 self.spec_file.get_applied_patches(),
                                                 **self.kwargs)
        except RuntimeError:
            raise RebaseHelperError('Patching failed')
        self.rebase_spec_file.write_updated_patches(self.rebased_patches,
                                                    self.conf.disable_inapplicable_patches)
        results_store.set_patches_results(self.rebased_patches)

    def generate_patch(self):
        """
        Generates patch to the results_dir containing all needed changes for
        the rebased package version
        """
        # Delete removed patches from rebased_sources_dir from git
        removed_patches = self.rebase_spec_file.removed_patches
        if removed_patches:
            self.rebased_repo.index.remove(removed_patches, working_tree=True)

        self.rebase_spec_file.update_paths_to_patches()

        # Generate patch
        self.rebased_repo.git.add(all=True)
        self.rebase_spec_file._update_data()  # pylint: disable=protected-access
        self.rebased_repo.index.commit(MacroHelper.expand(self.conf.changelog_entry, self.conf.changelog_entry))
        patch = self.rebased_repo.git.format_patch('-1', stdout=True, stdout_as_string=False)
        with open(os.path.join(self.results_dir, 'changes.patch'), 'wb') as f:
            f.write(patch)
            f.write(b'\n')

        results_store.set_changes_patch('changes_patch', os.path.join(self.results_dir, 'changes.patch'))

    @classmethod
    def _update_gitignore(cls, sources, rebased_sources_dir):
        """Adds new entries into .gitignore file.

        Args:
            sources (list): List of new source files.
            rebased_sources_dir (str): Target directory.

        """
        gitignore = os.path.join(rebased_sources_dir, '.gitignore')

        if not os.path.isfile(gitignore):
            return

        with open(gitignore) as f:
            entries = f.readlines()

        def match(source):
            source = source.lstrip(os.path.sep).rstrip('\n')
            for entry in entries:
                if fnmatch.fnmatch(source, entry.lstrip(os.path.sep).rstrip('\n')):
                    return True
            return False

        with open(gitignore, 'a') as f:
            for src in [s for s in sources if not match(s)]:
                f.write(os.path.sep + src + '\n')

    @classmethod
    def _prepare_rebased_repository(cls, patches, execution_dir, rebased_sources_dir):
        """
        Initialize git repository in the rebased directory
        :return: git.Repo instance of rebased_sources
        """
        for patch in patches['applied'] + patches['not_applied']:
            shutil.copy(patch.path, rebased_sources_dir)

        sources = os.path.join(execution_dir, 'sources')
        if os.path.isfile(sources):
            shutil.copy(sources, rebased_sources_dir)

        gitignore = os.path.join(execution_dir, '.gitignore')
        if os.path.isfile(gitignore):
            shutil.copy(gitignore, rebased_sources_dir)

        repo = git.Repo.init(rebased_sources_dir)
        repo.git.config('user.name', GitHelper.get_user(), local=True)
        repo.git.config('user.email', GitHelper.get_email(), local=True)
        repo.git.add(all=True)
        repo.index.commit('Initial commit', skip_hooks=True)
        return repo

    @staticmethod
    def _sanitize_build_dict(build_dict):
        blacklist = [
            'builds_nowait',
            'build_tasks',
            'builder_options',
            'srpm_builder_options',
        ]
        return {k: v for k, v in build_dict.items() if k not in blacklist}

    def build_source_packages(self):
        try:
            builder = plugin_manager.srpm_build_tools.get_plugin(self.conf.srpm_buildtool)
        except NotImplementedError as e:
            raise RebaseHelperError('{}. Supported SRPM build tools are {}'.format(
                str(e), plugin_manager.srpm_build_tools.get_supported_tools()))

        for version in ['old', 'new']:
            koji_build_id = None
            results_dir = '{}-build'.format(os.path.join(self.results_dir, version))
            spec = self.spec_file if version == 'old' else self.rebase_spec_file
            package_name = spec.get_package_name()
            package_version = spec.get_version()
            package_full_version = spec.get_full_version()
            logger.info('Building source package for %s version %s', package_name, package_full_version)

            if version == 'old' and self.conf.get_old_build_from_koji:
                koji_build_id, package_version, package_full_version = KojiHelper.get_old_build_info(package_name,
                                                                                                     package_version)

            build_dict = dict(
                name=package_name,
                version=package_version,
                srpm_buildtool=self.conf.srpm_buildtool,
                srpm_builder_options=self.conf.srpm_builder_options)
            try:
                if koji_build_id:
                    session = KojiHelper.create_session()
                    build_dict['srpm'], build_dict['logs'] = KojiHelper.download_build(session,
                                                                                       koji_build_id,
                                                                                       os.path.join(
                                                                                           results_dir,
                                                                                           'SRPM'
                                                                                       ),
                                                                                       arches=['src'])

                else:
                    build_dict.update(builder.build(spec, results_dir, **build_dict))
                build_dict = self._sanitize_build_dict(build_dict)
                results_store.set_build_data(version, build_dict)
            except RebaseHelperError:  # pylint: disable=try-except-raise
                raise
            except SourcePackageBuildError as e:
                build_dict.update(builder.get_logs())
                build_dict['source_package_build_error'] = str(e)
                build_dict = self._sanitize_build_dict(build_dict)
                results_store.set_build_data(version, build_dict)
                if e.logfile:
                    msg = 'Building {} SRPM packages failed; see {} for more information'.format(version, e.logfile)
                else:
                    msg = 'Building {} SRPM packages failed; see logs in {} for more information'.format(
                        version, os.path.join(results_dir, 'SRPM'))
                logger.error(msg)
                raise RebaseHelperError(msg, logfiles=builder.get_logs().get('logs'))
            except Exception:
                raise RebaseHelperError('Building package failed with unknown reason. '
                                        'Check all available log files.')

    def build_binary_packages(self):
        """Function calls build class for building packages"""
        try:
            builder = plugin_manager.build_tools.get_plugin(self.conf.buildtool)
        except NotImplementedError as e:
            raise RebaseHelperError('{}. Supported build tools are {}'.format(
                str(e), plugin_manager.build_tools.get_supported_tools()))

        for version in ['old', 'new']:
            results_dir = '{}-build'.format(os.path.join(self.results_dir, version))
            spec = None
            task_id = None
            koji_build_id = None
            build_dict = {}

            if self.conf.build_tasks is None:
                spec = self.spec_file if version == 'old' else self.rebase_spec_file
                package_name = spec.get_package_name()
                package_version = spec.get_version()
                package_full_version = spec.get_full_version()

                if version == 'old' and self.conf.get_old_build_from_koji:
                    koji_build_id, package_version, package_full_version = KojiHelper.get_old_build_info(
                                                                               package_name,
                                                                               package_version)

                build_dict = dict(
                    name=package_name,
                    version=package_version,
                    builds_nowait=self.conf.builds_nowait,
                    build_tasks=self.conf.build_tasks,
                    builder_options=self.conf.builder_options,
                    srpm=results_store.get_build(version).get('srpm'),
                    srpm_logs=results_store.get_build(version).get('logs'))

                # prepare for building
                builder.prepare(spec, self.conf)

                logger.info('Building binary packages for %s version %s', package_name, package_full_version)
            else:
                task_id = self.conf.build_tasks[0] if version == 'old' else self.conf.build_tasks[1]

            try:
                if self.conf.build_tasks is None:
                    if koji_build_id:
                        session = KojiHelper.create_session()
                        build_dict['rpm'], build_dict['logs'] = KojiHelper.download_build(session,
                                                                                          koji_build_id,
                                                                                          os.path.join(
                                                                                              results_dir,
                                                                                              'RPM',
                                                                                          ),
                                                                                          arches=['noarch', 'x86_64'])
                    else:
                        build_dict.update(builder.build(spec, results_dir, **build_dict))
                if builder.CREATES_TASKS and task_id and not koji_build_id:
                    if not self.conf.builds_nowait:
                        build_dict['rpm'], build_dict['logs'] = builder.wait_for_task(build_dict,
                                                                                      task_id,
                                                                                      results_dir)
                    elif self.conf.build_tasks:
                        build_dict['rpm'], build_dict['logs'] = builder.get_detached_task(task_id, results_dir)
                build_dict = self._sanitize_build_dict(build_dict)
                results_store.set_build_data(version, build_dict)
            except RebaseHelperError:  # pylint: disable=try-except-raise
                # Proper RebaseHelperError instance was created already. Re-raise it.
                raise
            except BinaryPackageBuildError as e:
                build_dict.update(builder.get_logs())
                build_dict['binary_package_build_error'] = str(e)
                build_dict = self._sanitize_build_dict(build_dict)
                results_store.set_build_data(version, build_dict)

                if e.logfile is None:
                    msg = 'Building {} RPM packages failed; see logs in {} for more information'.format(
                        version, os.path.join(results_dir, 'RPM')
                    )
                else:
                    msg = 'Building {} RPM packages failed; see {} for more information'.format(version, e.logfile)

                logger.error(msg)
                raise RebaseHelperError(msg, logfiles=builder.get_logs().get('logs'))
            except Exception:
                raise RebaseHelperError('Building package failed with unknown reason. '
                                        'Check all available log files.')

        if self.conf.builds_nowait and not self.conf.build_tasks:
            if builder.CREATES_TASKS:
                self.print_task_info(builder)

    def run_package_checkers(self, results_dir, **kwargs):
        """
        Runs checkers on packages and stores results in a given directory.

        :param results_dir: Path to directory in which to store the results.
        :type results_dir: str
        :param category: checker type(SOURCE/SRPM/RPM)
        :type category: str
        :return: None
        """
        results = dict()

        for checker_name in self.conf.pkgcomparetool:
            try:
                data = plugin_manager.checkers.run(os.path.join(results_dir, 'checkers'),
                                                   checker_name,
                                                   **kwargs)
                if data:
                    results[checker_name] = data
            except CheckerNotFoundError:
                logger.error("Rebase-helper did not find checker '%s'.", checker_name)

        for diff_name, result in results.items():
            results_store.set_checker_output(diff_name, result)

    def get_all_log_files(self):
        """
        Function returns all log_files created by rebase-helper
        First if debug log file and second is report summary log file

        :return:
        """
        log_list = []
        if PathHelper.file_available(self.debug_log_file):
            log_list.append(self.debug_log_file)
        if PathHelper.file_available(self.report_log_file):
            log_list.append(self.report_log_file)
        return log_list

    def get_new_build_logs(self):
        result = {}
        result['build_ref'] = {}
        for version in ['old', 'new']:
            result['build_ref'][version] = results_store.get_build(version)
        return result

    def get_checker_outputs(self):
        checkers = {}
        for check, data in results_store.get_checkers().items():
            if data:
                for log in data:
                    if PathHelper.file_available(log):
                        checkers[check] = log
            else:
                checkers[check] = None
        return checkers

    def get_rebased_patches(self):
        """
        Function returns a list of patches either
        '': [list_of_deleted_patches]
        :return:
        """
        patches = False
        output_patch_string = []
        if results_store.get_patches():
            for key, val in results_store.get_patches().items():
                if key:
                    output_patch_string.append('Following patches have been {}:\n{}'.format(key, val))
                    patches = True
        if not patches:
            output_patch_string.append('Patches were not touched. All were applied properly')
        return output_patch_string

    def print_summary(self, exception=None):
        """
        Save rebase-helper result and print the summary using output tools.
        :param exception: Error message from rebase-helper
        :return:
        """
        logs = None
        # Store rebase helper result exception
        if exception:
            if exception.logfiles:
                logs = exception.logfiles

            results_store.set_result_message('fail', exception.msg)
        else:
            result = "Rebase from {}-{} to {}-{} completed without an error".format(
                self.spec_file.get_package_name(), self.spec_file.get_version(),
                self.rebase_spec_file.get_package_name(), self.rebase_spec_file.get_version())
            results_store.set_result_message('success', result)

        if self.rebase_spec_file:
            self.rebase_spec_file.update_paths_to_patches()
            self.generate_patch()

        plugin_manager.output_tools.run(self.conf.outputtool, logs, self)

    def print_task_info(self, builder):
        logs = self.get_new_build_logs()['build_ref']
        for version in ['old', 'new']:
            logger.info(builder.get_task_info(logs[version]))

    def get_rebasehelper_data(self):
        rh_stuff = {}
        rh_stuff['build_logs'] = self.get_new_build_logs()
        rh_stuff['patches'] = self.get_rebased_patches()
        rh_stuff['checkers'] = self.get_checker_outputs()
        rh_stuff['logs'] = self.get_all_log_files()
        return rh_stuff

    def apply_changes(self):
        try:
            repo = git.Repo(self.execution_dir)
        except git.InvalidGitRepositoryError:
            repo = git.Repo.init(self.execution_dir)
        patch = results_store.get_changes_patch()
        if not patch:
            logger.warning('Cannot apply changes.patch. No patch file was created')
        try:
            repo.git.am(patch['changes_patch'])
        except git.GitCommandError as e:
            logger.warning('changes.patch was not applied properly. Please review changes manually.'
                           '\nThe error message is: %s', str(e))

    def prepare_next_run(self, results_dir):
        # Running build log hooks only makes sense after a failed build
        # of new RPM packages. The folder results_dir/new-build/RPM
        # doesn't exist unless the build of new RPM packages has been run.
        changes_made = False
        if os.path.exists(os.path.join(results_dir, 'new-build', 'RPM')):
            changes_made = plugin_manager.build_log_hooks.run(self.spec_file, self.rebase_spec_file, **self.kwargs)
        # Save current rebase spec file content
        self.rebase_spec_file.save()
        if not self.conf.non_interactive and \
                InputHelper.get_message('Do you want to try it one more time'):
            logger.info('Now it is time to make changes to  %s if necessary.', self.rebase_spec_file.path)
        elif self.conf.non_interactive and changes_made:
            logger.info('Build log hooks made some changes to the SPEC file, starting the build process again.')
        else:
            return False
        if not self.conf.non_interactive and not \
                InputHelper.get_message('Do you want to continue with the rebuild now'):
            return False
        # Update rebase spec file content after potential manual modifications
        self.rebase_spec_file._read_spec_content()  # pylint: disable=protected-access
        self.rebase_spec_file._update_data()  # pylint: disable=protected-access
        # clear current version output directories
        if os.path.exists(os.path.join(results_dir, 'old-build')):
            shutil.rmtree(os.path.join(results_dir, 'old-build'))
        if os.path.exists(os.path.join(results_dir, 'new-build')):
            shutil.rmtree(os.path.join(results_dir, 'new-build'))
        return True

    def run(self):
        # Certain options can be used only with specific build tools
        tools_creating_tasks = []
        for tool_name, tool in plugin_manager.build_tools.plugins.items():
            if tool and tool.CREATES_TASKS:
                tools_creating_tasks.append(tool_name)
        if self.conf.buildtool not in tools_creating_tasks:
            options_used = []
            if self.conf.build_tasks is not None:
                options_used.append('--build-tasks')
            if self.conf.builds_nowait is True:
                options_used.append('--builds-nowait')
            if options_used:
                raise RebaseHelperError("{} can be used only with the following build tools: {}".format(
                                        " and ".join(options_used),
                                        ", ".join(tools_creating_tasks)))
        elif self.conf.builds_nowait and self.conf.get_old_build_from_koji:
            raise RebaseHelperError("{} can't be used with: {}".format('--builds-nowait', '--get-old-build-from-koji'))

        tools_accepting_options = []
        for tool_name, tool in plugin_manager.build_tools.plugins.items():
            if tool and tool.ACCEPTS_OPTIONS:
                tools_accepting_options.append(tool_name)
        if self.conf.buildtool not in tools_accepting_options:
            options_used = []
            if self.conf.builder_options is not None:
                options_used.append('--builder-options')
            if options_used:
                raise RebaseHelperError("{} can be used only with the following build tools: {}".format(
                                        " and ".join(options_used),
                                        ", ".join(tools_accepting_options)))

        if self.conf.build_tasks is None:
            old_sources, new_sources = self.prepare_sources()
            self.run_package_checkers(self.results_dir,
                                      category=CheckerCategory.SOURCE,
                                      old_dir=old_sources,
                                      new_dir=new_sources)
            if not self.conf.build_only and not self.conf.comparepkgs:
                try:
                    self.patch_sources([old_sources, new_sources])
                except RebaseHelperError as e:
                    # Print summary and return error
                    self.print_summary(e)
                    raise

        if not self.conf.patch_only:
            if not self.conf.comparepkgs:
                # Build packages
                while True:
                    try:
                        if self.conf.build_tasks is None:
                            self.build_source_packages()
                        self.run_package_checkers(self.results_dir, category=CheckerCategory.SRPM)
                        self.build_binary_packages()
                        if self.conf.builds_nowait and not self.conf.build_tasks:
                            return
                        self.run_package_checkers(self.results_dir, category=CheckerCategory.RPM)
                    # Print summary and return error
                    except RebaseHelperError as e:
                        if self.prepare_next_run(self.results_dir):
                            continue
                        self.print_summary(e)
                        raise
                    else:
                        break
            else:
                if self.get_rpm_packages(self.conf.comparepkgs):
                    self.run_package_checkers(self.results_dir, category=CheckerCategory.SRPM)
                    self.run_package_checkers(self.results_dir, category=CheckerCategory.RPM)

        if not self.conf.keep_workspace:
            self._delete_workspace_dir()

        if self.debug_log_file:
            self.print_summary()
        if self.conf.apply_changes:
            self.apply_changes()
        return 0
Exemplo n.º 40
0
class Application(object):
    result_file = ""
    temp_dir = ""
    kwargs = {}
    old_sources = ""
    new_sources = ""
    old_rest_sources = []
    new_rest_sources = []
    spec_file = None
    spec_file_path = None
    rebase_spec_file = None
    rebase_spec_file_path = None
    debug_log_file = None
    report_log_file = None
    rebased_patches = {}
    upstream_monitoring = False

    def __init__(self, cli_conf=None):
        """
        Initialize the application

        :param cli_conf: CLI object with configuration gathered from commandline
        :return: 
        """
        OutputLogger.clear()

        self.conf = cli_conf

        if self.conf.verbose:
            LoggerHelper.add_stream_handler(logger, logging.DEBUG)
        else:
            LoggerHelper.add_stream_handler(logger, logging.INFO)

        # The directory in which rebase-helper was executed
        if self.conf.results_dir is None:
            self.execution_dir = os.getcwd()
        else:
            self.execution_dir = self.conf.results_dir

        # Temporary workspace for Builder, checks, ...
        self.kwargs['workspace_dir'] = self.workspace_dir = os.path.join(self.execution_dir,
                                                                         settings.REBASE_HELPER_WORKSPACE_DIR)
        # Directory where results should be put
        self.kwargs['results_dir'] = self.results_dir = os.path.join(self.execution_dir,
                                                                     settings.REBASE_HELPER_RESULTS_DIR)

        self.kwargs['non_interactive'] = self.conf.non_interactive
        # if not continuing, check the results dir
        if not self.conf.cont and not self.conf.build_only and not self.conf.comparepkgs:
            self._check_results_dir()
        # This is used if user executes rebase-helper with --continue
        # parameter even when directory does not exist
        if not os.path.exists(self.results_dir):
            os.makedirs(self.results_dir)
            os.makedirs(os.path.join(self.results_dir, settings.REBASE_HELPER_LOGS))

        self._add_debug_log_file()
        self._add_report_log_file()
        logger.debug("Rebase-helper version: %s" % version.VERSION)
        if self.conf.build_tasks is None:
            self._get_spec_file()
            self._prepare_spec_objects()

            # check the workspace dir
            if not self.conf.cont:
                self._check_workspace_dir()

            # TODO: Remove the value from kwargs and use only CLI attribute!
            self.kwargs['continue'] = self.conf.cont
            self._initialize_data()

        if self.conf.cont or self.conf.build_only:
            self._delete_old_builds()

    def _add_debug_log_file(self):
        """
        Add the application wide debug log file

        :return: 
        """
        debug_log_file = os.path.join(self.results_dir, settings.REBASE_HELPER_DEBUG_LOG)
        try:
            LoggerHelper.add_file_handler(logger,
                                          debug_log_file,
                                          logging.Formatter("%(asctime)s %(levelname)s\t%(filename)s"
                                                            ":%(lineno)s %(funcName)s: %(message)s"),
                                          logging.DEBUG)
        except (IOError, OSError):
            logger.warning("Can not create debug log '%s'", debug_log_file)
        else:
            self.debug_log_file = debug_log_file

    def _add_report_log_file(self):
        """
        Add the application report log file

        :return: 
        """
        report_log_file = os.path.join(self.results_dir, settings.REBASE_HELPER_REPORT_LOG)
        try:
            LoggerHelper.add_file_handler(logger_report,
                                          report_log_file,
                                          None,
                                          logging.INFO)
        except (IOError, OSError):
            logger.warning("Can not create report log '%s'", report_log_file)
        else:
            self.report_log_file = report_log_file

    def _prepare_spec_objects(self):
        """
        Prepare spec files and initialize objects

        :return: 
        """
        self.rebase_spec_file_path = get_rebase_name(self.spec_file_path)

        self.spec_file = SpecFile(self.spec_file_path,
                                  self.execution_dir,
                                  download=not self.conf.not_download_sources)
        # Check whether test suite is enabled at build time
        if not self.spec_file.is_test_suite_enabled():
            OutputLogger.set_info_text('WARNING', 'Test suite is not enabled at build time.')
        #  create an object representing the rebased SPEC file
        self.rebase_spec_file = self.spec_file.copy(self.rebase_spec_file_path)

        #  check if argument passed as new source is a file or just a version
        if [True for ext in Archive.get_supported_archives() if self.conf.sources.endswith(ext)]:
            logger.debug("argument passed as a new source is a file")
            self.rebase_spec_file.set_version_using_archive(self.conf.sources)
        else:
            logger.debug("argument passed as a new source is a version")
            version, extra_version = SpecFile.split_version_string(self.conf.sources)
            self.rebase_spec_file.set_version(version)
            self.rebase_spec_file.set_extra_version(extra_version)

    def _initialize_data(self):
        """Function fill dictionary with default data"""
        # Get all tarballs before self.kwargs initialization
        self.old_sources = self.spec_file.get_archive()
        new_sources = self.rebase_spec_file.get_archive()

        self.old_sources = os.path.abspath(self.old_sources)
        if new_sources:
            self.conf.sources = new_sources

        if not self.conf.sources:
            raise RebaseHelperError('You have to define new sources.')
        else:
            self.new_sources = os.path.abspath(self.conf.sources)
        # Contains all source except the Source0
        self.old_rest_sources = self.spec_file.get_sources()[1:]
        self.old_rest_sources = [os.path.abspath(x) for x in self.old_rest_sources]
        self.new_rest_sources = self.rebase_spec_file.get_sources()[1:]
        self.new_rest_sources = [os.path.abspath(x) for x in self.new_rest_sources]

        # We want to inform user immediately if compare tool doesn't exist
        supported_tools = Checker(os.path.dirname(__file__)).get_supported_tools()
        if self.conf.pkgcomparetool and self.conf.pkgcomparetool not in supported_tools:
            raise RebaseHelperError('You have to specify one of these check tools %s' % supported_tools)

    def _get_rebase_helper_log(self):
        return os.path.join(self.results_dir, settings.REBASE_HELPER_RESULTS_LOG)

    def get_rpm_packages(self, dirname):
        """
        Function returns RPM packages stored in dirname/old and dirname/new directories

        :param dirname: directory where are stored old and new RPMS
        :return: 
        """
        found = True
        for version in ['old', 'new']:
            data = {}
            data['name'] = self.spec_file.get_package_name()
            if version == 'old':
                spec_version = self.spec_file.get_version()
            else:
                spec_version = self.rebase_spec_file.get_version()
            data['version'] = spec_version
            data['rpm'] = PathHelper.find_all_files(os.path.join(os.path.realpath(dirname), version, 'RPM'), '*.rpm')
            if not data['rpm']:
                logger.error('Your path %s%s/RPM does not contain any RPM packages', dirname, version)
                found = False
            OutputLogger.set_build_data(version, data)
        if not found:
            return False
        return True

    def _get_spec_file(self):
        """Function gets the spec file from the execution_dir directory"""
        self.spec_file_path = PathHelper.find_first_file(self.execution_dir, '*.spec', 0)
        if not self.spec_file_path:
            raise RebaseHelperError("Could not find any SPEC file in the current directory '%s'", self.execution_dir)

    def _delete_old_builds(self):
        """
        Deletes the old and new result dir from previous build

        :return: 
        """
        self._delete_new_results_dir()
        self._delete_old_results_dir()

    def _delete_old_results_dir(self):
        """
        Deletes old result dir

        :return: 
        """
        if os.path.isdir(os.path.join(self.results_dir, 'old')):
            shutil.rmtree(os.path.join(self.results_dir, 'old'))

    def _delete_new_results_dir(self):
        """
        Deletes new result dir

        :return: 
        """
        if os.path.isdir(os.path.join(self.results_dir, 'new')):
            shutil.rmtree(os.path.join(self.results_dir, 'new'))

    def _delete_workspace_dir(self):
        """
        Deletes workspace directory and loggs message

        :return: 
        """
        logger.debug("Removing the workspace directory '%s'", self.workspace_dir)
        if os.path.isdir(self.workspace_dir):
            shutil.rmtree(self.workspace_dir)

    def _check_workspace_dir(self):
        """
        Check if workspace dir exists, and removes it if yes.

        :return: 
        """
        if os.path.exists(self.workspace_dir):
            logger.warning("Workspace directory '%s' exists, removing it", os.path.basename(self.workspace_dir))
            self._delete_workspace_dir()
        os.makedirs(self.workspace_dir)

    def _check_results_dir(self):
        """
        Check if  results dir exists, and removes it if yes.

        :return: 
        """
        # TODO: We may not want to delete the directory in the future
        if os.path.exists(self.results_dir):
            logger.warning("Results directory '%s' exists, removing it", os.path.basename(self.results_dir))
            shutil.rmtree(self.results_dir)
        os.makedirs(self.results_dir)
        os.makedirs(os.path.join(self.results_dir, settings.REBASE_HELPER_LOGS))
        os.makedirs(os.path.join(self.results_dir, 'old'))
        os.makedirs(os.path.join(self.results_dir, 'new'))

    @staticmethod
    def extract_archive(archive_path, destination):
        """
        Extracts given archive into the destination and handle all exceptions.

        :param archive_path: path to the archive to be extracted
        :param destination: path to a destination, where the archive should be extracted to
        :return: 
        """
        try:
            archive = Archive(archive_path)
        except NotImplementedError as ni_e:
            raise RebaseHelperError('%s. Supported archives are %s' % six.text_type(ni_e),
                                    Archive.get_supported_archives())

        try:
            archive.extract_archive(destination)
        except IOError:
            raise RebaseHelperError("Archive '%s' can not be extracted" % archive_path)
        except (EOFError, SystemError):
            raise RebaseHelperError("Archive '%s' is damaged" % archive_path)

    @staticmethod
    def extract_sources(archive_path, destination):
        """Function extracts a given Archive and returns a full dirname to sources"""
        Application.extract_archive(archive_path, destination)

        try:
            sources_dir = os.listdir(destination)[0]
        except IndexError:
            raise RebaseHelperError('Extraction of sources failed!')

        if os.path.isdir(os.path.join(destination, sources_dir)):
            return os.path.join(destination, sources_dir)
        else:
            return destination

    @staticmethod
    def check_build_requires(spec):
        """
        Check if all build dependencies are installed. If not, asks user they should be installed.
        If yes, it installs build dependencies using PolicyKit.

        :param spec: SpecFile object
        :return: 
        """
        req_pkgs = spec.get_requires()
        if not RpmHelper.all_packages_installed(req_pkgs):
            if ConsoleHelper.get_message('\nSome build dependencies are missing. Do you want to install them now'):
                if RpmHelper.install_build_dependencies(spec.get_path()) != 0:
                    raise RebaseHelperError('Failed to install build dependencies')

    def prepare_sources(self):
        """
        Function prepares a sources.

        :return: 
        """
        old_dir = Application.extract_sources(self.old_sources,
                                              os.path.join(self.execution_dir, settings.OLD_SOURCES_DIR))
        new_dir = Application.extract_sources(self.new_sources,
                                              os.path.join(self.execution_dir, settings.NEW_SOURCES_DIR))

        # determine top-level directory in new_sources archive
        toplevel_dir = os.path.relpath(new_dir,
                                       os.path.join(self.execution_dir, settings.NEW_SOURCES_DIR))

        if toplevel_dir != '.':
            self.rebase_spec_file.update_setup_dirname(toplevel_dir)

        # extract rest of source archives to correct paths
        rest_sources = [self.old_rest_sources, self.new_rest_sources]
        spec_files = [self.spec_file, self.rebase_spec_file]
        sources_dirs = [settings.OLD_SOURCES_DIR, settings.NEW_SOURCES_DIR]
        for sources, spec_file, sources_dir in zip(rest_sources, spec_files, sources_dirs):
            for rest in sources:
                archive = [x for x in Archive.get_supported_archives() if rest.endswith(x)]
                if archive:
                    dest_dir = spec_file.find_archive_target_in_prep(rest)
                    if dest_dir:
                        Application.extract_sources(rest, os.path.join(self.execution_dir, sources_dir, dest_dir))

        return [old_dir, new_dir]

    def patch_sources(self, sources):
        # Patch sources
        git_helper = GitHelper(sources[0])
        if not self.conf.non_interactive:
            git_helper.check_git_config()
        patch = Patcher(GitHelper.GIT)
        self.rebase_spec_file.update_changelog(self.rebase_spec_file.get_new_log(git_helper))
        try:
            self.rebased_patches = patch.patch(sources[0],
                                               sources[1],
                                               self.old_rest_sources,
                                               git_helper,
                                               self.spec_file.get_applied_patches(),
                                               self.spec_file.get_prep_section(),
                                               **self.kwargs)
        except RuntimeError:
            raise RebaseHelperError('Patching failed')
        self.rebase_spec_file.write_updated_patches(self.rebased_patches)
        if self.conf.non_interactive:
            if 'unapplied' in self.rebased_patches:
                OutputLogger.set_patch_output('Unapplied patches:', self.rebased_patches['unapplied'])
        OutputLogger.set_patch_output('Patches:', self.rebased_patches)

    def build_packages(self):
        """Function calls build class for building packages"""
        if self.conf.buildtool == 'fedpkg' and not koji_builder:
            print ('Importing module koji failed. Switching to mockbuild.')
            self.conf.buildtool = 'mock'
        try:
            builder = Builder(self.conf.buildtool)
        except NotImplementedError as ni_e:
            raise RebaseHelperError('%s. Supported build tools are %s' % six.text_type(ni_e),
                                    Builder.get_supported_tools())

        for version in ['old', 'new']:
            spec_object = self.spec_file if version == 'old' else self.rebase_spec_file
            build_dict = {}
            task_id = None
            if self.conf.build_tasks is None:
                build_dict['name'] = spec_object.get_package_name()
                build_dict['version'] = spec_object.get_version()
                patches = [x.get_path() for x in spec_object.get_patches()]
                spec = spec_object.get_path()
                sources = spec_object.get_sources()
                logger.info('Building packages for %s version %s',
                            spec_object.get_package_name(),
                            spec_object.get_version())
            else:
                if version == 'old':
                    task_id = self.conf.build_tasks.split(',')[0]
                else:
                    task_id = self.conf.build_tasks.split(',')[1]
            results_dir = os.path.join(self.results_dir, version)
            build_dict['builds_nowait'] = self.conf.builds_nowait
            build_dict['build_tasks'] = self.conf.build_tasks

            files = {}
            number_retries = 0
            while self.conf.build_retries != number_retries:
                try:
                    if self.conf.build_tasks is None:
                        build_dict.update(builder.build(spec, sources, patches, results_dir, **build_dict))
                    if not self.conf.builds_nowait:
                        if self.conf.buildtool == 'fedpkg':
                            while True:
                                kh = KojiHelper()
                                build_dict['rpm'], build_dict['logs'] = kh.get_koji_tasks(build_dict['koji_task_id'], results_dir)
                                if build_dict['rpm']:
                                    break
                    else:
                        if self.conf.build_tasks:
                            if self.conf.buildtool == 'fedpkg':
                                kh = KojiHelper()
                                try:
                                    build_dict['rpm'], build_dict['logs'] = kh.get_koji_tasks(task_id, results_dir)
                                    OutputLogger.set_build_data(version, build_dict)
                                    if not build_dict['rpm']:
                                        return False
                                except TypeError:
                                    logger.info('Koji tasks are not finished yet. Try again later')
                                    return False
                            elif self.conf.buildtool == 'copr':
                                copr_helper = CoprHelper()
                                client = copr_helper.get_client()
                                build_id = int(task_id)
                                status = copr_helper.get_build_status(client, build_id)
                                if status in ['importing', 'pending', 'starting', 'running']:
                                    logger.info('Copr build is not finished yet. Try again later')
                                    return False
                                else:
                                    build_dict['rpm'], build_dict['logs'] = copr_helper.download_build(client, build_id, results_dir)
                                    if status not in ['succeeded', 'skipped']:
                                        logger.info('Copr build {} did not complete successfully'.format(build_id))
                                        return False
                    # Build finishes properly. Go out from while cycle
                    OutputLogger.set_build_data(version, build_dict)
                    break

                except SourcePackageBuildError:
                    #  always fail for original version
                    if version == 'old':
                        raise RebaseHelperError('Creating old SRPM package failed.')
                    logger.error('Building source package failed.')
                    #  TODO: implement log analyzer for SRPMs and add the checks here!!!
                    raise

                except BinaryPackageBuildError:
                    #  always fail for original version
                    rpm_dir = os.path.join(results_dir, 'RPM')
                    build_dict.update(builder.get_logs())
                    OutputLogger.set_build_data(version, build_dict)
                    build_log = 'build.log'
                    build_log_path = os.path.join(rpm_dir, build_log)
                    if version == 'old':
                        raise RebaseHelperError('Building old RPM package failed. Check log %s', build_log_path)
                    logger.error('Building binary packages failed.')
                    msg = 'Building package failed'
                    try:
                        files = BuildLogAnalyzer.parse_log(rpm_dir, build_log)
                    except BuildLogAnalyzerMissingError:
                        raise RebaseHelperError('Build log %s does not exist', build_log_path)
                    except BuildLogAnalyzerMakeError:
                        raise RebaseHelperError('%s during build. Check log %s', msg, build_log_path)
                    except BuildLogAnalyzerPatchError:
                        raise RebaseHelperError('%s during patching. Check log %s', msg, build_log_path)
                    except RuntimeError:
                        if self.conf.build_retries == number_retries:
                            raise RebaseHelperError('%s with unknown reason. Check log %s', msg, build_log_path)

                    if 'missing' in files:
                        missing_files = '\n'.join(files['missing'])
                        logger.info('Files not packaged in the SPEC file:\n%s', missing_files)
                    elif 'deleted' in files:
                        deleted_files = '\n'.join(files['deleted'])
                        logger.warning('Removed files packaged in SPEC file:\n%s', deleted_files)
                    else:
                        if self.conf.build_retries == number_retries:
                            raise RebaseHelperError("Build failed, but no issues were found in the build log %s", build_log)
                    self.rebase_spec_file.modify_spec_files_section(files)

                if not self.conf.non_interactive:
                        msg = 'Do you want rebase-helper to try build the packages one more time'
                        if not ConsoleHelper.get_message(msg):
                            raise KeyboardInterrupt
                else:
                    logger.warning('Some patches were not successfully applied')
                #  build just failed, otherwise we would break out of the while loop
                logger.debug('Number of retries is %s', self.conf.build_retries)
                if os.path.exists(os.path.join(results_dir, 'RPM')):
                    shutil.rmtree(os.path.join(results_dir, 'RPM'))
                if os.path.exists(os.path.join(results_dir, 'SRPM')):
                    shutil.rmtree(os.path.join(results_dir, 'SRPM'))
                number_retries += 1
            if self.conf.build_retries == number_retries:
                raise RebaseHelperError('Building package failed with unknow reason. Check all available log files.')

        return True

    def _execute_checkers(self, checker, dir_name):
        """
        Function executes a checker based on command line arguments

        :param checker: checker name based from command line
        :return: Nothing
        """
        pkgchecker = Checker(checker)
        logger.info('Comparing packages using %s...', checker)
        text = pkgchecker.run_check(dir_name)
        return text

    def pkgdiff_packages(self, dir_name):
        """
        Function calls pkgdiff class for comparing packages
        :param dir_name: specify a result dir
        :return: 
        """
        pkgdiff_results = {}
        checker = Checker(os.path.dirname(__file__))
        if not self.conf.pkgcomparetool:
            for check in checker.get_supported_tools():
                try:
                    results = checker.run_check(dir_name, checker_name=check)
                    pkgdiff_results[check] = results
                except CheckerNotFoundError:
                    logger.info("Rebase-helper did not find checker '%s'." % check)
        else:
            pkgdiff_results[self.conf.pkgcomparetool] = checker.run_check(dir_name, checker_name=self.conf.pkgcomparetool)
        if pkgdiff_results:
            for diff_name, result in six.iteritems(pkgdiff_results):
                OutputLogger.set_checker_output(diff_name, result)

    def get_all_log_files(self):
        """
        Function returns all log_files created by rebase-helper
        First if debug log file and second is report summary log file

        :return: 
        """
        log_list = []
        if FileHelper.file_available(self.debug_log_file):
            log_list.append(self.debug_log_file)
        if FileHelper.file_available(self.report_log_file):
            log_list.append(self.report_log_file)
        return log_list

    def get_new_build_logs(self):
        result = {}
        result['build_ref'] = {}
        for version in ['old', 'new']:
            result['build_ref'][version] = OutputLogger.get_build(version)
        return result

    def get_checker_outputs(self):
        checkers = {}
        if OutputLogger.get_checkers():
            for check, data in six.iteritems(OutputLogger.get_checkers()):
                if data:
                    for log in six.iterkeys(data):
                        if FileHelper.file_available(log):
                            checkers[check] = log
                else:
                    checkers[check] = None
        return checkers

    def get_rebased_patches(self):
        """
        Function returns a list of patches either
        '': [list_of_deleted_patches]
        :return:
        """
        patches = False
        output_patch_string = []
        if OutputLogger.get_patches():
            for key, val in six.iteritems(OutputLogger.get_patches()):
                if key:
                    output_patch_string.append('Following patches have been %s:\n%s' % (key, val))
                    patches = True
        if not patches:
            output_patch_string.append('Patches were not touched. All were applied properly')
        return output_patch_string

    def print_summary(self):
        output_tool.check_output_argument(self.conf.outputtool)
        output = output_tool.OutputTool(self.conf.outputtool)
        report_file = os.path.join(self.results_dir, self.conf.outputtool + settings.REBASE_HELPER_OUTPUT_SUFFIX)
        output.print_information(path=report_file)
        logger.info('Report file from rebase-helper is available here: %s', report_file)

    def print_koji_logs(self):
        logs = self.get_new_build_logs()['build_ref']
        message = "Scratch build for '%s' version is: http://koji.fedoraproject.org/koji/taskinfo?taskID=%s"
        for version in ['old', 'new']:
            data = logs[version]
            logger.info(message % (data['version'], data['koji_task_id']))

    def print_copr_logs(self):
        logs = self.get_new_build_logs()['build_ref']
        copr_helper = CoprHelper()
        client = copr_helper.get_client()
        message = "Copr build for '%s' version is: %s"
        for version in ['old', 'new']:
            data = logs[version]
            build_url = copr_helper.get_build_url(client, data['copr_build_id'])
            logger.info(message % (data['version'], build_url))

    def set_upstream_monitoring(self):
        self.upstream_monitoring = True

    def get_rebasehelper_data(self):
        rh_stuff = {}
        rh_stuff['build_logs'] = self.get_new_build_logs()
        rh_stuff['patches'] = self.get_rebased_patches()
        rh_stuff['checkers'] = self.get_checker_outputs()
        rh_stuff['logs'] = self.get_all_log_files()
        return rh_stuff

    def run_download_compare(self, tasks_dict, dir_name):
        self.set_upstream_monitoring()
        kh = KojiHelper()
        for version in ['old', 'new']:
            rh_dict = {}
            compare_dirname = os.path.join(dir_name, version)
            if not os.path.exists(compare_dirname):
                os.mkdir(compare_dirname, 0o777)
            (task, upstream_version, package) = tasks_dict[version]
            rh_dict['rpm'], rh_dict['logs'] = kh.get_koji_tasks([task], compare_dirname)
            rh_dict['version'] = upstream_version
            rh_dict['name'] = package
            OutputLogger.set_build_data(version, rh_dict)
        if tasks_dict['status'] == 'CLOSED':
            self.pkgdiff_packages(dir_name)
        self.print_summary()
        rh_stuff = self.get_rebasehelper_data()
        logger.info(rh_stuff)
        return rh_stuff

    def run(self):
        if self.conf.fedpkg_build_tasks:
            logger.warning("Option --fedpkg-build-tasks is deprecated, use --build-tasks instead.")
            if not self.conf.build_tasks:
                self.conf.build_tasks = self.conf.fedpkg_build_tasks

        if self.conf.build_tasks and not self.conf.builds_nowait:
            if self.conf.buildtool in ['fedpkg', 'copr']:
                logger.error("--builds-nowait has to be specified with --build-tasks.")
                return 1
            else:
                logger.warning("Options are allowed only for fedpkg or copr build tools. Suppress them.")
                self.conf.build_tasks = self.conf.builds_nowait = False

        sources = None
        if self.conf.build_tasks is None:
            sources = self.prepare_sources()
            if not self.conf.build_only and not self.conf.comparepkgs:
                self.patch_sources(sources)

        build = False
        if not self.conf.patch_only:
            if not self.conf.comparepkgs:
                # check build dependencies for rpmbuild
                if self.conf.buildtool == 'rpmbuild':
                    Application.check_build_requires(self.spec_file)
                # Build packages
                try:
                    build = self.build_packages()
                    if self.conf.builds_nowait and not self.conf.build_tasks:
                        if self.conf.buildtool == 'fedpkg':
                            self.print_koji_logs()
                        elif self.conf.buildtool == 'copr':
                            self.print_copr_logs()
                        return 0
                except RuntimeError:
                    logger.error('Unknown error caused by build log analysis')
                    return 1
                # Perform checks
            else:
                build = self.get_rpm_packages(self.conf.comparepkgs)
                # We don't care dirname doesn't contain any RPM packages
                # Therefore return 1
            if build:
                self.pkgdiff_packages(self.results_dir)
            else:
                if not self.upstream_monitoring:
                    logger.info('Rebase package to %s FAILED. See for more details', self.conf.sources)
                return 1
            self.print_summary()

        if not self.conf.keep_workspace:
            self._delete_workspace_dir()

        if self.debug_log_file:
            logger.info("Detailed debug log is located in '%s'", self.debug_log_file)
        if not self.upstream_monitoring and not self.conf.patch_only:
            logger.info('Rebase package to %s was SUCCESSFUL.\n', self.conf.sources)
        return 0
Exemplo n.º 41
0
def spec_object(workdir):  # pylint: disable=redefined-outer-name
    shutil.copy(os.path.join(TEST_FILES_DIR, SPEC_FILE), workdir)
    return SpecFile(SPEC_FILE, workdir)
Exemplo n.º 42
0
 def test_split_version_string(self):
     assert SpecFile.split_version_string() == (None, None)
     assert SpecFile.split_version_string('1.0.1') == ('1.0.1', '')
     assert SpecFile.split_version_string('1.0.1b1') == ('1.0.1', 'b1')
     assert SpecFile.split_version_string('1.0.1rc1') == ('1.0.1', 'rc1')
Exemplo n.º 43
0
class Application(object):
    result_file = ""
    temp_dir = ""
    kwargs = {}
    old_sources = ""
    new_sources = ""
    old_rest_sources = []
    new_rest_sources = []
    spec_file = None
    spec_file_path = None
    rebase_spec_file = None
    rebase_spec_file_path = None
    debug_log_file = None
    report_log_file = None
    rebased_patches = {}
    rebased_repo = None

    def __init__(self, cli_conf, execution_dir, results_dir, debug_log_file):
        """
        Initialize the application

        :param cli_conf: CLI object with configuration gathered from commandline
        :return:
        """
        results_store.clear()

        self.conf = cli_conf
        self.execution_dir = execution_dir
        self.rebased_sources_dir = os.path.join(results_dir, 'rebased-sources')

        self.debug_log_file = debug_log_file

        # Temporary workspace for Builder, checks, ...
        self.kwargs['workspace_dir'] = self.workspace_dir = os.path.join(self.execution_dir, constants.WORKSPACE_DIR)
        # Directory where results should be put
        self.kwargs['results_dir'] = self.results_dir = results_dir

        # Directory contaning only those files, which are relevant for the new rebased version
        self.kwargs['rebased_sources_dir'] = self.rebased_sources_dir

        self.kwargs['non_interactive'] = self.conf.non_interactive

        self.kwargs['changelog_entry'] = self.conf.changelog_entry

        self.kwargs['spec_hook_blacklist'] = self.conf.spec_hook_blacklist

        logger.debug("Rebase-helper version: %s", VERSION)

        if self.conf.build_tasks is None:
            # check the workspace dir
            if not self.conf.cont:
                self._check_workspace_dir()

            self._get_spec_file()
            self._prepare_spec_objects()

            if self.conf.update_sources:
                sources = [os.path.basename(s) for s in self.spec_file.sources]
                rebased_sources = [os.path.basename(s) for s in self.rebase_spec_file.sources]
                uploaded = LookasideCacheHelper.update_sources('fedpkg', self.rebased_sources_dir,
                                                               self.rebase_spec_file.get_package_name(),
                                                               sources, rebased_sources)
                self._update_gitignore(uploaded, self.rebased_sources_dir)

            # TODO: Remove the value from kwargs and use only CLI attribute!
            self.kwargs['continue'] = self.conf.cont
            self._initialize_data()

        if self.conf.cont or self.conf.build_only:
            self._delete_old_builds()

    @staticmethod
    def setup(cli_conf):
        execution_dir = os.getcwd()
        results_dir = os.path.abspath(cli_conf.results_dir) if cli_conf.results_dir else execution_dir
        results_dir = os.path.join(results_dir, constants.RESULTS_DIR)

        # if not continuing, check the results dir
        if not cli_conf.cont and not cli_conf.build_only and not cli_conf.comparepkgs:
            Application._check_results_dir(results_dir)

        # This is used if user executes rebase-helper with --continue
        # parameter even when directory does not exist
        if not os.path.exists(results_dir):
            os.makedirs(results_dir)
            os.makedirs(os.path.join(results_dir, constants.LOGS_DIR))

        debug_log_file = Application._add_debug_log_file(results_dir)

        return execution_dir, results_dir, debug_log_file

    @staticmethod
    def _add_debug_log_file(results_dir):
        """
        Add the application wide debug log file

        :return: log file path
        """
        debug_log_file = os.path.join(results_dir, constants.DEBUG_LOG)
        try:
            LoggerHelper.add_file_handler(logger,
                                          debug_log_file,
                                          logging.Formatter("%(asctime)s %(levelname)s\t%(filename)s"
                                                            ":%(lineno)s %(funcName)s: %(message)s"),
                                          logging.DEBUG)
        except (IOError, OSError):
            logger.warning("Can not create debug log '%s'", debug_log_file)
        else:
            return debug_log_file

    def _prepare_spec_objects(self):
        """
        Prepare spec files and initialize objects

        :return:
        """
        self.rebase_spec_file_path = get_rebase_name(self.rebased_sources_dir, self.spec_file_path)

        self.spec_file = SpecFile(self.spec_file_path,
                                  self.conf.changelog_entry,
                                  self.execution_dir,
                                  download=not self.conf.not_download_sources)
        # Check whether test suite is enabled at build time
        if not self.spec_file.is_test_suite_enabled():
            results_store.set_info_text('WARNING', 'Test suite is not enabled at build time.')
        # create an object representing the rebased SPEC file
        self.rebase_spec_file = self.spec_file.copy(self.rebase_spec_file_path)

        if not self.conf.sources:
            self.conf.sources = versioneers_runner.run(self.conf.versioneer,
                                                       self.spec_file.get_package_name(),
                                                       self.spec_file.category,
                                                       self.conf.versioneer_blacklist)
            if self.conf.sources:
                logger.info("Determined latest upstream version '%s'", self.conf.sources)
            else:
                raise RebaseHelperError('Could not determine latest upstream version '
                                        'and no SOURCES argument specified!')

        # Prepare rebased_sources_dir
        self.rebased_repo = self._prepare_rebased_repository(self.spec_file.patches,
                                                             self.execution_dir,
                                                             self.rebased_sources_dir)

        # check if argument passed as new source is a file or just a version
        if [True for ext in Archive.get_supported_archives() if self.conf.sources.endswith(ext)]:
            logger.debug("argument passed as a new source is a file")
            self.rebase_spec_file.set_version_using_archive(self.conf.sources)
        else:
            logger.debug("argument passed as a new source is a version")
            version, extra_version, separator = SpecFile.split_version_string(self.conf.sources)
            self.rebase_spec_file.set_version(version)
            self.rebase_spec_file.set_extra_version_separator(separator)
            self.rebase_spec_file.set_extra_version(extra_version)

        if not self.conf.skip_version_check and parse_version(self.rebase_spec_file.get_version()) \
                <= parse_version(self.spec_file.get_version()):
            raise RebaseHelperError("Current version is equal to or newer than the requested version, nothing to do.")

        # run spec hooks
        spec_hooks_runner.run_spec_hooks(self.spec_file, self.rebase_spec_file, **self.kwargs)

        # spec file object has been sanitized downloading can proceed
        for spec_file in [self.spec_file, self.rebase_spec_file]:
            if spec_file.download:
                spec_file.download_remote_sources()
                # parse spec again with sources downloaded to properly expand %prep section
                spec_file._update_data()  # pylint: disable=protected-access

    def _initialize_data(self):
        """Function fill dictionary with default data"""
        # Get all tarballs before self.kwargs initialization
        self.old_sources = self.spec_file.get_archive()
        new_sources = self.rebase_spec_file.get_archive()

        self.old_sources = os.path.abspath(self.old_sources)
        if new_sources:
            self.conf.sources = new_sources

        if not self.conf.sources:
            raise RebaseHelperError('You have to define new sources.')
        else:
            self.new_sources = os.path.abspath(self.conf.sources)
        # Contains all source except the Source0
        self.old_rest_sources = [os.path.abspath(x) for x in self.spec_file.get_sources()[1:]]
        self.new_rest_sources = [os.path.abspath(x) for x in self.rebase_spec_file.get_sources()[1:]]

    def get_rpm_packages(self, dirname):
        """
        Function returns RPM packages stored in dirname/old and dirname/new directories

        :param dirname: directory where are stored old and new RPMS
        :return:
        """
        found = True
        for version in ['old', 'new']:
            data = {}
            data['name'] = self.spec_file.get_package_name()
            if version == 'old':
                spec_version = self.spec_file.get_version()
            else:
                spec_version = self.rebase_spec_file.get_version()
            data['version'] = spec_version
            data['rpm'] = PathHelper.find_all_files(os.path.join(os.path.realpath(dirname), version, 'RPM'), '*.rpm')
            if not data['rpm']:
                logger.error('Your path %s%s/RPM does not contain any RPM packages', dirname, version)
                found = False
            results_store.set_build_data(version, data)
        if not found:
            return False
        return True

    def _get_spec_file(self):
        """Function gets the spec file from the execution_dir directory"""
        self.spec_file_path = PathHelper.find_first_file(self.execution_dir, '*.spec', 0)
        if not self.spec_file_path:
            raise RebaseHelperError("Could not find any SPEC file in the current directory '%s'" % self.execution_dir)

    def _delete_old_builds(self):
        """
        Deletes the old and new result dir from previous build

        :return:
        """
        self._delete_new_results_dir()
        self._delete_old_results_dir()

    def _delete_old_results_dir(self):
        """
        Deletes old result dir

        :return:
        """
        if os.path.isdir(os.path.join(self.results_dir, 'old')):
            shutil.rmtree(os.path.join(self.results_dir, 'old'))

    def _delete_new_results_dir(self):
        """
        Deletes new result dir

        :return:
        """
        if os.path.isdir(os.path.join(self.results_dir, 'new')):
            shutil.rmtree(os.path.join(self.results_dir, 'new'))

    def _delete_workspace_dir(self):
        """
        Deletes workspace directory and loggs message

        :return:
        """
        logger.debug("Removing the workspace directory '%s'", self.workspace_dir)
        if os.path.isdir(self.workspace_dir):
            shutil.rmtree(self.workspace_dir)

    def _check_workspace_dir(self):
        """
        Check if workspace dir exists, and removes it if yes.

        :return:
        """
        if os.path.exists(self.workspace_dir):
            logger.warning("Workspace directory '%s' exists, removing it", os.path.basename(self.workspace_dir))
            self._delete_workspace_dir()
        os.makedirs(self.workspace_dir)

    @staticmethod
    def _check_results_dir(results_dir):
        """
        Check if  results dir exists, and removes it if yes.

        :return:
        """
        # TODO: We may not want to delete the directory in the future
        if os.path.exists(results_dir):
            logger.warning("Results directory '%s' exists, removing it", os.path.basename(results_dir))
            shutil.rmtree(results_dir)
        os.makedirs(results_dir)
        os.makedirs(os.path.join(results_dir, constants.LOGS_DIR))
        os.makedirs(os.path.join(results_dir, constants.OLD_BUILD_DIR))
        os.makedirs(os.path.join(results_dir, constants.NEW_BUILD_DIR))
        os.makedirs(os.path.join(results_dir, constants.CHECKERS_DIR))
        os.makedirs(os.path.join(results_dir, constants.REBASED_SOURCES_DIR))

    @staticmethod
    def extract_archive(archive_path, destination):
        """
        Extracts given archive into the destination and handle all exceptions.

        :param archive_path: path to the archive to be extracted
        :param destination: path to a destination, where the archive should be extracted to
        :return:
        """
        try:
            archive = Archive(archive_path)
        except NotImplementedError as ni_e:
            raise RebaseHelperError('%s. Supported archives are %s' % (six.text_type(ni_e),
                                    Archive.get_supported_archives()))

        try:
            archive.extract_archive(destination)
        except IOError:
            raise RebaseHelperError("Archive '%s' can not be extracted" % archive_path)
        except (EOFError, SystemError):
            raise RebaseHelperError("Archive '%s' is damaged" % archive_path)

    @staticmethod
    def extract_sources(archive_path, destination):
        """Function extracts a given Archive and returns a full dirname to sources"""
        Application.extract_archive(archive_path, destination)

        files = os.listdir(destination)

        if not files:
            raise RebaseHelperError('Extraction of sources failed!')
        # if there is only one directory, we can assume it's top-level directory
        elif len(files) == 1:
            sources_dir = os.path.join(destination, files[0])
            if os.path.isdir(sources_dir):
                return sources_dir

        # archive without top-level directory
        return destination

    def prepare_sources(self):
        """
        Function prepares a sources.

        :return:
        """

        old_sources_dir = os.path.join(self.execution_dir, constants.WORKSPACE_DIR, constants.OLD_SOURCES_DIR)
        new_sources_dir = os.path.join(self.execution_dir, constants.WORKSPACE_DIR, constants.NEW_SOURCES_DIR)

        old_dir = Application.extract_sources(self.old_sources, old_sources_dir)
        new_dir = Application.extract_sources(self.new_sources, new_sources_dir)

        old_tld = os.path.relpath(old_dir, old_sources_dir)
        new_tld = os.path.relpath(new_dir, new_sources_dir)

        dirname = self.spec_file.get_setup_dirname()

        if dirname and os.sep in dirname:
            dirs = os.path.split(dirname)
            if old_tld == dirs[0]:
                old_dir = os.path.join(old_dir, *dirs[1:])
            if new_tld == dirs[0]:
                new_dir = os.path.join(new_dir, *dirs[1:])

        new_dirname = os.path.relpath(new_dir, new_sources_dir)

        if new_dirname != '.':
            self.rebase_spec_file.update_setup_dirname(new_dirname)

        # extract rest of source archives to correct paths
        rest_sources = [self.old_rest_sources, self.new_rest_sources]
        spec_files = [self.spec_file, self.rebase_spec_file]
        sources_dirs = [
            os.path.join(constants.WORKSPACE_DIR, constants.OLD_SOURCES_DIR),
            os.path.join(constants.WORKSPACE_DIR, constants.NEW_SOURCES_DIR),
        ]
        for sources, spec_file, sources_dir in zip(rest_sources, spec_files, sources_dirs):
            for rest in sources:
                archive = [x for x in Archive.get_supported_archives() if rest.endswith(x)]
                if archive:
                    dest_dir = spec_file.find_archive_target_in_prep(rest)
                    if dest_dir:
                        Application.extract_sources(rest, os.path.join(self.execution_dir, sources_dir, dest_dir))

        return [old_dir, new_dir]

    def patch_sources(self, sources):
        # Patch sources
        patch = Patcher('git')
        self.rebase_spec_file.update_changelog(self.rebase_spec_file.get_new_log())
        try:
            self.rebased_patches = patch.patch(sources[0],
                                               sources[1],
                                               self.old_rest_sources,
                                               self.spec_file.get_applied_patches(),
                                               **self.kwargs)
        except RuntimeError:
            raise RebaseHelperError('Patching failed')
        self.rebase_spec_file.write_updated_patches(self.rebased_patches,
                                                    self.conf.disable_inapplicable_patches)
        results_store.set_patches_results(self.rebased_patches)

    def generate_patch(self):
        """
        Generates patch to the results_dir containing all needed changes for
        the rebased package version
        """
        # Delete removed patches from rebased_sources_dir from git
        removed_patches = self.rebase_spec_file.removed_patches
        if removed_patches:
            self.rebased_repo.index.remove(removed_patches, working_tree=True)

        self.rebase_spec_file.update_paths_to_patches()

        # Generate patch
        self.rebased_repo.git.add(all=True)
        self.rebase_spec_file._update_data()  # pylint: disable=protected-access
        self.rebased_repo.index.commit(MacroHelper.expand(self.conf.changelog_entry, self.conf.changelog_entry))
        patch = self.rebased_repo.git.format_patch('-1', stdout=True, stdout_as_string=False)
        with open(os.path.join(self.results_dir, 'changes.patch'), 'wb') as f:
            f.write(patch)
            f.write(b'\n')

        results_store.set_changes_patch('changes_patch', os.path.join(self.results_dir, 'changes.patch'))

    @classmethod
    def _update_gitignore(cls, sources, rebased_sources_dir):
        """Adds new entries into .gitignore file.

        Args:
            sources (list): List of new source files.
            rebased_sources_dir (str): Target directory.

        """
        gitignore = os.path.join(rebased_sources_dir, '.gitignore')
        with open(gitignore) as f:
            entries = f.readlines()

        def match(source):
            source = source.lstrip(os.path.sep).rstrip('\n')
            for entry in entries:
                if fnmatch.fnmatch(source, entry.lstrip(os.path.sep).rstrip('\n')):
                    return True
            return False

        with open(gitignore, 'a') as f:
            for src in [s for s in sources if not match(s)]:
                f.write(os.path.sep + src + '\n')

    @classmethod
    def _prepare_rebased_repository(cls, patches, execution_dir, rebased_sources_dir):
        """
        Initialize git repository in the rebased directory
        :return: git.Repo instance of rebased_sources
        """
        for patch in patches['applied'] + patches['not_applied']:
            shutil.copy(patch.path, rebased_sources_dir)

        sources = os.path.join(execution_dir, 'sources')
        if os.path.isfile(sources):
            shutil.copy(sources, rebased_sources_dir)

        gitignore = os.path.join(execution_dir, '.gitignore')
        if os.path.isfile(gitignore):
            shutil.copy(gitignore, rebased_sources_dir)

        repo = git.Repo.init(rebased_sources_dir)
        repo.git.config('user.name', GitHelper.get_user(), local=True)
        repo.git.config('user.email', GitHelper.get_email(), local=True)
        repo.git.add(all=True)
        repo.index.commit('Initial commit', skip_hooks=True)
        return repo

    @staticmethod
    def _sanitize_build_dict(build_dict):
        blacklist = [
            'builds_nowait',
            'build_tasks',
            'builder_options',
            'srpm_builder_options',
        ]
        return {k: v for k, v in six.iteritems(build_dict) if k not in blacklist}

    def build_source_packages(self):
        try:
            builder = SRPMBuilder(self.conf.srpm_buildtool)
        except NotImplementedError as e:
            raise RebaseHelperError('{}. Supported SRPM build tools are {}'.format(
                six.text_type(e), SRPMBuilder.get_supported_tools()))

        for version in ['old', 'new']:
            results_dir = '{}-build'.format(os.path.join(self.results_dir, version))
            spec = self.spec_file if version == 'old' else self.rebase_spec_file
            package_name = spec.get_package_name()
            package_version = spec.get_version()
            package_full_version = spec.get_full_version()
            logger.info('Building source package for %s version %s', package_name, package_full_version)
            build_dict = dict(
                name=package_name,
                version=package_version,
                srpm_buildtool=self.conf.srpm_buildtool,
                srpm_builder_options=self.conf.srpm_builder_options)
            try:
                build_dict.update(builder.build(spec, results_dir, **build_dict))
                build_dict = self._sanitize_build_dict(build_dict)
                results_store.set_build_data(version, build_dict)
            except RebaseHelperError:
                raise
            except SourcePackageBuildError as e:
                build_dict.update(builder.get_logs())
                build_dict['source_package_build_error'] = six.text_type(e)
                build_dict = self._sanitize_build_dict(build_dict)
                results_store.set_build_data(version, build_dict)
                if e.logfile:
                    msg = 'Building {} SRPM packages failed; see {} for more information'.format(version, e.logfile)
                else:
                    msg = 'Building {} SRPM packages failed; see logs in {} for more information'.format(
                        version, os.path.join(results_dir, 'SRPM'))
                raise RebaseHelperError(msg, logfiles=builder.get_logs().get('logs'))
            except Exception:
                raise RebaseHelperError('Building package failed with unknown reason. '
                                        'Check all available log files.')

    def build_binary_packages(self):
        """Function calls build class for building packages"""
        try:
            builder = Builder(self.conf.buildtool)
        except NotImplementedError as e:
            raise RebaseHelperError('{}. Supported build tools are {}'.format(
                six.text_type(e), Builder.get_supported_tools()))

        for version in ['old', 'new']:
            successful_builds = 0
            try_build_again = False
            while successful_builds < 1:
                results_dir = '{}-build'.format(os.path.join(self.results_dir, version))
                spec = None
                task_id = None
                koji_build_id = None
                build_dict = {}

                if self.conf.build_tasks is None:
                    spec = self.spec_file if version == 'old' else self.rebase_spec_file
                    package_name = spec.get_package_name()
                    package_version = spec.get_version()
                    package_full_version = spec.get_full_version()

                    if version == 'old' and self.conf.get_old_build_from_koji:
                        if KojiHelper.functional:
                            session = KojiHelper.create_session()
                            koji_version, koji_build_id = KojiHelper.get_latest_build(session, package_name)
                            if koji_version:
                                if koji_version != package_version:
                                    logger.warning('Version of the latest Koji build (%s) with id (%s) '
                                                   'differs from version in SPEC file (%s)!',
                                                   koji_version, koji_build_id, package_version)
                                package_version = package_full_version = koji_version
                            else:
                                logger.warning('Unable to find the latest Koji build!')
                        else:
                            logger.warning('Unable to get the latest Koji build!')

                    build_dict = dict(
                        name=package_name,
                        version=package_version,
                        builds_nowait=self.conf.builds_nowait,
                        build_tasks=self.conf.build_tasks,
                        builder_options=self.conf.builder_options,
                        srpm=results_store.get_build(version).get('srpm'),
                        srpm_logs=results_store.get_build(version).get('logs'))

                    # prepare for building
                    builder.prepare(spec, self.conf)

                    logger.info('Building binary packages for %s version %s', package_name, package_full_version)
                else:
                    task_id = self.conf.build_tasks[0] if version == 'old' else self.conf.build_tasks[1]

                try:
                    if self.conf.build_tasks is None:
                        if koji_build_id:
                            session = KojiHelper.create_session()
                            build_dict['rpm'], build_dict['logs'] = KojiHelper.download_build(session,
                                                                                              koji_build_id,
                                                                                              results_dir)
                        else:
                            build_dict.update(builder.build(spec, results_dir, **build_dict))
                    if builder.creates_tasks() and task_id and not koji_build_id:
                        if not self.conf.builds_nowait:
                            build_dict['rpm'], build_dict['logs'] = builder.wait_for_task(build_dict,
                                                                                          task_id,
                                                                                          results_dir)
                        elif self.conf.build_tasks:
                            build_dict['rpm'], build_dict['logs'] = builder.get_detached_task(task_id, results_dir)
                    build_dict = self._sanitize_build_dict(build_dict)
                    results_store.set_build_data(version, build_dict)
                    successful_builds += 1
                except RebaseHelperError:
                    # Proper RebaseHelperError instance was created already. Re-raise it.
                    raise
                except BinaryPackageBuildError as e:
                    build_dict.update(builder.get_logs())
                    build_dict['binary_package_build_error'] = six.text_type(e)
                    build_dict = self._sanitize_build_dict(build_dict)
                    results_store.set_build_data(version, build_dict)

                    if e.logfile is None:
                        msg = 'Building {} RPM packages failed; see logs in {} for more information'.format(
                            version, os.path.join(results_dir, 'RPM')
                        )
                    else:
                        msg = 'Building {} RPM packages failed; see {} for more information'.format(version, e.logfile)

                    logger.info(msg)
                    if self.rebase_spec_file:
                        # Save current rebase spec file content
                        self.rebase_spec_file.save()
                    if not self.conf.non_interactive and \
                            ConsoleHelper.get_message('Do you want to try it one more time'):
                        try_build_again = True
                    else:
                        raise RebaseHelperError(msg, logfiles=builder.get_logs().get('logs'))
                except Exception:
                    raise RebaseHelperError('Building package failed with unknown reason. '
                                            'Check all available log files.')

                if try_build_again:
                    successful_builds = 0
                    try_build_again = False

                    logger.info('Now it is time to make changes to  %s if necessary.', self.rebase_spec_file.path)
                    if not ConsoleHelper.get_message('Do you want to continue with the rebuild now'):
                        raise KeyboardInterrupt
                    # Update rebase spec file content after potential manual modifications
                    self.rebase_spec_file._read_spec_content()  # pylint: disable=protected-access
                    self.rebase_spec_file._update_data()  # pylint: disable=protected-access
                    # clear current version output directories
                    if os.path.exists(os.path.join(results_dir, 'RPM')):
                        shutil.rmtree(os.path.join(results_dir, 'RPM'))

        if self.conf.builds_nowait and not self.conf.build_tasks:
            if builder.creates_tasks():
                self.print_task_info(builder)

    def run_package_checkers(self, results_dir, **kwargs):
        """
        Runs checkers on packages and stores results in a given directory.

        :param results_dir: Path to directory in which to store the results.
        :type results_dir: str
        :param category: checker type(SOURCE/SRPM/RPM)
        :type category: str
        :return: None
        """
        results = dict()

        for checker_name in self.conf.pkgcomparetool:
            try:
                data = checkers_runner.run_checker(os.path.join(results_dir, 'checkers'),
                                                   checker_name,
                                                   **kwargs)
                if data:
                    results[checker_name] = data
            except CheckerNotFoundError:
                logger.error("Rebase-helper did not find checker '%s'.", checker_name)

        for diff_name, result in six.iteritems(results):
            results_store.set_checker_output(diff_name, result)

    def get_all_log_files(self):
        """
        Function returns all log_files created by rebase-helper
        First if debug log file and second is report summary log file

        :return:
        """
        log_list = []
        if FileHelper.file_available(self.debug_log_file):
            log_list.append(self.debug_log_file)
        if FileHelper.file_available(self.report_log_file):
            log_list.append(self.report_log_file)
        return log_list

    def get_new_build_logs(self):
        result = {}
        result['build_ref'] = {}
        for version in ['old', 'new']:
            result['build_ref'][version] = results_store.get_build(version)
        return result

    def get_checker_outputs(self):
        checkers = {}
        for check, data in six.iteritems(results_store.get_checkers()):
            if data:
                for log in six.iterkeys(data):
                    if FileHelper.file_available(log):
                        checkers[check] = log
            else:
                checkers[check] = None
        return checkers

    def get_rebased_patches(self):
        """
        Function returns a list of patches either
        '': [list_of_deleted_patches]
        :return:
        """
        patches = False
        output_patch_string = []
        if results_store.get_patches():
            for key, val in six.iteritems(results_store.get_patches()):
                if key:
                    output_patch_string.append('Following patches have been %s:\n%s' % (key, val))
                    patches = True
        if not patches:
            output_patch_string.append('Patches were not touched. All were applied properly')
        return output_patch_string

    def print_summary(self, exception=None):
        """
        Save rebase-helper result and print the summary using output_tools_runner
        :param exception: Error message from rebase-helper
        :return:
        """
        logs = None
        # Store rebase helper result exception
        if exception:
            if exception.logfiles:
                logs = exception.logfiles

            results_store.set_result_message('fail', exception.msg)
        else:
            result = "Rebase from {}-{} to {}-{} completed without an error".format(
                self.spec_file.get_package_name(), self.spec_file.get_version(),
                self.rebase_spec_file.get_package_name(), self.rebase_spec_file.get_version())
            results_store.set_result_message('success', result)

        if self.rebase_spec_file:
            self.rebase_spec_file.update_paths_to_patches()
            self.generate_patch()

        output_tools_runner.run_output_tools(logs, self)

    def print_task_info(self, builder):
        logs = self.get_new_build_logs()['build_ref']
        for version in ['old', 'new']:
            logger.info(builder.get_task_info(logs[version]))

    def get_rebasehelper_data(self):
        rh_stuff = {}
        rh_stuff['build_logs'] = self.get_new_build_logs()
        rh_stuff['patches'] = self.get_rebased_patches()
        rh_stuff['checkers'] = self.get_checker_outputs()
        rh_stuff['logs'] = self.get_all_log_files()
        return rh_stuff

    def apply_changes(self):
        try:
            repo = git.Repo(self.execution_dir)
        except git.InvalidGitRepositoryError:
            repo = git.Repo.init(self.execution_dir)
        patch = results_store.get_changes_patch()
        if not patch:
            logger.warning('Cannot apply changes.patch. No patch file was created')
        try:
            repo.git.am(patch['changes_patch'])
        except git.GitCommandError as e:
            logger.warning('changes.patch was not applied properly. Please review changes manually.'
                           '\nThe error message is: %s', six.text_type(e))

    def run(self):
        # Certain options can be used only with specific build tools
        tools_creating_tasks = [k for k, v in six.iteritems(Builder.build_tools) if v.creates_tasks()]
        if self.conf.buildtool not in tools_creating_tasks:
            options_used = []
            if self.conf.build_tasks is not None:
                options_used.append('--build-tasks')
            if self.conf.builds_nowait is True:
                options_used.append('--builds-nowait')
            if options_used:
                raise RebaseHelperError("{} can be used only with the following build tools: {}".format(
                                        " and ".join(options_used),
                                        ", ".join(tools_creating_tasks)))
        elif self.conf.builds_nowait and self.conf.get_old_build_from_koji:
            raise RebaseHelperError("%s can't be used with: %s" %
                                    ('--builds-nowait', '--get-old-build-from-koji')
                                    )

        tools_accepting_options = [k for k, v in six.iteritems(Builder.build_tools) if v.accepts_options()]
        if self.conf.buildtool not in tools_accepting_options:
            options_used = []
            if self.conf.builder_options is not None:
                options_used.append('--builder-options')
            if options_used:
                raise RebaseHelperError("{} can be used only with the following build tools: {}".format(
                                        " and ".join(options_used),
                                        ", ".join(tools_accepting_options)))

        if self.conf.build_tasks is None:
            old_sources, new_sources = self.prepare_sources()
            self.run_package_checkers(self.results_dir, category='SOURCE', old_dir=old_sources, new_dir=new_sources)
            if not self.conf.build_only and not self.conf.comparepkgs:
                try:
                    self.patch_sources([old_sources, new_sources])
                except RebaseHelperError as e:
                    # Print summary and return error
                    self.print_summary(e)
                    raise

        if not self.conf.patch_only:
            if not self.conf.comparepkgs:
                # Build packages
                try:
                    if self.conf.build_tasks is None:
                        self.build_source_packages()
                    self.run_package_checkers(self.results_dir, category='SRPM')
                    self.build_binary_packages()
                    if self.conf.builds_nowait and not self.conf.build_tasks:
                        return
                    self.run_package_checkers(self.results_dir, category='RPM')
                # Print summary and return error
                except RebaseHelperError as e:
                    self.print_summary(e)
                    raise
            else:
                if self.get_rpm_packages(self.conf.comparepkgs):
                    self.run_package_checkers(self.results_dir, category='SRPM')
                    self.run_package_checkers(self.results_dir, category='RPM')

        if not self.conf.keep_workspace:
            self._delete_workspace_dir()

        if self.debug_log_file:
            self.print_summary()
        if self.conf.apply_changes:
            self.apply_changes()
        return 0
Exemplo n.º 44
0
 def spec_object(self, workdir):
     sf = SpecFile(self.SPEC_FILE, workdir, download=False)
     return sf
Exemplo n.º 45
0
class Application(object):
    result_file = ""
    temp_dir = ""
    kwargs = {}
    old_sources = ""
    new_sources = ""
    old_rest_sources = []
    new_rest_sources = []
    spec_file = None
    spec_file_path = None
    rebase_spec_file = None
    rebase_spec_file_path = None
    debug_log_file = None
    report_log_file = None
    rebased_patches = {}
    upstream_monitoring = False
    rebased_repo = None

    def __init__(self, cli_conf, execution_dir, results_dir, debug_log_file):
        """
        Initialize the application

        :param cli_conf: CLI object with configuration gathered from commandline
        :return:
        """
        results_store.clear()

        self.conf = cli_conf
        self.execution_dir = execution_dir
        self.rebased_sources_dir = os.path.join(results_dir, 'rebased-sources')

        self.debug_log_file = debug_log_file

        # Temporary workspace for Builder, checks, ...
        self.kwargs['workspace_dir'] = self.workspace_dir = os.path.join(
            self.execution_dir, settings.REBASE_HELPER_WORKSPACE_DIR)
        # Directory where results should be put
        self.kwargs['results_dir'] = self.results_dir = results_dir

        # Directory contaning only those files, which are relevant for the new rebased version
        self.kwargs['rebased_sources_dir'] = self.rebased_sources_dir

        self.kwargs['non_interactive'] = self.conf.non_interactive

        logger.debug("Rebase-helper version: %s" % version.VERSION)

        if self.conf.build_tasks is None:
            # check the workspace dir
            if not self.conf.cont:
                self._check_workspace_dir()

            self._get_spec_file()
            self._prepare_spec_objects()

            # TODO: Remove the value from kwargs and use only CLI attribute!
            self.kwargs['continue'] = self.conf.cont
            self._initialize_data()

        if self.conf.cont or self.conf.build_only:
            self._delete_old_builds()

    @staticmethod
    def setup(cli_conf):
        execution_dir = os.getcwd()
        results_dir = cli_conf.results_dir if cli_conf.results_dir else execution_dir
        results_dir = os.path.join(results_dir,
                                   settings.REBASE_HELPER_RESULTS_DIR)

        # if not continuing, check the results dir
        if not cli_conf.cont and not cli_conf.build_only and not cli_conf.comparepkgs:
            Application._check_results_dir(results_dir)

        # This is used if user executes rebase-helper with --continue
        # parameter even when directory does not exist
        if not os.path.exists(results_dir):
            os.makedirs(results_dir)
            os.makedirs(os.path.join(results_dir, settings.REBASE_HELPER_LOGS))

        debug_log_file = Application._add_debug_log_file(results_dir)

        return execution_dir, results_dir, debug_log_file

    @staticmethod
    def _add_debug_log_file(results_dir):
        """
        Add the application wide debug log file

        :return: log file path
        """
        debug_log_file = os.path.join(results_dir,
                                      settings.REBASE_HELPER_DEBUG_LOG)
        try:
            LoggerHelper.add_file_handler(
                logger, debug_log_file,
                logging.Formatter("%(asctime)s %(levelname)s\t%(filename)s"
                                  ":%(lineno)s %(funcName)s: %(message)s"),
                logging.DEBUG)
        except (IOError, OSError):
            logger.warning("Can not create debug log '%s'", debug_log_file)
        else:
            return debug_log_file

    @staticmethod
    def _add_report_log_file(results_dir):
        """
        Add the application report log file

        :return: log file path
        """
        report_log_file = os.path.join(results_dir,
                                       settings.REBASE_HELPER_REPORT_LOG)
        try:
            LoggerHelper.add_file_handler(logger_report, report_log_file, None,
                                          logging.INFO)
        except (IOError, OSError):
            logger.warning("Can not create report log '%s'", report_log_file)
        else:
            return report_log_file

    def _prepare_spec_objects(self):
        """
        Prepare spec files and initialize objects

        :return: 
        """
        self.rebase_spec_file_path = get_rebase_name(self.rebased_sources_dir,
                                                     self.spec_file_path)

        self.spec_file = SpecFile(self.spec_file_path,
                                  self.execution_dir,
                                  download=not self.conf.not_download_sources)
        # Check whether test suite is enabled at build time
        if not self.spec_file.is_test_suite_enabled():
            results_store.set_info_text(
                'WARNING', 'Test suite is not enabled at build time.')
        # create an object representing the rebased SPEC file
        self.rebase_spec_file = self.spec_file.copy(self.rebase_spec_file_path)

        if not self.conf.sources:
            self.conf.sources = versioneers_runner.run(
                self.conf.versioneer, self.spec_file.get_package_name(),
                self.spec_file.category)
            if self.conf.sources:
                logger.info("Determined latest upstream version '%s'",
                            self.conf.sources)
            else:
                raise RebaseHelperError(
                    'Could not determine latest upstream version '
                    'and no SOURCES argument specified!')

        # Prepare rebased_sources_dir
        self.rebased_repo = self._prepare_rebased_repository(
            self.spec_file.patches, self.rebased_sources_dir)

        # check if argument passed as new source is a file or just a version
        if [
                True for ext in Archive.get_supported_archives()
                if self.conf.sources.endswith(ext)
        ]:
            logger.debug("argument passed as a new source is a file")
            self.rebase_spec_file.set_version_using_archive(self.conf.sources)
        else:
            logger.debug("argument passed as a new source is a version")
            version, extra_version, separator = SpecFile.split_version_string(
                self.conf.sources)
            self.rebase_spec_file.set_version(version)
            self.rebase_spec_file.set_extra_version_separator(separator)
            self.rebase_spec_file.set_extra_version(extra_version)

        # run spec hooks
        spec_hooks_runner.run_spec_hooks(self.spec_file, self.rebase_spec_file,
                                         **self.kwargs)

        # spec file object has been sanitized downloading can proceed
        for spec_file in [self.spec_file, self.rebase_spec_file]:
            if spec_file.download:
                spec_file.download_remote_sources()

    def _initialize_data(self):
        """Function fill dictionary with default data"""
        # Get all tarballs before self.kwargs initialization
        self.old_sources = self.spec_file.get_archive()
        new_sources = self.rebase_spec_file.get_archive()

        self.old_sources = os.path.abspath(self.old_sources)
        if new_sources:
            self.conf.sources = new_sources

        if not self.conf.sources:
            raise RebaseHelperError('You have to define new sources.')
        else:
            self.new_sources = os.path.abspath(self.conf.sources)
        # Contains all source except the Source0
        self.old_rest_sources = [
            os.path.abspath(x) for x in self.spec_file.get_sources()[1:]
        ]
        self.new_rest_sources = [
            os.path.abspath(x) for x in self.rebase_spec_file.get_sources()[1:]
        ]

    def _get_rebase_helper_log(self):
        return os.path.join(self.results_dir,
                            settings.REBASE_HELPER_RESULTS_LOG)

    def get_rpm_packages(self, dirname):
        """
        Function returns RPM packages stored in dirname/old and dirname/new directories

        :param dirname: directory where are stored old and new RPMS
        :return: 
        """
        found = True
        for version in ['old', 'new']:
            data = {}
            data['name'] = self.spec_file.get_package_name()
            if version == 'old':
                spec_version = self.spec_file.get_version()
            else:
                spec_version = self.rebase_spec_file.get_version()
            data['version'] = spec_version
            data['rpm'] = PathHelper.find_all_files(
                os.path.join(os.path.realpath(dirname), version, 'RPM'),
                '*.rpm')
            if not data['rpm']:
                logger.error(
                    'Your path %s%s/RPM does not contain any RPM packages',
                    dirname, version)
                found = False
            results_store.set_build_data(version, data)
        if not found:
            return False
        return True

    def _get_spec_file(self):
        """Function gets the spec file from the execution_dir directory"""
        self.spec_file_path = PathHelper.find_first_file(
            self.execution_dir, '*.spec', 0)
        if not self.spec_file_path:
            raise RebaseHelperError(
                "Could not find any SPEC file in the current directory '%s'",
                self.execution_dir)

    def _delete_old_builds(self):
        """
        Deletes the old and new result dir from previous build

        :return: 
        """
        self._delete_new_results_dir()
        self._delete_old_results_dir()

    def _delete_old_results_dir(self):
        """
        Deletes old result dir

        :return: 
        """
        if os.path.isdir(os.path.join(self.results_dir, 'old')):
            shutil.rmtree(os.path.join(self.results_dir, 'old'))

    def _delete_new_results_dir(self):
        """
        Deletes new result dir

        :return: 
        """
        if os.path.isdir(os.path.join(self.results_dir, 'new')):
            shutil.rmtree(os.path.join(self.results_dir, 'new'))

    def _delete_workspace_dir(self):
        """
        Deletes workspace directory and loggs message

        :return: 
        """
        logger.debug("Removing the workspace directory '%s'",
                     self.workspace_dir)
        if os.path.isdir(self.workspace_dir):
            shutil.rmtree(self.workspace_dir)

    def _check_workspace_dir(self):
        """
        Check if workspace dir exists, and removes it if yes.

        :return: 
        """
        if os.path.exists(self.workspace_dir):
            logger.warning("Workspace directory '%s' exists, removing it",
                           os.path.basename(self.workspace_dir))
            self._delete_workspace_dir()
        os.makedirs(self.workspace_dir)

    @staticmethod
    def _check_results_dir(results_dir):
        """
        Check if  results dir exists, and removes it if yes.

        :return: 
        """
        # TODO: We may not want to delete the directory in the future
        if os.path.exists(results_dir):
            logger.warning("Results directory '%s' exists, removing it",
                           os.path.basename(results_dir))
            shutil.rmtree(results_dir)
        os.makedirs(results_dir)
        os.makedirs(os.path.join(results_dir, settings.REBASE_HELPER_LOGS))
        os.makedirs(os.path.join(results_dir, 'old-build'))
        os.makedirs(os.path.join(results_dir, 'new-build'))
        os.makedirs(os.path.join(results_dir, 'checkers'))
        os.makedirs(os.path.join(results_dir, 'rebased-sources'))

    @staticmethod
    def extract_archive(archive_path, destination):
        """
        Extracts given archive into the destination and handle all exceptions.

        :param archive_path: path to the archive to be extracted
        :param destination: path to a destination, where the archive should be extracted to
        :return: 
        """
        try:
            archive = Archive(archive_path)
        except NotImplementedError as ni_e:
            raise RebaseHelperError(
                '%s. Supported archives are %s' % six.text_type(ni_e),
                Archive.get_supported_archives())

        try:
            archive.extract_archive(destination)
        except IOError:
            raise RebaseHelperError("Archive '%s' can not be extracted" %
                                    archive_path)
        except (EOFError, SystemError):
            raise RebaseHelperError("Archive '%s' is damaged" % archive_path)

    @staticmethod
    def extract_sources(archive_path, destination):
        """Function extracts a given Archive and returns a full dirname to sources"""
        Application.extract_archive(archive_path, destination)

        files = os.listdir(destination)

        if not files:
            raise RebaseHelperError('Extraction of sources failed!')
        # if there is only one directory, we can assume it's top-level directory
        elif len(files) == 1:
            sources_dir = os.path.join(destination, files[0])
            if os.path.isdir(sources_dir):
                return sources_dir

        # archive without top-level directory
        return destination

    def prepare_sources(self):
        """
        Function prepares a sources.

        :return: 
        """

        old_sources_dir = os.path.join(self.execution_dir,
                                       settings.OLD_SOURCES_DIR)
        new_sources_dir = os.path.join(self.execution_dir,
                                       settings.NEW_SOURCES_DIR)

        old_dir = Application.extract_sources(self.old_sources,
                                              old_sources_dir)
        new_dir = Application.extract_sources(self.new_sources,
                                              new_sources_dir)

        old_tld = os.path.relpath(old_dir, old_sources_dir)
        new_tld = os.path.relpath(new_dir, new_sources_dir)

        dirname = self.spec_file.get_setup_dirname()

        if dirname and os.sep in dirname:
            dirs = os.path.split(dirname)
            if old_tld == dirs[0]:
                old_dir = os.path.join(old_dir, *dirs[1:])
            if new_tld == dirs[0]:
                new_dir = os.path.join(new_dir, *dirs[1:])

        new_dirname = os.path.relpath(new_dir, new_sources_dir)

        if new_dirname != '.':
            self.rebase_spec_file.update_setup_dirname(new_dirname)

        # extract rest of source archives to correct paths
        rest_sources = [self.old_rest_sources, self.new_rest_sources]
        spec_files = [self.spec_file, self.rebase_spec_file]
        sources_dirs = [settings.OLD_SOURCES_DIR, settings.NEW_SOURCES_DIR]
        for sources, spec_file, sources_dir in zip(rest_sources, spec_files,
                                                   sources_dirs):
            for rest in sources:
                archive = [
                    x for x in Archive.get_supported_archives()
                    if rest.endswith(x)
                ]
                if archive:
                    dest_dir = spec_file.find_archive_target_in_prep(rest)
                    if dest_dir:
                        Application.extract_sources(
                            rest,
                            os.path.join(self.execution_dir, sources_dir,
                                         dest_dir))

        return [old_dir, new_dir]

    def patch_sources(self, sources):
        # Patch sources
        patch = Patcher('git')
        self.rebase_spec_file.update_changelog(
            self.rebase_spec_file.get_new_log())
        try:
            self.rebased_patches = patch.patch(
                sources[0], sources[1], self.old_rest_sources,
                self.spec_file.get_applied_patches(),
                self.spec_file.get_prep_section(), **self.kwargs)
        except RuntimeError:
            raise RebaseHelperError('Patching failed')
        self.rebase_spec_file.write_updated_patches(
            self.rebased_patches, self.conf.disable_inapplicable_patches)
        results_store.set_patches_results(self.rebased_patches)

    def generate_patch(self):
        """
        Generates patch to the results_dir containing all needed changes for
        the rebased package version
        """
        # Delete removed patches from rebased_sources_dir from git
        removed_patches = self.rebase_spec_file.removed_patches
        if removed_patches:
            self.rebased_repo.index.remove(removed_patches, working_tree=True)

        self.rebase_spec_file.update_paths_to_patches()

        # Generate patch
        self.rebased_repo.git.add(all=True)
        self.rebased_repo.index.commit('New upstream release {}'.format(
            self.rebase_spec_file.get_full_version()),
                                       skip_hooks=True)
        patch = self.rebased_repo.git.format_patch('-1',
                                                   stdout=True,
                                                   stdout_as_string=False)
        with open(os.path.join(self.results_dir, 'changes.patch'), 'wb') as f:
            f.write(patch)
            f.write(b'\n')

        results_store.set_changes_patch(
            'changes_patch', os.path.join(self.results_dir, 'changes.patch'))

    @classmethod
    def _prepare_rebased_repository(cls, patches, rebased_sources_dir):
        """
        Initialize git repository in the rebased directory
        :return: git.Repo instance of rebased_sources
        """
        for patch in patches['applied'] + patches['not_applied']:
            shutil.copy(patch.path, rebased_sources_dir)

        repo = git.Repo.init(rebased_sources_dir)
        repo.git.config('user.name', GitHelper.get_user(), local=True)
        repo.git.config('user.email', GitHelper.get_email(), local=True)
        repo.git.add(all=True)
        repo.index.commit('Initial commit', skip_hooks=True)
        return repo

    def build_packages(self):
        """Function calls build class for building packages"""
        try:
            builder = Builder(self.conf.buildtool)
        except NotImplementedError as ni_e:
            raise RebaseHelperError(
                '%s. Supported build tools are %s' % six.text_type(ni_e),
                Builder.get_supported_tools())

        for version in ['old', 'new']:
            spec_object = self.spec_file if version == 'old' else self.rebase_spec_file
            build_dict = {}
            task_id = None
            koji_build_id = None

            if self.conf.build_tasks is None:
                pkg_name = spec_object.get_package_name()
                pkg_version = spec_object.get_version()
                pkg_full_version = spec_object.get_full_version()

                if version == 'old' and self.conf.get_old_build_from_koji:
                    if KojiHelper.functional:
                        koji_version, koji_build_id = KojiHelper.get_latest_build(
                            pkg_name)
                        if koji_version:
                            if koji_version != pkg_version:
                                logger.warning(
                                    'Version of the latest Koji build (%s) with id (%s) '
                                    'differs from version in SPEC file (%s)!',
                                    koji_version, koji_build_id, pkg_version)
                            pkg_version = pkg_full_version = koji_version
                        else:
                            logger.warning(
                                'Unable to find the latest Koji build!')
                    else:
                        logger.warning('Unable to get the latest Koji build!')

                # prepare for building
                builder.prepare(spec_object, self.conf)

                build_dict['name'] = pkg_name
                build_dict['version'] = pkg_version
                patches = [x.get_path() for x in spec_object.get_patches()]
                spec = spec_object.get_path()
                sources = spec_object.get_sources()
                logger.info('Building packages for %s version %s', pkg_name,
                            pkg_full_version)
            else:
                if version == 'old':
                    task_id = self.conf.build_tasks[0]
                else:
                    task_id = self.conf.build_tasks[1]
            results_dir = os.path.join(self.results_dir, version) + '-build'

            files = {}
            number_retries = 0
            while self.conf.build_retries != number_retries:
                try:
                    if self.conf.build_tasks is None:
                        if koji_build_id:
                            build_dict['rpm'], build_dict[
                                'logs'] = KojiHelper.download_build(
                                    koji_build_id, results_dir)
                        else:
                            build_dict.update(
                                builder.build(spec, sources, patches,
                                              results_dir, **build_dict))
                    if builder.creates_tasks() and not koji_build_id:
                        if not self.conf.builds_nowait:
                            build_dict['rpm'], build_dict[
                                'logs'] = builder.wait_for_task(
                                    build_dict, results_dir)
                            if build_dict['rpm'] is None:
                                return False
                        elif self.conf.build_tasks:
                            build_dict['rpm'], build_dict[
                                'logs'] = builder.get_detached_task(
                                    task_id, results_dir)
                            if build_dict['rpm'] is None:
                                return False
                    # Build finishes properly. Go out from while cycle
                    results_store.set_build_data(version, build_dict)
                    break

                except SourcePackageBuildError as e:
                    build_dict.update(builder.get_logs())
                    build_dict['source_package_build_error'] = six.text_type(e)
                    results_store.set_build_data(version, build_dict)
                    #  always fail for original version
                    if version == 'old':
                        raise RebaseHelperError(
                            'Creating old SRPM package failed.')
                    logger.error('Building source package failed.')
                    #  TODO: implement log analyzer for SRPMs and add the checks here!!!
                    raise

                except BinaryPackageBuildError as e:
                    #  always fail for original version
                    rpm_dir = os.path.join(results_dir, 'RPM')
                    build_dict.update(builder.get_logs())
                    build_dict['binary_package_build_error'] = six.text_type(e)
                    results_store.set_build_data(version, build_dict)
                    build_log = 'build.log'
                    build_log_path = os.path.join(rpm_dir, build_log)
                    if version == 'old':
                        error_message = 'Building old RPM package failed. Check logs: {} '.format(
                            builder.get_logs().get('logs', 'N/A'))
                        raise RebaseHelperError(
                            error_message,
                            logfiles=builder.get_logs().get('logs'))
                    logger.error('Building binary packages failed.')
                    msg = 'Building package failed'
                    try:
                        files = BuildLogAnalyzer.parse_log(rpm_dir, build_log)
                    except BuildLogAnalyzerMissingError:
                        raise RebaseHelperError('Build log %s does not exist',
                                                build_log_path)
                    except BuildLogAnalyzerMakeError:
                        raise RebaseHelperError(
                            '%s during build. Check log %s',
                            msg,
                            build_log_path,
                            logfiles=[build_log_path])
                    except BuildLogAnalyzerPatchError:
                        raise RebaseHelperError(
                            '%s during patching. Check log %s',
                            msg,
                            build_log_path,
                            logfiles=[build_log_path])
                    except RuntimeError:
                        if self.conf.build_retries == number_retries:
                            raise RebaseHelperError(
                                '%s with unknown reason. Check log %s',
                                msg,
                                build_log_path,
                                logfiles=[build_log_path])

                    if 'missing' in files:
                        missing_files = '\n'.join(files['missing'])
                        logger.info('Files not packaged in the SPEC file:\n%s',
                                    missing_files)
                    elif 'deleted' in files:
                        deleted_files = '\n'.join(files['deleted'])
                        logger.warning(
                            'Removed files packaged in SPEC file:\n%s',
                            deleted_files)
                    else:
                        if self.conf.build_retries == number_retries:
                            raise RebaseHelperError(
                                "Build failed, but no issues were found in the build log %s",
                                build_log,
                                logfiles=[build_log])
                    self.rebase_spec_file.modify_spec_files_section(files)

                if not self.conf.non_interactive:
                    msg = 'Do you want rebase-helper to try to build the packages one more time'
                    if not ConsoleHelper.get_message(msg):
                        raise KeyboardInterrupt
                else:
                    logger.warning(
                        'Some patches were not successfully applied')
                # build just failed, otherwise we would break out of the while loop
                logger.debug('Number of retries is %s',
                             self.conf.build_retries)
                number_retries += 1
                if self.conf.build_retries > number_retries:
                    # only remove builds if this retry is not the last one
                    if os.path.exists(os.path.join(results_dir, 'RPM')):
                        shutil.rmtree(os.path.join(results_dir, 'RPM'))
                    if os.path.exists(os.path.join(results_dir, 'SRPM')):
                        shutil.rmtree(os.path.join(results_dir, 'SRPM'))
            if self.conf.build_retries == number_retries:
                raise RebaseHelperError(
                    'Building package failed with unknown reason. Check all available log files.'
                )

        if self.conf.builds_nowait and not self.conf.build_tasks:
            if builder.creates_tasks():
                self.print_task_info(builder)

        return True

    def run_package_checkers(self, results_dir):
        """
        Runs checkers on packages and stores results in a given directory.

        :param results_dir: Path to directory in which to store the results.
        :type results_dir: str
        :return: None
        """
        results = dict()

        for checker_name in self.conf.pkgcomparetool:
            try:
                results[checker_name] = checkers_runner.run_checker(
                    os.path.join(results_dir, 'checkers'), checker_name)
            except CheckerNotFoundError:
                logger.error("Rebase-helper did not find checker '%s'." %
                             checker_name)

        for diff_name, result in six.iteritems(results):
            results_store.set_checker_output(diff_name, result)

    def get_all_log_files(self):
        """
        Function returns all log_files created by rebase-helper
        First if debug log file and second is report summary log file

        :return: 
        """
        log_list = []
        if FileHelper.file_available(self.debug_log_file):
            log_list.append(self.debug_log_file)
        if FileHelper.file_available(self.report_log_file):
            log_list.append(self.report_log_file)
        return log_list

    def get_new_build_logs(self):
        result = {}
        result['build_ref'] = {}
        for version in ['old', 'new']:
            result['build_ref'][version] = results_store.get_build(version)
        return result

    def get_checker_outputs(self):
        checkers = {}
        if results_store.get_checkers():
            for check, data in six.iteritems(results_store.get_checkers()):
                if data:
                    for log in six.iterkeys(data):
                        if FileHelper.file_available(log):
                            checkers[check] = log
                else:
                    checkers[check] = None
        return checkers

    def get_rebased_patches(self):
        """
        Function returns a list of patches either
        '': [list_of_deleted_patches]
        :return:
        """
        patches = False
        output_patch_string = []
        if results_store.get_patches():
            for key, val in six.iteritems(results_store.get_patches()):
                if key:
                    output_patch_string.append(
                        'Following patches have been %s:\n%s' % (key, val))
                    patches = True
        if not patches:
            output_patch_string.append(
                'Patches were not touched. All were applied properly')
        return output_patch_string

    def print_summary(self, exception=None):
        """
        Save rebase-helper result and print the summary using output_tools_runner
        :param exception: Error message from rebase-helper
        :return:
        """
        logs = None
        # Store rebase helper result exception
        if exception:
            if exception.logfiles:
                logs = exception.logfiles

            results_store.set_result_message('fail', exception.msg)
        else:
            result = "Rebase to %s SUCCEEDED" % self.conf.sources
            results_store.set_result_message('success', result)

        self.rebase_spec_file.update_paths_to_patches()
        self.generate_patch()
        output_tools_runner.run_output_tools(logs, self)

    def print_task_info(self, builder):
        logs = self.get_new_build_logs()['build_ref']
        for version in ['old', 'new']:
            logger.info(builder.get_task_info(logs[version]))

    def set_upstream_monitoring(self):
        # This function is used by the-new-hotness, do not remove it!
        self.upstream_monitoring = True

    def get_rebasehelper_data(self):
        rh_stuff = {}
        rh_stuff['build_logs'] = self.get_new_build_logs()
        rh_stuff['patches'] = self.get_rebased_patches()
        rh_stuff['checkers'] = self.get_checker_outputs()
        rh_stuff['logs'] = self.get_all_log_files()
        return rh_stuff

    def run_download_compare(self, tasks_dict, dir_name):
        # TODO: Add doc text with explanation
        self.set_upstream_monitoring()
        kh = KojiHelper()
        for version in ['old', 'new']:
            rh_dict = {}
            compare_dirname = os.path.join(dir_name, version)
            if not os.path.exists(compare_dirname):
                os.mkdir(compare_dirname, 0o777)
            (task, upstream_version, package) = tasks_dict[version]
            rh_dict['rpm'], rh_dict['logs'] = kh.get_koji_tasks(
                [task], compare_dirname)
            rh_dict['version'] = upstream_version
            rh_dict['name'] = package
            results_store.set_build_data(version, rh_dict)
        if tasks_dict['status'] == 'CLOSED':
            self.run_package_checkers(dir_name)
        self.print_summary()
        rh_stuff = self.get_rebasehelper_data()
        logger.info(rh_stuff)
        return rh_stuff

    def run(self):
        # Certain options can be used only with specific build tools
        tools_creating_tasks = [
            k for k, v in six.iteritems(Builder.build_tools)
            if v.creates_tasks()
        ]
        if self.conf.buildtool not in tools_creating_tasks:
            options_used = []
            if self.conf.build_tasks is not None:
                options_used.append('--build-tasks')
            if self.conf.builds_nowait is True:
                options_used.append('--builds-nowait')
            if options_used:
                raise RebaseHelperError(
                    "%s can be used only with the following build tools: %s",
                    " and ".join(options_used),
                    ", ".join(tools_creating_tasks))
        elif self.conf.builds_nowait and self.conf.get_old_build_from_koji:
            raise RebaseHelperError(
                "%s can't be used with: %s" %
                ('--builds-nowait', '--get-old-build-from-koji'))

        tools_accepting_options = [
            k for k, v in six.iteritems(Builder.build_tools)
            if v.accepts_options()
        ]
        if self.conf.buildtool not in tools_accepting_options:
            options_used = []
            if self.conf.builder_options is not None:
                options_used.append('--builder-options')
            if options_used:
                raise RebaseHelperError(
                    "%s can be used only with the following build tools: %s",
                    " and ".join(options_used),
                    ", ".join(tools_accepting_options))

        sources = None
        if self.conf.build_tasks is None:
            sources = self.prepare_sources()
            if not self.conf.build_only and not self.conf.comparepkgs:
                try:
                    self.patch_sources(sources)
                except RebaseHelperError as e:
                    # Print summary and return error
                    self.print_summary(e)
                    raise

        build = False
        if not self.conf.patch_only:
            if not self.conf.comparepkgs:
                # Build packages
                try:
                    build = self.build_packages()
                    if self.conf.builds_nowait and not self.conf.build_tasks:
                        return
                # Print summary and return error
                except RebaseHelperError as e:
                    self.print_summary(e)
                    raise
            else:
                build = self.get_rpm_packages(self.conf.comparepkgs)
                # We don't care dirname doesn't contain any RPM packages
                # Therefore return 1
            if build:
                try:
                    self.run_package_checkers(self.results_dir)
                # Print summary and return error
                except RebaseHelperError as e:
                    self.print_summary(e)
                    raise
            else:
                if not self.upstream_monitoring:
                    # TODO: This should be an ERROR
                    logger.info(
                        'Rebase package to %s FAILED. See for more details',
                        self.conf.sources)

        if not self.conf.keep_workspace:
            self._delete_workspace_dir()

        if self.debug_log_file:
            self.print_summary()
        return 0