def get_bugzilla_component(cls, bugzilla_id: str) -> str:
        """Gets a component of the bugzilla.

        Args:
            bugzilla_id: ID of the bugzilla.

        Returns:
            Component of the bugzilla.

        Raises:
            RebaseHelperError: If no such bugzilla exists or if the bugzilla
            was not created by Upstream Release Monitoring.

        """
        r = requests.get('{}/bug/{}'.format(cls.BUGZILLA_REST_API_URL,
                                            bugzilla_id))
        if not r.ok:
            raise RebaseHelperError('Could not obtain data from bugzilla')

        response_json = r.json()
        if 'error' in response_json:
            logger.error('Bugzilla error: %s', response_json['error'])
            raise RebaseHelperError('Could not obtain data from bugzilla')

        bug = response_json['bugs'][0]
        if bug['creator_detail'][
                'email'] != cls.UPSTREAM_RELEASE_MONITORING_USERNAME:
            raise RebaseHelperError(
                'The given bugzilla was not created by Upstream Release Monitoring'
            )
        return bug['component'][0]
Example #2
0
    def split_version_string(version_string: str, current_version: str) -> Tuple[str, Optional[str]]:
        """Splits version string into version and extra version.

        Args:
            version_string: Complete version string.
            current_version: Current version (the value of Version tag).

        Returns:
            Tuple of version and extra_version.

        Raises:
            RebaseHelperError in case passed version string is not valid.

        """
        version_re = re.compile(r'^(\d+[.\d]*\d+|\d+)(\.|-|_|\+|~)?(\w+)?$')
        m = version_re.match(version_string)
        if not m:
            raise RebaseHelperError('Invalid version string: {}'.format(version_string))
        version, separator, extra = m.groups()
        m = version_re.match(current_version)
        if not m:
            raise RebaseHelperError('Invalid version string: {}'.format(current_version))
        if m.group(3):
            # if current version contains non-numeric characters, the new version should too
            version += (separator or '') + (extra or '')
            extra = None  # type: ignore  # the type is actually Optional[str], but is defined as str in typeshed
        logger.debug('Split version string %s into %s and %s', version_string, version, extra)
        return version, extra
Example #3
0
 def download_build(cls, client, build_id, destination):
     logger.info('Downloading packages and logs for build %d', build_id)
     try:
         result = client.get_build_details(build_id)
     except copr.client.exceptions.CoprRequestException as e:
         raise RebaseHelperError(
             'Failed to get copr build details for {}: {}'.format(
                 build_id, str(e)))
     rpms = []
     logs = []
     for _, url in six.iteritems(result.data['results_by_chroot']):
         url = url if url.endswith('/') else url + '/'
         d = pyquery.PyQuery(url)
         d.make_links_absolute()
         for a in d('a[href$=\'.rpm\'], a[href$=\'.log.gz\']'):
             fn = os.path.basename(
                 urllib.parse.urlsplit(a.attrib['href']).path)
             dest = os.path.join(destination, fn)
             if fn.endswith('.src.rpm'):
                 # skip source RPM
                 continue
             DownloadHelper.download_file(a.attrib['href'], dest)
             if fn.endswith('.rpm'):
                 rpms.append(dest)
             elif fn.endswith('.log.gz'):
                 extracted = dest.replace('.log.gz', '.log')
                 try:
                     with gzip.open(dest, 'rb') as archive:
                         with open(extracted, 'wb') as f:
                             f.write(archive.read())
                 except (IOError, EOFError):
                     raise RebaseHelperError(
                         'Failed to extract {}'.format(dest))
                 logs.append(extracted)
     return rpms, logs
Example #4
0
    def run_check(cls, results_dir, **kwargs):
        """
        Compares old and new RPMs using pkgdiff
        :param results_dir result dir where are stored results
        """
        cls.results_dir = os.path.join(results_dir, cls.name)
        cls.prepare_results_dir()
        cls.pkgdiff_results_full_path_html = os.path.join(
            cls.results_dir, cls.pkgdiff_results_filename + '.html')

        cmd = [cls.CMD]
        cmd.append('-hide-unchanged')
        for version in ['old', 'new']:
            old = results_store.get_build(version)
            if old:
                file_name = cls._create_xml(version, input_structure=old)
                cmd.append(file_name)
        cmd.append('-extra-info')
        cmd.append(cls.results_dir)
        cmd.append('-report-path')
        cmd.append(cls.pkgdiff_results_full_path_html)
        try:
            ret_code = ProcessHelper.run_subprocess(
                cmd, output_file=ProcessHelper.DEV_NULL)
        except OSError as e:
            raise CheckerNotFoundError(
                "Checker '{}' was not found or installed.".format(
                    cls.name)) from e

        # From pkgdiff source code:
        # ret_code 0 means unchanged
        # ret_code 1 means Changed
        # other return codes means error
        if int(ret_code) != 0 and int(ret_code) != 1:
            raise RebaseHelperError(
                'Execution of {} failed.\nCommand line is: {}'.format(
                    cls.CMD, cmd))
        results_dict = cls.process_xml_results(cls.results_dir)
        lines: List[str] = []

        for key, val in results_dict.items():
            if val:
                if lines:
                    lines.append('')
                lines.append('Following files were {}:'.format(key))
                lines.extend(val)

        pkgdiff_report = os.path.join(cls.results_dir,
                                      cls.pkgdiff_results_filename + '.txt')
        try:
            with open(pkgdiff_report, "w", encoding=ENCODING) as f:
                f.write('\n'.join(lines))
        except IOError as e:
            raise RebaseHelperError(
                "Unable to write result from {} to '{}'".format(
                    cls.name, pkgdiff_report)) from e

        return dict(path=cls.get_checker_output_dir_short())
Example #5
0
    def _prepare_spec_objects(self):
        """
        Prepare spec files and initialize objects

        :return:
        """
        self.rebase_spec_file_path = get_rebase_name(self.rebased_sources_dir, self.spec_file_path)

        self.spec_file = SpecFile(self.spec_file_path,
                                  self.conf.changelog_entry,
                                  self.execution_dir,
                                  download=not self.conf.not_download_sources)
        # Check whether test suite is enabled at build time
        if not self.spec_file.is_test_suite_enabled():
            results_store.set_info_text('WARNING', 'Test suite is not enabled at build time.')
        # create an object representing the rebased SPEC file
        self.rebase_spec_file = self.spec_file.copy(self.rebase_spec_file_path)

        if not self.conf.sources:
            self.conf.sources = versioneers_runner.run(self.conf.versioneer,
                                                       self.spec_file.get_package_name(),
                                                       self.spec_file.category,
                                                       self.conf.versioneer_blacklist)
            if self.conf.sources:
                logger.info("Determined latest upstream version '%s'", self.conf.sources)
            else:
                raise RebaseHelperError('Could not determine latest upstream version '
                                        'and no SOURCES argument specified!')

        # Prepare rebased_sources_dir
        self.rebased_repo = self._prepare_rebased_repository(self.spec_file.patches,
                                                             self.execution_dir,
                                                             self.rebased_sources_dir)

        # check if argument passed as new source is a file or just a version
        if [True for ext in Archive.get_supported_archives() if self.conf.sources.endswith(ext)]:
            logger.debug("argument passed as a new source is a file")
            self.rebase_spec_file.set_version_using_archive(self.conf.sources)
        else:
            logger.debug("argument passed as a new source is a version")
            version, extra_version, separator = SpecFile.split_version_string(self.conf.sources)
            self.rebase_spec_file.set_version(version)
            self.rebase_spec_file.set_extra_version_separator(separator)
            self.rebase_spec_file.set_extra_version(extra_version)

        if not self.conf.skip_version_check and parse_version(self.rebase_spec_file.get_version()) \
                <= parse_version(self.spec_file.get_version()):
            raise RebaseHelperError("Current version is equal to or newer than the requested version, nothing to do.")

        # run spec hooks
        spec_hooks_runner.run_spec_hooks(self.spec_file, self.rebase_spec_file, **self.kwargs)

        # spec file object has been sanitized downloading can proceed
        for spec_file in [self.spec_file, self.rebase_spec_file]:
            if spec_file.download:
                spec_file.download_remote_sources()
                # parse spec again with sources downloaded to properly expand %prep section
                spec_file._update_data()  # pylint: disable=protected-access
Example #6
0
    def build_source_packages(self):
        try:
            builder = srpm_build_helper.get_tool(self.conf.srpm_buildtool)
        except NotImplementedError as e:
            raise RebaseHelperError('{}. Supported SRPM build tools are {}'.format(
                six.text_type(e), srpm_build_helper.get_supported_tools()))

        for version in ['old', 'new']:
            koji_build_id = None
            results_dir = '{}-build'.format(os.path.join(self.results_dir, version))
            spec = self.spec_file if version == 'old' else self.rebase_spec_file
            package_name = spec.get_package_name()
            package_version = spec.get_version()
            package_full_version = spec.get_full_version()
            logger.info('Building source package for %s version %s', package_name, package_full_version)

            if version == 'old' and self.conf.get_old_build_from_koji:
                koji_build_id, package_version, package_full_version = KojiHelper.get_old_build_info(package_name,
                                                                                                     package_version)

            build_dict = dict(
                name=package_name,
                version=package_version,
                srpm_buildtool=self.conf.srpm_buildtool,
                srpm_builder_options=self.conf.srpm_builder_options)
            try:
                if koji_build_id:
                    session = KojiHelper.create_session()
                    build_dict['srpm'], build_dict['logs'] = KojiHelper.download_build(session,
                                                                                       koji_build_id,
                                                                                       os.path.join(
                                                                                           results_dir,
                                                                                           'SRPM'
                                                                                       ),
                                                                                       arches=['src'])

                else:
                    build_dict.update(builder.build(spec, results_dir, **build_dict))
                build_dict = self._sanitize_build_dict(build_dict)
                results_store.set_build_data(version, build_dict)
            except RebaseHelperError:  # pylint: disable=try-except-raise
                raise
            except SourcePackageBuildError as e:
                build_dict.update(builder.get_logs())
                build_dict['source_package_build_error'] = six.text_type(e)
                build_dict = self._sanitize_build_dict(build_dict)
                results_store.set_build_data(version, build_dict)
                if e.logfile:
                    msg = 'Building {} SRPM packages failed; see {} for more information'.format(version, e.logfile)
                else:
                    msg = 'Building {} SRPM packages failed; see logs in {} for more information'.format(
                        version, os.path.join(results_dir, 'SRPM'))
                raise RebaseHelperError(msg, logfiles=builder.get_logs().get('logs'))
            except Exception:
                raise RebaseHelperError('Building package failed with unknown reason. '
                                        'Check all available log files.')
Example #7
0
    def run_check(cls, results_dir):
        """
        Compares old and new RPMs using pkgdiff
        :param results_dir result dir where are stored results
        """
        cls.results_dir = results_dir
        cls.pkgdiff_results_full_path = os.path.join(cls.results_dir, cls.pkgdiff_results_filename)

        cmd = [cls.CMD]
        cmd.append('-hide-unchanged')
        for version in ['old', 'new']:
            old = results_store.get_build(version)
            if old:
                file_name = cls._create_xml(version, input_structure=old)
                cmd.append(file_name)
        cmd.append('-extra-info')
        cmd.append(cls.results_dir)
        cmd.append('-report-path')
        cmd.append(cls.pkgdiff_results_full_path)
        try:
            ret_code = ProcessHelper.run_subprocess(cmd, output=ProcessHelper.DEV_NULL)
        except OSError:
            raise CheckerNotFoundError("Checker '%s' was not found or installed." % cls.CMD)

        """
         From pkgdiff source code:
         ret_code 0 means unchanged
         ret_code 1 means Changed
         other return codes means error
        """
        if int(ret_code) != 0 and int(ret_code) != 1:
            raise RebaseHelperError('Execution of %s failed.\nCommand line is: %s' % (cls.CMD, cmd))
        results_dict = cls.process_xml_results(cls.results_dir)
        lines = []

        for key, val in six.iteritems(results_dict):
            if val:
                if lines:
                    lines.append('')
                lines.append('Following files were %s:' % key)
                lines.extend(val)

        pkgdiff_report = os.path.join(cls.results_dir, 'report-' + cls.pkgdiff_results_filename + '.log')
        try:
            with open(pkgdiff_report, "w") as f:
                f.write('\n'.join(lines))
        except IOError:
            raise RebaseHelperError("Unable to write result from %s to '%s'" % (cls.CMD, pkgdiff_report))

        return {pkgdiff_report: None}
Example #8
0
    def upload_srpm(cls, session, srpm):
        """Uploads SRPM to a Koji hub.

        Args:
            session (koji.ClientSession): Active Koji session instance.
            srpm (str): Valid path to SRPM.

        Returns:
            str: Remote path to the uploaded SRPM.

        Raises:
            RebaseHelperError: If upload failed.

        """
        def progress(uploaded, total, chunksize, t1, t2):  # pylint: disable=unused-argument
            DownloadHelper.progress(total, uploaded, upload_start)

        suffix = ''.join(
            [random.choice(string.ascii_letters) for _ in range(8)])
        path = os.path.join('cli-build', six.text_type(time.time()), suffix)
        logger.info('Uploading SRPM')
        try:
            try:
                upload_start = time.time()
                session.uploadWrapper(srpm, path, callback=progress)
            except koji.GenericError as e:
                raise RebaseHelperError('Upload failed: {}'.format(
                    six.text_type(e)))
        finally:
            sys.stdout.write('\n')
            sys.stdout.flush()
        return os.path.join(path, os.path.basename(srpm))
Example #9
0
    def create_session(cls, profile='koji'):
        """Creates new Koji session and immediately logs in to a Koji hub.

        Args:
            profile (str): Koji profile to use.

        Returns:
            koji.ClientSession: Newly created session instance.

        Raises:
            RebaseHelperError: If login failed.

        """
        config = koji.read_config(profile)
        session = koji.ClientSession(config['server'], opts=config)
        try:
            session.gssapi_login()
        except Exception:  # pylint: disable=broad-except
            pass
        else:
            return session
        # fall back to kerberos login (doesn't work with python3)
        exc = (koji.AuthError,
               koji.krbV.Krb5Error) if koji.krbV else koji.AuthError
        try:
            session.krb_login()
        except exc as e:
            raise RebaseHelperError('Login failed: {}'.format(
                six.text_type(e)))
        else:
            return session
    def clone_repository(cls, component: str, bugzilla_id: str) -> str:
        """Clones remote dist-git repository of a component.

        Args:
            component: Package to clone.
            bugzilla_id: ID of the bugzilla.

        Returns:
            Path to the cloned repository.

        Raises:
            RebaseHelperError: If the directory, that the repository
            is supposed to be cloned into, exists.

        """
        path = os.path.abspath('{}-{}'.format(bugzilla_id, component))
        if os.path.exists(path):
            raise RebaseHelperError(
                'Could not clone the repository because the directory '
                '{} already exists'.format(path))

        url = '{}/{}.git'.format(cls.DIST_GIT_REPO_URL, component)
        logger.info("Cloning %s into %s", url, path)
        git.Repo.clone_from(url, path)
        return path
    def print_summary(cls, path, results):
        """Function is used for printing summary information"""
        if results.get_summary_info():
            for key, value in six.iteritems(results.get_summary_info()):
                logger.info("%s %s\n", key, value)

        try:
            LoggerHelper.add_file_handler(logger_report, path)
        except (OSError, IOError):
            raise RebaseHelperError(
                "Can not create results file '{}'".format(path))

        cls.results_store = results

        cls.print_success_message()
        logger_report.info("All result files are stored in %s",
                           os.path.dirname(path))

        cls.print_changes_patch()

        cls.print_checkers_text_output(results.get_checkers())

        if results.get_patches():
            cls.print_patches(results.get_patches())

        cls.print_message_and_separator("\nRPMS")
        for pkg_version in ['old', 'new']:
            pkg_results = results.get_build(pkg_version)
            if pkg_results:
                cls.print_rpms_and_logs(pkg_results, pkg_version.capitalize())
Example #12
0
    def revert_redefine_release_with_macro(self, macro):
        """
        Method removes the redefined the Release: line with given macro and uncomments the old Release line.

        :param macro:
        :return:
        """
        search_re = re.compile(
            r'^Release:\s*[0-9.]*[0-9]+\.{0}%{{\?dist}}\s*'.format(macro))

        for index, line in enumerate(self.spec_content):
            match = search_re.search(line)
            if match:
                # We will uncomment old line, so sanity check first
                if not self.spec_content[index - 1].startswith('#Release:'):
                    raise RebaseHelperError(
                        "Redefined Release line in SPEC is not 'commented out' "
                        "old line: '{0}'".format(self.spec_content[index -
                                                                   1].strip()))
                logger.debug("Uncommenting original Release line "
                             "'%s'", self.spec_content[index - 1].strip())
                self.spec_content[index - 1] = self.spec_content[index -
                                                                 1].lstrip('#')
                logger.debug("Removing redefined Release line '%s'",
                             line.strip())
                self.spec_content.pop(index)
                self.save()
                break
Example #13
0
    def _update_data(self):
        """
        Function updates data from given SPEC file

        :return: 
        """
        # Load rpm information
        try:
            self.spc = rpm.spec(self.path)
        except ValueError:
            raise RebaseHelperError("Problem with parsing SPEC file '%s'" %
                                    self.path)
        self.sources = self._get_spec_sources_list(self.spc)
        self.prep_section = self.spc.prep
        # HEADER of SPEC file
        self.hdr = self.spc.sourceHeader
        self.rpm_sections = self._split_sections()
        # determine the extra_version
        logger.debug("Updating the extra version")
        _, self.extra_version, separator = SpecFile.extract_version_from_archive_name(
            self.get_archive(), self._get_raw_source_string(0))
        self.set_extra_version_separator(separator)

        self.patches = self._get_initial_patches_list()
        self.macros = MacroHelper.dump()

        # TODO: don't call this at all in SPEC file methods
        if self.download:
            self.download_remote_sources()
Example #14
0
 def build(cls, client, project, srpm):
     try:
         result = client.create_new_build(projectname=project, pkgs=[srpm])
     except copr.client.exceptions.CoprRequestException:
         raise RebaseHelperError('Failed to start copr build')
     else:
         return result.builds_list[0].build_id
 def create_project(cls,
                    client,
                    project,
                    chroots,
                    description,
                    instructions,
                    permanent=False,
                    hide=True):
     try:
         client.project_proxy.get(ownername=client.config.get('username'),
                                  projectname=project)
         # Project found, reuse it
     except CoprNoResultException:
         try:
             client.project_proxy.add(
                 ownername=client.config.get('username'),
                 projectname=project,
                 chroots=chroots,
                 delete_after_days=None
                 if permanent else cls.DELETE_PROJECT_AFTER,
                 unlisted_on_hp=hide,
                 description=description,
                 instructions=instructions)
         except CoprRequestException as e:
             error = e.result.error
             try:
                 [[error]] = error.values()
             except AttributeError:
                 pass
             raise RebaseHelperError(
                 'Failed to create copr project. Reason: {}'.format(
                     error)) from e
Example #16
0
 def split_nevra(cls, string):
     """Splits string into name, epoch, version, release and arch components"""
     regexps = [
         ('NEVRA',
          re.compile(r'^([^:]+)-(([0-9]+):)?([^-:]+)-(.+)\.([^.]+)$')),
         ('NEVR', re.compile(r'^([^:]+)-(([0-9]+):)?([^-:]+)-(.+)()$')),
         ('NA', re.compile(r'^([^:]+)()()()()\.([^.]+)$')),
         ('N', re.compile(r'^([^:]+)()()()()()$')),
     ]
     if not cls.ARCHES:
         cls.ARCHES = cls.get_arches()
     for pattern, regexp in regexps:
         match = regexp.match(string)
         if not match:
             continue
         name = match.group(1) or None
         epoch = match.group(3) or None
         if epoch:
             epoch = int(epoch)
         version = match.group(4) or None
         release = match.group(5) or None
         arch = match.group(6) or None
         if pattern == 'NEVRA' and arch not in cls.ARCHES:
             # unknown arch, let's assume it's actually dist
             continue
         return dict(name=name,
                     epoch=epoch,
                     version=version,
                     release=release,
                     arch=arch)
     raise RebaseHelperError('Unable to split string into NEVRA.')
Example #17
0
    def extract_version_from_archive_name(archive_path: str, main_source: str) -> str:
        """Extracts version string from source archive name.

        Args:
            archive_path: Path to the main sources archive.
            main_source: Value of Source0 tag.

        Returns:
            Extracted version string.

        Raises:
            RebaseHelperError in case version can't be determined.

        """
        fallback_regex = r'\w*[-_]?v?([.\d]+.*)({0})'.format(
            '|'.join([re.escape(a) for a in Archive.get_supported_archives()]))
        source = os.path.basename(main_source)
        regex = re.sub(r'%({)?version(?(1)})(.*%(\w+|{.+}))?', 'PLACEHOLDER', source, flags=re.IGNORECASE)
        regex = MacroHelper.expand(regex, regex)
        regex = re.escape(regex).replace('PLACEHOLDER', r'(.+)')
        if regex == re.escape(MacroHelper.expand(source, source)):
            # no substitution was made, use the fallback regex
            regex = fallback_regex
        logger.debug('Extracting version from archive name using %s', regex)
        archive_name = os.path.basename(archive_path)
        m = re.match(regex, archive_name)
        if m:
            logger.debug('Extracted version %s', m.group(1))
            return m.group(1)
        if regex != fallback_regex:
            m = re.match(fallback_regex, archive_name)
            if m:
                logger.debug('Extracted version %s', m.group(1))
                return m.group(1)
        raise RebaseHelperError('Unable to extract version from archive name')
    def get_version_from_comments(cls, bugzilla_id: str) -> Optional[str]:
        """Gets version from bugzilla comments.

        Args:
            bugzilla_id: ID of the bugzilla.

        Returns:
            Version specified by Upstream Release Monitoring in comments
            or None, if no version could be found.

        Raises:
            RebaseHelperError: If no such bugzilla exists.

        """
        r = requests.get('{}/bug/{}/comment'.format(cls.BUGZILLA_REST_API_URL,
                                                    bugzilla_id))
        if not r.ok:
            raise RebaseHelperError('Could not obtain data from bugzilla')
        version = None
        comments = r.json()['bugs'][bugzilla_id]['comments']
        pattern = re.compile(r'^Latest upstream release: (?P<version>.*)\n')
        for comment in comments:
            if comment['creator'] != cls.UPSTREAM_RELEASE_MONITORING_USERNAME:
                continue
            match = pattern.match(comment['text'])
            if match:
                version = match.group('version')

        return version
Example #19
0
    def _create_xml(cls, name, input_structure):
        """
        Function creates a XML format for pkgdiff command
        :param name: package name
        :param input_structure: structure provided by OutputLogger.get_build('new' or 'old')
        :return:
        """
        file_name = os.path.join(cls.results_dir, name + ".xml")
        if input_structure.get('version', '') == '':
            input_structure['version'] = cls._get_rpm_info('version', input_structure['rpm'])

        if input_structure.get('name', '') == '':
            input_structure['name'] = cls._get_rpm_info('name', input_structure['rpm'])

        tags = {'version': input_structure.get('version', ""),
                'group': input_structure.get('name', ''),
                'packages': input_structure.get('rpm', [])}
        lines = []
        for key, value in tags.items():
            new_value = value if isinstance(value, str) else '\n'.join(value)
            lines.append('<{0}>\n{1}\n</{0}>\n'.format(key, new_value))

        try:
            with open(file_name, 'w') as f:
                f.writelines(lines)
        except IOError:
            raise RebaseHelperError("Unable to create XML file for pkgdiff tool '%s'" % file_name)

        return file_name
Example #20
0
    def download_remote_sources(self):
        """
        Method that iterates over all sources and downloads ones, which contain URL instead of just a file.

        :return: None
        """
        try:
            # try to download old sources from Fedora lookaside cache
            LookasideCacheHelper.download(self.lookaside_cache_preset, os.path.dirname(self.path), self.header.name,
                                          self.sources_location)
        except LookasideCacheError as e:
            logger.verbose("Downloading sources from lookaside cache failed. "
                           "Reason: %s.", str(e))

        # filter out only sources with URL
        remote_files = [source for source in self.sources if bool(urllib.parse.urlparse(source).scheme)]
        # download any sources that are not yet downloaded
        for remote_file in remote_files:
            local_file = os.path.join(self.sources_location, os.path.basename(remote_file))
            if not os.path.isfile(local_file):
                logger.verbose("File '%s' doesn't exist locally, downloading it.", local_file)
                try:
                    DownloadHelper.download_file(remote_file, local_file)
                except DownloadError as e:
                    raise RebaseHelperError("Failed to download file from URL {}. "
                                            "Reason: '{}'. ".format(remote_file, str(e))) from e
Example #21
0
    def print_summary(cls, path, results=results_store):
        """
        Function is used for printing summary informations

        :return: 
        """
        if results.get_summary_info():
            for key, value in six.iteritems(results.get_summary_info()):
                logger.info("%s %s\n", key, value)

        try:
            LoggerHelper.add_file_handler(logger_report, path)
        except (OSError, IOError):
            raise RebaseHelperError("Can not create results file '%s'" % path)

        type_pkgs = ['old', 'new']
        if results.get_patches():
            cls.print_patches(results.get_patches(),
                              '\nSummary information about patches:')
        for pkg in type_pkgs:
            type_pkg = results.get_build(pkg)
            if type_pkg:
                cls.print_rpms(type_pkg, pkg.capitalize())
                cls.print_build_logs(type_pkg, pkg.capitalize())

        cls.print_pkgdiff_tool(results.get_checkers())
Example #22
0
 def get_build_status(cls, client, build_id):
     try:
         result = client.get_build_details(build_id)
     except copr.client.exceptions.CoprRequestException:
         raise RebaseHelperError(
             'Failed to get copr build details for id {}'.format(build_id))
     else:
         return result.status
Example #23
0
 def _write_spec_content(self):
     """Writes the current state of SpecContent into a file."""
     logger.verbose("Writing SPEC file '%s' to the disc", self.path)
     try:
         with open(self.path, "w") as f:
             f.write(str(self.spec_content))
     except IOError as e:
         raise RebaseHelperError("Unable to write updated data to SPEC file '{}'".format(self.path)) from e
Example #24
0
 def _get_spec_file(self):
     """Function gets the spec file from the execution_dir directory"""
     self.spec_file_path = PathHelper.find_first_file(
         self.execution_dir, '*.spec', 0)
     if not self.spec_file_path:
         raise RebaseHelperError(
             "Could not find any SPEC file in the current directory '%s'",
             self.execution_dir)
 def get_client(cls):
     try:
         client = Client.create_from_config_file()
     except (CoprNoConfigException, CoprConfigException) as e:
         raise RebaseHelperError(
             'Missing or invalid copr configuration file') from e
     else:
         return client
Example #26
0
    def run_check(cls, result_dir):
        """Compares old and new RPMs using pkgdiff"""
        debug_old, rest_pkgs_old = cls._get_packages_for_abipkgdiff(results_store.get_build('old'))
        debug_new, rest_pkgs_new = cls._get_packages_for_abipkgdiff(results_store.get_build('new'))
        cmd = [cls.CMD]
        if debug_old is None:
            logger.warning("Package doesn't contain any debug package")
            return None
        try:
            cmd.append('--d1')
            cmd.append(debug_old[0])
        except IndexError:
            logger.error('Debuginfo package not found for old package.')
            return None
        try:
            cmd.append('--d2')
            cmd.append(debug_new[0])
        except IndexError:
            logger.error('Debuginfo package not found for new package.')
            return None
        reports = {}
        for pkg in rest_pkgs_old:
            command = list(cmd)
            # Package can be <letters><numbers>-<letters>-<and_whatever>
            regexp = r'^(\w*)(-\D+)?.*$'
            reg = re.compile(regexp)
            matched = reg.search(os.path.basename(pkg))
            if matched:
                file_name = matched.group(1)
                command.append(pkg)
                find = [x for x in rest_pkgs_new if os.path.basename(x).startswith(file_name)]
                command.append(find[0])
                package_name = os.path.basename(os.path.basename(pkg))
                logger.debug('Package name for ABI comparision %s', package_name)
                regexp_name = r'(\w-)*(\D+)*'
                reg_name = re.compile(regexp_name)
                matched = reg_name.search(os.path.basename(pkg))
                logger.debug('Found matches %s', matched.groups())
                if matched:
                    package_name = matched.group(0) + cls.log_name
                else:
                    package_name = package_name + '-' + cls.log_name
                output = os.path.join(cls.results_dir, result_dir, package_name)
                try:
                    ret_code = ProcessHelper.run_subprocess(command, output=output)
                except OSError:
                    raise CheckerNotFoundError("Checker '%s' was not found or installed." % cls.CMD)

                if int(ret_code) & settings.ABIDIFF_ERROR and int(ret_code) & settings.ABIDIFF_USAGE_ERROR:
                    raise RebaseHelperError('Execution of %s failed.\nCommand line is: %s' % (cls.CMD, cmd))
                if int(ret_code) == 0:
                    text = 'ABI of the compared binaries in package %s are equal.' % package_name
                else:
                    text = 'ABI of the compared binaries in package %s are not equal.' % package_name
                reports[output] = text
            else:
                logger.debug("Rebase-helper did not find a package name in '%s'", package_name)
        return reports
Example #27
0
    def run_check(cls, results_dir):
        """Compares old and new RPMs using rpmdiff"""
        results_dict = {}

        for tag in settings.CHECKER_TAGS:
            results_dict[tag] = []
        cls.results_dir = results_dir

        # Only S (size), M(mode) and 5 (checksum) are now important
        not_catched_flags = ['T', 'F', 'G', 'U', 'V', 'L', 'D', 'N']
        old_pkgs = cls._get_rpms(results_store.get_old_build().get(
            'rpm', None))
        new_pkgs = cls._get_rpms(results_store.get_new_build().get(
            'rpm', None))
        for key, value in six.iteritems(old_pkgs):
            if 'debuginfo' in key or 'debugsource' in key:
                # skip debug{info,source} packages
                continue
            cmd = [cls.CMD]
            # TODO modify to online command
            for x in not_catched_flags:
                cmd.extend(['-i', x])
            cmd.append(value)
            # We would like to build correct old package against correct new packages
            try:
                cmd.append(new_pkgs[key])
            except KeyError:
                logger.warning('New version of package %s was not found!', key)
                continue
            output = StringIO()
            try:
                ProcessHelper.run_subprocess(cmd, output=output)
            except OSError:
                raise CheckerNotFoundError(
                    "Checker '%s' was not found or installed." % cls.CMD)
            results_dict = cls._analyze_logs(output, results_dict)

        results_dict = cls.update_added_removed(results_dict)
        results_dict = dict(
            (k, v) for k, v in six.iteritems(results_dict) if v)
        lines = []
        for key, val in six.iteritems(results_dict):
            if val:
                if lines:
                    lines.append('')
                lines.append('Following files were %s:' % key)
                lines.extend(val)

        rpmdiff_report = os.path.join(cls.results_dir,
                                      'report-' + cls.CMD + '.log')
        try:
            with open(rpmdiff_report, "w") as f:
                f.write('\n'.join(lines))
        except IOError:
            raise RebaseHelperError("Unable to write result from %s to '%s'" %
                                    (cls.CMD, rpmdiff_report))

        return {rpmdiff_report: None}
Example #28
0
 def get_client(cls):
     try:
         client = copr.CoprClient.create_from_file_config()
     except (copr.client.exceptions.CoprNoConfException,
             copr.client.exceptions.CoprConfigException):
         raise RebaseHelperError(
             'Missing or invalid copr configuration file')
     else:
         return client
Example #29
0
 def _write_spec_file_to_disc(self):
     """Write the current SPEC file to the disc"""
     logger.debug("Writing SPEC file '%s' to the disc", self.path)
     try:
         with open(self.path, "w") as f:
             f.writelines(self.spec_content)
     except IOError:
         raise RebaseHelperError(
             "Unable to write updated data to SPEC file '%s'", self.path)
Example #30
0
    def extract_archive(archive_path, destination):
        """
        Extracts given archive into the destination and handle all exceptions.

        :param archive_path: path to the archive to be extracted
        :param destination: path to a destination, where the archive should be extracted to
        :return:
        """
        archive = Archive(archive_path)

        try:
            archive.extract_archive(destination)
        except IOError as e:
            raise RebaseHelperError("Archive '{}' can not be extracted".format(
                archive_path)) from e
        except (EOFError, SystemError) as e:
            raise RebaseHelperError(
                "Archive '{}' is damaged".format(archive_path)) from e