예제 #1
0
 def _scratch_build(cls, source, **kwargs):
     session = cls.koji_helper.session_maker()
     remote = cls.koji_helper.upload_srpm(session, source)
     task_id = session.build(remote, cls.target_tag, cls.opts, priority=cls.priority)
     if kwargs['builds_nowait']:
         return None, None, task_id
     weburl = cls.weburl + '/taskinfo?taskID=%i' % task_id
     logger.info('Koji task_id is here:\n' + weburl)
     session.logout()
     task_dict = cls.koji_helper.watch_koji_tasks(session, [task_id])
     task_list = []
     package_failed = False
     for key in six.iterkeys(task_dict):
         if task_dict[key] == koji.TASK_STATES['FAILED']:
             package_failed = True
         task_list.append(key)
     rpms, logs = cls.koji_helper.download_scratch_build(task_list, os.path.dirname(source).replace('SRPM', 'RPM'))
     if package_failed:
         weburl = '%s/taskinfo?taskID=%i' % (cls.weburl, task_list[0])
         logger.info('RPM build failed %s', weburl)
         logs.append(weburl)
         cls.logs.append(weburl)
         raise BinaryPackageBuildError
     logs.append(weburl)
     return rpms, logs, task_id
예제 #2
0
    def run(self, spec_file, rebase_spec_file, non_interactive, force_build_log_hooks, **kwargs):
        """Runs all non-blacklisted build log hooks.

        Args:
            spec_file (rebasehelper.specfile.SpecFile): Original SpecFile object.
            rebase_spec_file (rebasehelper.specfile.SpecFile): Rebased SpecFile object.
            non_interactive (bool): Whether rebase-helper is in non-interactive mode.
            force_build_log_hooks (bool): Whether to run the hooks even in
                non-interactive mode.
            kwargs (dict): Keyword arguments from instance of Application.

        Returns:
            bool: Whether build log hooks made some changes to the SPEC file.

        """
        changes_made = False
        if not non_interactive or force_build_log_hooks:
            blacklist = kwargs.get('build_log_hook_blacklist', [])
            for name, build_log_hook in self.plugins.items():
                if not build_log_hook or name in blacklist:
                    continue
                categories = build_log_hook.CATEGORIES
                if not categories or spec_file.category in categories:
                    logger.info('Running %s build log hook.', name)
                    result, rerun = build_log_hook.run(spec_file, rebase_spec_file, **kwargs)
                    result = build_log_hook.merge_two_results(results_store.get_build_log_hooks().get(name, {}), result)
                    results_store.set_build_log_hooks_result(name, result)
                    if rerun:
                        changes_made = True
        return changes_made
예제 #3
0
    def run(self, versioneer, package_name, category, versioneer_blacklist=None):
        """Runs the specified versioneer or all versioneers subsequently
        until one of them succeeds.

        Args:
            versioneer (str): Name of a versioneer.
            package_name (str): Name of a package.
            category (str): Package category.
            versioneer_blacklist (list): List of versioneers that will be skipped.

        Returns:
            str: Latest upstream version of a package.

        """
        if versioneer_blacklist is None:
            versioneer_blacklist = []

        if versioneer:
            logger.info("Running '%s' versioneer", versioneer)
            return self.plugins[versioneer].run(package_name)
        # run all versioneers, except those disabled in config, categorized first
        allowed_versioneers = [v for k, v in self.plugins.items() if v and k not in versioneer_blacklist]
        for versioneer in sorted(allowed_versioneers, key=lambda v: not v.CATEGORIES):
            categories = versioneer.CATEGORIES
            if not categories or category in categories:
                logger.info("Running '%s' versioneer", versioneer.name)
                result = versioneer.run(package_name)
                if result:
                    return result
        return None
예제 #4
0
    def upload_srpm(cls, session, srpm):
        """Uploads SRPM to a Koji hub.

        Args:
            session (koji.ClientSession): Active Koji session instance.
            srpm (str): Valid path to SRPM.

        Returns:
            str: Remote path to the uploaded SRPM.

        Raises:
            RebaseHelperError: If upload failed.

        """
        def progress(uploaded, total, chunksize, t1, t2):  # pylint: disable=unused-argument
            DownloadHelper.progress(total, uploaded, upload_start)
        suffix = ''.join(random.choice(string.ascii_letters) for _ in range(8))
        path = os.path.join('cli-build', str(time.time()), suffix)
        logger.info('Uploading SRPM')
        try:
            try:
                upload_start = time.time()
                session.uploadWrapper(srpm, path, callback=progress)
            except koji.GenericError as e:
                raise RebaseHelperError('Upload failed: {}'.format(str(e)))
        finally:
            sys.stdout.write('\n')
            sys.stdout.flush()
        return os.path.join(path, os.path.basename(srpm))
예제 #5
0
    def run(self,
            versioneer,
            package_name,
            category,
            versioneer_blacklist=None):
        """
        Runs specified versioneer or all versioneers subsequently until one of them succeeds.

        :param versioneer: Name of a versioneer
        :param package_name: Name of a package
        :param category: Package category
        :param versioneer_blacklist: List of versioneers that will be skipped
        :return: Latest upstream version of a package
        """
        if versioneer_blacklist is None:
            versioneer_blacklist = []

        if versioneer:
            logger.info("Running '%s' versioneer", versioneer)
            return self.versioneers[versioneer].run(package_name)
        # run all versioneers, except those disabled in config, categorized first
        allowed_versioneers = [
            v for k, v in six.iteritems(self.versioneers)
            if k not in versioneer_blacklist
        ]
        for versioneer in sorted(allowed_versioneers,
                                 key=lambda v: not v.get_categories()):
            categories = versioneer.get_categories()
            if not categories or category in categories:
                logger.info("Running '%s' versioneer", versioneer.get_name())
                result = versioneer.run(package_name)
                if result:
                    return result
        return None
예제 #6
0
    def print_summary(cls, path):

        """
        Function is used for printing summary informations
        :return:
        """

        for key, value in six.iteritems(OutputLogger.get_summary_info()):
            logger.info("%s %s\n", key, value)

        try:
            LoggerHelper.add_file_handler(logger_report, path)
        except (OSError, IOError):
            raise RebaseHelperError("Can not create results file '%s'" % path)

        type_pkgs = ['old', 'new']
        if OutputLogger.get_patches():
            cls.print_patches(OutputLogger.get_patches(), '\nSummary information about patches:')
        for pkg in type_pkgs:
            type_pkg = OutputLogger.get_build(pkg)
            if type_pkg:
                cls.print_rpms(type_pkg, pkg.capitalize())
                cls.print_build_logs(type_pkg, pkg.capitalize())

        cls.print_pkgdiff_tool()
예제 #7
0
    def run_checker(self, results_dir, checker_name, **kwargs):
        """
        Runs a particular checker and returns the results.

        :param results_dir: Path to a directory in which the checker should store the results.
        :type results_dir: str
        :param checker_name: Name of the checker to run. Ideally this should be name of existing checker.
        :type checker_name: str
        :raises NotImplementedError: If checker with the given name does not exist.
        :return: results from the checker
        """
        checker = None
        for check_tool in six.itervalues(self.plugin_classes):
            if check_tool.get_category() != kwargs.get('category'):
                continue
            if check_tool.match(checker_name):
                # we found the checker we are looking for
                checker = check_tool
                break

        if checker is None:
            # Appropriate checker not found
            return None

        logger.info("Running tests on packages using '%s'", checker_name)
        return checker.run_check(results_dir, **kwargs)
예제 #8
0
    def print_summary(cls, path, results):
        """Function is used for printing summary information"""
        if results.get_summary_info():
            for key, value in six.iteritems(results.get_summary_info()):
                logger.info("%s %s\n", key, value)

        try:
            LoggerHelper.add_file_handler(logger_report, path)
        except (OSError, IOError):
            raise RebaseHelperError(
                "Can not create results file '{}'".format(path))

        cls.results_store = results

        cls.print_success_message()
        logger_report.info("All result files are stored in %s",
                           os.path.dirname(path))

        cls.print_changes_patch()

        cls.print_checkers_text_output(results.get_checkers())

        if results.get_patches():
            cls.print_patches(results.get_patches())

        cls.print_message_and_separator("\nRPMS")
        for pkg_version in ['old', 'new']:
            pkg_results = results.get_build(pkg_version)
            if pkg_results:
                cls.print_rpms_and_logs(pkg_results, pkg_version.capitalize())
예제 #9
0
    def _build_srpm(cls, spec, workdir, results_dir, srpm_results_dir, srpm_builder_options):
        """
        Build SRPM using rpmbuild.

        :param spec: abs path to SPEC file inside the rpmbuild/SPECS in workdir.
        :param workdir: abs path to working directory with rpmbuild directory
                        structure, which will be used as HOME dir.
        :param results_dir: abs path to dir where the log should be placed.
        :param srpm_results_dir: path to directory where SRPM will be placed.
        :param srpm_builder_options: list of additional options to rpmbuild.
        :return: abs path to built SRPM.
        """
        logger.info("Building SRPM")
        spec_loc, spec_name = os.path.split(spec)
        output = os.path.join(results_dir, "build.log")

        cmd = ['rpmbuild', '-bs', spec_name]

        if srpm_builder_options is not None:
            cmd.extend(srpm_builder_options)

        ret = ProcessHelper.run_subprocess_cwd_env(cmd,
                                                   cwd=spec_loc,
                                                   env={'HOME': workdir},
                                                   output_file=output)

        build_log_path = os.path.join(srpm_results_dir, 'build.log')

        if ret == 0:
            return PathHelper.find_first_file(workdir, '*.src.rpm')
        # An error occurred, raise an exception
        logfile = build_log_path
        logs = [l for l in PathHelper.find_all_files(results_dir, '*.log')]
        cls.logs = [os.path.join(srpm_results_dir, os.path.basename(l)) for l in logs]
        raise SourcePackageBuildError("Building SRPM failed!", logfile=logfile)
예제 #10
0
 def download_build(cls, client, build_id, destination):
     logger.info('Downloading packages and logs for build %d', build_id)
     try:
         result = client.get_build_details(build_id)
     except copr.client.exceptions.CoprRequestException as e:
         raise RebaseHelperError(
             'Failed to get copr build details for {}: {}'.format(
                 build_id, str(e)))
     rpms = []
     logs = []
     for _, url in six.iteritems(result.data['results_by_chroot']):
         url = url if url.endswith('/') else url + '/'
         d = pyquery.PyQuery(url)
         d.make_links_absolute()
         for a in d('a[href$=\'.rpm\'], a[href$=\'.log.gz\']'):
             fn = os.path.basename(
                 urllib.parse.urlsplit(a.attrib['href']).path)
             dest = os.path.join(destination, fn)
             if fn.endswith('.src.rpm'):
                 # skip source RPM
                 continue
             DownloadHelper.download_file(a.attrib['href'], dest)
             if fn.endswith('.rpm'):
                 rpms.append(dest)
             elif fn.endswith('.log.gz'):
                 extracted = dest.replace('.log.gz', '.log')
                 try:
                     with gzip.open(dest, 'rb') as archive:
                         with open(extracted, 'wb') as f:
                             f.write(archive.read())
                 except (IOError, EOFError):
                     raise RebaseHelperError(
                         'Failed to extract {}'.format(dest))
                 logs.append(extracted)
     return rpms, logs
예제 #11
0
    def download_file(url, destination_name):
        """
        Method for downloading file using pycurl

        :param url: URL from which to download the file
        :param destination_name: path where to store downloaded file
        :return None
        """
        if os.path.exists(destination_name):
            return
        with open(destination_name, 'wb') as f:
            curl = pycurl.Curl()
            curl.setopt(pycurl.URL, url)
            curl.setopt(pycurl.CONNECTTIMEOUT, 30)
            curl.setopt(pycurl.FOLLOWLOCATION, 1)
            curl.setopt(pycurl.MAXREDIRS, 5)
            curl.setopt(pycurl.TIMEOUT, 300)
            curl.setopt(pycurl.WRITEDATA, f)
            try:
                logger.info('Downloading sources from URL %s', url)
                curl.perform()
            except pycurl.error as error:
                curl.close()
                raise ReferenceError("Downloading '%s' failed with error '%s'.", url, error)

            else:
                curl.close()
예제 #12
0
    def run(self):
        sources = self.prepare_sources()

        if not self.conf.build_only and not self.conf.comparepkgs:
            self.patch_sources(sources)

        if not self.conf.patch_only:
            if not self.conf.comparepkgs:
                # check build dependencies for rpmbuild
                if self.conf.buildtool == 'rpmbuild':
                    Application.check_build_requires(self.spec_file)
                # Build packages
                build = self.build_packages()
                # Perform checks
            else:
                build = self.get_rpm_packages(self.conf.comparepkgs)
                # We don't care dirname doesn't contain any RPM packages
                # Therefore return 1
                if not build:
                    return 1
            if build:
                self.pkgdiff_packages()

        # print summary information
        self.print_summary()

        if not self.conf.keep_workspace:
            self._delete_workspace_dir()

        if self.debug_log_file:
            logger.info("Detailed debug log is located in '%s'", self.debug_log_file)
예제 #13
0
    def download_file(url, destination_path, timeout=10, blocksize=8192):
        """
        Method for downloading file from HTTP, HTTPS and FTP URL.

        :param url: URL from which to download the file
        :param destination_path: path where to store downloaded file
        :param timeout: timeout in seconds for blocking actions like connecting, etc.
        :param blocksize: size in Bytes of blocks used for downloading the file and reporting progress
        :return: None
        """
        try:
            response = urllib.request.urlopen(url, timeout=timeout)
            file_size = int(response.info().get('Content-Length', -1))

            # file exists, check the size
            if os.path.exists(destination_path):
                if file_size < 0 or file_size != os.path.getsize(
                        destination_path):
                    logger.debug(
                        "The destination file '%s' exists, but sizes don't match! Removing it.",
                        destination_path)
                    os.remove(destination_path)
                else:
                    logger.debug(
                        "The destination file '%s' exists, and the size is correct! Skipping download.",
                        destination_path)
                    return
            try:
                with open(destination_path, 'wb') as local_file:
                    logger.info('Downloading file from URL %s', url)
                    download_start = time.time()
                    downloaded = 0

                    # report progress
                    DownloadHelper.progress(file_size, downloaded,
                                            download_start)

                    # do the actual download
                    while True:
                        buffer = response.read(blocksize)

                        # no more data to read
                        if not buffer:
                            break

                        downloaded += len(buffer)
                        local_file.write(buffer)

                        # report progress
                        DownloadHelper.progress(file_size, downloaded,
                                                download_start)

                    sys.stdout.write('\n')
                    sys.stdout.flush()
            except KeyboardInterrupt as e:
                os.remove(destination_path)
                raise e

        except urllib.error.URLError as e:
            raise DownloadError(str(e))
예제 #14
0
    def _build_rpm(cls, srpm, workdir, results_dir, rpm_results_dir, builder_options=None):
        """
        Build RPM using rpmbuild.

        :param srpm: abs path to SRPM
        :param workdir: abs path to working directory with rpmbuild directory
                        structure, which will be used as HOME dir.
        :param results_dir: abs path to dir where the log should be placed.
        :param rpm_results_dir: path directory to where RPMs will be placed.
        :return: abs paths to built RPMs.
        """
        logger.info("Building RPMs")
        output = os.path.join(results_dir, "build.log")

        cmd = [cls.CMD, '--rebuild', srpm]
        if builder_options is not None:
            cmd.extend(builder_options)
        ret = ProcessHelper.run_subprocess_cwd_env(cmd,
                                                   env={'HOME': workdir},
                                                   output_file=output)

        build_log_path = os.path.join(rpm_results_dir, 'build.log')

        if ret == 0:
            return [f for f in PathHelper.find_all_files(workdir, '*.rpm') if not f.endswith('.src.rpm')]
        # An error occurred, raise an exception
        logfile = build_log_path
        cls.logs.extend([l for l in PathHelper.find_all_files(rpm_results_dir, '*.log')])
        raise BinaryPackageBuildError("Building RPMs failed!", results_dir, logfile=logfile)
예제 #15
0
    def _do_build_srpm(cls, spec, workdir, results_dir):
        """
        Build SRPM using rpmbuild.

        :param spec: abs path to SPEC file inside the rpmbuild/SPECS in workdir.
        :param workdir: abs path to working directory with rpmbuild directory
                        structure, which will be used as HOME dir.
        :param results_dir: abs path to dir where the log should be placed.
        :return: If build process ends successfully returns abs path
                 to built SRPM, otherwise 'None'.
        """
        logger.info("Building SRPM")
        spec_loc, spec_name = os.path.split(spec)
        output = os.path.join(results_dir, "build.log")

        cmd = ['rpmbuild', '-bs', spec_name]
        ret = ProcessHelper.run_subprocess_cwd_env(cmd,
                                                   cwd=spec_loc,
                                                   env={'HOME': workdir},
                                                   output=output)

        if ret != 0:
            return None
        else:
            return PathHelper.find_first_file(workdir, '*.src.rpm')
예제 #16
0
    def _correct_missing_files(cls, rebase_spec_file, files):
        """Adds files found in buildroot which are missing in %files
        sections in the SPEC file. Each file is added to a %files section
        with the closest matching path.

        """
        macros = [
            m for m in rebase_spec_file.macros
            if m['name'] in MacroHelper.MACROS_WHITELIST
        ]
        macros = MacroHelper.expand_macros(macros)
        # ensure maximal greediness
        macros.sort(key=lambda k: len(k['value']), reverse=True)

        result = collections.defaultdict(lambda: collections.defaultdict(list))
        for file in files:
            section = cls._get_best_matching_files_section(
                rebase_spec_file, file)
            substituted_path = MacroHelper.substitute_path_with_macros(
                file, macros)
            try:
                index = [
                    i for i, l in enumerate(
                        rebase_spec_file.spec_content.sections[section]) if l
                ][-1] + 1
            except IndexError:
                # section is empty
                index = 0
            rebase_spec_file.spec_content.sections[section].insert(
                index, substituted_path)
            result['added'][section].append(substituted_path)
            logger.info("Added %s to '%s' section", substituted_path, section)

        return result
예제 #17
0
    def build(cls, spec, results_dir, srpm, **kwargs):
        """
        Builds the RPMs using rpmbuild

        :param spec: SpecFile object
        :param results_dir: absolute path to DIR where results should be stored
        :param srpm: absolute path to SRPM
        :return: dict with:
                 'rpm' -> list with absolute paths to RPMs
                 'logs' -> list with absolute paths to build_logs
        """
        rpm_results_dir = os.path.join(results_dir, "RPM")
        sources = spec.get_sources()
        patches = [p.get_path() for p in spec.get_patches()]
        with RpmbuildTemporaryEnvironment(sources, patches, spec.get_path(), rpm_results_dir) as tmp_env:
            env = tmp_env.env()
            tmp_dir = tmp_env.path()
            tmp_results_dir = env.get(RpmbuildTemporaryEnvironment.TEMPDIR_RESULTS)
            rpms = cls._build_rpm(srpm, tmp_dir, tmp_results_dir, rpm_results_dir,
                                  builder_options=cls.get_builder_options(**kwargs))

        logger.info("Building RPMs finished successfully")

        # RPMs paths in results_dir
        rpms = [os.path.join(rpm_results_dir, os.path.basename(f)) for f in rpms]
        logger.debug("Successfully built RPMs: '%s'", str(rpms))

        # gather logs
        cls.logs.extend([l for l in PathHelper.find_all_files(rpm_results_dir, '*.log')])
        logger.debug("logs: '%s'", str(cls.logs))

        return dict(rpm=rpms, logs=cls.logs)
예제 #18
0
    def _build_rpm(cls,
                   srpm,
                   results_dir,
                   root=None,
                   arch=None,
                   builder_options=None):
        """Build RPM using mock."""
        logger.info("Building RPMs")
        output = os.path.join(results_dir, "mock_output.log")

        cmd = [cls.CMD, '--rebuild', srpm, '--resultdir', results_dir]
        if root is not None:
            cmd.extend(['--root', root])
        if arch is not None:
            cmd.extend(['--arch', arch])
        if builder_options is not None:
            cmd.extend(builder_options)

        ret = ProcessHelper.run_subprocess(cmd, output=output)

        if ret != 0:
            return None
        else:
            return [
                f for f in PathHelper.find_all_files(results_dir, '*.rpm')
                if not f.endswith('.src.rpm')
            ]
예제 #19
0
    def _build_rpm(cls, srpm, workdir, results_dir, builder_options=None):
        """
        Build RPM using rpmbuild.

        :param srpm: abs path to SRPM
        :param workdir: abs path to working directory with rpmbuild directory
                        structure, which will be used as HOME dir.
        :param results_dir: abs path to dir where the log should be placed.
        :return: If build process ends successfully returns list of abs paths
                 to built RPMs, otherwise 'None'.
        """
        logger.info("Building RPMs")
        output = os.path.join(results_dir, "build.log")

        cmd = [cls.CMD, '--rebuild', srpm]
        if builder_options is not None:
            cmd.extend(builder_options)
        ret = ProcessHelper.run_subprocess_cwd_env(cmd,
                                                   env={'HOME': workdir},
                                                   output=output)

        if ret != 0:
            return None
        else:
            return [
                f for f in PathHelper.find_all_files(workdir, '*.rpm')
                if not f.endswith('.src.rpm')
            ]
예제 #20
0
    def _do_build_srpm(cls, spec, workdir, results_dir):
        """
        Build SRPM using rpmbuild.

        :param spec: abs path to SPEC file inside the rpmbuild/SPECS in workdir.
        :param workdir: abs path to working directory with rpmbuild directory
                        structure, which will be used as HOME dir.
        :param results_dir: abs path to dir where the log should be placed.
        :return: If build process ends successfully returns abs path
                 to built SRPM, otherwise 'None'.
        """
        logger.info("Building SRPM")
        spec_loc, spec_name = os.path.split(spec)
        output = os.path.join(results_dir, "build.log")

        cmd = ['rpmbuild', '-bs', spec_name]
        ret = ProcessHelper.run_subprocess_cwd_env(cmd,
                                                   cwd=spec_loc,
                                                   env={'HOME': workdir},
                                                   output=output)

        if ret != 0:
            return None
        else:
            return PathHelper.find_first_file(workdir, '*.src.rpm')
예제 #21
0
    def upload_srpm(cls, session, srpm):
        """Uploads SRPM to a Koji hub.

        Args:
            session (koji.ClientSession): Active Koji session instance.
            srpm (str): Valid path to SRPM.

        Returns:
            str: Remote path to the uploaded SRPM.

        Raises:
            RebaseHelperError: If upload failed.

        """
        def progress(uploaded, total, chunksize, t1, t2):  # pylint: disable=unused-argument
            DownloadHelper.progress(total, uploaded, upload_start)

        suffix = ''.join(
            [random.choice(string.ascii_letters) for _ in range(8)])
        path = os.path.join('cli-build', six.text_type(time.time()), suffix)
        logger.info('Uploading SRPM')
        try:
            try:
                upload_start = time.time()
                session.uploadWrapper(srpm, path, callback=progress)
            except koji.GenericError as e:
                raise RebaseHelperError('Upload failed: {}'.format(
                    six.text_type(e)))
        finally:
            sys.stdout.write('\n')
            sys.stdout.flush()
        return os.path.join(path, os.path.basename(srpm))
예제 #22
0
 def prepare_next_run(self, results_dir):
     # Running build log hooks only makes sense after a failed build
     # of new RPM packages. The folder results_dir/new-build/RPM
     # doesn't exist unless the build of new RPM packages has been run.
     changes_made = False
     if os.path.exists(os.path.join(results_dir, 'new-build', 'RPM')):
         changes_made = plugin_manager.build_log_hooks.run(self.spec_file, self.rebase_spec_file, **self.kwargs)
     # Save current rebase spec file content
     self.rebase_spec_file.save()
     if not self.conf.non_interactive and \
             InputHelper.get_message('Do you want to try it one more time'):
         logger.info('Now it is time to make changes to  %s if necessary.', self.rebase_spec_file.path)
     elif self.conf.non_interactive and changes_made:
         logger.info('Build log hooks made some changes to the SPEC file, starting the build process again.')
     else:
         return False
     if not self.conf.non_interactive and not \
             InputHelper.get_message('Do you want to continue with the rebuild now'):
         return False
     # Update rebase spec file content after potential manual modifications
     self.rebase_spec_file._read_spec_content()  # pylint: disable=protected-access
     self.rebase_spec_file._update_data()  # pylint: disable=protected-access
     # clear current version output directories
     if os.path.exists(os.path.join(results_dir, 'old-build')):
         shutil.rmtree(os.path.join(results_dir, 'old-build'))
     if os.path.exists(os.path.join(results_dir, 'new-build')):
         shutil.rmtree(os.path.join(results_dir, 'new-build'))
     return True
예제 #23
0
    def print_summary(cls, path, results):
        """Function is used for printing summary information"""
        if results.get_summary_info():
            for key, value in results.get_summary_info().items():
                logger.info("%s %s\n", key, value)

        LoggerHelper.add_file_handler(logger_report, path)

        cls.results_store = results

        cls.print_success_message()
        logger_report.info("All result files are stored in %s", os.path.dirname(path))

        cls.print_changes_patch()

        cls.print_checkers_text_output(results.get_checkers())

        cls.print_build_log_hooks_result(results.get_build_log_hooks())

        if results.get_patches():
            cls.print_patches(results.get_patches())

        cls.print_message_and_separator("\nRPMS")
        for pkg_version in ['old', 'new']:
            pkg_results = results.get_build(pkg_version)
            if pkg_results:
                cls.print_rpms_and_logs(pkg_results, pkg_version.capitalize())
예제 #24
0
    def _build_rpm(cls, srpm, workdir, results_dir, builder_options=None):
        """
        Build RPM using rpmbuild.

        :param srpm: abs path to SRPM
        :param workdir: abs path to working directory with rpmbuild directory
                        structure, which will be used as HOME dir.
        :param results_dir: abs path to dir where the log should be placed.
        :return: If build process ends successfully returns list of abs paths
                 to built RPMs, otherwise 'None'.
        """
        logger.info("Building RPMs")
        output = os.path.join(results_dir, "build.log")

        cmd = [cls.CMD, '--rebuild', srpm]
        if builder_options is not None:
            cmd.extend(builder_options)
        ret = ProcessHelper.run_subprocess_cwd_env(cmd,
                                                   env={'HOME': workdir},
                                                   output=output)

        if ret != 0:
            return None
        else:
            return [f for f in PathHelper.find_all_files(workdir, '*.rpm') if not f.endswith('.src.rpm')]
예제 #25
0
    def build(cls, spec, results_dir, **kwargs):
        """
        Build SRPM with chosen SRPM Build Tool

        :param spec: SpecFile object
        :param results_dir: absolute path to DIR where results should be stored
        :return: absolute path to SRPM, list with absolute paths to logs
        """
        srpm_results_dir = os.path.join(results_dir, "SRPM")
        sources = spec.get_sources()
        patches = [p.get_path() for p in spec.get_patches()]
        with MockTemporaryEnvironment(sources, patches, spec.get_path(),
                                      srpm_results_dir) as tmp_env:
            srpm_builder_options = cls.get_srpm_builder_options(**kwargs)

            env = tmp_env.env()
            tmp_dir = tmp_env.path()
            tmp_spec = env.get(MockTemporaryEnvironment.TEMPDIR_SPEC)
            tmp_results_dir = env.get(
                MockTemporaryEnvironment.TEMPDIR_RESULTS)

            srpm = cls._build_srpm(tmp_spec, tmp_dir, tmp_results_dir, srpm_results_dir,
                                   srpm_builder_options=srpm_builder_options)

        logger.info("Building SRPM finished successfully")

        # srpm path in results_dir
        srpm = os.path.join(srpm_results_dir, os.path.basename(srpm))
        logger.verbose("Successfully built SRPM: '%s'", str(srpm))
        # gather logs
        logs = [l for l in PathHelper.find_all_files(srpm_results_dir, '*.log')]
        logger.verbose("logs: '%s'", str(logs))

        return dict(srpm=srpm, logs=logs)
예제 #26
0
    def print_summary(cls, path, results=results_store):
        """
        Function is used for printing summary informations

        :return: 
        """
        if results.get_summary_info():
            for key, value in six.iteritems(results.get_summary_info()):
                logger.info("%s %s\n", key, value)

        try:
            LoggerHelper.add_file_handler(logger_report, path)
        except (OSError, IOError):
            raise RebaseHelperError("Can not create results file '%s'" % path)

        type_pkgs = ['old', 'new']
        if results.get_patches():
            cls.print_patches(results.get_patches(),
                              '\nSummary information about patches:')
        for pkg in type_pkgs:
            type_pkg = results.get_build(pkg)
            if type_pkg:
                cls.print_rpms(type_pkg, pkg.capitalize())
                cls.print_build_logs(type_pkg, pkg.capitalize())

        cls.print_pkgdiff_tool(results.get_checkers())
예제 #27
0
    def run_patch(cls, old_dir, new_dir, rest_sources, git_helper, patches, prep, **kwargs):
        """
        The function can be used for patching one
        directory against another
        """
        cls.kwargs = kwargs
        cls.old_sources = old_dir
        cls.new_sources = new_dir
        cls.output_data = []
        cls.cont = cls.kwargs['continue']
        cls.rest_sources = rest_sources
        cls.git_helper = git_helper
        cls.patches = patches
        cls.non_interactive = kwargs.get('non_interactive')
        if not os.path.isdir(os.path.join(cls.old_sources, '.git')):
            cls.init_git(old_dir)
            cls.init_git(new_dir)
            cls.source_dir = cls.old_sources
            prep_path = cls.create_prep_script(prep)
            if not cls.patch_sources_by_prep_script:
                cls.apply_old_patches()
            if cls.exec_prep_script or cls.patch_sources_by_prep_script:
                logger.info('Executing prep script')
                cls.call_prep_script(prep_path)
            cls.cont = False

        return cls._git_rebase()
예제 #28
0
    def _build_rpm(cls, srpm, workdir, results_dir, rpm_results_dir, builder_options=None):
        """
        Build RPM using rpmbuild.

        :param srpm: abs path to SRPM
        :param workdir: abs path to working directory with rpmbuild directory
                        structure, which will be used as HOME dir.
        :param results_dir: abs path to dir where the log should be placed.
        :param rpm_results_dir: path directory to where RPMs will be placed.
        :return: abs paths to built RPMs.
        """
        logger.info("Building RPMs")
        output = os.path.join(results_dir, "build.log")

        cmd = [cls.CMD, '--rebuild', srpm]
        if builder_options is not None:
            cmd.extend(builder_options)
        ret = ProcessHelper.run_subprocess_cwd_env(cmd,
                                                   env={'HOME': workdir},
                                                   output_file=output)

        build_log_path = os.path.join(rpm_results_dir, 'build.log')

        if ret == 0:
            return [f for f in PathHelper.find_all_files(workdir, '*.rpm') if not f.endswith('.src.rpm')]
        # An error occurred, raise an exception
        logfile = build_log_path
        logs = [l for l in PathHelper.find_all_files(results_dir, '*.log')]
        cls.logs.extend(os.path.join(rpm_results_dir, os.path.basename(l)) for l in logs)
        raise BinaryPackageBuildError("Building RPMs failed!", results_dir, logfile=logfile)
예제 #29
0
    def build(cls, spec, results_dir, srpm, **kwargs):
        """
        Builds the RPMs using rpmbuild

        :param spec: SpecFile object
        :param results_dir: absolute path to DIR where results should be stored
        :param srpm: absolute path to SRPM
        :return: dict with:
                 'rpm' -> list with absolute paths to RPMs
                 'logs' -> list with absolute paths to build_logs
        """
        cls.logs = []
        rpm_results_dir = os.path.join(results_dir, "RPM")
        sources = spec.get_sources()
        patches = [p.get_path() for p in spec.get_patches()]
        with RpmbuildTemporaryEnvironment(sources, patches, spec.get_path(), rpm_results_dir) as tmp_env:
            env = tmp_env.env()
            tmp_dir = tmp_env.path()
            tmp_results_dir = env.get(RpmbuildTemporaryEnvironment.TEMPDIR_RESULTS)
            rpms = cls._build_rpm(srpm, tmp_dir, tmp_results_dir, rpm_results_dir,
                                  builder_options=cls.get_builder_options(**kwargs))

        logger.info("Building RPMs finished successfully")

        # RPMs paths in results_dir
        rpms = [os.path.join(rpm_results_dir, os.path.basename(f)) for f in rpms]
        logger.verbose("Successfully built RPMs: '%s'", str(rpms))

        # gather logs
        cls.logs.extend(l for l in PathHelper.find_all_files(rpm_results_dir, '*.log'))
        logger.verbose("logs: '%s'", str(cls.logs))

        return dict(rpm=rpms, logs=cls.logs)
예제 #30
0
    def fill_dictionary(cls, result_dir):
        """
        Parsed files.xml and symbols.xml and fill dictionary
        :return:
        """
        XML_FILES = ['files.xml', 'symbols.xml']
        results_dict = {}

        for tag in settings.CHECKER_TAGS:
            results_dict[tag] = []
        for file_name in [os.path.join(result_dir, x) for x in XML_FILES]:
            logger.info('Processing %s file.', file_name)
            try:
                with open(file_name, "r") as f:
                    lines = f.readlines()
                    lines.insert(0, '<pkgdiff>')
                    lines.append('</pkgdiff>')
                    pkgdiff_tree = ElementTree.fromstringlist(lines)
                    for tag in settings.CHECKER_TAGS:
                        for pkgdiff in pkgdiff_tree.findall('.//' + tag):
                            results_dict[tag].extend([x.strip() for x in pkgdiff.text.strip().split('\n')])
            except IOError:
                continue

        return results_dict
예제 #31
0
    def print_summary(cls, path):

        """
        Function is used for printing summary informations
        :return:
        """

        # First of all we would like to print all
        # summary information
        OutputLogger.set_info_text("Summary output is also available in log:", path)
        logger.info('\n')
        for key, value in six.iteritems(OutputLogger.get_summary_info()):
            logger.info("%s %s\n", key, value)

        try:
            LoggerHelper.add_file_handler(logger_output, path)
        except (OSError, IOError):
            raise RebaseHelperError("Can not create results file '%s'", path)

        type_pkgs = ['old', 'new']
        cls.print_patches(OutputLogger.get_patches(), '\nSummary information about patches:')
        for pkg in type_pkgs:
            type_pkg = OutputLogger.get_build(pkg)
            if type_pkg:
                cls.print_rpms(type_pkg, pkg.capitalize())
                cls.print_build_logs(type_pkg, pkg.capitalize())

        cls.print_pkgdiff_tool()
예제 #32
0
    def build(cls, spec, results_dir, **kwargs):
        """
        Build SRPM with chosen SRPM Build Tool

        :param spec: SpecFile object
        :param results_dir: absolute path to DIR where results should be stored
        :return: absolute path to SRPM, list with absolute paths to logs
        """
        srpm_results_dir = os.path.join(results_dir, "SRPM")
        sources = spec.get_sources()
        patches = [p.get_path() for p in spec.get_patches()]
        with RpmbuildTemporaryEnvironment(sources, patches, spec.get_path(),
                                          srpm_results_dir) as tmp_env:
            srpm_builder_options = cls.get_srpm_builder_options(**kwargs)

            env = tmp_env.env()
            tmp_dir = tmp_env.path()
            tmp_spec = env.get(RpmbuildTemporaryEnvironment.TEMPDIR_SPEC)
            tmp_results_dir = env.get(
                RpmbuildTemporaryEnvironment.TEMPDIR_RESULTS)

            srpm = cls._build_srpm(tmp_spec, tmp_dir, tmp_results_dir, srpm_results_dir,
                                   srpm_builder_options=srpm_builder_options)

        logger.info("Building SRPM finished successfully")

        # srpm path in results_dir
        srpm = os.path.join(srpm_results_dir, os.path.basename(srpm))
        logger.verbose("Successfully built SRPM: '%s'", str(srpm))
        # gather logs
        logs = [l for l in PathHelper.find_all_files(srpm_results_dir, '*.log')]
        logger.verbose("logs: '%s'", str(logs))

        return dict(srpm=srpm, logs=logs)
예제 #33
0
    def _build_srpm(cls, spec, sources, patches, results_dir):
        """
        Builds the SRPM using rpmbuild

        :param spec: absolute path to the SPEC file.
        :param sources: list with absolute paths to SOURCES
        :param patches: list with absolute paths to PATCHES
        :param results_dir: absolute path to DIR where results should be stored
        :return: absolute path to SRPM, list with absolute paths to logs
        """
        # build SRPM
        srpm_results_dir = os.path.join(results_dir, "SRPM")
        with RpmbuildTemporaryEnvironment(sources, patches, spec,
                                          srpm_results_dir) as tmp_env:
            env = tmp_env.env()
            tmp_dir = tmp_env.path()
            tmp_spec = env.get(RpmbuildTemporaryEnvironment.TEMPDIR_SPEC)
            tmp_results_dir = env.get(
                RpmbuildTemporaryEnvironment.TEMPDIR_RESULTS)
            srpm = cls._do_build_srpm(tmp_spec, tmp_dir, tmp_results_dir)

        if srpm is None:
            raise SourcePackageBuildError("Building SRPM failed!")
        else:
            logger.info("Building SRPM finished successfully")

        # srpm path in results_dir
        srpm = os.path.join(srpm_results_dir, os.path.basename(srpm))
        logger.debug("Successfully built SRPM: '%s'", str(srpm))
        # gather logs
        logs = [l for l in PathHelper.find_all_files(srpm_results_dir, '*.log')]
        logger.debug("logs: '%s'", str(logs))

        return srpm, logs
예제 #34
0
 def download_build(cls, client, build_id, destination):
     logger.info('Downloading packages and logs for build %d', build_id)
     try:
         result = client.get_build_details(build_id)
     except copr.client.exceptions.CoprRequestException:
         raise RebaseHelperError(
             'Failed to get copr build details for {}'.format(build_id))
     rpms = []
     logs = []
     for _, url in six.iteritems(result.data['results_by_chroot']):
         url = url if url.endswith('/') else url + '/'
         d = pyquery.PyQuery(url, opener=lambda x: urllib.request.urlopen(x))
         d.make_links_absolute()
         for a in d('a[href$=\'.rpm\'], a[href$=\'.log.gz\']'):
             fn = os.path.basename(urllib.parse.urlsplit(a.attrib['href']).path)
             dest = os.path.join(destination, fn)
             if fn.endswith('.src.rpm'):
                 # skip source RPM
                 continue
             DownloadHelper.download_file(a.attrib['href'], dest)
             if fn.endswith('.rpm'):
                 rpms.append(dest)
             elif fn.endswith('.log.gz'):
                 extracted = dest.replace('.log.gz', '.log')
                 try:
                     with gzip.open(dest, 'rb') as archive:
                         with open(extracted, 'wb') as f:
                             f.write(archive.read())
                 except (IOError, EOFError):
                     raise RebaseHelperError(
                         'Failed to extract {}'.format(dest))
                 logs.append(extracted)
     return rpms, logs
예제 #35
0
 def run(cls, spec_file, rebase_spec_file, **kwargs):
     # find non-existent local sources
     sources = [idx for idx, src in enumerate(rebase_spec_file.sources)
                if not urllib.parse.urlparse(src).scheme and not os.path.isfile(src)]
     for idx in sources:
         if spec_file.sources[idx] == rebase_spec_file.sources[idx]:
             # skip sources that stayed unchanged
             continue
         source = rebase_spec_file.sources[idx]
         logger.info("Found non-existent source '%s'", source)
         source_re = re.compile(r'^Source0?:' if idx == 0 else r'^Source{}:'.format(idx))
         comment_re = re.compile(r'^#')
         comments = None
         # find matching Source line in the SPEC file
         preamble = rebase_spec_file.spec_content.section('%package')
         for i in range(len(preamble)):
             if source_re.match(preamble[i]):
                 # get all comments above this line
                 for j in range(i - 1, 0, -1):
                     if not comment_re.match(preamble[j]):
                         comments = preamble[j+1:i]
                         break
                 break
         if not comments:
             # nothing to do
             continue
         # update data so that RPM macros are populated correctly
         rebase_spec_file._update_data()  # pylint: disable=protected-access
         instructions = cls._get_instructions(comments,
                                              spec_file.get_version(),
                                              rebase_spec_file.get_version())
         logfile = os.path.join(kwargs['workspace_dir'], '{}.log'.format(source))
         cls._build_source_from_instructions(instructions, source, logfile)
예제 #36
0
    def _correct_missing_files(cls, rebase_spec_file, files):
        """Adds files found in buildroot which are missing in %files
        sections in the SPEC file. Each file is added to a %files section
        with the closest matching path.

        """
        macros = [m for m in rebase_spec_file.macros if m['name'] in MacroHelper.MACROS_WHITELIST]
        macros = MacroHelper.expand_macros(macros)
        # ensure maximal greediness
        macros.sort(key=lambda k: len(k['value']), reverse=True)

        result = collections.defaultdict(lambda: collections.defaultdict(list))
        for file in files:
            section = cls._get_best_matching_files_section(rebase_spec_file, file)
            substituted_path = MacroHelper.substitute_path_with_macros(file, macros)
            try:
                index = [i for i, l in enumerate(rebase_spec_file.spec_content.section(section)) if l][-1] + 1
            except IndexError:
                # section is empty
                index = 0
            rebase_spec_file.spec_content.section(section).insert(index, substituted_path)
            result['added'][section].append(substituted_path)
            logger.info("Added %s to '%s' section", substituted_path, section)

        return result
예제 #37
0
 def run(cls, spec_file, rebase_spec_file, **kwargs):
     # find non-existent local sources
     sources = [idx for idx, src in enumerate(rebase_spec_file.sources)
                if not urllib.parse.urlparse(src).scheme and not os.path.isfile(src)]
     for idx in sources:
         if spec_file.sources[idx] == rebase_spec_file.sources[idx]:
             # skip sources that stayed unchanged
             continue
         source = rebase_spec_file.sources[idx]
         logger.info("Found non-existent source '%s'", source)
         source_re = re.compile(r'^Source0?:' if idx == 0 else r'^Source{}:'.format(idx))
         comment_re = re.compile(r'^#')
         comments = None
         # find matching Source line in the SPEC file
         for i in range(len(rebase_spec_file.spec_content)):
             if source_re.match(rebase_spec_file.spec_content[i]):
                 # get all comments above this line
                 for j in range(i - 1, 0, -1):
                     if not comment_re.match(rebase_spec_file.spec_content[j]):
                         comments = rebase_spec_file.spec_content[j+1:i]
                         break
                 break
         # update data so that RPM macros are populated correctly
         rebase_spec_file._update_data()
         instructions = cls._get_instructions(comments,
                                              spec_file.get_version(),
                                              rebase_spec_file.get_version())
         logfile = os.path.join(kwargs['workspace_dir'], '{}.log'.format(source))
         cls._build_source_from_instructions(instructions, source, logfile)
예제 #38
0
    def run_patch(cls, old_dir, new_dir, rest_sources, patches, prep, **kwargs):
        """
        The function can be used for patching one
        directory against another
        """
        cls.kwargs = kwargs
        cls.old_sources = old_dir
        cls.new_sources = new_dir
        cls.output_data = None
        cls.cont = cls.kwargs['continue']
        cls.rest_sources = rest_sources
        cls.patches = patches
        cls.non_interactive = kwargs.get('non_interactive')
        if not os.path.isdir(os.path.join(cls.old_sources, '.git')):
            cls.old_repo = cls.init_git(old_dir)
            cls.new_repo = cls.init_git(new_dir)
            cls.source_dir = cls.old_sources
            prep_path = cls.create_prep_script(prep)
            if not cls.patch_sources_by_prep_script:
                cls.apply_old_patches()
            if cls.exec_prep_script or cls.patch_sources_by_prep_script:
                logger.info('Executing prep script')
                cls.call_prep_script(prep_path)
            cls.cont = False
        else:
            cls.old_repo = git.Repo(old_dir)
            cls.new_repo = git.Repo(new_dir)

        return cls._git_rebase()
예제 #39
0
    def run(self, spec_file, rebase_spec_file, non_interactive, force_build_log_hooks, **kwargs):
        """Runs all non-blacklisted build log hooks.

        Args:
            spec_file (rebasehelper.specfile.SpecFile): Original SpecFile object.
            rebase_spec_file (rebasehelper.specfile.SpecFile): Rebased SpecFile object.
            kwargs (dict): Keyword arguments from instance of Application.

        Returns:
            bool: Whether build log hooks made some changes to the SPEC file.

        """
        changes_made = False
        if not non_interactive or force_build_log_hooks:
            blacklist = kwargs.get('build_log_hook_blacklist', [])
            for name, build_log_hook in six.iteritems(self.build_log_hooks):
                if not build_log_hook or name in blacklist:
                    continue
                categories = build_log_hook.CATEGORIES
                if not categories or spec_file.category in categories:
                    logger.info('Running %s build log hook.', name)
                    result, rerun = build_log_hook.run(spec_file, rebase_spec_file, **kwargs)
                    result = build_log_hook.merge_two_results(results_store.get_build_log_hooks().get(name, {}), result)
                    results_store.set_build_log_hooks_result(name, result)
                    if rerun:
                        changes_made = True
        return changes_made
예제 #40
0
    def _get_initial_sources_list(self):
        """Function returns all sources mentioned in SPEC file"""
        # get all regular sources
        sources = []
        tar_sources = []
        sources_list = [x for x in self.spc.sources if x[2] == 1]
        remote_files_re = re.compile(r'(http:|https:|ftp:)//.*')

        for index, src in enumerate(sorted(sources_list, key=lambda source: source[1])):
            # src is type of (SOURCE, Index of source, Type of source (PAtch, Source)
            # We need to download all archives and only the one
            abs_path = os.path.join(self.sources_location, os.path.basename(src[0]).strip())
            sources.append(abs_path)
            archive = [x for x in Archive.get_supported_archives() if src[0].endswith(x)]
            # if the source is a remote file, download it
            if archive:
                if remote_files_re.search(src[0]) and self.download and not os.path.isfile(abs_path):
                    logger.debug("Tarball is not in absolute path {} "
                                 "trying to download one from URL {}".format(abs_path, src[0]))
                    logger.info("Tarball is not in absolute path. Trying to download it from URL")
                    try:
                        DownloadHelper.download_file(src[0], abs_path)
                    except DownloadError as e:
                        raise RebaseHelperError("Failed to download file from URL {}. "
                                                "Reason: '{}'. ".format(src[0], str(e)))
                tar_sources.append(abs_path)
        return sources, tar_sources
예제 #41
0
    def _build_rpm(cls, srpm, results_dir, rpm_results_dir, root=None, arch=None, builder_options=None):
        """
        Build RPM using mock.

        :param srpm: full path to the srpm.
        :param results_dir: abs path to dir where the log should be placed.
        :param rpm_results_dir: directory where rpms will be placed.
        :param root: path to where chroot should be built.
        :param arch: target architectures for the build.
        :param builder_options: builder_options for mock.
        :return abs paths to RPMs.
        """
        logger.info("Building RPMs")
        output = os.path.join(results_dir, "mock_output.log")

        cmd = [cls.CMD, '--old-chroot', '--rebuild', srpm, '--resultdir', results_dir]
        if root is not None:
            cmd.extend(['--root', root])
        if arch is not None:
            cmd.extend(['--arch', arch])
        if builder_options is not None:
            cmd.extend(builder_options)

        ret = ProcessHelper.run_subprocess(cmd, output_file=output)

        if ret == 0:
            return [f for f in PathHelper.find_all_files(results_dir, '*.rpm') if not f.endswith('.src.rpm')]
        else:
            logfile = Mock.get_mock_logfile_path(ret, rpm_results_dir, tmp_path=results_dir)
        logs = [l for l in PathHelper.find_all_files(results_dir, '*.log')]
        cls.logs.extend(os.path.join(rpm_results_dir, os.path.basename(l)) for l in logs)
        raise BinaryPackageBuildError("Building RPMs failed!", rpm_results_dir, logfile=logfile)
예제 #42
0
 def _scratch_build(cls, source, **kwargs):
     session = cls.koji_helper.session_maker()
     remote = cls.koji_helper.upload_srpm(session, source)
     task_id = session.build(remote,
                             cls.target_tag,
                             cls.opts,
                             priority=cls.priority)
     if kwargs['builds_nowait']:
         return None, None, task_id
     weburl = cls.weburl + '/taskinfo?taskID=%i' % task_id
     logger.info('Koji task_id is here:\n' + weburl)
     session.logout()
     task_dict = cls.koji_helper.watch_koji_tasks(session, [task_id])
     task_list = []
     package_failed = False
     for key in six.iterkeys(task_dict):
         if task_dict[key] == koji.TASK_STATES['FAILED']:
             package_failed = True
         task_list.append(key)
     rpms, logs = cls.koji_helper.download_scratch_build(
         session, task_list,
         os.path.dirname(source).replace('SRPM', 'RPM'))
     if package_failed:
         weburl = '%s/taskinfo?taskID=%i' % (cls.weburl, task_list[0])
         logger.info('RPM build failed %s', weburl)
         logs.append(weburl)
         cls.logs.append(weburl)
         raise BinaryPackageBuildError
     logs.append(weburl)
     return rpms, logs, task_id
예제 #43
0
    def download_task_results(cls, session, tasklist, destination):
        """Downloads packages and logs of finished Koji tasks.

        Args:
            session (koji.ClientSession): Active Koji session instance.
            tasklist (list): List of task IDs.
            destination (str): Path where to download files to.

        Returns:
            tuple: List of downloaded RPMs and list of downloaded logs.

        Raises:
            DownloadError: If download failed.

        """
        rpms = []
        logs = []
        for task_id in tasklist:
            logger.info('Downloading packages and logs for task %s', task_id)
            task = session.getTaskInfo(task_id, request=True)
            if task['state'] in [koji.TASK_STATES['FREE'], koji.TASK_STATES['OPEN']]:
                logger.info('Task %s is still running!', task_id)
                continue
            elif task['state'] != koji.TASK_STATES['CLOSED']:
                logger.info('Task %s did not complete successfully!', task_id)
            if task['method'] == 'buildArch':
                tasks = [task]
            elif task['method'] == 'build':
                opts = dict(parent=task_id, method='buildArch', decode=True,
                            state=[koji.TASK_STATES['CLOSED'], koji.TASK_STATES['FAILED']])
                tasks = session.listTasks(opts=opts)
            else:
                logger.info('Task %s is not a build or buildArch task!', task_id)
                continue
            for task in tasks:
                base_path = koji.pathinfo.taskrelpath(task['id'])
                output = session.listTaskOutput(task['id'])
                for filename in output:
                    local_path = os.path.join(destination, filename)
                    download = False
                    fn, ext = os.path.splitext(filename)
                    if ext == '.rpm':
                        if task['state'] != koji.TASK_STATES['CLOSED']:
                            continue
                        if local_path not in rpms:
                            nevra = RpmHelper.split_nevra(fn)
                            # FIXME: multiple arches
                            download = nevra['arch'] in ['noarch', 'x86_64']
                            if download:
                                rpms.append(local_path)
                    else:
                        if local_path not in logs:
                            download = True
                            logs.append(local_path)
                    if download:
                        logger.info('Downloading file %s', filename)
                        url = '/'.join([session.opts['topurl'], 'work', base_path, filename])
                        DownloadHelper.download_file(url, local_path)
        return rpms, logs
    def _upload_source(cls,
                       url,
                       package,
                       filename,
                       hashtype,
                       hsh,
                       auth=requests_gssapi.HTTPSPNEGOAuth()):
        class ChunkedData(object):
            def __init__(self, check_only, chunksize=8192):
                self.check_only = check_only
                self.chunksize = chunksize
                self.start = time.time()
                self.uploaded = False
                fields = [
                    ('name', package),
                    ('{}sum'.format(hashtype), hsh),
                ]
                if check_only:
                    fields.append(('filename', filename))
                else:
                    with open(filename, 'rb') as f:
                        rf = RequestField('file', f.read(), filename)
                        rf.make_multipart()
                        fields.append(rf)
                self.data, content_type = encode_multipart_formdata(fields)
                self.headers = {'Content-Type': content_type}

            def __iter__(self):
                if self.uploaded:
                    # ensure the progressbar is shown only once (HTTPSPNEGOAuth causes second request)
                    yield self.data
                else:
                    totalsize = len(self.data)
                    for offset in range(0, totalsize, self.chunksize):
                        transferred = min(offset + self.chunksize, totalsize)
                        if not self.check_only:
                            DownloadHelper.progress(totalsize, transferred,
                                                    self.start)
                        yield self.data[offset:transferred]
                    self.uploaded = True

        def post(check_only=False):
            cd = ChunkedData(check_only)
            r = requests.post(url, data=cd, headers=cd.headers, auth=auth)
            if not 200 <= r.status_code < 300:
                raise LookasideCacheError(r.reason)
            return r.content

        state = post(check_only=True)
        if state.strip() == b'Available':
            # already uploaded
            return

        logger.info('Uploading %s to lookaside cache', filename)
        try:
            post()
        finally:
            sys.stdout.write('\n')
            sys.stdout.flush()
예제 #45
0
 def get_detached_task(cls, task_id, results_dir):
     if not cls.koji_helper:
         cls.koji_helper = KojiHelper()
     try:
         return cls.koji_helper.get_koji_tasks(task_id, results_dir)
     except TypeError:
         logger.info('Koji tasks are not finished yet. Try again later')
         return None, None
예제 #46
0
 def print_summary(self):
     output = output_tool.OutputTool(self.conf.outputtool)
     report_file = os.path.join(
         self.results_dir,
         self.conf.outputtool + settings.REBASE_HELPER_OUTPUT_SUFFIX)
     output.print_information(path=report_file)
     logger.info('Report file from rebase-helper is available here: %s',
                 report_file)
예제 #47
0
    def _prepare_spec_objects(self):
        """
        Prepare spec files and initialize objects

        :return:
        """
        self.rebase_spec_file_path = get_rebase_name(self.rebased_sources_dir, self.spec_file_path)

        self.spec_file = SpecFile(self.spec_file_path,
                                  self.conf.changelog_entry,
                                  self.execution_dir,
                                  download=not self.conf.not_download_sources)
        # Check whether test suite is enabled at build time
        if not self.spec_file.is_test_suite_enabled():
            results_store.set_info_text('WARNING', 'Test suite is not enabled at build time.')
        # create an object representing the rebased SPEC file
        self.rebase_spec_file = self.spec_file.copy(self.rebase_spec_file_path)

        if not self.conf.sources:
            self.conf.sources = versioneers_runner.run(self.conf.versioneer,
                                                       self.spec_file.get_package_name(),
                                                       self.spec_file.category,
                                                       self.conf.versioneer_blacklist)
            if self.conf.sources:
                logger.info("Determined latest upstream version '%s'", self.conf.sources)
            else:
                raise RebaseHelperError('Could not determine latest upstream version '
                                        'and no SOURCES argument specified!')

        # Prepare rebased_sources_dir
        self.rebased_repo = self._prepare_rebased_repository(self.spec_file.patches,
                                                             self.execution_dir,
                                                             self.rebased_sources_dir)

        # check if argument passed as new source is a file or just a version
        if [True for ext in Archive.get_supported_archives() if self.conf.sources.endswith(ext)]:
            logger.debug("argument passed as a new source is a file")
            self.rebase_spec_file.set_version_using_archive(self.conf.sources)
        else:
            logger.debug("argument passed as a new source is a version")
            version, extra_version, separator = SpecFile.split_version_string(self.conf.sources)
            self.rebase_spec_file.set_version(version)
            self.rebase_spec_file.set_extra_version_separator(separator)
            self.rebase_spec_file.set_extra_version(extra_version)

        if not self.conf.skip_version_check and parse_version(self.rebase_spec_file.get_version()) \
                <= parse_version(self.spec_file.get_version()):
            raise RebaseHelperError("Current version is equal to or newer than the requested version, nothing to do.")

        # run spec hooks
        spec_hooks_runner.run_spec_hooks(self.spec_file, self.rebase_spec_file, **self.kwargs)

        # spec file object has been sanitized downloading can proceed
        for spec_file in [self.spec_file, self.rebase_spec_file]:
            if spec_file.download:
                spec_file.download_remote_sources()
                # parse spec again with sources downloaded to properly expand %prep section
                spec_file._update_data()  # pylint: disable=protected-access
예제 #48
0
    def _prepare_spec_objects(self):
        """
        Prepare spec files and initialize objects

        :return:
        """
        self.rebase_spec_file_path = get_rebase_name(self.rebased_sources_dir, self.spec_file_path)

        self.spec_file = SpecFile(self.spec_file_path, self.execution_dir)
        # Check whether test suite is enabled at build time
        if not self.spec_file.is_test_suite_enabled():
            results_store.set_info_text('WARNING', 'Test suite is not enabled at build time.')
        # create an object representing the rebased SPEC file
        self.rebase_spec_file = self.spec_file.copy(self.rebase_spec_file_path)

        if not self.conf.sources:
            self.conf.sources = plugin_manager.versioneers.run(self.conf.versioneer,
                                                               self.spec_file.get_package_name(),
                                                               self.spec_file.category,
                                                               self.conf.versioneer_blacklist)
            if self.conf.sources:
                logger.info("Determined latest upstream version '%s'", self.conf.sources)
            else:
                raise RebaseHelperError('Could not determine latest upstream version '
                                        'and no SOURCES argument specified!')

        # Prepare rebased_sources_dir
        self.rebased_repo = self._prepare_rebased_repository(self.spec_file.patches,
                                                             self.execution_dir,
                                                             self.rebased_sources_dir)

        # check if argument passed as new source is a file or just a version
        if [True for ext in Archive.get_supported_archives() if self.conf.sources.endswith(ext)]:
            logger.verbose("argument passed as a new source is a file")
            self.rebase_spec_file.set_version_using_archive(self.conf.sources)
        else:
            logger.verbose("argument passed as a new source is a version")
            version, extra_version, separator = SpecFile.split_version_string(self.conf.sources)
            self.rebase_spec_file.set_version(version)
            self.rebase_spec_file.set_extra_version_separator(separator)
            self.rebase_spec_file.set_extra_version(extra_version)

        if not self.conf.skip_version_check and parse_version(self.rebase_spec_file.get_version()) \
                <= parse_version(self.spec_file.get_version()):
            raise RebaseHelperError("Current version is equal to or newer than the requested version, nothing to do.")

        self.rebase_spec_file.update_changelog(self.conf.changelog_entry)

        # run spec hooks
        plugin_manager.spec_hooks.run(self.spec_file, self.rebase_spec_file, **self.kwargs)

        # spec file object has been sanitized downloading can proceed
        if not self.conf.not_download_sources:
            for spec_file in [self.spec_file, self.rebase_spec_file]:
                spec_file.download_remote_sources()
                # parse spec again with sources downloaded to properly expand %prep section
                spec_file._update_data()  # pylint: disable=protected-access
예제 #49
0
    def get_koji_tasks(cls, task_id, dir_name):
        session = cls.session_maker(baseurl=cls.server)
        task_id = int(task_id)
        rpm_list = []
        log_list = []
        tasks = []
        task = session.getTaskInfo(task_id, request=True)
        if task['state'] in (koji.TASK_STATES['FREE'],
                             koji.TASK_STATES['OPEN']):
            return None, None
        elif task['state'] != koji.TASK_STATES['CLOSED']:
            logger.info('Task %i did not complete successfully' % task_id)

        if task['method'] == 'build':
            logger.info('Getting rpms for chilren of task %i: %s', task['id'],
                        koji.taskLabel(task))
            # getting rpms from children of task
            tasks = session.listTasks(
                opts={
                    'parent':
                    task_id,
                    'method':
                    'buildArch',
                    'state':
                    [koji.TASK_STATES['CLOSED'], koji.TASK_STATES['FAILED']],
                    'decode':
                    True
                })
        elif task['method'] == 'buildArch':
            tasks = [task]
        for task in tasks:
            base_path = koji.pathinfo.taskrelpath(task['id'])
            output = session.listTaskOutput(task['id'])
            if output is None:
                return None
            for filename in output:
                download = False
                full_path_name = os.path.join(dir_name, filename)
                if filename.endswith('.src.rpm'):
                    continue
                if filename.endswith('.rpm'):
                    if task['state'] != koji.TASK_STATES['CLOSED']:
                        continue
                    arch = filename.rsplit('.', 3)[2]
                    if full_path_name not in rpm_list:
                        download = arch in ['noarch', 'x86_64']
                        if download:
                            rpm_list.append(full_path_name)
                else:
                    if full_path_name not in log_list:
                        log_list.append(full_path_name)
                        download = True
                if download:
                    DownloadHelper.download_file(
                        cls.baseurl + base_path + '/' + filename,
                        full_path_name)
        return rpm_list, log_list
예제 #50
0
    def run():
        debug_log_file = None
        try:
            cli = CLI()
            if hasattr(cli, 'version'):
                logger.info(VERSION)
                sys.exit(0)

            config = Config(getattr(cli, 'config-file', None))
            config.merge(cli)
            for handler in [main_handler, output_tool_handler]:
                handler.set_terminal_background(config.background)

            ConsoleHelper.use_colors = ConsoleHelper.should_use_colors(config)
            execution_dir, results_dir, debug_log_file = Application.setup(
                config)
            traceback_log = os.path.join(results_dir, LOGS_DIR, TRACEBACK_LOG)
            if config.verbose == 0:
                main_handler.setLevel(logging.INFO)
            elif config.verbose == 1:
                main_handler.setLevel(CustomLogger.VERBOSE)
            else:
                main_handler.setLevel(logging.DEBUG)
            app = Application(config, execution_dir, results_dir,
                              debug_log_file)
            app.run()
        except KeyboardInterrupt:
            logger.info('Interrupted by user')
        except RebaseHelperError as e:
            if e.msg:
                logger.error('%s', e.msg)
            else:
                logger.error('%s', six.text_type(e))
            sys.exit(1)
        except SystemExit as e:
            sys.exit(e.code)
        except BaseException:
            if debug_log_file:
                logger.error(
                    'rebase-helper failed due to an unexpected error. Please report this problem'
                    '\nusing the following link: %s'
                    '\nand include the content of'
                    '\n\'%s\' and'
                    '\n\'%s\''
                    '\nin the report.'
                    '\nThank you!', NEW_ISSUE_LINK, debug_log_file,
                    traceback_log)
                LoggerHelper.add_file_handler(logger_traceback, traceback_log)
                logger_traceback.trace('', exc_info=1)
            else:
                logger.error(
                    'rebase-helper failed due to an unexpected error. Please report this problem'
                    '\nusing the following link: %s'
                    '\nThank you!', NEW_ISSUE_LINK)
            sys.exit(1)

        sys.exit(0)
예제 #51
0
    def build_source_packages(self):
        try:
            builder = plugin_manager.srpm_build_tools.get_plugin(self.conf.srpm_buildtool)
        except NotImplementedError as e:
            raise RebaseHelperError('{}. Supported SRPM build tools are {}'.format(
                str(e), plugin_manager.srpm_build_tools.get_supported_tools()))

        for version in ['old', 'new']:
            koji_build_id = None
            results_dir = '{}-build'.format(os.path.join(self.results_dir, version))
            spec = self.spec_file if version == 'old' else self.rebase_spec_file
            package_name = spec.get_package_name()
            package_version = spec.get_version()
            package_full_version = spec.get_full_version()
            logger.info('Building source package for %s version %s', package_name, package_full_version)

            if version == 'old' and self.conf.get_old_build_from_koji:
                koji_build_id, package_version, package_full_version = KojiHelper.get_old_build_info(package_name,
                                                                                                     package_version)

            build_dict = dict(
                name=package_name,
                version=package_version,
                srpm_buildtool=self.conf.srpm_buildtool,
                srpm_builder_options=self.conf.srpm_builder_options)
            try:
                if koji_build_id:
                    session = KojiHelper.create_session()
                    build_dict['srpm'], build_dict['logs'] = KojiHelper.download_build(session,
                                                                                       koji_build_id,
                                                                                       os.path.join(
                                                                                           results_dir,
                                                                                           'SRPM'
                                                                                       ),
                                                                                       arches=['src'])

                else:
                    build_dict.update(builder.build(spec, results_dir, **build_dict))
                build_dict = self._sanitize_build_dict(build_dict)
                results_store.set_build_data(version, build_dict)
            except RebaseHelperError:  # pylint: disable=try-except-raise
                raise
            except SourcePackageBuildError as e:
                build_dict.update(builder.get_logs())
                build_dict['source_package_build_error'] = str(e)
                build_dict = self._sanitize_build_dict(build_dict)
                results_store.set_build_data(version, build_dict)
                if e.logfile:
                    msg = 'Building {} SRPM packages failed; see {} for more information'.format(version, e.logfile)
                else:
                    msg = 'Building {} SRPM packages failed; see logs in {} for more information'.format(
                        version, os.path.join(results_dir, 'SRPM'))
                logger.error(msg)
                raise RebaseHelperError(msg, logfiles=builder.get_logs().get('logs'))
            except Exception:
                raise RebaseHelperError('Building package failed with unknown reason. '
                                        'Check all available log files.')
예제 #52
0
 def apply_old_patches(cls):
     """Function applies a patch to a old/new sources"""
     for patch in cls.patches:
         logger.info("Applying patch '%s' to '%s'", os.path.basename(patch.get_path()), os.path.basename(cls.source_dir))
         ret_code = GitPatchTool.apply_patch(cls.git_helper, patch)
         # unexpected
         if int(ret_code) != 0:
             if cls.source_dir == cls.old_sources:
                 raise RuntimeError('Failed to patch old sources')
예제 #53
0
 def print_copr_logs(self):
     logs = self.get_new_build_logs()['build_ref']
     copr_helper = CoprHelper()
     client = copr_helper.get_client()
     message = "Copr build for '%s' version is: %s"
     for version in ['old', 'new']:
         data = logs[version]
         build_url = copr_helper.get_build_url(client, data['copr_build_id'])
         logger.info(message % (data['version'], build_url))
예제 #54
0
    def build_source_packages(self):
        try:
            builder = srpm_build_helper.get_tool(self.conf.srpm_buildtool)
        except NotImplementedError as e:
            raise RebaseHelperError('{}. Supported SRPM build tools are {}'.format(
                six.text_type(e), srpm_build_helper.get_supported_tools()))

        for version in ['old', 'new']:
            koji_build_id = None
            results_dir = '{}-build'.format(os.path.join(self.results_dir, version))
            spec = self.spec_file if version == 'old' else self.rebase_spec_file
            package_name = spec.get_package_name()
            package_version = spec.get_version()
            package_full_version = spec.get_full_version()
            logger.info('Building source package for %s version %s', package_name, package_full_version)

            if version == 'old' and self.conf.get_old_build_from_koji:
                koji_build_id, package_version, package_full_version = KojiHelper.get_old_build_info(package_name,
                                                                                                     package_version)

            build_dict = dict(
                name=package_name,
                version=package_version,
                srpm_buildtool=self.conf.srpm_buildtool,
                srpm_builder_options=self.conf.srpm_builder_options)
            try:
                if koji_build_id:
                    session = KojiHelper.create_session()
                    build_dict['srpm'], build_dict['logs'] = KojiHelper.download_build(session,
                                                                                       koji_build_id,
                                                                                       os.path.join(
                                                                                           results_dir,
                                                                                           'SRPM'
                                                                                       ),
                                                                                       arches=['src'])

                else:
                    build_dict.update(builder.build(spec, results_dir, **build_dict))
                build_dict = self._sanitize_build_dict(build_dict)
                results_store.set_build_data(version, build_dict)
            except RebaseHelperError:  # pylint: disable=try-except-raise
                raise
            except SourcePackageBuildError as e:
                build_dict.update(builder.get_logs())
                build_dict['source_package_build_error'] = six.text_type(e)
                build_dict = self._sanitize_build_dict(build_dict)
                results_store.set_build_data(version, build_dict)
                if e.logfile:
                    msg = 'Building {} SRPM packages failed; see {} for more information'.format(version, e.logfile)
                else:
                    msg = 'Building {} SRPM packages failed; see logs in {} for more information'.format(
                        version, os.path.join(results_dir, 'SRPM'))
                raise RebaseHelperError(msg, logfiles=builder.get_logs().get('logs'))
            except Exception:
                raise RebaseHelperError('Building package failed with unknown reason. '
                                        'Check all available log files.')
예제 #55
0
 def print_copr_logs(self):
     logs = self.get_new_build_logs()['build_ref']
     copr_helper = CoprHelper()
     client = copr_helper.get_client()
     message = "Copr build for '%s' version is: %s"
     for version in ['old', 'new']:
         data = logs[version]
         build_url = copr_helper.get_build_url(client,
                                               data['copr_build_id'])
         logger.info(message % (data['version'], build_url))
예제 #56
0
 def _execute_checkers(self, checker):
     """
     Function executes a checker based on command line arguments
     :param checker: checker name based from command line
     :return: Nothing
     """
     pkgchecker = Checker(checker)
     logger.info('Comparing packages using %s...', checker)
     text = pkgchecker.run_check(self.results_dir)
     return text
예제 #57
0
 def apply_old_patches(cls):
     """Function applies a patch to a old/new sources"""
     for patch in cls.patches:
         logger.info("Applying patch '%s' to '%s'",
                     os.path.basename(patch.get_path()),
                     os.path.basename(cls.source_dir))
         try:
             cls.apply_patch(cls.old_repo, patch)
         except git.GitCommandError:
             raise RuntimeError('Failed to patch old sources')
예제 #58
0
    def download_file(url, destination_path, blocksize=8192):
        """Downloads a file from HTTP, HTTPS or FTP URL.

        Args:
            url (str): URL to be downloaded.
            destination_path (str): Path to where the downloaded file will be stored.
            blocksize (int): Block size in bytes.

        """
        r = DownloadHelper.request(url, stream=True)
        if r is None:
            raise DownloadError(
                "An unexpected error occurred during the download.")

        if not 200 <= r.status_code < 300:
            raise DownloadError(r.reason)

        file_size = int(r.headers.get('content-length', -1))

        # file exists, check the size
        if os.path.exists(destination_path):
            if file_size < 0 or file_size != os.path.getsize(destination_path):
                logger.verbose(
                    "The destination file '%s' exists, but sizes don't match! Removing it.",
                    destination_path)
                os.remove(destination_path)
            else:
                logger.verbose(
                    "The destination file '%s' exists, and the size is correct! Skipping download.",
                    destination_path)
                return
        try:
            with open(destination_path, 'wb') as local_file:
                logger.info('Downloading file from URL %s', url)
                download_start = time.time()
                downloaded = 0

                # report progress
                DownloadHelper.progress(file_size, downloaded, download_start)

                # do the actual download
                for chunk in r.iter_content(chunk_size=blocksize):
                    downloaded += len(chunk)
                    local_file.write(chunk)

                    # report progress
                    DownloadHelper.progress(file_size, downloaded,
                                            download_start)

                sys.stdout.write('\n')
                sys.stdout.flush()
        except KeyboardInterrupt as e:
            os.remove(destination_path)
            raise e
예제 #59
0
 def apply_old_patches(cls):
     """Function applies a patch to a old/new sources"""
     for patch in cls.patches:
         logger.info("Applying patch '%s' to '%s'",
                     os.path.basename(patch.get_path()),
                     os.path.basename(cls.source_dir))
         ret_code = GitPatchTool.apply_patch(cls.git_helper, patch)
         # unexpected
         if int(ret_code) != 0:
             if cls.source_dir == cls.old_sources:
                 raise RuntimeError('Failed to patch old sources')
예제 #60
0
    def run_output_tool(self, tool, logs=None, app=None):
        """
        Runs specified output tool.

        :param tool: Tool to run
        :param log: Log that probably contains the important message concerning the rebase fail
        :param app: Application class instance
        """
        output_tool = self.output_tools[tool]
        logger.info("Running '%s' output tool.", tool)
        output_tool.run(logs, app=app)
        output_tool.print_cli_summary(app)