def apply_patch(git_helper, patch_object):
        """
        Function applies patches to old sources
        It tries apply patch with am command and if it fails
        then with command --apply
        """
        logger.debug('Applying patch with am')

        patch_name = patch_object.get_path()
        patch_option = patch_object.get_option()
        ret_code = git_helper.command_am(input_file=patch_name)
        if int(ret_code) != 0:
            git_helper.command_am(parameters='--abort', input_file=patch_name)
            logger.debug('Applying patch with git am failed.')
            ret_code = git_helper.command_apply(input_file=patch_name,
                                                option=patch_option)
            if int(ret_code) != 0:
                ret_code = git_helper.command_apply(input_file=patch_name,
                                                    option=patch_option,
                                                    ignore_space=True)
            ret_code = GitPatchTool.commit_patch(git_helper, patch_name)
        else:
            # replace last commit message with patch name to preserve mapping between commits and patches
            ret_code = git_helper.command_commit(message='Patch: {0}'.format(
                os.path.basename(patch_name)),
                                                 amend=True)
        return ret_code
Exemple #2
0
    def revert_redefine_release_with_macro(self, macro):
        """
        Method removes the redefined the Release: line with given macro and uncomments the old Release line.

        :param macro:
        :return:
        """
        search_re = re.compile(
            r'^Release:\s*[0-9.]*[0-9]+\.{0}%{{\?dist}}\s*'.format(macro))

        for index, line in enumerate(self.spec_content):
            match = search_re.search(line)
            if match:
                # We will uncomment old line, so sanity check first
                if not self.spec_content[index - 1].startswith('#Release:'):
                    raise RebaseHelperError(
                        "Redefined Release line in SPEC is not 'commented out' "
                        "old line: '{0}'".format(self.spec_content[index -
                                                                   1].strip()))
                logger.debug("Uncommenting original Release line "
                             "'%s'", self.spec_content[index - 1].strip())
                self.spec_content[index - 1] = self.spec_content[index -
                                                                 1].lstrip('#')
                logger.debug("Removing redefined Release line '%s'",
                             line.strip())
                self.spec_content.pop(index)
                self.save()
                break
Exemple #3
0
    def set_version(self, version):
        """
        Method to update the version in the SPEC file

        :param version: string with new version
        :return: None
        """
        version_re = re.compile(r'^Version:\s*(.+)')
        for index, line in enumerate(self.spec_content):
            match = version_re.search(line)
            if match:
                logger.debug("Updating version in SPEC from '%s' with '%s'",
                             self.get_version(), version)

                # search for used macros in spec file scope
                for m in MacroHelper.filter(self.macros, level=-1, used=True):
                    if m['name'] in match.group(1):
                        # redefine the macro, don't touch Version tag
                        self._set_macro(m['name'], version)
                        return

                self.spec_content[index] = line.replace(
                    match.group(1), version)
                break
        #  save changes to the disc
        self.save()
Exemple #4
0
    def _update_data(self):
        """
        Function updates data from given SPEC file

        :return: 
        """
        # Load rpm information
        try:
            self.spc = rpm.spec(self.path)
        except ValueError:
            raise RebaseHelperError("Problem with parsing SPEC file '%s'" %
                                    self.path)
        self.sources = self._get_spec_sources_list(self.spc)
        self.prep_section = self.spc.prep
        # HEADER of SPEC file
        self.hdr = self.spc.sourceHeader
        self.rpm_sections = self._split_sections()
        # determine the extra_version
        logger.debug("Updating the extra version")
        _, self.extra_version, separator = SpecFile.extract_version_from_archive_name(
            self.get_archive(), self._get_raw_source_string(0))
        self.set_extra_version_separator(separator)

        self.patches = self._get_initial_patches_list()
        self.macros = MacroHelper.dump()

        # TODO: don't call this at all in SPEC file methods
        if self.download:
            self.download_remote_sources()
Exemple #5
0
    def _update_data(self):
        """
        Function updates data from given SPEC file

        :return: 
        """
        # Load rpm information
        try:
            self.spc = rpm.spec(self.path)
        except ValueError:
            raise RebaseHelperError("Problem with parsing SPEC file '%s'" % self.path)
        self.prep_section = self.spc.prep
        # HEADER of SPEC file
        self.hdr = self.spc.sourceHeader

        # All source file mentioned in SPEC file Source[0-9]*
        self.rpm_sections = self._split_sections()
        # determine the extra_version
        logger.debug("Updating the extra version")
        self.sources, self.tar_sources = self._get_initial_sources_list()

        _, self.extra_version, separator = SpecFile.extract_version_from_archive_name(
            self.get_archive(),
            self._get_raw_source_string(0))
        self.set_extra_version_separator(separator)

        self.patches = self._get_initial_patches_list()
        self.macros = MacroHelper.dump()
    def build(cls, spec, results_dir, srpm, **kwargs):
        """
        Builds the RPMs using rpmbuild

        :param spec: SpecFile object
        :param results_dir: absolute path to DIR where results should be stored
        :param srpm: absolute path to SRPM
        :return: dict with:
                 'rpm' -> list with absolute paths to RPMs
                 'logs' -> list with absolute paths to build_logs
        """
        rpm_results_dir = os.path.join(results_dir, "RPM")
        sources = spec.get_sources()
        patches = [p.get_path() for p in spec.get_patches()]
        with RpmbuildTemporaryEnvironment(sources, patches, spec.get_path(), rpm_results_dir) as tmp_env:
            env = tmp_env.env()
            tmp_dir = tmp_env.path()
            tmp_results_dir = env.get(RpmbuildTemporaryEnvironment.TEMPDIR_RESULTS)
            rpms = cls._build_rpm(srpm, tmp_dir, tmp_results_dir, rpm_results_dir,
                                  builder_options=cls.get_builder_options(**kwargs))

        logger.info("Building RPMs finished successfully")

        # RPMs paths in results_dir
        rpms = [os.path.join(rpm_results_dir, os.path.basename(f)) for f in rpms]
        logger.debug("Successfully built RPMs: '%s'", str(rpms))

        # gather logs
        cls.logs.extend([l for l in PathHelper.find_all_files(rpm_results_dir, '*.log')])
        logger.debug("logs: '%s'", str(cls.logs))

        return dict(rpm=rpms, logs=cls.logs)
Exemple #7
0
    def download_remote_sources(self):
        """
        Method that iterates over all sources and downloads ones, which contain URL instead of just a file.

        :return: None
        """
        try:
            # try to download old sources from Fedora lookaside cache
            LookasideCacheHelper.download('fedpkg', os.path.dirname(self.path),
                                          self.get_package_name())
        except LookasideCacheError as e:
            logger.debug("Downloading sources from lookaside cache failed. "
                         "Reason: '{}'.".format(str(e)))

        # filter out only sources with URL
        remote_files = [
            source for source in self.sources
            if bool(urllib.parse.urlparse(source).scheme)
        ]
        # download any sources that are not yet downloaded
        for remote_file in remote_files:
            local_file = os.path.join(self.sources_location,
                                      os.path.basename(remote_file))
            if not os.path.isfile(local_file):
                logger.debug(
                    "File '%s' doesn't exist locally, downloading it.",
                    local_file)
                try:
                    DownloadHelper.download_file(remote_file, local_file)
                except DownloadError as e:
                    raise RebaseHelperError(
                        "Failed to download file from URL {}. "
                        "Reason: '{}'. ".format(remote_file, str(e)))
Exemple #8
0
    def download_remote_sources(self):
        """
        Method that iterates over all sources and downloads ones, which contain URL instead of just a file.

        :return: None
        """
        try:
            # try to download old sources from Fedora lookaside cache
            LookasideCacheHelper.download('fedpkg', os.path.dirname(self.path), self.get_package_name())
        except LookasideCacheError as e:
            logger.debug("Downloading sources from lookaside cache failed. "
                         "Reason: '{}'.".format(str(e)))

        # filter out only sources with URL
        remote_files = [source for source in self.sources if bool(urllib.parse.urlparse(source).scheme)]
        # download any sources that are not yet downloaded
        for remote_file in remote_files:
            local_file = os.path.join(self.sources_location, os.path.basename(remote_file))
            if not os.path.isfile(local_file):
                logger.debug("File '%s' doesn't exist locally, downloading it.", local_file)
                try:
                    DownloadHelper.download_file(remote_file, local_file)
                except DownloadError as e:
                    raise RebaseHelperError("Failed to download file from URL {}. "
                                            "Reason: '{}'. ".format(remote_file, str(e)))
Exemple #9
0
    def _prepare_spec_objects(self):
        """
        Prepare spec files and initialize objects

        :return: 
        """
        self.rebase_spec_file_path = get_rebase_name(self.results_dir,
                                                     self.spec_file_path)

        self.spec_file = SpecFile(self.spec_file_path,
                                  self.execution_dir,
                                  download=not self.conf.not_download_sources)
        # Check whether test suite is enabled at build time
        if not self.spec_file.is_test_suite_enabled():
            results_store.set_info_text(
                'WARNING', 'Test suite is not enabled at build time.')
        #  create an object representing the rebased SPEC file
        self.rebase_spec_file = self.spec_file.copy(self.rebase_spec_file_path)

        #  check if argument passed as new source is a file or just a version
        if [
                True for ext in Archive.get_supported_archives()
                if self.conf.sources.endswith(ext)
        ]:
            logger.debug("argument passed as a new source is a file")
            self.rebase_spec_file.set_version_using_archive(self.conf.sources)
        else:
            logger.debug("argument passed as a new source is a version")
            version, extra_version, separator = SpecFile.split_version_string(
                self.conf.sources)
            self.rebase_spec_file.set_version(version)
            self.rebase_spec_file.set_extra_version_separator(separator)
            self.rebase_spec_file.set_extra_version(extra_version)
    def _build_srpm(cls, spec, sources, patches, results_dir):
        """
        Builds the SRPM using rpmbuild

        :param spec: absolute path to the SPEC file.
        :param sources: list with absolute paths to SOURCES
        :param patches: list with absolute paths to PATCHES
        :param results_dir: absolute path to DIR where results should be stored
        :return: absolute path to SRPM, list with absolute paths to logs
        """
        # build SRPM
        srpm_results_dir = os.path.join(results_dir, "SRPM")
        with RpmbuildTemporaryEnvironment(sources, patches, spec,
                                          srpm_results_dir) as tmp_env:
            env = tmp_env.env()
            tmp_dir = tmp_env.path()
            tmp_spec = env.get(RpmbuildTemporaryEnvironment.TEMPDIR_SPEC)
            tmp_results_dir = env.get(
                RpmbuildTemporaryEnvironment.TEMPDIR_RESULTS)
            srpm = cls._do_build_srpm(tmp_spec, tmp_dir, tmp_results_dir)

        if srpm is None:
            raise SourcePackageBuildError("Building SRPM failed!")
        else:
            logger.info("Building SRPM finished successfully")

        # srpm path in results_dir
        srpm = os.path.join(srpm_results_dir, os.path.basename(srpm))
        logger.debug("Successfully built SRPM: '%s'", str(srpm))
        # gather logs
        logs = [l for l in PathHelper.find_all_files(srpm_results_dir, '*.log')]
        logger.debug("logs: '%s'", str(logs))

        return srpm, logs
Exemple #11
0
    def _prepare_spec_objects(self):
        """
        Prepare spec files and initialize objects
        :return:
        """
        self.rebase_spec_file_path = get_rebase_name(self.spec_file_path)

        self.spec_file = SpecFile(self.spec_file_path,
                                  self.execution_dir,
                                  download=not self.conf.not_download_sources)
        # Check whether test suite is enabled at build time
        if not self.spec_file.is_test_suite_enabled():
            OutputLogger.set_info_text('WARNING', 'Test suite is not enabled at build time.')
        #  create an object representing the rebased SPEC file
        self.rebase_spec_file = self.spec_file.copy(self.rebase_spec_file_path)

        #  check if argument passed as new source is a file or just a version
        if [True for ext in Archive.get_supported_archives() if self.conf.sources.endswith(ext)]:
            logger.debug("argument passed as a new source is a file")
            self.rebase_spec_file.set_version_using_archive(self.conf.sources)
        else:
            logger.debug("argument passed as a new source is a version")
            version, extra_version = SpecFile.split_version_string(self.conf.sources)
            self.rebase_spec_file.set_version(version)
            if extra_version:
                self.rebase_spec_file.set_extra_version(extra_version)
Exemple #12
0
    def _get_initial_sources_list(self):
        """Function returns all sources mentioned in SPEC file"""
        # get all regular sources
        sources = []
        tar_sources = []
        sources_list = [x for x in self.spc.sources if x[2] == 1]
        remote_files_re = re.compile(r'(http:|https:|ftp:)//.*')

        for index, src in enumerate(sorted(sources_list, key=lambda source: source[1])):
            # src is type of (SOURCE, Index of source, Type of source (PAtch, Source)
            # We need to download all archives and only the one
            abs_path = os.path.join(self.sources_location, os.path.basename(src[0]).strip())
            sources.append(abs_path)
            archive = [x for x in Archive.get_supported_archives() if src[0].endswith(x)]
            # if the source is a remote file, download it
            if archive:
                if remote_files_re.search(src[0]) and self.download and not os.path.isfile(abs_path):
                    logger.debug("Tarball is not in absolute path {} "
                                 "trying to download one from URL {}".format(abs_path, src[0]))
                    logger.info("Tarball is not in absolute path. Trying to download it from URL")
                    try:
                        DownloadHelper.download_file(src[0], abs_path)
                    except DownloadError as e:
                        raise RebaseHelperError("Failed to download file from URL {}. "
                                                "Reason: '{}'. ".format(src[0], str(e)))
                tar_sources.append(abs_path)
        return sources, tar_sources
Exemple #13
0
 def _delete_workspace_dir(self):
     """
     Deletes workspace directory and loggs message
     :return:
     """
     logger.debug("Removing the workspace directory '%s'", self.workspace_dir)
     shutil.rmtree(self.workspace_dir)
Exemple #14
0
    def set_version(self, version):

        """
        Method to update the version in the SPEC file

        :param version: string with new version
        :return: None
        """
        version_re = re.compile('Version:\s*(.+)')
        for index, line in enumerate(self.spec_content):
            match = version_re.search(line)
            if match:
                logger.debug("Updating version in SPEC from '%s' with '%s'", self.get_version(), version)

                # search for used macros in spec file scope
                for m in MacroHelper.get_macros(level=-1, used=True):
                    if m['name'] in match.group(1):
                        # redefine the macro, don't touch Version tag
                        self._set_macro(m['name'], version)
                        return

                self.spec_content[index] = line.replace(match.group(1), version)
                break
        #  save changes to the disc
        self.save()
Exemple #15
0
    def apply_patch(repo, patch_object):
        """
        Function applies patches to old sources
        It tries apply patch with am command and if it fails
        then with command --apply
        """
        logger.debug('Applying patch with am')

        patch_name = patch_object.get_path()
        patch_strip = patch_object.get_strip()
        try:
            repo.git.am(patch_name)
        except git.GitCommandError:
            logger.debug('Applying patch with git-am failed.')
            try:
                repo.git.apply(patch_name, p=patch_strip)
            except git.GitCommandError:
                repo.git.apply(patch_name,
                               p=patch_strip,
                               reject=True,
                               whitespace='fix')
            repo.git.add(all=True)
            repo.index.commit('Patch: {0}'.format(
                os.path.basename(patch_name)),
                              skip_hooks=True)
        else:
            # replace last commit message with patch name to preserve mapping between commits and patches
            repo.head.reset('HEAD~1', index=False)
            repo.index.commit('Patch: {0}'.format(
                os.path.basename(patch_name)),
                              skip_hooks=True)
Exemple #16
0
    def download_file(url, destination_path, timeout=10, blocksize=8192):
        """
        Method for downloading file from HTTP, HTTPS and FTP URL.

        :param url: URL from which to download the file
        :param destination_path: path where to store downloaded file
        :param timeout: timeout in seconds for blocking actions like connecting, etc.
        :param blocksize: size in Bytes of blocks used for downloading the file and reporting progress
        :return: None
        """
        try:
            response = urllib.request.urlopen(url, timeout=timeout)
            file_size = int(response.info().get('Content-Length', -1))

            # file exists, check the size
            if os.path.exists(destination_path):
                if file_size < 0 or file_size != os.path.getsize(
                        destination_path):
                    logger.debug(
                        "The destination file '%s' exists, but sizes don't match! Removing it.",
                        destination_path)
                    os.remove(destination_path)
                else:
                    logger.debug(
                        "The destination file '%s' exists, and the size is correct! Skipping download.",
                        destination_path)
                    return
            try:
                with open(destination_path, 'wb') as local_file:
                    logger.info('Downloading file from URL %s', url)
                    download_start = time.time()
                    downloaded = 0

                    # report progress
                    DownloadHelper.progress(file_size, downloaded,
                                            download_start)

                    # do the actual download
                    while True:
                        buffer = response.read(blocksize)

                        # no more data to read
                        if not buffer:
                            break

                        downloaded += len(buffer)
                        local_file.write(buffer)

                        # report progress
                        DownloadHelper.progress(file_size, downloaded,
                                                download_start)

                    sys.stdout.write('\n')
                    sys.stdout.flush()
            except KeyboardInterrupt as e:
                os.remove(destination_path)
                raise e

        except urllib.error.URLError as e:
            raise DownloadError(str(e))
Exemple #17
0
    def extract_archive(self, path=None):
        """
        Extracts the archive into the given path

        :param path: Path where to extract the archive to.
        :return:
        """
        if path is None:
            TypeError("Expected argument 'path' (pos 1) is missing")

        logger.debug("Extracting '%s' into '%s'", self._filename, path)

        try:
            LZMAError = lzma.LZMAError
        except AttributeError:
            LZMAError = lzma.error

        try:
            archive = self._archive_type.open(self._filename)
        except (tarfile.ReadError, LZMAError) as e:
            raise IOError(six.text_type(e))

        self._archive_type.extract(archive, self._filename, path)
        try:
            archive.close()
        except AttributeError:
            # pseudo archive types don't return real file-like object
            pass
 def _create_directory_sctructure(self):
     # create directory structure
     for dir_name in ['SOURCES', 'SPECS', 'RESULTS']:
         self._env[self.TEMPDIR + '_' + dir_name] = os.path.join(
             self._env[self.TEMPDIR], dir_name)
         logger.debug("Creating '%s'",
                      self._env[self.TEMPDIR + '_' + dir_name])
         os.makedirs(self._env[self.TEMPDIR + '_' + dir_name])
Exemple #19
0
 def _create_directory_structure(self):
     # create directory structure
     for dir_name in ['SOURCES', 'SPECS', 'RESULTS']:
         self._env[self.TEMPDIR + '_' + dir_name] = os.path.join(
             self._env[self.TEMPDIR], dir_name)
         logger.debug("Creating '%s'",
                      self._env[self.TEMPDIR + '_' + dir_name])
         os.makedirs(self._env[self.TEMPDIR + '_' + dir_name])
Exemple #20
0
 def _write_spec_file_to_disc(self):
     """Write the current SPEC file to the disc"""
     logger.debug("Writing SPEC file '%s' to the disc", self.path)
     try:
         with open(self.path, "w") as f:
             f.writelines(self.spec_content)
     except IOError:
         raise RebaseHelperError("Unable to write updated data to SPEC file '%s'", self.path)
 def commit_patch(git_helper, patch_name):
     """Function commits patched files to git"""
     logger.debug('Commit patch')
     ret_code = git_helper.command_add_files(parameters=['--all'])
     if int(ret_code) != 0:
         raise GitRebaseError('We are not able to add changed files to local git repository.')
     ret_code = git_helper.command_commit(message='Patch: {0}'.format(os.path.basename(patch_name)))
     return ret_code
Exemple #22
0
    def _prepare_spec_objects(self):
        """
        Prepare spec files and initialize objects

        :return:
        """
        self.rebase_spec_file_path = get_rebase_name(self.rebased_sources_dir, self.spec_file_path)

        self.spec_file = SpecFile(self.spec_file_path,
                                  self.conf.changelog_entry,
                                  self.execution_dir,
                                  download=not self.conf.not_download_sources)
        # Check whether test suite is enabled at build time
        if not self.spec_file.is_test_suite_enabled():
            results_store.set_info_text('WARNING', 'Test suite is not enabled at build time.')
        # create an object representing the rebased SPEC file
        self.rebase_spec_file = self.spec_file.copy(self.rebase_spec_file_path)

        if not self.conf.sources:
            self.conf.sources = versioneers_runner.run(self.conf.versioneer,
                                                       self.spec_file.get_package_name(),
                                                       self.spec_file.category,
                                                       self.conf.versioneer_blacklist)
            if self.conf.sources:
                logger.info("Determined latest upstream version '%s'", self.conf.sources)
            else:
                raise RebaseHelperError('Could not determine latest upstream version '
                                        'and no SOURCES argument specified!')

        # Prepare rebased_sources_dir
        self.rebased_repo = self._prepare_rebased_repository(self.spec_file.patches,
                                                             self.execution_dir,
                                                             self.rebased_sources_dir)

        # check if argument passed as new source is a file or just a version
        if [True for ext in Archive.get_supported_archives() if self.conf.sources.endswith(ext)]:
            logger.debug("argument passed as a new source is a file")
            self.rebase_spec_file.set_version_using_archive(self.conf.sources)
        else:
            logger.debug("argument passed as a new source is a version")
            version, extra_version, separator = SpecFile.split_version_string(self.conf.sources)
            self.rebase_spec_file.set_version(version)
            self.rebase_spec_file.set_extra_version_separator(separator)
            self.rebase_spec_file.set_extra_version(extra_version)

        if not self.conf.skip_version_check and parse_version(self.rebase_spec_file.get_version()) \
                <= parse_version(self.spec_file.get_version()):
            raise RebaseHelperError("Current version is equal to or newer than the requested version, nothing to do.")

        # run spec hooks
        spec_hooks_runner.run_spec_hooks(self.spec_file, self.rebase_spec_file, **self.kwargs)

        # spec file object has been sanitized downloading can proceed
        for spec_file in [self.spec_file, self.rebase_spec_file]:
            if spec_file.download:
                spec_file.download_remote_sources()
                # parse spec again with sources downloaded to properly expand %prep section
                spec_file._update_data()  # pylint: disable=protected-access
Exemple #23
0
 def _write_spec_file_to_disc(self):
     """Write the current SPEC file to the disc"""
     logger.debug("Writing SPEC file '%s' to the disc", self.path)
     try:
         with open(self.path, "w") as f:
             f.writelines(self.spec_content)
     except IOError:
         raise RebaseHelperError(
             "Unable to write updated data to SPEC file '%s'", self.path)
Exemple #24
0
    def _delete_workspace_dir(self):
        """
        Deletes workspace directory and loggs message

        :return:
        """
        logger.debug("Removing the workspace directory '%s'", self.workspace_dir)
        if os.path.isdir(self.workspace_dir):
            shutil.rmtree(self.workspace_dir)
Exemple #25
0
    def __init__(self, cli_conf=None):
        """
        Initialize the application

        :param cli_conf: CLI object with configuration gathered from commandline
        :return: 
        """
        OutputLogger.clear()

        self.conf = cli_conf

        if self.conf.verbose:
            LoggerHelper.add_stream_handler(logger, logging.DEBUG)
        else:
            LoggerHelper.add_stream_handler(logger, logging.INFO)

        # The directory in which rebase-helper was executed
        if self.conf.results_dir is None:
            self.execution_dir = os.getcwd()
        else:
            self.execution_dir = self.conf.results_dir

        # Temporary workspace for Builder, checks, ...
        self.kwargs['workspace_dir'] = self.workspace_dir = os.path.join(self.execution_dir,
                                                                         settings.REBASE_HELPER_WORKSPACE_DIR)
        # Directory where results should be put
        self.kwargs['results_dir'] = self.results_dir = os.path.join(self.execution_dir,
                                                                     settings.REBASE_HELPER_RESULTS_DIR)

        self.kwargs['non_interactive'] = self.conf.non_interactive
        # if not continuing, check the results dir
        if not self.conf.cont and not self.conf.build_only and not self.conf.comparepkgs:
            self._check_results_dir()
        # This is used if user executes rebase-helper with --continue
        # parameter even when directory does not exist
        if not os.path.exists(self.results_dir):
            os.makedirs(self.results_dir)
            os.makedirs(os.path.join(self.results_dir, settings.REBASE_HELPER_LOGS))

        self._add_debug_log_file()
        self._add_report_log_file()
        logger.debug("Rebase-helper version: %s" % version.VERSION)
        if self.conf.build_tasks is None:
            self._get_spec_file()
            self._prepare_spec_objects()

            # check the workspace dir
            if not self.conf.cont:
                self._check_workspace_dir()

            # TODO: Remove the value from kwargs and use only CLI attribute!
            self.kwargs['continue'] = self.conf.cont
            self._initialize_data()

        if self.conf.cont or self.conf.build_only:
            self._delete_old_builds()
Exemple #26
0
    def download_file(url, destination_path, timeout=10, blocksize=8192):
        """
        Method for downloading file from HTTP, HTTPS and FTP URL.

        :param url: URL from which to download the file
        :param destination_path: path where to store downloaded file
        :param timeout: timeout in seconds for blocking actions like connecting, etc.
        :param blocksize: size in Bytes of blocks used for downloading the file and reporting progress
        :return: None
        """
        try:
            response = urllib.request.urlopen(url, timeout=timeout)
            file_size = int(response.info().get('Content-Length', -1))

            # file exists, check the size
            if os.path.exists(destination_path):
                if file_size < 0 or file_size != os.path.getsize(destination_path):
                    logger.debug("The destination file '%s' exists, but sizes don't match! Removing it.",
                                 destination_path)
                    os.remove(destination_path)
                else:
                    logger.debug("The destination file '%s' exists, and the size is correct! Skipping download.",
                                 destination_path)
                    return
            try:
                with open(destination_path, 'wb') as local_file:
                    logger.info('Downloading file from URL %s', url)
                    download_start = time.time()
                    downloaded = 0

                    # report progress
                    DownloadHelper.progress(file_size, downloaded, download_start)

                    # do the actual download
                    while True:
                        buffer = response.read(blocksize)

                        # no more data to read
                        if not buffer:
                            break

                        downloaded += len(buffer)
                        local_file.write(buffer)

                        # report progress
                        DownloadHelper.progress(file_size, downloaded, download_start)

                    sys.stdout.write('\n')
                    sys.stdout.flush()
            except KeyboardInterrupt as e:
                os.remove(destination_path)
                raise e

        except urllib.error.URLError as e:
            raise DownloadError(str(e))
Exemple #27
0
    def __exit__(self, type, value, traceback):
        # run callback before removing the environment
        try:
            self._exit_callback(**self.env())
        except TypeError:
            pass
        else:
            logger.debug("Exit callback executed successfully")

        shutil.rmtree(self.path(), onerror=lambda func, path, excinfo: shutil.rmtree(path))
        logger.debug("Destroyed environment in '%s'", self.path())
 def _create_directory_sctructure(self):
     # create rpmbuild directory structure
     for dir_name in ['RESULTS', 'rpmbuild']:
         self._env[self.TEMPDIR + '_' + dir_name.upper()] = os.path.join(self._env[self.TEMPDIR], dir_name)
         logger.debug("Creating '%s'", self._env[self.TEMPDIR + '_' + dir_name.upper()])
         os.makedirs(self._env[self.TEMPDIR + '_' + dir_name.upper()])
     for dir_name in ['BUILD', 'BUILDROOT', 'RPMS', 'SOURCES', 'SPECS', 'SRPMS']:
         self._env[self.TEMPDIR + '_' + dir_name] = os.path.join(self._env[self.TEMPDIR_RPMBUILD],
                                                                 dir_name)
         logger.debug("Creating '%s'", self._env[self.TEMPDIR + '_' + dir_name])
         os.makedirs(self._env[self.TEMPDIR + '_' + dir_name])
 def _prepare_git(cls, upstream_name):
     cls.git_helper.command_remote_add(upstream_name, cls.new_sources)
     cls.git_helper.command_fetch(upstream_name)
     cls.output_data = cls.git_helper.command_log(parameters='--pretty=oneline')
     logger.debug('Outputdata from git log %s', cls.output_data)
     number = 0
     if cls.prep_section:
         number = 1
     last_hash = GitHelper.get_commit_hash_log(cls.output_data, number=number)
     init_hash = GitHelper.get_commit_hash_log(cls.output_data, len(cls.output_data)-1)
     return init_hash, last_hash
Exemple #30
0
 def commit_patch(git_helper, patch_name):
     """Function commits patched files to git"""
     logger.debug('Commit patch')
     ret_code = git_helper.command_add_files(parameters=['--all'])
     if int(ret_code) != 0:
         raise GitRebaseError(
             'We are not able to add changed files to local git repository.'
         )
     ret_code = git_helper.command_commit(
         message='Patch: {0}'.format(os.path.basename(patch_name)))
     return ret_code
Exemple #31
0
    def __exit__(self, type, value, traceback):
        # run callback before removing the environment
        try:
            self._exit_callback(**self.env())
        except TypeError:
            pass
        else:
            logger.debug("Exit callback executed successfully")

        shutil.rmtree(self.path(),
                      onerror=lambda func, path, excinfo: shutil.rmtree(path))
        logger.debug("Destroyed environment in '%s'", self.path())
Exemple #32
0
    def __init__(self, cli_conf, execution_dir, results_dir, debug_log_file):
        """
        Initialize the application

        :param cli_conf: CLI object with configuration gathered from commandline
        :return:
        """
        results_store.clear()

        self.conf = cli_conf
        self.execution_dir = execution_dir
        self.rebased_sources_dir = os.path.join(results_dir, 'rebased-sources')

        self.debug_log_file = debug_log_file

        # Temporary workspace for Builder, checks, ...
        self.kwargs['workspace_dir'] = self.workspace_dir = os.path.join(self.execution_dir, constants.WORKSPACE_DIR)
        # Directory where results should be put
        self.kwargs['results_dir'] = self.results_dir = results_dir

        # Directory contaning only those files, which are relevant for the new rebased version
        self.kwargs['rebased_sources_dir'] = self.rebased_sources_dir

        self.kwargs['non_interactive'] = self.conf.non_interactive

        self.kwargs['changelog_entry'] = self.conf.changelog_entry

        self.kwargs['spec_hook_blacklist'] = self.conf.spec_hook_blacklist

        logger.debug("Rebase-helper version: %s", VERSION)

        if self.conf.build_tasks is None:
            # check the workspace dir
            if not self.conf.cont:
                self._check_workspace_dir()

            self._get_spec_file()
            self._prepare_spec_objects()

            if self.conf.update_sources:
                sources = [os.path.basename(s) for s in self.spec_file.sources]
                rebased_sources = [os.path.basename(s) for s in self.rebase_spec_file.sources]
                uploaded = LookasideCacheHelper.update_sources('fedpkg', self.rebased_sources_dir,
                                                               self.rebase_spec_file.get_package_name(),
                                                               sources, rebased_sources)
                self._update_gitignore(uploaded, self.rebased_sources_dir)

            # TODO: Remove the value from kwargs and use only CLI attribute!
            self.kwargs['continue'] = self.conf.cont
            self._initialize_data()

        if self.conf.cont or self.conf.build_only:
            self._delete_old_builds()
Exemple #33
0
    def run_check(cls, results_dir, **kwargs):
        """Compares old and new RPMs using abipkgdiff"""
        # Check if ABI changes occured
        cls.abi_changes = None
        cls.results_dir = os.path.join(results_dir, cls.NAME)
        os.makedirs(cls.results_dir)
        debug_old, rest_pkgs_old = cls._get_packages_for_abipkgdiff(
            results_store.get_build('old'))
        debug_new, rest_pkgs_new = cls._get_packages_for_abipkgdiff(
            results_store.get_build('new'))
        cmd = [cls.NAME]
        reports = {}
        for pkg in rest_pkgs_old:
            command = list(cmd)
            debug = cls._find_debuginfo(debug_old, pkg)
            if debug:
                command.append('--d1')
                command.append(debug)
            old_name = RpmHelper.split_nevra(os.path.basename(pkg))['name']
            find = [
                x for x in rest_pkgs_new if RpmHelper.split_nevra(
                    os.path.basename(x))['name'] == old_name
            ]
            if not find:
                logger.warning('New version of package %s was not found!',
                               old_name)
                continue
            new_pkg = find[0]
            debug = cls._find_debuginfo(debug_new, new_pkg)
            if debug:
                command.append('--d2')
                command.append(debug)
            command.append(pkg)
            command.append(new_pkg)
            logger.debug('Package name for ABI comparison %s', old_name)
            output = os.path.join(cls.results_dir, old_name + '.txt')
            try:
                ret_code = ProcessHelper.run_subprocess(command,
                                                        output_file=output)
            except OSError:
                raise CheckerNotFoundError(
                    "Checker '{}' was not found or installed.".format(
                        cls.NAME))

            if int(ret_code) & cls.ABIDIFF_ERROR and int(
                    ret_code) & cls.ABIDIFF_USAGE_ERROR:
                raise RebaseHelperError(
                    'Execution of {} failed.\nCommand line is: {}'.format(
                        cls.NAME, cmd))
            reports[old_name] = int(ret_code)
        return dict(packages=cls.parse_abi_logs(reports),
                    abi_changes=cls.abi_changes,
                    path=cls.get_checker_output_dir_short())
Exemple #34
0
    def _parse_mock_log(cls, log_name):
        """
        :param log_name: mock logfile
        :return: files which failed
        """
        with open(log_name, 'r') as f:
            lines = f.read()
        if not lines:
            logger.debug('Problem with openning log %s', log_name)
            raise BuildLogAnalyzerMissingError

        return None
Exemple #35
0
 def _git_rebase(cls):
     """Function performs git rebase between old and new sources"""
     # in old_sources do.
     # 1) git remote add new_sources <path_to_new_sources>
     # 2) git fetch new_sources
     # 3 git rebase -i --onto new_sources/master <oldest_commit_old_source> <the_latest_commit_old_sourcese>
     if not cls.cont:
         logger.info('Git-rebase operation to %s is ongoing...', os.path.basename(cls.new_sources))
         upstream = 'new_upstream'
         init_hash, last_hash = cls._prepare_git(upstream)
         ret_code = cls.git_helper.command_rebase(parameters='--onto', upstream_name=upstream,
                                                  first_hash=init_hash, last_hash=last_hash)
     else:
         logger.info('Git-rebase operation continues...')
         ret_code = cls.git_helper.command_rebase(parameters='--skip')
     cls._get_git_helper_data()
     logger.debug(cls.output_data)
     modified_patches = []
     deleted_patches = []
     unapplied_patches = []
     while True:
         if int(ret_code) != 0:
             patch_name = cls.git_helper.get_unapplied_patch(cls.output_data)
             logger.info("Git has problems with rebasing patch %s", patch_name)
             if not cls.non_interactive:
                 cls.git_helper.command_mergetool()
             else:
                 unapplied_patches.append(patch_name)
             modified_files = cls.git_helper.command_diff_status()
             cls.git_helper.command_add_files(parameters=modified_files)
             base_name = os.path.join(cls.kwargs['results_dir'], patch_name)
             cls.git_helper.command_diff('HEAD', output_file=base_name)
             with open(base_name, "r") as f:
                 del_patches = f.readlines()
             if not del_patches:
                 deleted_patches.append(base_name)
             else:
                 logger.info('Following files were modified: %s', ','.join(modified_files).decode(defenc))
                 cls.git_helper.command_commit(message=patch_name)
                 cls.git_helper.command_diff('HEAD~1', output_file=base_name)
                 modified_patches.append(base_name)
             if not cls.non_interactive:
                 if not ConsoleHelper.get_message('Do you want to continue with another patch'):
                     raise KeyboardInterrupt
             ret_code = cls.git_helper.command_rebase('--skip')
             cls._get_git_helper_data()
         else:
             break
     deleted_patches = cls._update_deleted_patches(deleted_patches)
     #TODO correct settings for merge tool in ~/.gitconfig
     # currently now meld is not started
     return {'modified': modified_patches, 'deleted': deleted_patches, 'unapplied': unapplied_patches}
Exemple #36
0
    def patch(self, old_dir, new_dir, rest_sources, patches, prep, **kwargs):
        """
        Apply patches and generate rebased patches if needed

        :param old_dir: path to dir with old patches
        :param new_dir: path to dir with new patches
        :param patches: old patches
        :param rebased_patches: rebased patches
        :param kwargs: --
        :return: 
        """
        logger.debug("Patching source by patch tool %s", self._patch_tool_name)
        return self._tool.run_patch(old_dir, new_dir, rest_sources, patches, prep, **kwargs)
    def patch(self, old_dir, new_dir, rest_sources, git_helper, patches, prep, **kwargs):
        """
        Apply patches and generate rebased patches if needed

        :param old_dir: path to dir with old patches
        :param new_dir: path to dir with new patches
        :param patches: old patches
        :param rebased_patches: rebased patches
        :param kwargs: --
        :return: 
        """
        logger.debug("Patching source by patch tool %s", self._patch_tool_name)
        return self._tool.run_patch(old_dir, new_dir, rest_sources, git_helper, patches, prep, **kwargs)
Exemple #38
0
 def _prepare_git(cls, upstream_name):
     cls.git_helper.command_remote_add(upstream_name, cls.new_sources)
     cls.git_helper.command_fetch(upstream_name)
     cls.output_data = cls.git_helper.command_log(
         parameters='--pretty=oneline')
     logger.debug('Outputdata from git log %s', cls.output_data)
     number = 0
     if cls.prep_section:
         number = 1
     last_hash = GitHelper.get_commit_hash_log(cls.output_data,
                                               number=number)
     init_hash = GitHelper.get_commit_hash_log(cls.output_data,
                                               len(cls.output_data) - 1)
     return init_hash, last_hash
Exemple #39
0
    def build(cls, spec, sources, patches, results_dir, **kwargs):
        """
        Builds the SRPM and RPMs using rpmbuild

        :param spec: absolute path to the SPEC file.
        :param sources: list with absolute paths to SOURCES
        :param patches: list with absolute paths to PATCHES
        :param results_dir: absolute path to DIR where results should be stored
        :return: dict with:
                 'srpm' -> absolute path to SRPM
                 'rpm' -> list with absolute paths to RPMs
                 'logs' -> list with absolute paths to build_logs
        """
        # build SRPM
        srpm, cls.logs = cls._build_srpm(spec, sources, patches, results_dir)

        # build RPMs
        rpm_results_dir = os.path.join(results_dir, "RPM")
        with RpmbuildTemporaryEnvironment(sources, patches, spec,
                                          rpm_results_dir) as tmp_env:
            env = tmp_env.env()
            tmp_dir = tmp_env.path()
            tmp_results_dir = env.get(
                RpmbuildTemporaryEnvironment.TEMPDIR_RESULTS)
            rpms = cls._build_rpm(
                srpm,
                tmp_dir,
                tmp_results_dir,
                builder_options=cls.get_builder_options(**kwargs))

        if rpms is None:
            cls.logs.extend([
                l for l in PathHelper.find_all_files(rpm_results_dir, '*.log')
            ])
            raise BinaryPackageBuildError("Building RPMs failed!")
        else:
            logger.info("Building RPMs finished successfully")

        # RPMs paths in results_dir
        rpms = [
            os.path.join(rpm_results_dir, os.path.basename(f)) for f in rpms
        ]
        logger.debug("Successfully built RPMs: '%s'", str(rpms))

        # gather logs
        cls.logs.extend(
            [l for l in PathHelper.find_all_files(rpm_results_dir, '*.log')])
        logger.debug("logs: '%s'", str(cls.logs))

        return {'srpm': srpm, 'rpm': rpms, 'logs': cls.logs}
    def run_check(cls, result_dir):
        """Compares old and new RPMs using abipkgdiff"""
        debug_old, rest_pkgs_old = cls._get_packages_for_abipkgdiff(
            results_store.get_build('old'))
        debug_new, rest_pkgs_new = cls._get_packages_for_abipkgdiff(
            results_store.get_build('new'))
        cmd = [cls.CMD]
        reports = {}
        for pkg in rest_pkgs_old:
            command = list(cmd)
            debug = cls._find_debuginfo(debug_old, pkg)
            if debug:
                command.append('--d1')
                command.append(debug)
            old_name = RpmHelper.split_nevra(os.path.basename(pkg))['name']
            find = [
                x for x in rest_pkgs_new if RpmHelper.split_nevra(
                    os.path.basename(x))['name'] == old_name
            ]
            if not find:
                logger.warning('New version of package %s was not found!',
                               old_name)
                continue
            new_pkg = find[0]
            debug = cls._find_debuginfo(debug_new, new_pkg)
            if debug:
                command.append('--d2')
                command.append(debug)
            command.append(pkg)
            command.append(new_pkg)
            logger.debug('Package name for ABI comparison %s', old_name)
            output = os.path.join(cls.results_dir, result_dir,
                                  old_name + '-' + cls.log_name)
            try:
                ret_code = ProcessHelper.run_subprocess(command, output=output)
            except OSError:
                raise CheckerNotFoundError(
                    "Checker '%s' was not found or installed." % cls.CMD)

            if int(ret_code) & settings.ABIDIFF_ERROR and int(
                    ret_code) & settings.ABIDIFF_USAGE_ERROR:
                raise RebaseHelperError(
                    'Execution of %s failed.\nCommand line is: %s' %
                    (cls.CMD, cmd))
            if int(ret_code) == 0:
                text = 'ABI of the compared binaries in package %s are equal.' % old_name
            else:
                text = 'ABI of the compared binaries in package %s are not equal.' % old_name
            reports[output] = text
        return reports
Exemple #41
0
    def run():
        debug_log_file = None
        try:
            # be verbose until debug_log_file is created
            handler = LoggerHelper.add_stream_handler(logger, logging.DEBUG)
            if "--builder-options" in sys.argv[1:]:
                raise RebaseHelperError(
                    "Wrong format of --builder-options. It must be in the following form:"
                    ' --builder-options="--desired-builder-option".'
                )
            cli = CLI()
            execution_dir, results_dir, debug_log_file, report_log_file = Application.setup(cli)
            if not cli.verbose:
                handler.setLevel(logging.INFO)
            app = Application(cli, execution_dir, results_dir, debug_log_file, report_log_file)
            app.run()
        except KeyboardInterrupt:
            logger.info("\nInterrupted by user")
        except RebaseHelperError as e:
            if e.args:
                logger.error("\n%s", e.args[0] % e.args[1:])
            else:
                logger.error("\n%s", six.text_type(e))
            sys.exit(1)
        except SystemExit as e:
            sys.exit(e.code)
        except BaseException:
            if debug_log_file:
                logger.error(
                    "\nrebase-helper failed due to an unexpected error. Please report this problem"
                    "\nusing the following link: %s"
                    "\nand include the content of"
                    "\n'%s'"
                    "\nfile in the report."
                    "\nThank you!",
                    NEW_ISSUE_LINK,
                    debug_log_file,
                )
            else:
                logger.error(
                    "\nrebase-helper failed due to an unexpected error. Please report this problem"
                    "\nusing the following link: %s"
                    "\nand include the traceback following this message in the report."
                    "\nThank you!",
                    NEW_ISSUE_LINK,
                )
            logger.debug("\n", exc_info=1)
            sys.exit(1)

        sys.exit(0)
Exemple #42
0
    def extract(self, path=None):
        """
        Extracts the archive into the given path

        :param path: Path where to extract the archive to.
        :return:
        """
        if path is None:
            TypeError("Expected argument 'path' (pos 1) is missing")

        logger.debug("Extracting '%s' into '%s'", self._filename, path)

        archive = self._archive_type.open(self._filename)
        archive.extractall(path)
        archive.close()
    def _get_package_version(self):

        """ Get package and version from fedmsg  """
        try:
            rebase_helper_msg = ast.literal_eval(self.msg['msg']['log'].encode('utf-8'))
        except ValueError:
            logger.debug('Wrong value in request from upstream monitoring service')
            return
        except SyntaxError:
            logger.debug('wrong request from upstream monitoring service')
            return
        self.package = rebase_helper_msg.get('package')
        logger.info('Package %s', self.package)
        self.version = rebase_helper_msg.get('version')
        self.arguments.append(self.version)
Exemple #44
0
    def set_release_number(self, release):
        """
        Method to set release number

        :param release: 
        :return: 
        """
        for index, line in enumerate(self.spec_content):
            if line.startswith('Release:'):
                new_release_line = re.sub(r'(Release:\s*)[0-9.]+(.*%{\?dist}\s*)', r'\g<1>{0}\2'.format(release),
                                          line)
                logger.debug("Changing release line to '%s'", new_release_line.strip())
                self.spec_content[index] = new_release_line
                self.save()
                break
Exemple #45
0
    def run():
        debug_log_file = None
        try:
            # be verbose until debug_log_file is created
            handler = LoggerHelper.add_stream_handler(logger, logging.DEBUG)
            if "--builder-options" in sys.argv[1:]:
                raise RebaseHelperError(
                    'Wrong format of --builder-options. It must be in the following form:'
                    ' --builder-options="--desired-builder-option".')
            cli = CLI()
            if cli.version:
                logger.info(VERSION)
                sys.exit(0)
            ConsoleHelper.use_colors = ConsoleHelper.should_use_colors(cli)
            execution_dir, results_dir, debug_log_file = Application.setup(cli)
            if not cli.verbose:
                handler.setLevel(logging.INFO)
            app = Application(cli, execution_dir, results_dir, debug_log_file)
            app.run()
        except KeyboardInterrupt:
            logger.info('\nInterrupted by user')
        except RebaseHelperError as e:
            if e.msg:
                logger.error('\n%s', e.msg)
            else:
                logger.error('\n%s', six.text_type(e))
            sys.exit(1)
        except SystemExit as e:
            sys.exit(e.code)
        except BaseException:
            if debug_log_file:
                logger.error(
                    '\nrebase-helper failed due to an unexpected error. Please report this problem'
                    '\nusing the following link: %s'
                    '\nand include the content of'
                    '\n\'%s\''
                    '\nfile in the report.'
                    '\nThank you!', NEW_ISSUE_LINK, debug_log_file)
            else:
                logger.error(
                    '\nrebase-helper failed due to an unexpected error. Please report this problem'
                    '\nusing the following link: %s'
                    '\nand include the traceback following this message in the report.'
                    '\nThank you!', NEW_ISSUE_LINK)
            logger.debug('\n', exc_info=1)
            sys.exit(1)

        sys.exit(0)
Exemple #46
0
    def set_version(self, version):

        """
        Method to update the version in the SPEC file

        :param version: string with new version
        :return: None
        """
        for index, line in enumerate(self.spec_content):
            if not line.startswith('Version'):
                continue
            logger.debug("Updating version in SPEC from '%s' with '%s'", self.get_version(), version)
            self.spec_content[index] = line.replace(self.get_version(), version)
            break
        #  save changes to the disc
        self.save()
Exemple #47
0
 def _create_directory_structure(self):
     # create rpmbuild directory structure
     for dir_name in ['RESULTS', 'rpmbuild']:
         self._env[self.TEMPDIR + '_' + dir_name.upper()] = os.path.join(
             self._env[self.TEMPDIR], dir_name)
         logger.debug("Creating '%s'",
                      self._env[self.TEMPDIR + '_' + dir_name.upper()])
         os.makedirs(self._env[self.TEMPDIR + '_' + dir_name.upper()])
     for dir_name in [
             'BUILD', 'BUILDROOT', 'RPMS', 'SOURCES', 'SPECS', 'SRPMS'
     ]:
         self._env[self.TEMPDIR + '_' + dir_name] = os.path.join(
             self._env[self.TEMPDIR_RPMBUILD], dir_name)
         logger.debug("Creating '%s'",
                      self._env[self.TEMPDIR + '_' + dir_name])
         os.makedirs(self._env[self.TEMPDIR + '_' + dir_name])
Exemple #48
0
    def _parse_mock_log(cls, log_name):
        """
        :param log_name: mock logfile
        :return: files which failed
        """
        files = {}
        files['missing'] = []
        files['deleted'] = []

        with open(log_name, 'r') as f:
            lines = f.read()
        if not lines:
            logger.debug('Problem with openning log %s', log_name)
            raise BuildLogAnalyzerMissingError

        return None
Exemple #49
0
    def fill_dictionary(cls, result_dir, old_version=None, new_version=None):
        """
        Parsed files.xml and symbols.xml and fill dictionary
        :param result_dir: where should be stored file for pkgdiff
        :param old_version: old version of package
        :param new_version: new version of package
        :return:
        """
        XML_FILES = ['files.xml', 'symbols.xml']
        if old_version is None:
            old_version = results_store.get_old_build().get('version')
            if old_version is '':
                old_version = cls._get_rpm_info(
                    'version',
                    results_store.get_old_build()['rpm'])
        if new_version is None:
            new_version = results_store.get_new_build().get('version')
            if new_version is '':
                new_version = cls._get_rpm_info(
                    'version',
                    results_store.get_new_build()['rpm'])

        for tag in cls.CHECKER_TAGS:
            cls.results_dict[tag] = []
        for file_name in [os.path.join(result_dir, x) for x in XML_FILES]:
            logger.debug('Processing %s file.', file_name)
            try:
                with open(file_name, "r") as f:
                    lines = ['<pkgdiff>']
                    lines.extend(f.readlines())
                    lines.append('</pkgdiff>')
                    pkgdiff_tree = ElementTree.fromstringlist(lines)
                    for tag in cls.CHECKER_TAGS:
                        for pkgdiff in pkgdiff_tree.findall('.//' + tag):
                            files = [
                                x.strip()
                                for x in pkgdiff.text.strip().split('\n')
                            ]
                            files = [
                                x.replace(old_version, '*') for x in files
                            ]
                            files = [
                                x.replace(new_version, '*') for x in files
                            ]
                            cls.results_dict[tag].extend(files)
            except IOError:
                continue
Exemple #50
0
    def set_release_number(self, release):
        """
        Method to set release number

        :param release:
        :return:
        """
        for index, line in enumerate(self.spec_content):
            if line.startswith('Release:'):
                new_release_line = re.sub(
                    r'(Release:\s*)[0-9.]+(.*%{\?dist}\s*)',
                    r'\g<1>{0}\2'.format(release), line)
                logger.debug("Changing release line to '%s'",
                             new_release_line.strip())
                self.spec_content[index] = new_release_line
                self.save()
                break
Exemple #51
0
    def redefine_release_with_macro(self, macro):
        """
        Method redefines the Release: line to include passed macro and comments out the old line

        :param macro: 
        :return: 
        """
        for index, line in enumerate(self.spec_content):
            if line.startswith('Release:'):
                new_release_line = re.sub(r'(Release:\s*[0-9.]*[0-9]+).*(%{\?dist}\s*)', r'\g<1>.{0}\2'.format(macro),
                                          line)
                logger.debug("Commenting out original Release line '%s'", line.strip())
                self.spec_content[index] = '#{0}'.format(line)
                logger.debug("Inserting new Release line '%s'", new_release_line.strip())
                self.spec_content.insert(index + 1, new_release_line)
                self.save()
                break
    def build(cls, spec, sources, patches, results_dir, root=None, arch=None, **kwargs):
        """
        Builds the SRPM and RPM using mock

        :param spec: absolute path to a SPEC file
        :param sources: list with absolute paths to SOURCES
        :param patches: list with absolute paths to PATCHES
        :param results_dir: absolute path to directory where results will be stored
        :param root: mock root used for building
        :param arch: architecture to build the RPM for
        :return: dict with:
                 'srpm' -> absolute path to SRPM
                 'rpm' -> list with absolute paths to RPMs
                 'logs' -> list with absolute paths to logs
        """
        # build SRPM
        srpm, cls.logs = cls._build_srpm(spec, sources, patches, results_dir)

        # build RPMs
        rpm_results_dir = os.path.join(results_dir, "RPM")
        with MockTemporaryEnvironment(sources, patches, spec, rpm_results_dir) as tmp_env:
            env = tmp_env.env()
            tmp_results_dir = env.get(MockTemporaryEnvironment.TEMPDIR_RESULTS)
            rpms = cls._build_rpm(srpm, tmp_results_dir)
            # remove SRPM - side product of building RPM
            tmp_srpm = PathHelper.find_first_file(tmp_results_dir, "*.src.rpm")
            if tmp_srpm is not None:
                os.unlink(tmp_srpm)

        if rpms is None:
            # We need to be inform what directory to analyze and what spec file failed
            cls.logs.extend([l for l in PathHelper.find_all_files(rpm_results_dir, '*.log')])
            raise BinaryPackageBuildError("Building RPMs failed!", rpm_results_dir, spec)
        else:
            logger.info("Building RPMs finished successfully")

        rpms = [os.path.join(rpm_results_dir, os.path.basename(f)) for f in rpms]
        logger.debug("Successfully built RPMs: '%s'", str(rpms))

        # gather logs
        cls.logs.extend([l for l in PathHelper.find_all_files(rpm_results_dir, '*.log')])
        logger.debug("logs: '%s'", str(cls.logs))

        return {'srpm': srpm,
                'rpm': rpms,
                'logs': cls.logs}
Exemple #53
0
    def split_version_string(version_string=''):
        """
        Method splits version string into version and possibly extra string as 'rc1' or 'b1', ...

        :param version_string: version string such as '1.1.1' or '1.2.3b1', ...
        :return: tuple of strings with (extracted version, extra version) or (None, None) if extraction failed
        """
        version_split_regex_str = '([.0-9]+)(\w*)'
        version_split_regex = re.compile(version_split_regex_str)
        logger.debug("Splitting string '%s'", version_string)
        match = version_split_regex.search(version_string)
        if match:
            version = match.group(1)
            extra_version = match.group(2)
            logger.debug("Divided version '%s' and extra string '%s'", version, extra_version)
            return version, extra_version
        else:
            return None, None
    def _build_env_exit_callback(self, results_dir, **kwargs):
        """
        The function that is called just before the destruction of the TemporaryEnvironment.
        It copies packages and logs into the results directory.

        :param results_dir: absolute path to results directory
        :return: 
        """
        os.makedirs(results_dir)
        log_message = "Copying '%s' '%s' to '%s'"
        # copy logs
        for log in PathHelper.find_all_files(kwargs[self.TEMPDIR_RESULTS], '*.log'):
            logger.debug(log_message, 'log', log, results_dir)
            shutil.copy(log, results_dir)
        # copy packages
        for package in PathHelper.find_all_files(kwargs[self.TEMPDIR], '*.rpm'):
            logger.debug(log_message, 'package', package, results_dir)
            shutil.copy(package, results_dir)
Exemple #55
0
    def _update_data(self):

        """
        Function updates data from given SPEC file
        :return:
        """
        # Load rpm information
        self.spc = rpm.spec(self.path)
        # HEADER of SPEC file
        self.hdr = self.spc.sourceHeader
        # All source file mentioned in SPEC file Source[0-9]*
        self.rpm_sections = self._split_sections()
        # determine the extra_version
        logger.debug("Updating the extra version")
        self.sources = self._get_initial_sources_list()
        self.extra_version = SpecFile.extract_version_from_archive_name(self.get_archive(),
                                                                        self._get_raw_source_string(0))[1]
        self.patches = self._get_initial_patches_list()
    def _analyze_logs(cls, output, results_dict):
        removed_things = ['.build-id', '.dwz', 'PROVIDE', 'REQUIRES']
        for line in output:
            if [x for x in removed_things if x in line]:
                continue

            fields = line.strip().split()
            logger.debug(fields)
            if line.startswith('removed'):
                results_dict['removed'].append(fields[1])
                continue
            if line.startswith('added'):
                results_dict['added'].append(fields[1])
                continue

            if re.match(r'(S..|..5)........', fields[0]):
                # size or checksum changed
                results_dict['changed'].append(fields[1])
        return results_dict
Exemple #57
0
    def extract_archive(self, path=None):
        """
        Extracts the archive into the given path

        :param path: Path where to extract the archive to.
        :return: 
        """
        if path is None:
            TypeError("Expected argument 'path' (pos 1) is missing")

        logger.debug("Extracting '%s' into '%s'", self._filename, path)

        archive = self._archive_type.open(self._filename)
        self._archive_type.extract(archive, self._filename, path)
        try:
            archive.close()
        except AttributeError:
            # pseudo archive types don't return real file-like object
            pass
Exemple #58
0
    def extract_archive(self, path=None):
        """
        Extracts the archive into the given path

        :param path: Path where to extract the archive to.
        :return:
        """
        if path is None:
            TypeError("Expected argument 'path' (pos 1) is missing")

        logger.debug("Extracting '%s' into '%s'", self._filename, path)

        archive = self._archive_type.open(self._filename)
        self._archive_type.extract(archive, self._filename, path)
        try:
            archive.close()
        except AttributeError:
            # pseudo archive types don't return real file-like object
            pass
Exemple #59
0
    def build(cls, spec, results_dir, srpm, **kwargs):
        """
        Builds the RPMs using mock

        :param spec: SpecFile object
        :param results_dir: absolute path to directory where results will be stored
        :param srpm: absolute path to SRPM
        :param root: mock root used for building
        :param arch: architecture to build the RPM for
        :return: dict with:
                 'rpm' -> list with absolute paths to RPMs
                 'logs' -> list with absolute paths to logs
        """
        rpm_results_dir = os.path.join(results_dir, "RPM")
        sources = spec.get_sources()
        patches = [p.get_path() for p in spec.get_patches()]
        with MockTemporaryEnvironment(sources, patches, spec.get_path(),
                                      rpm_results_dir) as tmp_env:
            env = tmp_env.env()
            tmp_results_dir = env.get(MockTemporaryEnvironment.TEMPDIR_RESULTS)
            rpms = cls._build_rpm(
                srpm,
                tmp_results_dir,
                rpm_results_dir,
                builder_options=cls.get_builder_options(**kwargs))
            # remove SRPM - side product of building RPM
            tmp_srpm = PathHelper.find_first_file(tmp_results_dir, "*.src.rpm")
            if tmp_srpm is not None:
                os.unlink(tmp_srpm)

        logger.info("Building RPMs finished successfully")

        rpms = [
            os.path.join(rpm_results_dir, os.path.basename(f)) for f in rpms
        ]
        logger.debug("Successfully built RPMs: '%s'", str(rpms))

        # gather logs
        cls.logs.extend(
            [l for l in PathHelper.find_all_files(rpm_results_dir, '*.log')])
        logger.debug("logs: '%s'", str(cls.logs))

        return dict(rpm=rpms, logs=cls.logs)
Exemple #60
0
    def _analyze_logs(cls, output, results_dict):
        removed_things = ['.build-id', '.dwz', 'PROVIDE', 'REQUIRES']
        for line in output:
            if [x for x in removed_things if x in line]:
                continue

            fields = line.strip().split()
            logger.debug(fields)
            if 'removed' in line:
                results_dict['removed'].append(fields[1])
                continue
            if 'added' in line:
                results_dict['added'].append(fields[1])
                continue
            #'S.5........' for regexp
            regexp = '(S)+\.(5)+\.\.\.\.\.\.\.\.'
            match = re.search(regexp, fields[0])
            if match:
                results_dict['changed'].append(fields[1])
        return results_dict