예제 #1
0
    def extract_version_from_archive_name(archive_path: str, main_source: str) -> str:
        """Extracts version string from source archive name.

        Args:
            archive_path: Path to the main sources archive.
            main_source: Value of Source0 tag.

        Returns:
            Extracted version string.

        Raises:
            RebaseHelperError in case version can't be determined.

        """
        fallback_regex = r'\w*[-_]?v?([.\d]+.*)({0})'.format(
            '|'.join([re.escape(a) for a in Archive.get_supported_archives()]))
        source = os.path.basename(main_source)
        regex = re.sub(r'%({)?version(?(1)})(.*%(\w+|{.+}))?', 'PLACEHOLDER', source, flags=re.IGNORECASE)
        regex = MacroHelper.expand(regex, regex)
        regex = re.escape(regex).replace('PLACEHOLDER', r'(.+)')
        if regex == re.escape(MacroHelper.expand(source, source)):
            # no substitution was made, use the fallback regex
            regex = fallback_regex
        logger.debug('Extracting version from archive name using %s', regex)
        archive_name = os.path.basename(archive_path)
        m = re.match(regex, archive_name)
        if m:
            logger.debug('Extracted version %s', m.group(1))
            return m.group(1)
        if regex != fallback_regex:
            m = re.match(fallback_regex, archive_name)
            if m:
                logger.debug('Extracted version %s', m.group(1))
                return m.group(1)
        raise RebaseHelperError('Unable to extract version from archive name')
예제 #2
0
 def _get_instructions(cls, comments, old_version, new_version):
     """Extract instructions from comments, update version if necessary"""
     instructions = []
     for comment in comments:
         comment = MacroHelper.expand(comment, comment)
         comment = MacroHelper.expand(comment, comment)
         comment = re.sub(r'^#\s*', '', comment)
         comment = comment.replace(old_version, new_version)
         instructions.append(comment)
     return instructions
예제 #3
0
 def _get_instructions(cls, comments, old_version, new_version):
     """Extract instructions from comments, update version if necessary"""
     instructions = []
     for comment in comments:
         comment = MacroHelper.expand(comment, comment)
         comment = MacroHelper.expand(comment, comment)
         comment = re.sub(r'^#\s*', '', comment)
         comment = comment.replace(old_version, new_version)
         instructions.append(comment)
     return instructions
예제 #4
0
    def _parse_list_tags(cls, section: str, section_content: List[str],
                         parsed: List[str], section_index: int,
                         next_source_index: int,
                         next_patch_index: int) -> Tuple[List[Tag], int, int]:
        """Parses all tags in a %sourcelist or %patchlist section.

        Only parses tags that are valid (that is - are in parsed), nothing more can
        consistently be detected.

        Follows how rpm works, the new Source/Patch tags are indexed starting from
        the last parsed Source/Patch tag.

        """
        tag = 'Source' if section == '%sourcelist' else 'Patch'
        result = []
        for i, line in enumerate(section_content):
            expanded = MacroHelper.expand(line)
            is_comment = SpecContent.get_comment_span(line,
                                                      section)[0] != len(line)
            if not expanded or not line or is_comment or not [
                    p for p in parsed if p == expanded.rstrip()
            ]:
                continue
            tag_name, tag_index, next_source_index, next_patch_index = cls._sanitize_tag(
                tag, next_source_index, next_patch_index)
            result.append(
                Tag(section_index, section, i, tag_name, (0, len(line)), True,
                    tag_index))

        return result, next_source_index, next_patch_index
예제 #5
0
    def get_new_log(self, changelog_entry):
        """Constructs a new changelog entry.

        Args:
            changelog_entry (str): Message to use in the entry.

        Returns:
            list: List of lines of the new entry.

        """
        new_record = []
        today = date.today()
        evr = '{epoch}:{ver}-{rel}'.format(epoch=self.header.epochnum,
                                           ver=self.header.version,
                                           rel=self.get_release())
        evr = evr[2:] if evr.startswith('0:') else evr
        new_record.append('* {day} {name} <{email}> - {evr}'.format(day=today.strftime('%a %b %d %Y'),
                                                                    name=GitHelper.get_user(),
                                                                    email=GitHelper.get_email(),
                                                                    evr=evr))
        self.update()
        # FIXME: ugly workaround for mysterious rpm bug causing macros to disappear
        self.update()
        new_record.append(MacroHelper.expand(changelog_entry, changelog_entry))
        new_record.append('')
        return new_record
예제 #6
0
    def get_setup_dirname(self):
        """
        Get dirname from %setup or %autosetup macro arguments

        :return: dirname
        """
        parser = self._get_setup_parser()

        prep = self.spec_content.section('%prep')
        if not prep:
            return None

        for line in prep:
            if line.startswith('%setup') or line.startswith('%autosetup'):
                args = shlex.split(line)
                args = [MacroHelper.expand(a, '') for a in args[1:]]

                # parse macro arguments
                try:
                    ns, _ = parser.parse_known_args(args)
                except ParseError:
                    continue

                # check if this macro instance is extracting Source0
                if not ns.T or ns.a == 0 or ns.b == 0:
                    return ns.n

        return None
예제 #7
0
    def _get_best_matching_files_section(cls, rebase_spec_file, file):
        """Finds a %files section with a file that has the closest match with
        the specified file. If the best match cannot be determined, the main
        %files section is returned.

        Args:
            rebase_spec_file (specfile.SpecFile): Rebased SpecFile object.
            file (str): Path to the file to be classified.

        Returns:
            str: Name of the section containing the closest matching file.

        """
        best_match = ''
        best_match_section = ''
        for sec_name, sec_content in rebase_spec_file.spec_content.sections:
            if sec_name.startswith('%files'):
                for line in sec_content:
                    new_best_match = difflib.get_close_matches(file, [best_match, MacroHelper.expand(line)])
                    if new_best_match:
                        # the new match is a closer match
                        if new_best_match[0] != best_match:
                            best_match = new_best_match[0]
                            best_match_section = sec_name

        return best_match_section or rebase_spec_file.get_main_files_section()
예제 #8
0
 def get_release(self) -> str:
     """Returns release string without %dist"""
     release = self.header.release
     dist = MacroHelper.expand('%{dist}')
     if dist and release.endswith(dist):
         release = release[:-len(dist)]
     return release
예제 #9
0
    def _get_best_matching_files_section(cls, rebase_spec_file, file):
        """Finds a %files section with a file that has the closest match with
        the specified file. If the best match cannot be determined, the main
        %files section is returned.

        Args:
            rebase_spec_file (specfile.SpecFile): Rebased SpecFile object.
            file (str): Path to the file to be classified.

        Returns:
            str: Name of the section containing the closest matching file.

        """
        best_match = ''
        best_match_section = ''
        for sec_name, sec_content in six.iteritems(
                rebase_spec_file.spec_content.sections):
            if sec_name.startswith('%files'):
                for line in sec_content:
                    new_best_match = difflib.get_close_matches(
                        file,
                        [best_match, MacroHelper.expand(line)])
                    if new_best_match:
                        # the new match is a closer match
                        if new_best_match[0] != best_match:
                            best_match = new_best_match[0]
                            best_match_section = sec_name

        return best_match_section or rebase_spec_file.get_main_files_section()
예제 #10
0
    def _get_best_matching_files_section(cls, rebase_spec_file, file):
        """Finds a %files section with a file that has the closest match with
        the specified file. If the best match cannot be determined, the main
        %files section is returned. If no main section is found, return the
        first %files section if possible, None otherwise.

        Args:
            rebase_spec_file (specfile.SpecFile): Rebased SpecFile object.
            file (str): Path to the file to be classified.

        Returns:
            str: Name of the section containing the closest matching file.
                None if no %files section can be found.

        """
        best_match = ''
        best_match_section = ''
        files = []
        for sec_name, sec_content in rebase_spec_file.spec_content.sections:
            if sec_name.startswith('%files'):
                files.append(sec_name)
                for line in sec_content:
                    new_best_match = difflib.get_close_matches(
                        file,
                        [best_match, MacroHelper.expand(line)])
                    if new_best_match:
                        # the new match is a closer match
                        if new_best_match[0] != best_match:
                            best_match = str(new_best_match[0])
                            best_match_section = sec_name

        return best_match_section or rebase_spec_file.get_main_files_section(
        ) or (files[0] if files else None)
예제 #11
0
    def generate_patch(self):
        """
        Generates patch to the results_dir containing all needed changes for
        the rebased package version
        """
        # Delete removed patches from rebased_sources_dir from git
        removed_patches = self.rebase_spec_file.removed_patches
        if removed_patches:
            self.rebased_repo.index.remove(removed_patches, working_tree=True)

        self.rebase_spec_file.update_paths_to_sources_and_patches()

        # Generate patch
        self.rebased_repo.git.add(all=True)
        self.rebase_spec_file.update()
        self.rebased_repo.index.commit(
            MacroHelper.expand(self.conf.changelog_entry,
                               self.conf.changelog_entry))
        patch = self.rebased_repo.git.format_patch('-1',
                                                   stdout=True,
                                                   stdout_as_string=False)
        with open(os.path.join(self.results_dir, constants.CHANGES_PATCH),
                  'wb') as f:
            f.write(patch)
            f.write(b'\n')

        results_store.set_changes_patch(
            'changes_patch',
            os.path.join(self.results_dir, constants.CHANGES_PATCH))
예제 #12
0
 def traverse(tree):
     result = []
     for node in tree:
         if node[0] == 't':
             # split text nodes on usual separators
             result.extend([t for t in re.split(r'(\.|-|_)', node[1]) if t])
         elif node[0] == 'm':
             m = '%{{{}}}'.format(node[1])
             if MacroHelper.expand(m):
                 result.append(m)
         elif node[0] == 'c':
             if MacroHelper.expand('%{{{}:1}}'.format(node[1])):
                 result.extend(traverse(node[2]))
         elif node[0] == 's':
             # ignore shell expansions, push nonsensical value
             result.append('@')
     return result
예제 #13
0
 def _sync_macros(s):
     """Makes all macros present in a string up-to-date in rpm context"""
     _, macros = _expand_macros(s)
     for macro in macros:
         MacroHelper.purge_macro(macro)
         value = _get_macro_value(macro)
         if value and MacroHelper.expand(value):
             rpm.addMacro(macro, value)
예제 #14
0
 def get_arches():
     """Gets list of all known architectures"""
     arches = ['aarch64', 'noarch', 'ppc', 'riscv64', 's390', 's390x', 'src', 'x86_64']
     macros = MacroHelper.dump()
     macros = [m for m in macros if m['name'] in ('ix86', 'arm', 'mips', 'sparc', 'alpha', 'power64')]
     for m in macros:
         arches.extend(MacroHelper.expand(m['value'], '').split())
     return arches
예제 #15
0
 def get_release_number(self) -> str:
     """
     Removed in rebasehelper=0.20.0
     """
     release = self.header.release
     dist = MacroHelper.expand("%{dist}")
     if dist:
         release = release.replace(dist, "")
     return re.sub(r"([0-9.]*[0-9]+).*", r"\1", release)
예제 #16
0
    def _parse_package_tags(
            cls, section: str, section_content: List[str], parsed: List[str],
            section_index: int, next_source_index: int,
            next_patch_index: int) -> Tuple[List[Tag], int, int]:
        """Parses all tags in a %package section and determines if they are valid.

        A tag is considered valid if it is still present after evaluating all conditions.

        Note that this is not perfect - if the same tag appears in both %if and %else blocks,
        and has the same value in both, it's impossible to tell them apart, so only the latter
        is considered valid, disregarding the actual condition.

        Returns:
              A tuple containing: a tuple of all Tags object, new next source index, new next patch index.

              Indexed tag names are sanitized, for example 'Source' is replaced with 'Source0'
              and 'Patch007' with 'Patch7'.

              Tag names are capitalized, section names are lowercase.

        """
        result = []
        tag_re = re.compile(r'^(?P<prefix>(?P<name>\w+)\s*:\s*)(?P<value>.+)$')
        for line_index, line in enumerate(section_content):
            expanded = MacroHelper.expand(line)
            if not line or not expanded:
                continue
            valid = bool(parsed
                         and [p for p in parsed if p == expanded.rstrip()])
            m = tag_re.match(line)
            if m:
                tag_name, tag_index, next_source_index, next_patch_index = cls._sanitize_tag(
                    m.group('name'), next_source_index, next_patch_index)
                result.append(
                    Tag(section_index, section, line_index, tag_name,
                        m.span('value'), valid, tag_index))
                continue
            m = tag_re.match(expanded)
            if m:
                start = line.find(m.group('prefix'))
                if start < 0:
                    # tag is probably defined by a macro, just ignore it
                    continue
                # conditionalized tag
                line = line[start:].rstrip(
                    '}')  # FIXME: removing trailing braces is not very robust
                m = tag_re.match(line)
                if m:
                    span = cast(Tuple[int, int],
                                tuple(x + start for x in m.span('value')))
                    tag_name, tag_index, next_source_index, next_patch_index = cls._sanitize_tag(
                        m.group('name'), next_source_index, next_patch_index)
                    result.append(
                        Tag(section_index, section, line_index, tag_name, span,
                            valid, tag_index))

        return result, next_source_index, next_patch_index
예제 #17
0
    def find_archive_target_in_prep(self, archive):
        """
        Tries to find a command that is used to extract the specified archive
        and attempts to determine target path from it.
        'tar' and 'unzip' commands are supported so far.

        :param archive: Path to archive
        :return: Target path relative to builddir or None if not determined
        """
        cd_parser = SilentArgumentParser()
        cd_parser.add_argument('dir', default=os.environ.get('HOME', ''))
        tar_parser = argparse.ArgumentParser()
        tar_parser.add_argument('-C', default='.', dest='target')
        unzip_parser = argparse.ArgumentParser()
        unzip_parser.add_argument('-d', default='.', dest='target')
        archive = os.path.basename(archive)
        builddir = MacroHelper.expand('%{_builddir}', '')
        basedir = builddir
        for line in self.get_prep_section():
            tokens = shlex.split(line, comments=True)
            if not tokens:
                continue
            # split tokens by pipe
            for tokens in [list(group) for k, group in itertools.groupby(tokens, lambda t: t == '|') if not k]:
                cmd, args = os.path.basename(tokens[0]), tokens[1:]
                if cmd == 'cd':
                    # keep track of current directory
                    try:
                        ns, _ = cd_parser.parse_known_args(args)
                    except ParseError:
                        pass
                    else:
                        basedir = ns.dir if os.path.isabs(ns.dir) else os.path.join(basedir, ns.dir)
                if archive in line:
                    if cmd == 'tar':
                        parser = tar_parser
                    elif cmd == 'unzip':
                        parser = unzip_parser
                    else:
                        continue
                    try:
                        ns, _ = parser.parse_known_args(args)
                    except ParseError:
                        continue
                    basedir = os.path.relpath(basedir, builddir)
                    return os.path.normpath(os.path.join(basedir, ns.target))
        return None
예제 #18
0
    def run(cls, spec_file: SpecFile, rebase_spec_file: SpecFile,
            **kwargs: Any):
        replace_with_macro = bool(kwargs.get('replace_old_version_with_macro'))

        subversion_patterns = cls._create_possible_replacements(
            spec_file, rebase_spec_file, replace_with_macro)
        examined_lines: Dict[int, Set[int]] = collections.defaultdict(set)
        for tag in rebase_spec_file.tags.filter():
            examined_lines[tag.section_index].add(tag.line)
            value = rebase_spec_file.get_raw_tag_value(tag.name,
                                                       tag.section_index)
            if not value or tag.name in cls.IGNORED_TAGS:
                continue
            scheme = urllib.parse.urlparse(value).scheme
            if (tag.name.startswith('Patch')
                    or tag.name.startswith('Source')) and not scheme:
                # skip local sources
                continue

            # replace the whole version first
            updated_value = subversion_patterns[0][0].sub(
                subversion_patterns[0][1], value)
            # replace subversions only for remote sources/patches
            if tag.name.startswith('Patch') or tag.name.startswith('Source'):
                for sub_pattern, repl in subversion_patterns[1:]:
                    updated_value = sub_pattern.sub(repl, updated_value)
            rebase_spec_file.set_raw_tag_value(tag.name, updated_value,
                                               tag.section_index)

        for sec_index, (sec_name, section) in enumerate(
                rebase_spec_file.spec_content.sections):
            if sec_name.startswith('%changelog'):
                continue
            for index, line in enumerate(section):
                tag_ignored = any(
                    MacroHelper.expand(line, line).startswith(tag)
                    for tag in cls.IGNORED_TAGS)
                if index in examined_lines[sec_index] or tag_ignored:
                    continue
                start, end = spec_file.spec_content.get_comment_span(
                    line, sec_name)
                updated_line = subversion_patterns[0][0].sub(
                    subversion_patterns[0][1], line[:start])
                section[index] = updated_line + line[start:end]

        rebase_spec_file.save()
예제 #19
0
    def _get_setup_parser(self):
        """
        Construct ArgumentParser for parsing %(auto)setup macro arguments

        :return: constructed ArgumentParser
        """
        parser = SilentArgumentParser()
        parser.add_argument('-n', default=MacroHelper.expand('%{name}-%{version}', '%{name}-%{version}'))
        parser.add_argument('-a', type=int, default=-1)
        parser.add_argument('-b', type=int, default=-1)
        parser.add_argument('-T', action='store_true')
        parser.add_argument('-q', action='store_true')
        parser.add_argument('-c', action='store_true')
        parser.add_argument('-D', action='store_true')
        parser.add_argument('-v', action='store_true')
        parser.add_argument('-N', action='store_true')
        parser.add_argument('-p', type=int, default=-1)
        parser.add_argument('-S', default='')
        return parser
예제 #20
0
    def generate_patch(self):
        """
        Generates patch to the results_dir containing all needed changes for
        the rebased package version
        """
        # Delete removed patches from rebased_sources_dir from git
        removed_patches = self.rebase_spec_file.removed_patches
        if removed_patches:
            self.rebased_repo.index.remove(removed_patches, working_tree=True)

        self.rebase_spec_file.update_paths_to_patches()

        # Generate patch
        self.rebased_repo.git.add(all=True)
        self.rebase_spec_file._update_data()  # pylint: disable=protected-access
        self.rebased_repo.index.commit(MacroHelper.expand(self.conf.changelog_entry, self.conf.changelog_entry))
        patch = self.rebased_repo.git.format_patch('-1', stdout=True, stdout_as_string=False)
        with open(os.path.join(self.results_dir, 'changes.patch'), 'wb') as f:
            f.write(patch)
            f.write(b'\n')

        results_store.set_changes_patch('changes_patch', os.path.join(self.results_dir, 'changes.patch'))
예제 #21
0
    def _correct_one_section(cls, subpackage: str, sec_name: str,
                             sec_content: List[str], files: List[str],
                             result: Dict[str, RemovedFromSections]) -> None:
        """Removes deleted files from one %files section.

        Args:
            subpackage: Name of the subpackage which the section relates to.
            sec_name: Name of the %files section
            sec_content: Content of the %files section
            files: Files that still need to be removed
            result: Dict summarizing the changes done to the SPEC file.

        """
        i = 0
        while i < len(sec_content):
            original_line = sec_content[i].split()
            # Expand the whole line to check for occurrences of special
            # keywords, such as %global and %if blocks. Macro definitions
            # expand to empty string.
            expanded = MacroHelper.expand(sec_content[i])
            if not original_line or not expanded or any(
                    k in expanded for k in cls.PROHIBITED_KEYWORDS):
                i += 1
                continue
            split_line = original_line[:]
            # Keep track of files which could possibly be renamed but not
            # detected by the hook. %doc and %license files are the 2 examples
            # of this. If %doc README is renamed to README.md, the hook will
            # simply remove it but README.md won't be added (it is installed
            # by the directive). We want to warn the user about this.
            possible_rename = [False for _ in split_line]
            directives, prepended_directive = cls._get_line_directives(
                split_line)
            # Determine absolute paths
            if prepended_directive:
                for j, path in enumerate(split_line):
                    if not os.path.isabs(path):
                        prepend_macro = cls.FILES_DIRECTIVES[
                            prepended_directive] or ''
                        split_line[j] = os.path.join(prepend_macro, subpackage,
                                                     os.path.basename(path))
                        possible_rename[j] = True
            split_line = [MacroHelper.expand(p) for p in split_line]

            j = 0
            while j < len(split_line) and files:
                file = split_line[j]
                warn_about_rename = possible_rename[j]
                for deleted_file in reversed(files):
                    if not fnmatch.fnmatch(deleted_file, file):
                        continue

                    original_file = original_line[len(directives) + j]

                    del possible_rename[j]
                    del split_line[j]
                    del original_line[len(directives) + j]
                    files.remove(deleted_file)
                    result['removed'][sec_name].append(original_file)
                    logger.info("Removed %s from '%s' section", original_file,
                                sec_name)
                    if warn_about_rename:
                        logger.warning(
                            "The installation of %s was handled by %s directive and the file has now been "
                            "removed. The file may have been renamed and rebase-helper cannot automatically "
                            "detect it. A common example of this is renaming README to README.md. It might "
                            "be necessary to re-add such renamed file to the rebased SPEC file manually.",
                            original_file, prepended_directive)
                    break
                else:
                    j += 1

            if not split_line:
                del sec_content[i]
            else:
                sec_content[i] = ' '.join(original_line)
                i += 1
예제 #22
0
 def _process_value(curval, newval):
     """
     Replaces non-redefinable-macro parts of curval with matching parts from newval
     and redefines values of macros accordingly
     """
     value, _ = _expand_macros(curval)
     _sync_macros(curval + newval)
     tokens = _tokenize(value)
     values = [None] * len(tokens)
     sm = SequenceMatcher(a=newval)
     i = 0
     # split newval to match tokens
     for index, token in enumerate(tokens):
         if token[0] == '%':
             # for macros, try both literal and expanded value
             for v in [token, MacroHelper.expand(token, token)]:
                 sm.set_seq2(v)
                 m = sm.find_longest_match(i, len(newval), 0, len(v))
                 valid = m.size == len(v)  # only full match is valid
                 if valid:
                     break
         else:
             sm.set_seq2(token)
             m = sm.find_longest_match(i, len(newval), 0, len(token))
             valid = m.size > 0
         if not valid:
             continue
         if token == sm.b:
             tokens[index] = token[m.b:m.b+m.size]
         if index > 0:
             values[index] = newval[m.a:m.a+m.size]
             if not values[index - 1]:
                 values[index - 1] = newval[i:m.a]
             else:
                 values[index - 1] += newval[i:m.a]
         else:
             values[index] = newval[i:m.a+m.size]
         i = m.a + m.size
     if newval[i:] and values:
         if not values[-1]:
             values[-1] = newval[i:]
         else:
             values[-1] += newval[i:]
     # try to fill empty macros
     for index, token in enumerate(tokens):
         if token[0] == '%':
             continue
         if token == values[index]:
             continue
         for i in range(index - 1, 0, -1):
             if tokens[i][0] == '%' and not values[i]:
                 values[i] = values[index]
                 values[index] = None
                 break
     # try to make values of identical macros equal
     for index, token in enumerate(tokens):
         if token[0] != '%':
             continue
         for i in range(index - 1, 0, -1):
             if tokens[i] == token:
                 idx = values[index].find(values[i])
                 if idx >= 0:
                     prefix = values[index][:idx]
                     for j in range(index - 1, i + 1, -1):
                         # first non-macro token
                         if tokens[j][0] != '%':
                             if prefix.endswith(values[j]):
                                 # move token from the end of prefix to the beginning
                                 prefix = values[j] + prefix[:prefix.find(values[j])]
                             else:
                                 # no match with prefix, cannot continue
                                 break
                         else:
                             # remove prefix from the original value and append it to the value of this macro
                             values[index] = values[index][idx:]
                             values[j] += prefix
                             break
                 break
     # redefine macros and update tokens
     for index, token in enumerate(tokens):
         if token == values[index]:
             continue
         if not values[index]:
             values[index] = '%{nil}' if token[0] == '%' else ''
         macros = _find_macros(token)
         if macros:
             _redefine_macro(macros[0][0], values[index])
         else:
             tokens[index] = values[index]
     result = ''.join(tokens)
     _sync_macros(curval + result)
     # only change value if necessary
     if MacroHelper.expand(curval) == MacroHelper.expand(result):
         return curval
     return result
예제 #23
0
    def _correct_deleted_files(cls, rebase_spec_file, files):
        """Removes files newly missing in buildroot from %files sections
        of the SPEC file. If a file cannot be removed, the user is informed
        and it is mentioned in the final report.

        """
        result: Dict[str, RemovedFiles] = collections.defaultdict(
            lambda: collections.defaultdict(list))
        for sec_name, sec_content in rebase_spec_file.spec_content.sections:
            if sec_name.startswith('%files'):
                subpackage = rebase_spec_file.get_subpackage_name(sec_name)
                i = 0
                while i < len(sec_content):
                    original_line = sec_content[i].split()
                    # Expand the whole line to check for occurrences of
                    # special keywords, such as %global and %if blocks.
                    # Macro definitions expand to empty string.
                    expanded = MacroHelper.expand(sec_content[i])
                    if not original_line or not expanded or any(
                            k in expanded for k in cls.PROHIBITED_KEYWORDS):
                        i += 1
                        continue
                    split_line = original_line[:]
                    directives: List[str] = []
                    prepend_macro = None
                    for element in reversed(split_line):
                        if element in cls.FILES_DIRECTIVES:
                            if cls.FILES_DIRECTIVES[element]:
                                prepend_macro = cls.FILES_DIRECTIVES[element]
                            directives.insert(0, element)
                            split_line.remove(element)

                    if prepend_macro:
                        for j, path in enumerate(split_line):
                            if not os.path.isabs(path):
                                split_line[j] = os.path.join(
                                    prepend_macro, subpackage,
                                    os.path.basename(path))
                    split_line = [MacroHelper.expand(p) for p in split_line]

                    j = 0
                    while j < len(split_line) and files:
                        file = split_line[j]
                        for deleted_file in reversed(files):
                            if not fnmatch.fnmatch(deleted_file, file):
                                continue

                            original_file = original_line[len(directives) + j]

                            del split_line[j]
                            del original_line[len(directives) + j]
                            files.remove(deleted_file)
                            result['removed'][sec_name].append(original_file)
                            logger.info("Removed %s from '%s' section",
                                        original_file, sec_name)
                            break
                        else:
                            j += 1

                    if not split_line:
                        del sec_content[i]
                    else:
                        sec_content[i] = ' '.join(original_line)
                        i += 1

                    if not files:
                        return result

        logger.info('Could not remove the following files:')
        for file in files:
            logger.info('\t%s', file)

        result['unable_to_remove'] = files
        return result
예제 #24
0
    def _correct_deleted_files(cls, rebase_spec_file, files):
        """Removes files newly missing in buildroot from %files sections
        of the SPEC file. If a file cannot be removed, the user is informed
        and it is mentioned in the final report.

        """
        result = collections.defaultdict(lambda: collections.defaultdict(list))
        for sec_name, sec_content in six.iteritems(
                rebase_spec_file.spec_content.sections):
            if sec_name.startswith('%files'):
                subpackage = rebase_spec_file.get_subpackage_name(sec_name)
                i = 0
                while i < len(sec_content):
                    original_line = sec_content[i].split()
                    if not original_line:
                        i += 1
                        continue
                    split_line = original_line[:]
                    directives = []
                    prepend_macro = None
                    for element in reversed(split_line):
                        if element in cls.FILES_DIRECTIVES:
                            if cls.FILES_DIRECTIVES[element]:
                                prepend_macro = cls.FILES_DIRECTIVES[element]
                            directives.insert(0, element)
                            split_line.remove(element)

                    if prepend_macro:
                        split_line = [
                            os.path.join(prepend_macro, subpackage,
                                         os.path.basename(p))
                            for p in split_line
                        ]
                    split_line = [MacroHelper.expand(p) for p in split_line]

                    j = 0
                    while j < len(split_line) and files:
                        file = split_line[j]
                        for deleted_file in reversed(files):
                            if not fnmatch.fnmatch(deleted_file, file):
                                continue

                            original_file = original_line[len(directives) + j]

                            del split_line[j]
                            del original_line[len(directives) + j]
                            files.remove(deleted_file)
                            result['removed'][sec_name].append(original_file)
                            logger.info("Removed %s from '%s' section",
                                        original_file, sec_name)
                            break
                        else:
                            j += 1

                    if not split_line:
                        del rebase_spec_file.spec_content.sections[sec_name][i]
                    else:
                        rebase_spec_file.spec_content.sections[sec_name][
                            i] = ' '.join(original_line)
                        i += 1

                    if not files:
                        return result

        logger.info('Could not remove the following files:')
        for file in files:
            logger.info('\t%s', file)

        result['unable_to_remove'] = files
        return result
예제 #25
0
    def update_setup_dirname(self, dirname):
        """
        Update %setup or %autosetup dirname argument if needed

        :param dirname: new dirname to be used
        """
        parser = self._get_setup_parser()

        prep = self.spec_content.section('%prep')
        if not prep:
            return

        for index, line in enumerate(prep):
            if line.startswith('%setup') or line.startswith('%autosetup'):
                args = shlex.split(line)
                macro = args[0]
                args = [MacroHelper.expand(a, '') for a in args[1:]]

                # parse macro arguments
                try:
                    ns, unknown = parser.parse_known_args(args)
                except ParseError:
                    continue

                # check if this macro instance is extracting Source0
                if ns.T and ns.a != 0 and ns.b != 0:
                    continue

                # check if modification is really necessary
                if dirname != ns.n:
                    new_dirname = dirname

                    # get %{name} and %{version} macros
                    macros = [m for m in MacroHelper.filter(self.macros, level=-3) if m['name'] in ('name', 'version')]
                    # add all macros from spec file scope
                    macros.extend(MacroHelper.filter(self.macros, level=0))
                    # omit short macros
                    macros = [m for m in macros if len(m['value']) > 1]
                    # ensure maximal greediness
                    macros.sort(key=lambda k: len(k['value']), reverse=True)

                    # substitute tokens with macros
                    for m in macros:
                        if m['value'] and m['value'] in dirname:
                            new_dirname = new_dirname.replace(m['value'], '%{{{}}}'.format(m['name']))

                    args = [macro]
                    args.extend(['-n', new_dirname])
                    if ns.a != -1:
                        args.extend(['-a', str(ns.a)])
                    if ns.b != -1:
                        args.extend(['-b', str(ns.b)])
                    if ns.T:
                        args.append('-T')
                    if ns.q:
                        args.append('-q')
                    if ns.c:
                        args.append('-c')
                    if ns.D:
                        args.append('-D')
                    if ns.v:
                        args.append('-v')
                    if ns.N:
                        args.append('-N')
                    if ns.p != -1:
                        args.extend(['-p', str(ns.p)])
                    if ns.S != '':
                        args.extend(['-S', ns.S])
                    args.extend(unknown)

                    prep[index] = ' '.join(args)
예제 #26
0
    def _correct_deleted_files(cls, rebase_spec_file, files):
        """Removes files newly missing in buildroot from %files sections
        of the SPEC file. If a file cannot be removed, the user is informed
        and it is mentioned in the final report.

        """
        result = collections.defaultdict(lambda: collections.defaultdict(list))
        for sec_name, sec_content in rebase_spec_file.spec_content.sections:
            if sec_name.startswith('%files'):
                subpackage = rebase_spec_file.get_subpackage_name(sec_name)
                i = 0
                while i < len(sec_content):
                    original_line = sec_content[i].split()
                    if not original_line:
                        i += 1
                        continue
                    split_line = original_line[:]
                    directives = []
                    prepend_macro = None
                    for element in reversed(split_line):
                        if element in cls.FILES_DIRECTIVES:
                            if cls.FILES_DIRECTIVES[element]:
                                prepend_macro = cls.FILES_DIRECTIVES[element]
                            directives.insert(0, element)
                            split_line.remove(element)

                    if prepend_macro:
                        for j, path in enumerate(split_line):
                            if not os.path.isabs(path):
                                split_line[j] = os.path.join(prepend_macro, subpackage, os.path.basename(path))
                    split_line = [MacroHelper.expand(p) for p in split_line]

                    j = 0
                    while j < len(split_line) and files:
                        file = split_line[j]
                        for deleted_file in reversed(files):
                            if not fnmatch.fnmatch(deleted_file, file):
                                continue

                            original_file = original_line[len(directives) + j]

                            del split_line[j]
                            del original_line[len(directives) + j]
                            files.remove(deleted_file)
                            result['removed'][sec_name].append(original_file)
                            logger.info("Removed %s from '%s' section", original_file, sec_name)
                            break
                        else:
                            j += 1

                    if not split_line:
                        del sec_content[i]
                    else:
                        sec_content[i] = ' '.join(original_line)
                        i += 1

                    if not files:
                        return result

        logger.info('Could not remove the following files:')
        for file in files:
            logger.info('\t%s', file)

        result['unable_to_remove'] = files
        return result