Exemplo n.º 1
0
def bb_to_patch_series(bbfile):
    """Get all local patches as a series"""
    series = PatchSeries()
    for path in bbfile.localfiles:
        if path.endswith('.patch'):
            series.append(Patch(path))
    return series
Exemplo n.º 2
0
    def patchseries(self, unapplied=False, ignored=False):
        """Return non-ignored patches of the RPM as a gbp patchseries"""
        series = PatchSeries()
        if "patch" in self._tags:
            tags = self._patches()
            applied = []
            for macro in self._special_directives["patch"]:
                if macro["id"] in tags:
                    applied.append((macro["id"], macro["args"]))
            ignored = set() if ignored else set(self.ignorepatches)

            # Put all patches that are applied first in the series
            for num, args in applied:
                if num not in ignored:
                    opts = self._patch_macro_opts(args)
                    strip = int(opts.strip) if opts.strip else 0
                    filename = os.path.basename(tags[num]["linevalue"])
                    series.append(Patch(os.path.join(self.specdir, filename), strip=strip))
            # Finally, append all unapplied patches to the series, if requested
            if unapplied:
                applied_nums = set([num for num, _args in applied])
                unapplied = set(tags.keys()).difference(applied_nums)
                for num in sorted(unapplied):
                    if num not in ignored:
                        filename = os.path.basename(tags[num]["linevalue"])
                        series.append(Patch(os.path.join(self.specdir, filename), strip=0))
        return series
Exemplo n.º 3
0
    def patchseries(self, unapplied=False, ignored=False):
        """Return non-ignored patches of the RPM as a gbp patchseries"""
        series = PatchSeries()

        ignored = set() if ignored else set(self.ignorepatches)
        tags = dict([(k, v) for k, v in self._patches().items()
                     if k not in ignored])

        if self._special_directives['autosetup']:
            # Return all patchses if %autosetup is used
            for num in sorted(tags):
                filename = os.path.basename(tags[num]['linevalue'])
                series.append(Patch(os.path.join(self.specdir, filename)))
        else:
            applied = []
            for macro in self._special_directives['patch']:
                if macro['id'] in tags:
                    applied.append((macro['id'], macro['args']))

            # Put all patches that are applied first in the series
            for num, args in applied:
                opts = self._patch_macro_opts(args)
                strip = int(opts.strip) if opts.strip else 0
                filename = os.path.basename(tags[num]['linevalue'])
                series.append(
                    Patch(os.path.join(self.specdir, filename), strip=strip))
            # Finally, append all unapplied patches to the series, if requested
            if unapplied:
                applied_nums = set([num for num, _args in applied])
                unapplied = set(tags.keys()).difference(applied_nums)
                for num in sorted(unapplied):
                    filename = os.path.basename(tags[num]['linevalue'])
                    series.append(
                        Patch(os.path.join(self.specdir, filename), strip=0))
        return series
Exemplo n.º 4
0
def safe_patches(queue, tmpdir_base):
    """
    Safe the current patches in a temporary directory
    below 'tmpdir_base'. Also, uncompress compressed patches here.

    @param queue: an existing patch queue
    @param tmpdir_base: base under which to create tmpdir
    @return: tmpdir and a safed queue (with patches in tmpdir)
    @rtype: tuple
    """

    tmpdir = tempfile.mkdtemp(dir=tmpdir_base, prefix='patchimport_')
    safequeue = PatchSeries()

    if len(queue) > 0:
        gbp.log.debug("Safeing patches '%s' in '%s'" %
                      (os.path.dirname(queue[0].path), tmpdir))
    for patch in queue:
        base, _archive_fmt, comp = parse_archive_filename(patch.path)
        uncompressors = {'gzip': gzip.open, 'bzip2': bz2.BZ2File}
        if comp in uncompressors:
            gbp.log.debug("Uncompressing '%s'" % os.path.basename(patch.path))
            src = uncompressors[comp](patch.path, 'r')
            dst_name = os.path.join(tmpdir, os.path.basename(base))
            if _archive_fmt:
                tar_name = dst_name
                dst_name += '.tar'
        elif comp:
            raise GbpError("Unsupported patch compression '%s', giving up" %
                           comp)
        else:
            src = open(patch.path, 'r')
            dst_name = os.path.join(tmpdir, os.path.basename(patch.path))

        dst = open(dst_name, 'w')
        dst.writelines(src)
        src.close()
        dst.close()
        if _archive_fmt:
            t = tarfile.open(dst_name, 'r:')
            t.extractall(path=tmpdir)
            t.close()
            dst_name = tar_name

        safequeue.append(patch)
        safequeue[-1].path = dst_name

    return safequeue
Exemplo n.º 5
0
 def read_series_file(self, file_):
     try:
         from gbp.patch_series import PatchSeries
         return PatchSeries.read_series_file(file_)
     except ImportError:
         log.warning('Please run "sudo apt-get install '
                     'git-buildpackage" to write the patches to '
                     './debian/changelog')
Exemplo n.º 6
0
    def patchseries(self, unapplied=False, ignored=False):
        """Return non-ignored patches of the RPM as a gbp patchseries"""
        series = PatchSeries()

        ignored = set() if ignored else set(self.ignorepatches)
        tags = dict([(k, v) for k, v in self._patches().items() if k not in ignored])

        if self._special_directives['autosetup']:
            # Return all patchses if %autosetup is used
            for num in sorted(tags):
                filename = os.path.basename(tags[num]['linevalue'])
                series.append(Patch(os.path.join(self.specdir, filename)))
        else:
            applied = []
            for macro in self._special_directives['patch']:
                if macro['id'] in tags:
                    applied.append((macro['id'], macro['args']))

            # Put all patches that are applied first in the series
            for num, args in applied:
                opts = self._patch_macro_opts(args)
                strip = int(opts.strip) if opts.strip else 0
                filename = os.path.basename(tags[num]['linevalue'])
                series.append(Patch(os.path.join(self.specdir, filename),
                                    strip=strip))
            # Finally, append all unapplied patches to the series, if requested
            if unapplied:
                applied_nums = set([num for num, _args in applied])
                unapplied = set(tags.keys()).difference(applied_nums)
                for num in sorted(unapplied):
                    filename = os.path.basename(tags[num]['linevalue'])
                    series.append(Patch(os.path.join(self.specdir, filename),
                                        strip=0))
        return series
Exemplo n.º 7
0
def safe_patches(queue, tmpdir_base):
    """
    Safe the current patches in a temporary directory
    below 'tmpdir_base'. Also, uncompress compressed patches here.

    @param queue: an existing patch queue
    @param tmpdir_base: base under which to create tmpdir
    @return: tmpdir and a safed queue (with patches in tmpdir)
    @rtype: tuple
    """

    tmpdir = tempfile.mkdtemp(dir=tmpdir_base, prefix='patchimport_')
    safequeue = PatchSeries()

    if len(queue) > 0:
        gbp.log.debug("Safeing patches '%s' in '%s'" %
                        (os.path.dirname(queue[0].path), tmpdir))
    for patch in queue:
        base, _archive_fmt, comp = parse_archive_filename(patch.path)
        uncompressors = {'gzip': gzip.open, 'bzip2': bz2.BZ2File}
        if comp in uncompressors:
            gbp.log.debug("Uncompressing '%s'" % os.path.basename(patch.path))
            src = uncompressors[comp](patch.path, 'r')
            dst_name = os.path.join(tmpdir, os.path.basename(base))
        elif comp:
            raise GbpError("Unsupported patch compression '%s', giving up"
                           % comp)
        else:
            src = open(patch.path, 'r')
            dst_name = os.path.join(tmpdir, os.path.basename(patch.path))

        dst = open(dst_name, 'w')
        dst.writelines(src)
        src.close()
        dst.close()

        safequeue.append(patch)
        safequeue[-1].path = dst_name

    return safequeue
Exemplo n.º 8
0
def safe_patches(queue):
    """
    Safe the current patches in a temporary directory

    @param queue: an existing patch queue
    @return: safed queue (with patches in tmpdir)
    @rtype: tuple
    """

    tmpdir = tempfile.mkdtemp(prefix='patchimport_')
    safequeue = PatchSeries()

    if len(queue) > 0:
        gbp.log.debug("Saving patches '%s' in '%s'" %
                      (os.path.dirname(queue[0].path), tmpdir))
    for patch in queue:
        base, _archive_fmt, comp = Archive.parse_filename(patch.path)
        uncompressors = {'gzip': gzip.open, 'bzip2': bz2.BZ2File}
        if comp in uncompressors:
            gbp.log.debug("Uncompressing '%s'" % os.path.basename(patch.path))
            src = uncompressors[comp](patch.path, 'r')
            dst_name = os.path.join(tmpdir, os.path.basename(base))
        elif comp:
            raise GbpError("Unsupported patch compression '%s', giving up" %
                           comp)
        else:
            src = open(patch.path, 'rb')
            dst_name = os.path.join(tmpdir, os.path.basename(patch.path))

        dst = open(dst_name, 'wb')
        dst.write(src.read())
        src.close()
        dst.close()

        safequeue.append(patch)
        safequeue[-1].path = dst_name

    return safequeue
Exemplo n.º 9
0
def import_quilt_patches(repo, branch, series, tries, force):
    """
    apply a series of quilt patches in the series file 'series' to branch
    the patch-queue branch for 'branch'

    @param repo: git repository to work on
    @param branch: branch to base pqtch queue on
    @param series; series file to read patches from
    @param tries: try that many times to apply the patches going back one
                  commit in the branches history after each failure.
    @param force: import the patch series even if the branch already exists
    """
    tmpdir = None

    if is_pq_branch(branch):
        if force:
            branch = pq_branch_base(branch)
            pq_branch = pq_branch_name(branch)
            repo.checkout(branch)
        else:
            gbp.log.err("Already on a patch-queue branch '%s' - doing nothing." % branch)
            raise GbpError
    else:
        pq_branch = pq_branch_name(branch)

    if repo.has_branch(pq_branch):
        if force:
            drop_pq(repo, branch)
        else:
            raise GbpError("Patch queue branch '%s'. already exists. Try 'rebase' instead."
                           % pq_branch)

    maintainer = get_maintainer_from_control(repo)
    commits = repo.get_commits(num=tries, first_parent=True)
    # If we go back in history we have to safe our pq so we always try to apply
    # the latest one
    if len(commits) > 1:
        tmpdir, series = safe_patches(series)

    queue = PatchSeries.read_series_file(series)

    i = len(commits)
    for commit in commits:
        if len(commits) > 1:
            gbp.log.info("%d %s left" % (i, 'tries' if i > 1 else 'try'))
        try:
            gbp.log.info("Trying to apply patches at '%s'" % commit)
            repo.create_branch(pq_branch, commit)
        except GitRepositoryError:
            raise GbpError("Cannot create patch-queue branch '%s'." % pq_branch)

        repo.set_branch(pq_branch)
        for patch in queue:
            gbp.log.debug("Applying %s" % patch.path)
            try:
                apply_and_commit_patch(repo, patch, maintainer, patch.topic)
            except (GbpError, GitRepositoryError):
                gbp.log.err("Failed to apply '%s'" % patch.path)
                repo.set_branch(branch)
                repo.delete_branch(pq_branch)
                break
        else:
            # All patches applied successfully
            break
        i-=1
    else:
        raise GbpError("Couldn't apply patches")

    if tmpdir:
        gbp.log.debug("Remove temporary patch safe '%s'" % tmpdir)
        shutil.rmtree(tmpdir)
Exemplo n.º 10
0
def import_quilt_patches(repo, branch, series, tries, force, pq_from,
                         upstream_tag):
    """
    apply a series of quilt patches in the series file 'series' to branch
    the patch-queue branch for 'branch'

    @param repo: git repository to work on
    @param branch: branch to base patch queue on
    @param series: series file to read patches from
    @param tries: try that many times to apply the patches going back one
                  commit in the branches history after each failure.
    @param force: import the patch series even if the branch already exists
    @param pq_from: what to use as the starting point for the pq branch.
                    DEBIAN indicates the current branch, TAG indicates that
                    the corresponding upstream tag should be used.
    @param upstream_tag: upstream tag template to use
    """
    tmpdir = None
    series = os.path.join(repo.path, series)

    if is_pq_branch(branch):
        if force:
            branch = pq_branch_base(branch)
            pq_branch = pq_branch_name(branch)
            repo.checkout(branch)
        else:
            raise GbpError("Already on a patch-queue branch '%s' - doing nothing." % branch)
    else:
        pq_branch = pq_branch_name(branch)

    if repo.has_branch(pq_branch):
        if force:
            drop_pq(repo, branch)
        else:
            raise GbpError("Patch queue branch '%s'. already exists. Try 'rebase' or 'switch' instead."
                           % pq_branch)

    maintainer = get_maintainer_from_control(repo)
    if pq_on_upstream_tag(pq_from):
        commits = [find_upstream_commit(repo, branch, upstream_tag)]
    else:  # pq_from == 'DEBIAN'
        commits = repo.get_commits(num=tries, first_parent=True)
    # If we go back in history we have to safe our pq so we always try to apply
    # the latest one
    # If we are using the upstream_tag, we always need a copy of the patches
    if len(commits) > 1 or pq_on_upstream_tag(pq_from):
        if os.path.exists(series):
            tmpdir, series = safe_patches(series, repo)

    queue = PatchSeries.read_series_file(series)

    i = len(commits)
    for commit in commits:
        if len(commits) > 1:
            gbp.log.info("%d %s left" % (i, 'tries' if i > 1 else 'try'))
        try:
            gbp.log.info("Trying to apply patches at '%s'" % commit)
            repo.create_branch(pq_branch, commit)
        except GitRepositoryError:
            raise GbpError("Cannot create patch-queue branch '%s'." % pq_branch)

        repo.set_branch(pq_branch)
        for patch in queue:
            gbp.log.debug("Applying %s" % patch.path)
            try:
                name = os.path.basename(patch.path)
                apply_and_commit_patch(repo, patch, maintainer, patch.topic, name)
            except (GbpError, GitRepositoryError) as e:
                gbp.log.err("Failed to apply '%s': %s" % (patch.path, e))
                repo.force_head('HEAD', hard=True)
                repo.set_branch(branch)
                repo.delete_branch(pq_branch)
                break
        else:
            # All patches applied successfully
            break
        i -= 1
    else:
        raise GbpError("Couldn't apply patches")

    if tmpdir:
        gbp.log.debug("Remove temporary patch safe '%s'" % tmpdir)
        shutil.rmtree(tmpdir)

    return len(queue)
Exemplo n.º 11
0
def import_quilt_patches(repo, branch, series, tries, force, pq_from,
                         upstream_tag):
    """
    apply a series of quilt patches in the series file 'series' to branch
    the patch-queue branch for 'branch'

    @param repo: git repository to work on
    @param branch: branch to base patch queue on
    @param series: series file to read patches from
    @param tries: try that many times to apply the patches going back one
                  commit in the branches history after each failure.
    @param force: import the patch series even if the branch already exists
    @param pq_from: what to use as the starting point for the pq branch.
                    DEBIAN indicates the current branch, TAG indicates that
                    the corresponding upstream tag should be used.
    @param upstream_tag: upstream tag template to use
    """
    tmpdir = None
    series = os.path.join(repo.path, series)

    if is_pq_branch(branch):
        if force:
            branch = pq_branch_base(branch)
            pq_branch = pq_branch_name(branch)
            repo.checkout(branch)
        else:
            raise GbpError(
                "Already on a patch-queue branch '%s' - doing nothing." %
                branch)
    else:
        pq_branch = pq_branch_name(branch)

    if repo.has_branch(pq_branch):
        if force:
            drop_pq(repo, branch)
        else:
            raise GbpError(
                "Patch queue branch '%s'. already exists. Try 'rebase' or 'switch' instead."
                % pq_branch)

    maintainer = get_maintainer_from_control(repo)
    if pq_on_upstream_tag(pq_from):
        commits = [find_upstream_commit(repo, branch, upstream_tag)]
    else:  # pq_from == 'DEBIAN'
        commits = repo.get_commits(num=tries, first_parent=True)
    # If we go back in history we have to safe our pq so we always try to apply
    # the latest one
    # If we are using the upstream_tag, we always need a copy of the patches
    if len(commits) > 1 or pq_on_upstream_tag(pq_from):
        if os.path.exists(series):
            tmpdir, series = safe_patches(series, repo)

    queue = PatchSeries.read_series_file(series)

    i = len(commits)
    for commit in commits:
        if len(commits) > 1:
            gbp.log.info("%d %s left" % (i, 'tries' if i > 1 else 'try'))
        try:
            gbp.log.info("Trying to apply patches at '%s'" % commit)
            repo.create_branch(pq_branch, commit)
        except GitRepositoryError:
            raise GbpError("Cannot create patch-queue branch '%s'." %
                           pq_branch)

        repo.set_branch(pq_branch)
        for patch in queue:
            gbp.log.debug("Applying %s" % patch.path)
            try:
                name = os.path.basename(patch.path)
                apply_and_commit_patch(repo, patch, maintainer, patch.topic,
                                       name)
            except (GbpError, GitRepositoryError) as e:
                gbp.log.err("Failed to apply '%s': %s" % (patch.path, e))
                repo.force_head('HEAD', hard=True)
                repo.set_branch(branch)
                repo.delete_branch(pq_branch)
                break
        else:
            # All patches applied successfully
            break
        i -= 1
    else:
        raise GbpError("Couldn't apply patches")

    if tmpdir:
        gbp.log.debug("Remove temporary patch safe '%s'" % tmpdir)
        shutil.rmtree(tmpdir)

    return len(queue)