def git_archive_submodules(repo, treeish, output, prefix, comp_type,
                           comp_level, comp_opts):
    """
    Create tar.gz of an archive with submodules

    since git-archive always writes an end of tarfile trailer we concatenate
    the generated archives using tar and compress the result.

    Exception handling is left to the caller.
    """
    prefix = sanitize_prefix(prefix)
    tarfile = output.rsplit('.', 1)[0]
    tempdir = tempfile.mkdtemp()
    submodule_tarfile = os.path.join(tempdir, "submodule.tar")
    try:
        # generate main tarfile
        repo.archive(format='tar',
                     prefix=prefix,
                     output=tarfile,
                     treeish=treeish)

        # generate each submodule's tarfile and append it to the main archive
        for (subdir, commit) in repo.get_submodules(treeish):
            tarpath = [subdir, subdir[2:]][subdir.startswith("./")]

            gbp.log.debug("Processing submodule %s (%s)" %
                          (subdir, commit[0:8]))
            repo.archive(format='tar',
                         prefix='%s%s/' % (prefix, tarpath),
                         output=submodule_tarfile,
                         treeish=commit,
                         cwd=subdir)
            CatenateTarArchive(tarfile)(submodule_tarfile)

        # compress the output
        ret = os.system("%s -%s %s %s" %
                        (comp_type, comp_level, comp_opts, tarfile))
        if ret:
            raise GbpError("Error creating %s: %d" % (output, ret))
    finally:
        shutil.rmtree(tempdir)
Esempio n. 2
0
    def _set_tag(self, tag, num, value, insertafter):
        """Set a tag value"""
        key = tag.lower()
        tagname = '%s%s' % (tag, num) if num is not None else tag
        value = value.strip()
        if not value:
            raise GbpError("Cannot set empty value to '%s:' tag" % tag)

        # Check type of tag, we don't support values for 'multivalue' tags
        try:
            header = self._specinfo.packages[0].header
            tagvalue = header[getattr(librpm, 'RPMTAG_%s' % tagname.upper())]
        except AttributeError:
            tagvalue = None
        tagvalue = None if type(tagvalue) is list else value

        # Try to guess the correct indentation from the previous or next tag
        indent_re = re.compile(r'^([a-z]+([0-9]+)?\s*:\s*)', flags=re.I)
        match = indent_re.match(str(insertafter))
        if not match:
            match = indent_re.match(str(insertafter.next))
        indent = 12 if not match else len(match.group(1))
        text = '%-*s%s\n' % (indent, '%s:' % tagname, value)
        if key in self._tags:
            self._tags[key]['value'] = tagvalue
            for line in reversed(self._tags[key]['lines']):
                if line['num'] == num:
                    gbp.log.debug("Updating '%s:' tag in spec" % tagname)
                    line['line'].set_data(text)
                    line['linevalue'] = value
                    return line['line']

        gbp.log.debug("Adding '%s:' tag after '%s...' line in spec" %
                      (tagname, str(insertafter)[0:20]))
        line = self._content.insert_after(insertafter, text)
        linerec = {'line': line, 'num': num, 'linevalue': value}
        if key in self._tags:
            self._tags[key]['lines'].append(linerec)
        else:
            self._tags[key] = {'value': tagvalue, 'lines': [linerec]}
        return line
Esempio n. 3
0
def export_source(repo, tree, source, options, dest_dir, tarball_dir):
    """
    Export a version of the source tree when building in a separate directory

    @param repo: the git repository to export from
    @type repo: L{gbp.git.GitRepository}
    @param source: the source package
    @param options: options to apply
    @param dest_dir: where to export the source to
    @param tarball_dir: where to fetch the tarball from in overlay mode
    @returns: the temporary directory
    """
    # Extract orig tarball if git-overlay option is selected:
    if options.overlay:
        if source.is_native():
            raise GbpError("Cannot overlay Debian native package")
        overlay_extract_origs(source, tarball_dir, dest_dir, options)

    gbp.log.info("Exporting '%s' to '%s'" % (options.export, dest_dir))
    if not dump_tree(repo, dest_dir, tree, options.with_submodules):
        raise GbpError
def dump_meta(cfg_data, options, repo, treeish, dump_dir):
    """Parse and dump meta information from a treeish"""
    tmpdir = tempfile.mkdtemp(prefix='gbp-bb_')
    try:
        bb_path = guess_bb_path(options, repo, treeish, bbappend=True)
        # Dump whole meta directory
        dump_tree(repo, tmpdir, '%s:%s' % (treeish, os.path.dirname(bb_path)),
                  False)
        # Parse recipe
        full_path = os.path.join(tmpdir, os.path.basename(bb_path))
        bbfile = BBFile(full_path, cfg_data)
        bb_get_local_files(bbfile, dump_dir)
    except GitRepositoryError as err:
        raise GbpError("Git error: %s" % err)
    finally:
        shutil.rmtree(tmpdir)

    # Re-parse recipe from final location
    full_path = os.path.abspath(os.path.join(dump_dir,
                                             os.path.basename(bb_path)))
    return BBFile(full_path, cfg_data)
Esempio n. 5
0
def write_tree(repo, options):
    """
    Write a tree of the index or working copy if necessary

    @param repo: the git repository we're acting on
    @type repo: L{GitRepository}
    @return: the sha1 of the tree
    @rtype: C{str}
    """
    if options.export_dir:
        if options.export == index_name:
            tree = repo.write_tree()
        elif options.export == wc_name:
            tree = write_wc(repo)
        else:
            tree = options.export
        if not repo.has_treeish(tree):
            raise GbpError("%s is not a valid treeish" % tree)
    else:
        tree = None
    return tree
Esempio n. 6
0
def export_patches(repo, options):
    """Export patches from the pq branch into a packaging branch"""
    current = repo.get_branch()
    if is_pq_branch(current, options):
        base = pq_branch_base(current, options)
        gbp.log.info("On branch '%s', switching to '%s'" % (current, base))
        repo.set_branch(base)
        spec = parse_spec(options, repo)
        pq_branch = current
    else:
        spec = parse_spec(options, repo)
        pq_branch = pq_branch_name(current, options, spec.version)
    upstream_commit = find_upstream_commit(repo, spec, options.upstream_tag)

    export_treeish = options.export_rev if options.export_rev else pq_branch
    if not repo.has_treeish(export_treeish):
        raise GbpError('Invalid treeish object %s' % export_treeish)

    update_patch_series(repo, spec, upstream_commit, export_treeish, options)

    GitCommand('status')(['--', spec.specdir])
Esempio n. 7
0
def download_source(pkg, dirs, unauth):
    opts = ['--download-only']
    if unauth:
        opts.append('--allow-unauthenticated')

    if re.match(r'[a-z]{1,5}://', pkg):
        cmd = 'dget'
        opts += ['-q', pkg]
    else:
        cmd = 'apt-get'
        opts += ['-qq', 'source', pkg]

    dirs['download'] = os.path.abspath(tempfile.mkdtemp())
    gbp.log.info("Downloading '%s' using '%s'..." % (pkg, cmd))

    gbpc.RunAtCommand(cmd, opts, shell=False)(dir=dirs['download'])
    try:
        dsc = glob.glob(os.path.join(dirs['download'], '*.dsc'))[0]
    except IndexError:
        raise GbpError("Did not find a dsc file at %s/" % dirs['download'])
    return dsc
Esempio n. 8
0
def export_patches(repo, branch, options):
    """Export patches from the pq branch into a patch series"""
    if is_pq_branch(branch):
        base = pq_branch_base(branch)
        gbp.log.info("On '%s', switching to '%s'" % (branch, base))
        branch = base
        repo.set_branch(branch)

    pq_branch = pq_branch_name(branch)
    try:
        shutil.rmtree(PATCH_DIR)
    except OSError as e:
        if e.errno != errno.ENOENT:
            raise GbpError("Failed to remove patch dir: %s" % e.strerror)
        else:
            gbp.log.debug("%s does not exist." % PATCH_DIR)

    patches = generate_patches(repo, branch, pq_branch, PATCH_DIR, options)

    if patches:
        with open(SERIES_FILE, 'w') as seriesfd:
            for patch in patches:
                seriesfd.write(os.path.relpath(patch, PATCH_DIR) + '\n')
        if options.commit:
            added, removed = commit_patches(repo, branch, patches, options)
            if added:
                what = 'patches' if len(added) > 1 else 'patch'
                gbp.log.info("Added %s %s to patch series" %
                             (what, ', '.join(added)))
            if removed:
                what = 'patches' if len(removed) > 1 else 'patch'
                gbp.log.info("Removed %s %s from patch series" %
                             (what, ', '.join(removed)))
        else:
            GitCommand('status')(['--', PATCH_DIR])
    else:
        gbp.log.info("No patches on '%s' - nothing to do." % pq_branch)

    if options.drop:
        drop_pq(repo, branch)
Esempio n. 9
0
def prepare_upstream_tarballs(repo, source, options, tarball_dir, output_dir):
    """
    Make sure we have the needed upstream tarballs. The default order is:
    - look in tarball_dir and if found symlink to it
    - create tarball using pristine-tar
    - create tarball using git-archive

    Afterwards
    - create pristine-tar commmits if pristine-tar-commit is in use
    - verify tarball checksums if pristine-tar is in use
    """
    if hasattr(options, 'no_create_orig') and options.no_create_orig:
        return

    if not source.is_native() and not source.upstream_version:
        raise GbpError("Non-native package '%s' "
                       "has invalid version '%s'" % (source.name, source.version))

    options.comp_type = guess_comp_type(options.comp_type,
                                        source,
                                        repo,
                                        options.tarball_dir)
    orig_files = source.upstream_tarball_names(options.comp_type, options.components)

    # look in tarball_dir first, if found force a symlink to it
    if options.tarball_dir:
        gbp.log.debug("Looking for orig tarballs '%s' at '%s'" % (", ".join(orig_files), tarball_dir))
        missing = du.DebianPkgPolicy.symlink_origs(orig_files, tarball_dir, output_dir, force=True)
        if missing:
            msg = "Tarballs '%s' not found at '%s'" % (", ".join(missing), tarball_dir)
        else:
            msg = "All Orig tarballs '%s' found at '%s'" % (", ".join(orig_files), tarball_dir)
        gbp.log.info(msg)

    # Create tarball if missing or forced
    if not du.DebianPkgPolicy.has_origs(orig_files, output_dir) or options.force_create:
        if not pristine_tar_build_origs(repo, source, output_dir, options):
            git_archive_build_origs(repo, source, output_dir, options)
    maybe_pristine_tar_commit(repo, source, options, output_dir, orig_files)
    pristine_tar_verify_origs(repo, source, options, output_dir, orig_files)
Esempio n. 10
0
def safe_patches(queue, tmpdir_base):
    """
    Safe the current patches in a temporary directory
    below 'tmpdir_base'. Also, uncompress compressed patches here.

    @param queue: an existing patch queue
    @param tmpdir_base: base under which to create tmpdir
    @return: tmpdir and a safed queue (with patches in tmpdir)
    @rtype: tuple
    """

    tmpdir = tempfile.mkdtemp(dir=tmpdir_base, prefix='patchimport_')
    safequeue = PatchSeries()

    if len(queue) > 0:
        gbp.log.debug("Safeing patches '%s' in '%s'" %
                      (os.path.dirname(queue[0].path), tmpdir))
    for patch in queue:
        base, _archive_fmt, comp = parse_archive_filename(patch.path)
        uncompressors = {'gzip': gzip.open, 'bzip2': bz2.BZ2File}
        if comp in uncompressors:
            gbp.log.debug("Uncompressing '%s'" % os.path.basename(patch.path))
            src = uncompressors[comp](patch.path, 'r')
            dst_name = os.path.join(tmpdir, os.path.basename(base))
        elif comp:
            raise GbpError("Unsupported patch compression '%s', giving up" %
                           comp)
        else:
            src = open(patch.path, 'r')
            dst_name = os.path.join(tmpdir, os.path.basename(patch.path))

        dst = open(dst_name, 'w')
        dst.writelines(src)
        src.close()
        dst.close()

        safequeue.append(patch)
        safequeue[-1].path = dst_name

    return safequeue
Esempio n. 11
0
def check_branch(repo, options):
    """
    Check if we're on the right branch and bail out otherwise

    returns: the current branch or C{None} if in detached head mode
    """
    branch = None
    try:
        branch = repo.get_branch()
    except GitRepositoryError:
        # Not being on any branch is o.k. with --git-ignore-branch
        if not options.ignore_branch:
            raise

    ignore = options.ignore_new or options.ignore_branch
    if branch != options.debian_branch and not ignore:
        gbp.log.err("You are not on branch '%s' but on '%s'" %
                    (options.debian_branch, branch))
        raise GbpError(
            "Use --git-ignore-branch to ignore or --git-debian-branch to set the branch name."
        )
    return branch
def git_archive_single(treeish,
                       output,
                       prefix,
                       comp_type,
                       comp_level,
                       comp_opts,
                       format='tar'):
    """
    Create an archive without submodules

    Exception handling is left to the caller.
    """
    prefix = sanitize_prefix(prefix)
    pipe = pipes.Template()
    pipe.prepend(
        "git archive --format=%s --prefix=%s %s" % (format, prefix, treeish),
        '.-')
    if comp_type:
        pipe.append('%s -c -%s %s' % (comp_type, comp_level, comp_opts), '--')
    ret = pipe.copy('', output)
    if ret:
        raise GbpError("Error creating %s: %d" % (output, ret))
Esempio n. 13
0
def generate_patches(repo, start, end, outdir, options):
    """
    Generate patch files from git
    """
    gbp.log.info("Generating patches from git (%s..%s)" % (start, end))
    patches = []
    for treeish in [start, end]:
        if not repo.has_treeish(treeish):
            raise GbpError('%s not a valid tree-ish' % treeish)

    # Generate patches
    rev_list = reversed(repo.get_commits(start, end))
    for commit in rev_list:
        info = repo.get_commit_info(commit)
        # Parse 'gbp-pq-topic:'
        topic = parse_old_style_topic(info)
        cmds = {'topic': topic} if topic else {}
        # Parse 'Gbp: ' style commands
        (cmds_gbp, info['body']) = parse_gbp_commands(info, 'gbp', ('ignore'),
                                                      ('topic'), ('topic'))
        cmds.update(cmds)
        # Parse 'Gbp-Pq: ' style commands
        (cmds_gbp_pq,
         info['body']) = parse_gbp_commands(info, 'gbp-pq', ('ignore'),
                                            ('topic'), ('topic'))
        cmds.update(cmds_gbp_pq)
        if not 'ignore' in cmds:
            if 'topic' in cmds:
                topic = cmds['topic']
            format_patch(outdir,
                         repo,
                         info,
                         patches,
                         options.patch_numbers,
                         topic=topic)
        else:
            gbp.log.info('Ignoring commit %s' % info['id'])

    return patches
def fetch_snapshots(pkg, downloaddir):
    "Fetch snapshots using debsnap von snapshots.debian.org"
    dscs = None

    gbp.log.info("Downloading snapshots of '%s' to '%s'..." %
                 (pkg, downloaddir))
    debsnap = gbpc.Command("debsnap", [ '--force', '--destdir=%s' %
                                        (downloaddir), pkg])
    try:
        debsnap()
    except gbpc.CommandExecFailed:
        if debsnap.retcode == 2:
            gbp.log.warn("Some packages failed to download. Continuing.")
            pass
        else:
            raise

    dscs = glob.glob(os.path.join(downloaddir, '*.dsc'))
    if not dscs:
        raise GbpError('No package downloaded')

    return [os.path.join(downloaddir, dsc) for dsc in dscs]
Esempio n. 15
0
def recursive_copy(src, dst):
    """Recursive copy, overwriting files and preserving symlinks"""
    # Remove existing destinations, if needed
    if os.path.isfile(dst) or os.path.islink(dst):
        os.unlink(dst)
    elif (os.path.isfile(src) or os.path.islink(src)) and os.path.isdir(dst):
        # Remove dst dir if src is a file
        shutil.rmtree(dst)

    try:
        if os.path.islink(src):
            os.symlink(os.readlink(src), dst)
        elif os.path.isdir(src):
            if not os.path.exists(dst):
                os.makedirs(dst)
            for fname in os.listdir(src):
                recursive_copy(os.path.join(src, fname),
                               os.path.join(dst, fname))
        else:
            shutil.copy2(src, dst)
    except (IOError, OSError) as err:
        raise GbpError("Error while copying '%s' to '%s': %s" % (src, dst, err))
Esempio n. 16
0
def git_archive(repo,
                source,
                output_dir,
                treeish,
                comp_type,
                comp_level,
                with_submodules,
                component=None):
    "create a compressed orig tarball in output_dir using git_archive"
    try:
        comp_opts = compressor_opts[comp_type][0]
    except KeyError:
        raise GbpError("Unsupported compression type '%s'" % comp_type)

    output = os.path.join(output_dir,
                          du.orig_file(source, comp_type, component=component))
    prefix = "%s-%s" % (source.name, source.upstream_version)

    try:
        if repo.has_submodules() and with_submodules:
            repo.update_submodules()
            git_archive_submodules(repo, treeish, output, prefix, comp_type,
                                   comp_level, comp_opts)

        else:
            git_archive_single(treeish, output, prefix, comp_type, comp_level,
                               comp_opts)
    except (GitRepositoryError, CommandExecFailed):
        gbp.log.err("Error generating submodules' archives")
        return False
    except OSError as e:
        gbp.log.err("Error creating %s: %s" % (output, str(e)))
        return False
    except GbpError:
        raise
    except Exception as e:
        gbp.log.err("Error creating %s: %s" % (output, str(e)))
        return False
    return True
Esempio n. 17
0
def git_archive_build_orig(repo, cp, output_dir, options):
    """
    Build orig tarball using git-archive

    @param cp: the changelog of the package we're acting on
    @type cp: L{ChangeLog}
    @param output_dir: where to put the tarball
    @type output_dir: C{Str}
    @param options: the parsed options
    @type options: C{dict} of options
    @return: the tree we built the tarball from
    @rtype: C{str}
    """
    upstream_tree = get_upstream_tree(repo, cp, options)
    gbp.log.info("%s does not exist, creating from '%s'" %
                 (du.orig_file(cp, options.comp_type), upstream_tree))
    gbp.log.debug("Building upstream tarball with compression '%s -%s'" %
                  (options.comp_type, options.comp_level))
    if not git_archive(repo, cp, output_dir, upstream_tree, options.comp_type,
                       options.comp_level, options.with_submodules):
        raise GbpError("Cannot create upstream tarball at '%s'" % output_dir)
    return upstream_tree
Esempio n. 18
0
def prepare_pristine_tar(archive, pkg, version):
    """
    Prepare the upstream source for pristine tar import.

    This checks if the upstream source is actually a tarball
    and creates a symlink from I{archive}
    to I{<pkg>_<version>.orig.tar.<ext>} so pristine-tar will
    see the correct basename. Same goes for an optional signature.

    @param archive: the upstream source's name
    @type archive: C{str}
    @param pkg: the source package's name
    @type pkg: C{str}
    @param version: the upstream version number
    @type version: C{str}
    @rtype: C{str}
    """
    linked = False
    if os.path.isdir(archive):
        return None, False

    ext = os.path.splitext(archive)[1]
    if ext in ['.tgz', '.tbz2', '.tlz', '.txz']:
        ext = ".%s" % ext[2:]

    link = "../%s_%s.orig.tar%s" % (pkg, version, ext)
    if os.path.basename(archive) != os.path.basename(link):
        try:
            linked = maybe_link(archive, link)
            archive_sig = '{}.asc'.format(archive)
            if os.path.exists(archive_sig):
                maybe_link(archive_sig, '{}.asc'.format(link))
        except OSError as err:
            raise GbpError("Cannot symlink '%s' to '%s': %s" %
                           (archive, link, err))
        return (link, linked)
    else:
        return (archive, linked)
Esempio n. 19
0
def guess_comp_type(comp_type, source, repo, tarball_dir):
    """Guess compression type to use for the to be built upstream tarball

    We prefer pristine-tar over everything else since this is what's carried around with
    the repo and might be more reliable than what a user has in tarball_dir.
    """
    if comp_type != 'auto':
        comp_type = Compressor.Aliases.get(comp_type, comp_type)
        if comp_type not in Compressor.Opts:
            gbp.log.warn("Unknown compression type - guessing.")
            comp_type = 'auto'

    if comp_type == 'auto':
        if repo and repo.has_pristine_tar_branch():
            regex = r'pristine-tar .* %s_%s\.orig.tar\.' % (source.name, source.upstream_version)
            commits = repo.grep_log(regex, repo.pristine_tar_branch, merges=False)
            if commits:
                commit = commits[-1]
                gbp.log.debug("Found pristine-tar commit at '%s'" % commit)
            else:
                commit = repo.pristine_tar_branch
            tarball = repo.get_commit_info(commit)['subject']
            (base_name, archive_fmt, comp_type) = Archive.parse_filename(tarball)
            gbp.log.debug("Determined compression type '%s'" % comp_type)
            if not comp_type:
                comp_type = 'gzip'
                gbp.log.warn("Unknown compression type of %s, assuming %s" % (tarball, comp_type))
        else:
            if not tarball_dir:
                tarball_dir = '..'
            detected = None
            for comp in Compressor.Opts.keys():
                if du.DebianPkgPolicy.has_orig(source.upstream_tarball_name(comp), tarball_dir):
                    if detected is not None:
                        raise GbpError("Multiple orig tarballs found.")
                    detected = comp
            comp_type = 'gzip' if detected is None else detected
    return comp_type
Esempio n. 20
0
def bb_get_files(pkg_data, tgt_dir, whole_dir=False, download=True):
    """Get (local) packaging files"""
    uris = (pkg_data.getVar('SRC_URI', True) or "").split()
    try:
        fetch = bb.fetch2.Fetch(uris, pkg_data)
        if download:
            gbp.log.info("Fetching sources...")
            fetch.download()
    except bb.fetch2.BBFetchException as err:
        raise GbpError("Failed to fetch packaging files: %s" % err)

    # Copy local files to target directory
    bb_dir = os.path.dirname(pkg_data.getVar('FILE', True))
    remote = []
    local = [path for path in pkg_data.getVar('BBINCLUDED', True).split() if
                path.startswith(bb_dir) and os.path.exists(path)]
    for url in fetch.urls:
        path = fetch.localpath(url)
        if path.startswith(bb_dir):
            if not whole_dir:
                gbp.log.debug("Found local meta file '%s'" % path)
                local.append(path)
        else:
            gbp.log.debug("Found remote file '%s'" % path)
            remote.append(fetch.ud[url])

    if whole_dir:
        # Simply copy whole meta dir, if requested
        recursive_copy(bb_dir, tgt_dir)
    else:
        for path in local:
            relpath = os.path.relpath(path, bb_dir)
            subdir = os.path.join(tgt_dir, os.path.dirname(relpath))
            if not os.path.exists(subdir):
                os.makedirs(subdir)
            shutil.copy2(path, os.path.join(tgt_dir, relpath))

    return remote
Esempio n. 21
0
def prepare_pristine_tar(archive, pkg, version):
    """
    Prepare the upstream source for pristine tar import.

    This checks if the upstream source is actually a tarball
    and creates a symlink from I{archive}
    to I{<pkg>_<version>.orig.tar.<ext>} so pristine-tar will
    see the correct basename.

    @param archive: the upstream source's name
    @type archive: C{str}
    @param pkg: the source package's name
    @type pkg: C{str}
    @param version: the upstream version number
    @type version: C{str}
    @rtype: C{str}
    """
    linked = False
    if os.path.isdir(archive):
        return None

    ext = os.path.splitext(archive)[1]
    if ext in ['.tgz', '.tbz2', '.tlz', '.txz']:
        ext = ".%s" % ext[2:]

    link = "../%s_%s.orig.tar%s" % (pkg, version, ext)

    if os.path.basename(archive) != os.path.basename(link):
        try:
            if not is_link_target(archive, link):
                os.symlink(os.path.abspath(archive), link)
                linked = True
        except OSError as err:
            raise GbpError("Cannot symlink '%s' to '%s': %s" %
                           (archive, link, err[1]))
        return (link, linked)
    else:
        return (archive, linked)
Esempio n. 22
0
    def rrr(self, refname, action, reftype):
        """
        Remember ref for rollback

        @param refname: ref to roll back
        @param action: the rollback action (delete, reset, ...)
        @param reftype: the reference type (tag, branch, ...)
        """
        sha = None

        if action == 'reset':
            try:
                sha = self.rev_parse(refname)
            except GitRepositoryError as err:
                gbp.log.warn("Failed to rev-parse %s: %s" % (refname, err))
        elif action == 'delete':
            pass
        elif action == 'abortmerge':
            pass
        else:
            raise GbpError("Unknown action %s for %s %s" %
                           (action, reftype, refname))
        self.rollbacks.append((refname, reftype, action, sha))
Esempio n. 23
0
def unpack_tarballs(repo, name, sources, version, options):
    tmpdir = tempfile.mkdtemp(dir='../')
    if not sources[0].is_dir():  # Unpack main tarball
        sources[0].unpack(tmpdir, options.filters)
        gbp.log.debug("Unpacked '%s' to '%s'" % (sources[0].path, sources[0].unpacked))

    try:
        postunpack_hook(repo, tmpdir, sources, options)
    except gbpc.CommandExecFailed:
        raise GbpError()  # The hook already printed an error message

    if orig_needs_repack(sources[0], options):
        gbp.log.debug("Filter pristine-tar: repacking '%s' from '%s'" % (sources[0].path,
                                                                         sources[0].unpacked))
        # FIXME: we should repack the other tarballs here too (See #860457)
        # for that we better move around sources instead of source[0]
        (source, tmpdir) = repack_upstream(sources[0], name, version, tmpdir, options.filters)
        sources[0] = source

    if not sources[0].is_dir():  # Unpack component tarballs
        for s in sources[1:]:
            s.unpack(sources[0].unpacked, options.filters)
    return (sources, tmpdir)
Esempio n. 24
0
    def _set_special_macro(self, name, identifier, args, insertafter):
        """Update a special macro line in spec file content"""
        key = name.lower()
        fullname = '%%%s%s' % (name, identifier)
        if key != 'patch':
            raise GbpError("Setting '%s' macro not supported" % name)

        updated = 0
        text = "%%%s%d %s\n" % (name, identifier, args)
        for line in self._special_directives[key]:
            if line['id'] == identifier:
                gbp.log.debug("Updating '%s' macro in spec" % fullname)
                line['args'] = args
                line['line'].set_data(text)
                ret = line['line']
                updated += 1
        if not updated:
            gbp.log.debug("Adding '%s' macro after '%s...' line in spec" %
                          (fullname, str(insertafter)[0:20]))
            ret = self._content.insert_after(insertafter, text)
            linerec = {'line': ret, 'id': identifier, 'args': args}
            self._special_directives[key].append(linerec)
        return ret
Esempio n. 25
0
def git_archive_build_origs(repo, source, output_dir, options):
    """
    Build orig tarball(s) using git-archive

    @param source: the changelog of the package we're acting on
    @type source: L{DebianSource}
    @param output_dir: where to put the tarball
    @type output_dir: C{Str}
    @param options: the parsed options
    @type options: C{dict} of options
    """
    comp = Compressor(options.comp_type, options.comp_level)
    upstream_tree = git_archive_get_upstream_tree(repo, source, options)
    gbp.log.info("Creating %s from '%s'" %
                 (source.upstream_tarball_name(comp.type), upstream_tree))
    gbp.log.debug("Building upstream tarball with compression %s" % comp)
    tree = repo.tree_drop_dirs(
        upstream_tree,
        options.components) if options.components else upstream_tree
    repo.create_upstream_tarball_via_git_archive(source, output_dir, tree,
                                                 comp, options.with_submodules)
    for component in options.components:
        subtree = repo.tree_get_dir(upstream_tree, component)
        if not subtree:
            raise GbpError(
                "No tree for '%s' found in '%s' to create additional tarball from"
                % (component, upstream_tree))
        gbp.log.info("Creating additional tarball '%s' from '%s'" %
                     (source.upstream_tarball_name(
                         options.comp_type, component=component), subtree))
        repo.create_upstream_tarball_via_git_archive(source,
                                                     output_dir,
                                                     subtree,
                                                     comp,
                                                     options.with_submodules,
                                                     component=component)
Esempio n. 26
0
def guess_bb_path_from_repo(repo,
                            treeish=None,
                            topdir='',
                            recursive=True,
                            bbappend=False):
    """Guess a bitbake recipe path from a git repository"""
    topdir = topdir.rstrip('/') + ('/') if topdir else ''
    # Search from working copy
    if not treeish:
        abspath = guess_bb_path_from_fs(os.path.join(repo.path, topdir),
                                        recursive, bbappend)
        return os.path.relpath(abspath, repo.path)

    # Search from treeish
    try:
        file_list = [
            nam for (mod, typ, sha,
                     nam) in repo.list_tree(treeish, recursive, topdir)
            if typ == 'blob'
        ]
    except GitRepositoryError as err:
        raise GbpError("Failed to search bb recipe from treeish %s, "
                       "Git error: %s" % (treeish, err))
    return guess_bb_file(file_list, bbappend)
Esempio n. 27
0
    def build_tarball_name(name,
                           version,
                           compression,
                           dir=None,
                           component=None):
        """
        Given a source package's I{name}, I{version} and I{compression}
        return the name of the corresponding upstream tarball.

        >>> DebianPkgPolicy.build_tarball_name('foo', '1.0', 'bzip2')
        'foo_1.0.orig.tar.bz2'
        >>> DebianPkgPolicy.build_tarball_name('bar', '0.0~git1234', 'xz')
        'bar_0.0~git1234.orig.tar.xz'
        >>> DebianPkgPolicy.build_tarball_name('bar', '0.0~git1234', 'xz', component="foo")
        'bar_0.0~git1234.orig-foo.tar.xz'

        @param name: the source package's name
        @type name: C{str}
        @param version: the upstream version
        @type version: C{str}
        @param compression: the desired compression
        @type compression: C{str}
        @param dir: a directory to prepend
        @type dir: C{str}
        @return: the tarballs name corresponding to the input parameters
        @rtype: C{str}
        """
        try:
            ext = Compressor.Exts[compression]
        except KeyError:
            raise GbpError("Unknown compression type '%s'" % compression)
        sub = '-{0}'.format(component) if component else ''
        tarball = "%s_%s.orig%s.tar.%s" % (name, version, sub, ext)
        if dir:
            tarball = os.path.join(dir, tarball)
        return tarball
Esempio n. 28
0
def mangle_changelog(changelog, cp, snapshot=''):
    """
    Mangle changelog to either add or remove snapshot markers

    @param snapshot: SHA1 if snapshot header should be added/maintained,
        empty if it should be removed
    @type  snapshot: C{str}
    """
    try:
        tmpfile = '%s.%s' % (changelog, snapshot)
        cw = open(tmpfile, 'w')
        cr = open(changelog, 'r')

        print("%(Source)s (%(MangledVersion)s) "
              "%(Distribution)s; urgency=%(urgency)s\n" % cp,
              file=cw)

        cr.readline()  # skip version and empty line
        cr.readline()
        line = cr.readline()
        if snapshot_re.match(line):
            cr.readline()  # consume the empty line after the snapshot header
            line = ''

        if snapshot:
            print("  ** SNAPSHOT build @%s **\n" % snapshot, file=cw)

        if line:
            print(line.rstrip(), file=cw)
        shutil.copyfileobj(cr, cw)
        cw.close()
        cr.close()
        os.unlink(changelog)
        os.rename(tmpfile, changelog)
    except OSError as e:
        raise GbpError("Error mangling changelog %s" % e)
Esempio n. 29
0
def generate_patches(repo, start, end, outdir, options):
    """
    Generate patch files from git
    """
    gbp.log.info("Generating patches from git (%s..%s)" % (start, end))
    patches = []
    commands = {}
    for treeish in [start, end]:
        if not repo.has_treeish(treeish):
            raise GbpError('Invalid treeish object %s' % treeish)

    start_sha1 = repo.rev_parse("%s^0" % start)
    try:
        end_commit = end
    except GitRepositoryError:
        # In case of plain tree-ish objects, assume current branch head is the
        # last commit
        end_commit = "HEAD"
    end_commit_sha1 = repo.rev_parse("%s^0" % end_commit)

    start_sha1 = repo.rev_parse("%s^0" % start)

    if not is_ancestor(repo, start_sha1, end_commit_sha1):
        raise GbpError("Start commit '%s' not an ancestor of end commit "
                       "'%s'" % (start, end_commit))
    # Check for merge commits, squash if merges found
    merges = repo.get_commits(start, end_commit, options=['--merges'])
    if merges:
        # Shorten SHA1s
        start_sha1 = repo.rev_parse(start, short=options.abbrev)
        merge_sha1 = repo.rev_parse(merges[0], short=options.abbrev)
        patch_fn = format_diff(outdir,
                               None,
                               repo,
                               start_sha1,
                               merge_sha1,
                               abbrev=options.abbrev)
        if patch_fn:
            gbp.log.info("Merge commits found! Diff between %s..%s written "
                         "into one monolithic diff" % (start_sha1, merge_sha1))
            patches.append(patch_fn)
            start = merge_sha1

    # Generate patches
    for commit in reversed(repo.get_commits(start, end_commit)):
        info = repo.get_commit_info(commit)
        (cmds, info['body']) = parse_gbp_commands(info, 'gbp-rpm', ('ignore'),
                                                  ('if', 'ifarch'))
        if 'ignore' not in cmds:
            patch_fn = format_patch(outdir,
                                    repo,
                                    info,
                                    patches,
                                    numbered=options.patch_numbers,
                                    abbrev=options.abbrev)
            if patch_fn:
                commands[os.path.basename(patch_fn)] = cmds
        else:
            gbp.log.info('Ignoring commit %s' % info['id'])

    # Generate diff to the tree-ish object
    if end_commit != end:
        gbp.log.info("Generating diff file %s..%s" % (end_commit, end))
        patch_fn = format_diff(outdir,
                               None,
                               repo,
                               end_commit,
                               end,
                               options.patch_export_ignore_path,
                               abbrev=options.abbrev)
        if patch_fn:
            patches.append(patch_fn)

    return [os.path.relpath(p) for p in patches], commands
Esempio n. 30
0
def import_spec_patches(repo, options):
    """
    apply a series of patches in a spec/packaging dir to branch
    the patch-queue branch for 'branch'

    @param repo: git repository to work on
    @param options: command options
    """
    current = repo.get_branch()
    # Get spec and related information
    if is_pq_branch(current):
        base = pq_branch_base(current)
        if options.force:
            spec = parse_spec(options, repo, base)
            spec_treeish = base
        else:
            raise GbpError("Already on a patch-queue branch '%s' - doing "
                           "nothing." % current)
    else:
        spec = parse_spec(options, repo)
        spec_treeish = None
        base = current
    upstream_commit = find_upstream_commit(repo, spec, options.upstream_tag)
    packager = get_packager(spec)
    pq_branch = pq_branch_name(base)

    # Create pq-branch
    if repo.has_branch(pq_branch) and not options.force:
        raise GbpError("Patch-queue branch '%s' already exists. "
                       "Try 'switch' instead." % pq_branch)
    try:
        if repo.get_branch() == pq_branch:
            repo.force_head(upstream_commit, hard=True)
        else:
            repo.create_branch(pq_branch, upstream_commit, force=True)
    except GitRepositoryError as err:
        raise GbpError("Cannot create patch-queue branch '%s': %s" %
                       (pq_branch, err))

    # Put patches in a safe place
    if spec_treeish:
        packaging_tmp = tempfile.mkdtemp(prefix='dump_')
        packaging_tree = '%s:%s' % (spec_treeish, options.packaging_dir)
        dump_tree(repo,
                  packaging_tmp,
                  packaging_tree,
                  with_submodules=False,
                  recursive=False)
        spec.specdir = packaging_tmp
    in_queue = spec.patchseries()
    queue = safe_patches(in_queue)
    # Do import
    try:
        gbp.log.info("Switching to branch '%s'" % pq_branch)
        repo.set_branch(pq_branch)

        if not queue:
            return
        gbp.log.info("Trying to apply patches from branch '%s' onto '%s'" %
                     (base, upstream_commit))
        for patch in queue:
            gbp.log.debug("Applying %s" % patch.path)
            apply_and_commit_patch(repo, patch, packager)
    except (GbpError, GitRepositoryError) as err:
        repo.set_branch(base)
        repo.delete_branch(pq_branch)
        raise GbpError('Import failed: %s' % err)

    gbp.log.info("%d patches listed in '%s' imported on '%s'" %
                 (len(queue), spec.specfile, pq_branch))