def _guess_orig_file(self): """ Try to guess the name of the primary upstream/source archive. Returns a dict with all the relevant information. """ orig = None sources = self.sources() for num, filename in sorted(sources.iteritems()): src = { 'num': num, 'filename': os.path.basename(filename), 'uri': filename } src['filename_base'], src['archive_fmt'], src['compression'] = \ parse_archive_filename(os.path.basename(filename)) if (src['filename_base'].startswith(self.name) and src['archive_fmt']): # Take the first archive that starts with pkg name orig = src break # otherwise we take the first archive elif not orig and src['archive_fmt']: orig = src # else don't accept if orig: orig['prefix'] = self._guess_orig_prefix(orig) return orig
def _guess_orig_file(self): """ Try to guess the name of the primary upstream/source archive. Returns a dict with all the relevant information. """ orig = None sources = self.sources() for num, filename in sorted(sources.iteritems()): src = {'num': num, 'filename': os.path.basename(filename), 'uri': filename} src['filename_base'], src['archive_fmt'], src['compression'] = \ parse_archive_filename(os.path.basename(filename)) if (src['filename_base'].startswith(self.name) and src['archive_fmt']): # Take the first archive that starts with pkg name orig = src break # otherwise we take the first archive elif not orig and src['archive_fmt']: orig = src # else don't accept if orig: orig['prefix'] = self._guess_orig_prefix(orig) return orig
def import_upstream_archive(repo, pkg_data, fetch_data, dirs, options): """Import upstream sources from archive""" # Unpack orig source archive path = fetch_data.localpath sources = RpmUpstreamSource(path) sources = sources.unpack(dirs['origsrc'], options.filters) tag_str_fields = dict(pkg_version(pkg_data), vendor=options.vendor.lower()) tag = repo.version_to_tag(options.upstream_tag, tag_str_fields) if not repo.has_tag(tag): gbp.log.info("Tag %s not found, importing upstream sources" % tag) branch = options.upstream_branch msg = "Upstream version %s" % tag_str_fields['upstreamversion'] if options.vcs_tag: parents = [repo.rev_parse("%s^{}" % options.vcs_tag)] else: parents = None commit = repo.commit_dir(sources.unpacked, "Imported %s" % msg, branch, other_parents=parents, create_missing_branch=options.create_missing_branches) repo.create_tag(name=tag, msg=msg, commit=commit, sign=options.sign_tags, keyid=options.keyid) if options.pristine_tar: archive_fmt = parse_archive_filename(path)[1] if archive_fmt == 'tar': repo.pristine_tar.commit(path, 'refs/heads/%s' % branch) else: gbp.log.warn('Ignoring pristine-tar, %s archives ' 'not supported' % archive_fmt) return repo.rev_parse('%s^0' % tag)
def prepare_pristine_tar(source, pkg_name, pkg_version, pristine_commit_name, filters=None, prefix=None, tmpdir=None): """ Prepare the upstream sources for pristine-tar import @param source: original upstream sources @type source: C{UpstreamSource} @param pkg_name: package name @type pkg_name: C{str} @param pkg_version: upstream version of the package @type pkg_version: C{str} @param pristine_commit_name: archive filename to commit to pristine-tar @type pristine_commit_name: C{str} or C{None} @param filters: filter to exclude files @type filters: C{list} of C{str} or C{None} @param prefix: prefix (i.e. leading directory of files) to use in pristine-tar, set to C{None} to not mangle orig archive @type prefix: C{str} or C{None} @param tmpdir: temporary working dir (cleanup left to caller) @type tmpdir: C{str} @return: prepared source archive @rtype: C{UpstreamSource} """ need_repack = False if source.is_dir(): if prefix is None: prefix = '%s-%s' % (pkg_name, pkg_version) gbp.log.info("Using guessed prefix '%s/' for pristine-tar" % prefix) need_repack = True else: if prefix is not None and prefix == source.prefix: prefix = None comp = parse_archive_filename(pristine_commit_name)[2] if filters or prefix is not None or source.compression != comp: if not source.unpacked: unpack_dir = tempfile.mkdtemp(prefix='pristine_unpack_', dir=tmpdir) source.unpack(unpack_dir) need_repack = True pristine_path = os.path.join(tmpdir, pristine_commit_name) if need_repack: gbp.log.debug("Packing '%s' from '%s' for pristine-tar" % (pristine_path, source.unpacked)) pristine = source.pack(pristine_path, filters, prefix) else: # Just create symlink for mangling the pristine tarball name os.symlink(source.path, pristine_path) pristine = source.__class__(pristine_path) return pristine
def get_component_tarballs(name, version, tarball, components): """ Figure out the paths to the component tarballs based on the main tarball. """ tarballs = [] for component in components: (_, _, comp_type) = parse_archive_filename(tarball) cname = DebianPkgPolicy.build_tarball_name(name, version, comp_type, os.path.dirname(tarball), component) tarballs.append((component, cname)) if not os.path.exists(cname): raise GbpError("Can not find component tarball %s" % cname) return tarballs
def safe_patches(queue, tmpdir_base): """ Safe the current patches in a temporary directory below 'tmpdir_base'. Also, uncompress compressed patches here. @param queue: an existing patch queue @param tmpdir_base: base under which to create tmpdir @return: tmpdir and a safed queue (with patches in tmpdir) @rtype: tuple """ tmpdir = tempfile.mkdtemp(dir=tmpdir_base, prefix='patchimport_') safequeue = PatchSeries() if len(queue) > 0: gbp.log.debug("Safeing patches '%s' in '%s'" % (os.path.dirname(queue[0].path), tmpdir)) for patch in queue: base, _archive_fmt, comp = parse_archive_filename(patch.path) uncompressors = {'gzip': gzip.open, 'bzip2': bz2.BZ2File} if comp in uncompressors: gbp.log.debug("Uncompressing '%s'" % os.path.basename(patch.path)) src = uncompressors[comp](patch.path, 'r') dst_name = os.path.join(tmpdir, os.path.basename(base)) if _archive_fmt: tar_name = dst_name dst_name += '.tar' elif comp: raise GbpError("Unsupported patch compression '%s', giving up" % comp) else: src = open(patch.path, 'r') dst_name = os.path.join(tmpdir, os.path.basename(patch.path)) dst = open(dst_name, 'w') dst.writelines(src) src.close() dst.close() if _archive_fmt: t = tarfile.open(dst_name, 'r:') t.extractall(path=tmpdir) t.close() dst_name = tar_name safequeue.append(patch) safequeue[-1].path = dst_name return safequeue
def pristine_tarball_name(source, pkg_name, pkg_version, pristine_name): old_filename = os.path.basename(source.path) base_name, _fmt, _comp = parse_archive_filename(old_filename) if pristine_name != 'auto': ext = string.replace(old_filename, base_name, '', 1) return pristine_name % { 'name': pkg_name, 'version': pkg_version, 'upstreamversion': pkg_version, 'filename_base': base_name, 'filename_ext': ext } # Need to repack and mangle filename if the archive is not # pristine-tar-compatible -> we decide to create gz compressed tarball elif not source.is_tarball(): return "%s.tar.gz" % base_name return old_filename
def guess_comp_type(repo, comp_type, cp, tarball_dir): """Guess compression type""" srcpkg = cp['Source'] upstream_version = cp['Upstream-Version'] if comp_type != 'auto': comp_type = compressor_aliases.get(comp_type, comp_type) if comp_type not in compressor_opts: gbp.log.warn("Unknown compression type - guessing.") comp_type = 'auto' if comp_type == 'auto': if not repo.has_pristine_tar_branch(): if not tarball_dir: tarball_dir = '..' detected = None for comp in compressor_opts.keys(): if du.DebianPkgPolicy.has_orig(du.orig_file(cp, comp), tarball_dir): if detected is not None: raise GbpError("Multiple orig tarballs found.") detected = comp if detected is not None: comp_type = detected else: comp_type = 'gzip' else: regex = 'pristine-tar .* %s_%s\.orig.tar\.' % (srcpkg, upstream_version) commits = repo.grep_log(regex, repo.pristine_tar_branch) if commits: commit = commits[-1] gbp.log.debug("Found pristine-tar commit at '%s'" % commit) else: commit = repo.pristine_tar_branch tarball = repo.get_commit_info(commit)['subject'] (base_name, archive_fmt, comp_type) = parse_archive_filename(tarball) gbp.log.debug("Determined compression type '%s'" % comp_type) if not comp_type: comp_type = 'gzip' gbp.log.warn("Unknown compression type of %s, assuming %s" % (tarball, comp_type)) return comp_type
def guess_upstream_source(pkg_data, remotes): """Guess the primary upstream source archive.""" orig = None name = pkg_data.getVar('PN', True) for fetch_data in remotes: if fetch_data.type == 'git': orig = fetch_data else: path = fetch_data.localpath fname = os.path.basename(path) fn_base, archive_fmt, _ = parse_archive_filename(fname) if fn_base.startswith(name) and archive_fmt: # Take an archive that starts with pkg name orig = fetch_data # otherwise we take the first archive elif not orig and archive_fmt: orig = fetch_data # else don't accept return orig
def safe_patches(queue, tmpdir_base): """ Safe the current patches in a temporary directory below 'tmpdir_base'. Also, uncompress compressed patches here. @param queue: an existing patch queue @param tmpdir_base: base under which to create tmpdir @return: tmpdir and a safed queue (with patches in tmpdir) @rtype: tuple """ tmpdir = tempfile.mkdtemp(dir=tmpdir_base, prefix='patchimport_') safequeue = PatchSeries() if len(queue) > 0: gbp.log.debug("Safeing patches '%s' in '%s'" % (os.path.dirname(queue[0].path), tmpdir)) for patch in queue: base, _archive_fmt, comp = parse_archive_filename(patch.path) uncompressors = {'gzip': gzip.open, 'bzip2': bz2.BZ2File} if comp in uncompressors: gbp.log.debug("Uncompressing '%s'" % os.path.basename(patch.path)) src = uncompressors[comp](patch.path, 'r') dst_name = os.path.join(tmpdir, os.path.basename(base)) elif comp: raise GbpError("Unsupported patch compression '%s', giving up" % comp) else: src = open(patch.path, 'r') dst_name = os.path.join(tmpdir, os.path.basename(patch.path)) dst = open(dst_name, 'w') dst.writelines(src) src.close() dst.close() safequeue.append(patch) safequeue[-1].path = dst_name return safequeue
def is_valid_orig_archive(cls, filename): """ Is this a valid orig source archive @param filename: upstream source archive filename @type filename: C{str} @return: true if valid upstream source archive filename @rtype: C{bool} >>> RpmPkgPolicy.is_valid_orig_archive("foo/bar_baz.tar.gz") True >>> RpmPkgPolicy.is_valid_orig_archive("foo.bar.tar") True >>> RpmPkgPolicy.is_valid_orig_archive("foo.bar") False >>> RpmPkgPolicy.is_valid_orig_archive("foo.gz") False """ _base, arch_fmt, _compression = parse_archive_filename(filename) if arch_fmt: return True return False
def safe_patches(queue): """ Safe the current patches in a temporary directory @param queue: an existing patch queue @return: safed queue (with patches in tmpdir) @rtype: tuple """ tmpdir = tempfile.mkdtemp(prefix='patchimport_') safequeue = PatchSeries() if len(queue) > 0: gbp.log.debug("Safeing patches '%s' in '%s'" % (os.path.dirname(queue[0].path), tmpdir)) for patch in queue: base, _archive_fmt, comp = parse_archive_filename(patch.path) uncompressors = {'gzip': gzip.open, 'bzip2': bz2.BZ2File} if comp in uncompressors: gbp.log.debug("Uncompressing '%s'" % os.path.basename(patch.path)) src = uncompressors[comp](patch.path, 'r') dst_name = os.path.join(tmpdir, os.path.basename(base)) elif comp: raise GbpError("Unsupported patch compression '%s', giving up" % comp) else: src = open(patch.path, 'r') dst_name = os.path.join(tmpdir, os.path.basename(patch.path)) dst = open(dst_name, 'w') dst.writelines(src) src.close() dst.close() safequeue.append(patch) safequeue[-1].path = dst_name return safequeue
def _guess_orig_file(self): """ Try to guess the name of the primary upstream/source archive. Returns a dict with all the relevant information. """ orig = None sources = self.sources() for num, filename in sorted(six.iteritems(sources)): src = {"num": num, "filename": os.path.basename(filename), "uri": filename} src["filename_base"], src["archive_fmt"], src["compression"] = parse_archive_filename( os.path.basename(filename) ) if src["filename_base"].startswith(self.name) and src["archive_fmt"]: # Take the first archive that starts with pkg name orig = src break # otherwise we take the first archive elif not orig and src["archive_fmt"]: orig = src # else don't accept if orig: orig["prefix"] = self._guess_orig_prefix(orig) return orig
def main(argv): """Main function of the git-import-srpm script""" dirs = dict(top=os.path.abspath(os.curdir)) ret = 0 skipped = False options, args = parse_args(argv) if len(args) != 1: gbp.log.err("Need to give exactly one package to import. Try --help.") return 1 try: dirs['tmp_base'] = tempfile.mkdtemp(dir=options.tmp_dir, prefix='import-srpm') except GbpError as err: gbp.log.err(err) return 1 try: srpm = args[0] if options.download: srpm = download_source(srpm, dirs) # Real srpm, we need to unpack, first true_srcrpm = False if not os.path.isdir(srpm) and not srpm.endswith(".spec"): src = parse_srpm(srpm) true_srcrpm = True dirs['pkgextract'] = tempfile.mkdtemp(dir=dirs['tmp_base'], prefix='pkgextract_') gbp.log.info("Extracting src rpm to '%s'" % dirs['pkgextract']) src.unpack(dirs['pkgextract']) preferred_spec = src.name + '.spec' srpm = dirs['pkgextract'] elif os.path.isdir(srpm): preferred_spec = os.path.basename(srpm.rstrip('/')) + '.spec' else: preferred_spec = None # Find and parse spec file if os.path.isdir(srpm): gbp.log.debug("Trying to import an unpacked srpm from '%s'" % srpm) dirs['src'] = os.path.abspath(srpm) spec = guess_spec(srpm, True, preferred_spec) else: gbp.log.debug("Trying to import an srpm from '%s' with spec "\ "file '%s'" % (os.path.dirname(srpm), srpm)) dirs['src'] = os.path.abspath(os.path.dirname(srpm)) spec = SpecFile(srpm) # Check the repository state try: repo = RpmGitRepository('.') is_empty = repo.is_empty() (clean, out) = repo.is_clean() if not clean and not is_empty: gbp.log.err("Repository has uncommitted changes, commit " "these first: ") raise GbpError, out except GitRepositoryError: gbp.log.info("No git repository found, creating one.") is_empty = True repo = RpmGitRepository.create(spec.name) os.chdir(repo.path) if repo.bare: set_bare_repo_options(options) # Create more tempdirs dirs['origsrc'] = tempfile.mkdtemp(dir=dirs['tmp_base'], prefix='origsrc_') dirs['packaging_base'] = tempfile.mkdtemp(dir=dirs['tmp_base'], prefix='packaging_') dirs['packaging'] = os.path.join(dirs['packaging_base'], options.packaging_dir) try: os.mkdir(dirs['packaging']) except OSError as err: if err.errno != errno.EEXIST: raise if true_srcrpm: # For true src.rpm we just take everything files = os.listdir(dirs['src']) else: # Need to copy files to the packaging directory given by caller files = [os.path.basename(patch.path) \ for patch in spec.patchseries(unapplied=True, ignored=True)] for filename in spec.sources().values(): files.append(os.path.basename(filename)) files.append(os.path.join(spec.specdir, spec.specfile)) # Don't copy orig source archive, though if spec.orig_src and spec.orig_src['filename'] in files: files.remove(spec.orig_src['filename']) for fname in files: fpath = os.path.join(dirs['src'], fname) if os.path.exists(fpath): shutil.copy2(fpath, dirs['packaging']) else: gbp.log.err("File '%s' listed in spec not found" % fname) raise GbpError # Unpack orig source archive if spec.orig_src: orig_tarball = os.path.join(dirs['src'], spec.orig_src['filename']) sources = RpmUpstreamSource(orig_tarball) sources.unpack(dirs['origsrc'], options.filters) else: sources = None src_tag_format = options.packaging_tag if options.native \ else options.upstream_tag tag_str_fields = dict(spec.version, vendor=options.vendor.lower()) src_tag = repo.version_to_tag(src_tag_format, tag_str_fields) ver_str = compose_version_str(spec.version) if repo.find_version(options.packaging_tag, tag_str_fields): gbp.log.warn("Version %s already imported." % ver_str) if options.allow_same_version: gbp.log.info("Moving tag of version '%s' since import forced" % ver_str) move_tag_stamp(repo, options.packaging_tag, tag_str_fields) else: raise SkipImport if is_empty: options.create_missing_branches = True # Determine author and committer info, currently same info is used # for both sources and packaging files author = None if spec.packager: match = re.match(r'(?P<name>.*[^ ])\s*<(?P<email>\S*)>', spec.packager.strip()) if match: author = GitModifier(match.group('name'), match.group('email')) if not author: author = GitModifier() gbp.log.debug("Couldn't determine packager info") committer = committer_from_author(author, options) # Import sources if sources: src_commit = repo.find_version(src_tag_format, tag_str_fields) if not src_commit: gbp.log.info("Tag %s not found, importing sources" % src_tag) branch = [options.upstream_branch, options.packaging_branch][options.native] if not repo.has_branch(branch): if options.create_missing_branches: gbp.log.info("Will create missing branch '%s'" % branch) else: gbp.log.err( no_upstream_branch_msg % branch + "\n" "Also check the --create-missing-branches option.") raise GbpError src_vendor = "Native" if options.native else "Upstream" msg = "%s version %s" % (src_vendor, spec.upstreamversion) src_commit = repo.commit_dir( sources.unpacked, "Imported %s" % msg, branch, author=author, committer=committer, create_missing_branch=options.create_missing_branches) repo.create_tag(name=src_tag, msg=msg, commit=src_commit, sign=options.sign_tags, keyid=options.keyid) if not options.native: if options.pristine_tar: archive_fmt = parse_archive_filename(orig_tarball)[1] if archive_fmt == 'tar': repo.pristine_tar.commit( orig_tarball, 'refs/heads/%s' % options.upstream_branch) else: gbp.log.warn('Ignoring pristine-tar, %s archives ' 'not supported' % archive_fmt) else: gbp.log.info("No orig source archive imported") # Import packaging files. For native packages we assume that also # packaging files are found in the source tarball if not options.native or not sources: gbp.log.info("Importing packaging files") branch = options.packaging_branch if not repo.has_branch(branch): if options.create_missing_branches: gbp.log.info("Will create missing branch '%s'" % branch) else: gbp.log.err(no_packaging_branch_msg % branch + "\n" "Also check the --create-missing-branches " "option.") raise GbpError tag = repo.version_to_tag(options.packaging_tag, tag_str_fields) msg = "%s release %s" % (options.vendor, ver_str) if options.orphan_packaging or not sources: commit = repo.commit_dir( dirs['packaging_base'], "Imported %s" % msg, branch, author=author, committer=committer, create_missing_branch=options.create_missing_branches) else: # Copy packaging files to the unpacked sources dir try: pkgsubdir = os.path.join(sources.unpacked, options.packaging_dir) os.mkdir(pkgsubdir) except OSError as err: if err.errno != errno.EEXIST: raise for fname in os.listdir(dirs['packaging']): shutil.copy2(os.path.join(dirs['packaging'], fname), pkgsubdir) commit = repo.commit_dir( sources.unpacked, "Imported %s" % msg, branch, other_parents=[src_commit], author=author, committer=committer, create_missing_branch=options.create_missing_branches) # Import patches on top of the source tree # (only for non-native packages with non-orphan packaging) force_to_branch_head(repo, options.packaging_branch) # Create packaging tag repo.create_tag(name=tag, msg=msg, commit=commit, sign=options.sign_tags, keyid=options.keyid) force_to_branch_head(repo, options.packaging_branch) except KeyboardInterrupt: ret = 1 gbp.log.err("Interrupted. Aborting.") except gbpc.CommandExecFailed: ret = 1 except GitRepositoryError as err: gbp.log.err("Git command failed: %s" % err) ret = 1 except GbpError as err: if str(err): gbp.log.err(err) ret = 1 except NoSpecError as err: gbp.log.err("Failed determine spec file: %s" % err) ret = 1 except SkipImport: skipped = True finally: os.chdir(dirs['top']) gbpc.RemoveTree(dirs['tmp_base'])() if not ret and not skipped: gbp.log.info("Version '%s' imported under '%s'" % (ver_str, spec.name)) return ret
def main(argv): """Main function of the git-import-srpm script""" dirs = dict(top=os.path.abspath(os.curdir)) ret = 0 skipped = False options, args = parse_args(argv) if not options: return ExitCodes.parse_error if len(args) == 1: srpm = args[0] target = None elif len(args) == 2: srpm = args[0] target = args[1] else: gbp.log.err("Need to give exactly one package to import. Try --help.") return 1 try: dirs['tmp_base'] = init_tmpdir(options.tmp_dir, 'import-srpm_') except GbpError as err: gbp.log.err(err) return 1 try: if options.download: srpm = download_source(srpm) # Real srpm, we need to unpack, first true_srcrpm = False if not os.path.isdir(srpm) and not srpm.endswith(".spec"): src = parse_srpm(srpm) true_srcrpm = True dirs['pkgextract'] = tempfile.mkdtemp(prefix='pkgextract_') gbp.log.info("Extracting src rpm to '%s'" % dirs['pkgextract']) src.unpack(dirs['pkgextract']) preferred_spec = src.name + '.spec' srpm = dirs['pkgextract'] elif os.path.isdir(srpm): preferred_spec = os.path.basename(srpm.rstrip('/')) + '.spec' else: preferred_spec = None # Find and parse spec file if os.path.isdir(srpm): gbp.log.debug("Trying to import an unpacked srpm from '%s'" % srpm) dirs['src'] = os.path.abspath(srpm) spec = guess_spec(srpm, True, preferred_spec) else: gbp.log.debug("Trying to import an srpm from '%s' with spec " "file '%s'" % (os.path.dirname(srpm), srpm)) dirs['src'] = os.path.abspath(os.path.dirname(srpm)) spec = SpecFile(srpm) # Check the repository state try: repo = RpmGitRepository('.') is_empty = repo.is_empty() (clean, out) = repo.is_clean() if not clean and not is_empty: gbp.log.err("Repository has uncommitted changes, commit " "these first: ") raise GbpError(out) except GitRepositoryError: gbp.log.info("No git repository found, creating one.") is_empty = True target = target or spec.name repo = RpmGitRepository.create(target) os.chdir(repo.path) repo_setup.set_user_name_and_email(options.repo_user, options.repo_email, repo) if repo.bare: set_bare_repo_options(options) # Create more tempdirs dirs['origsrc'] = tempfile.mkdtemp(prefix='origsrc_') dirs['packaging_base'] = tempfile.mkdtemp(prefix='packaging_') dirs['packaging'] = os.path.join(dirs['packaging_base'], options.packaging_dir) try: os.mkdir(dirs['packaging']) except OSError as err: if err.errno != errno.EEXIST: raise if true_srcrpm: # For true src.rpm we just take everything files = os.listdir(dirs['src']) else: # Need to copy files to the packaging directory given by caller files = [os.path.basename(patch.path) for patch in spec.patchseries(unapplied=True, ignored=True)] for filename in spec.sources().values(): files.append(os.path.basename(filename)) files.append(os.path.join(spec.specdir, spec.specfile)) # Don't copy orig source archive, though if spec.orig_src and spec.orig_src['filename'] in files: files.remove(spec.orig_src['filename']) for fname in files: fpath = os.path.join(dirs['src'], fname) if os.path.exists(fpath): shutil.copy2(fpath, dirs['packaging']) else: gbp.log.err("File '%s' listed in spec not found" % fname) raise GbpError # Unpack orig source archive if spec.orig_src: orig_tarball = os.path.join(dirs['src'], spec.orig_src['filename']) sources = RpmUpstreamSource(orig_tarball) sources.unpack(dirs['origsrc'], options.filters) else: sources = None tag_str_fields = dict(spec.version, vendor=options.vendor.lower()) if options.native: src_tag_format = options.packaging_tag src_tag = repo.version_to_tag(src_tag_format, tag_str_fields) upstream_tag = src_tag upstream_str_fields = tag_str_fields else: src_tag_format = options.upstream_tag src_tag = repo.version_to_tag(src_tag_format, tag_str_fields) upstream_str_fields = filter_version(tag_str_fields, 'release', 'epoch') upstream_tag = repo.version_to_tag(src_tag_format, upstream_str_fields) ver_str = compose_version_str(spec.version) if repo.find_version(options.packaging_tag, tag_str_fields): gbp.log.warn("Version %s already imported." % ver_str) if options.allow_same_version: gbp.log.info("Moving tag of version '%s' since import forced" % ver_str) move_tag_stamp(repo, options.packaging_tag, tag_str_fields) else: raise SkipImport if is_empty: options.create_missing_branches = True # Determine author and committer info, currently same info is used # for both sources and packaging files author = None if spec.packager: match = re.match(r'(?P<name>.*[^ ])\s*<(?P<email>\S*)>', spec.packager.strip()) if match: author = GitModifier(match.group('name'), match.group('email')) if not author: author = GitModifier() gbp.log.debug("Couldn't determine packager info") committer = committer_from_author(author, options) # Import sources if sources: src_commit = repo.find_version(src_tag_format, upstream_str_fields) if not src_commit: gbp.log.info("Tag %s not found, importing sources" % src_tag) branch = [options.upstream_branch, options.packaging_branch][options.native] if not repo.has_branch(branch): if options.create_missing_branches: gbp.log.info("Will create missing branch '%s'" % branch) else: gbp.log.err(no_upstream_branch_msg % branch + "\n" "Also check the --create-missing-branches option.") raise GbpError src_vendor = "Native" if options.native else "Upstream" msg = "%s version %s" % (src_vendor, spec.upstreamversion) src_commit = repo.commit_dir(sources.unpacked, "Import %s" % msg, branch, author=author, committer=committer, create_missing_branch=options.create_missing_branches) repo.create_tag(name=src_tag if options.native else upstream_tag, msg=msg, commit=src_commit, sign=options.sign_tags, keyid=options.keyid) if not options.native: if options.pristine_tar: archive_fmt = parse_archive_filename(orig_tarball)[1] if archive_fmt == 'tar': repo.pristine_tar.commit(orig_tarball, 'refs/heads/%s' % options.upstream_branch) else: gbp.log.warn('Ignoring pristine-tar, %s archives ' 'not supported' % archive_fmt) else: gbp.log.info("No orig source archive imported") # Import packaging files. For native packages we assume that also # packaging files are found in the source tarball if not options.native or not sources: gbp.log.info("Importing packaging files") branch = options.packaging_branch if not repo.has_branch(branch): if options.create_missing_branches: gbp.log.info("Will create missing branch '%s'" % branch) else: gbp.log.err(no_packaging_branch_msg % branch + "\n" "Also check the --create-missing-branches " "option.") raise GbpError tag = repo.version_to_tag(options.packaging_tag, tag_str_fields) msg = "%s release %s" % (options.vendor, ver_str) if options.orphan_packaging or not sources: commit = repo.commit_dir(dirs['packaging_base'], "Import %s" % msg, branch, author=author, committer=committer, create_missing_branch=options.create_missing_branches) else: # Copy packaging files to the unpacked sources dir try: pkgsubdir = os.path.join(sources.unpacked, options.packaging_dir) os.mkdir(pkgsubdir) except OSError as err: if err.errno != errno.EEXIST: raise for fname in os.listdir(dirs['packaging']): shutil.copy2(os.path.join(dirs['packaging'], fname), pkgsubdir) commit = repo.commit_dir(sources.unpacked, "Import %s" % msg, branch, other_parents=[src_commit], author=author, committer=committer, create_missing_branch=options.create_missing_branches) # Import patches on top of the source tree # (only for non-native packages with non-orphan packaging) force_to_branch_head(repo, options.packaging_branch) # Create packaging tag repo.create_tag(name=tag, msg=msg, commit=commit, sign=options.sign_tags, keyid=options.keyid) force_to_branch_head(repo, options.packaging_branch) except KeyboardInterrupt: ret = 1 gbp.log.err("Interrupted. Aborting.") except gbpc.CommandExecFailed: ret = 1 except GitRepositoryError as err: gbp.log.err("Git command failed: %s" % err) ret = 1 except GbpError as err: if str(err): gbp.log.err(err) ret = 1 except NoSpecError as err: gbp.log.err("Failed determine spec file: %s" % err) ret = 1 except SkipImport: skipped = True finally: os.chdir(dirs['top']) del_tmpdir() if not ret and not skipped: gbp.log.info("Version '%s' imported under '%s'" % (ver_str, repo.path)) return ret