def _guess_orig_file(self): """ Try to guess the name of the primary upstream/source archive. Returns a dict with all the relevant information. """ orig = None sources = self.sources() for num, filename in sorted(sources.items()): src = {'num': num, 'filename': os.path.basename(filename), 'uri': filename} src['filename_base'], src['archive_fmt'], src['compression'] = \ Archive.parse_filename(os.path.basename(filename)) if (src['filename_base'].startswith(self.name) and src['archive_fmt']): # Take the first archive that starts with pkg name orig = src break # otherwise we take the first archive elif not orig and src['archive_fmt']: orig = src # else don't accept if orig: orig['prefix'] = self._guess_orig_prefix(orig) return orig
def _guess_orig_file(self): """ Try to guess the name of the primary upstream/source archive. Returns a dict with all the relevant information. """ orig = None sources = self.sources() for num, filename in sorted(sources.items()): src = { 'num': num, 'filename': os.path.basename(filename), 'uri': filename } src['filename_base'], src['archive_fmt'], src['compression'] = \ Archive.parse_filename(os.path.basename(filename)) if (src['filename_base'].startswith(self.name) and src['archive_fmt']): # Take the first archive that starts with pkg name orig = src break # otherwise we take the first archive elif not orig and src['archive_fmt']: orig = src # else don't accept if orig: orig['prefix'] = self._guess_orig_prefix(orig) return orig
def guess_comp_type(repo, comp_type, source, tarball_dir): """Guess compression type to use for the to be built upstream tarball""" if comp_type != 'auto': comp_type = Compressor.Aliases.get(comp_type, comp_type) if comp_type not in Compressor.Opts: gbp.log.warn("Unknown compression type - guessing.") comp_type = 'auto' if comp_type == 'auto': if repo.has_pristine_tar_branch(): regex = 'pristine-tar .* %s_%s\.orig.tar\.' % (source.name, source.upstream_version) commits = repo.grep_log(regex, repo.pristine_tar_branch) if commits: commit = commits[-1] gbp.log.debug("Found pristine-tar commit at '%s'" % commit) else: commit = repo.pristine_tar_branch tarball = repo.get_commit_info(commit)['subject'] (base_name, archive_fmt, comp_type) = Archive.parse_filename(tarball) gbp.log.debug("Determined compression type '%s'" % comp_type) if not comp_type: comp_type = 'gzip' gbp.log.warn("Unknown compression type of %s, assuming %s" % (tarball, comp_type)) else: if not tarball_dir: tarball_dir = '..' detected = None for comp in Compressor.Opts.keys(): if du.DebianPkgPolicy.has_orig(source.upstream_tarball_name(comp), tarball_dir): if detected is not None: raise GbpError("Multiple orig tarballs found.") detected = comp comp_type = 'gzip' if detected is None else detected return comp_type
def get_component_tarballs(name, version, tarball, components): """ Figure out the paths to the component tarballs based on the main tarball. """ tarballs = [] (_, _, comp_type) = Archive.parse_filename(tarball) for component in components: cname = DebianPkgPolicy.build_tarball_name(name, version, comp_type, os.path.dirname(tarball), component) tarballs.append(DebianAdditionalTarball(cname, component)) if not os.path.exists(cname): raise GbpError("Can not find component tarball %s" % cname) return tarballs
def get_component_tarballs(name, version, tarball, components): """ Figure out the paths to the component tarballs based on the main tarball. """ tarballs = [] (_, _, comp_type) = Archive.parse_filename(tarball) for component in components: cname = DebianPkgPolicy.build_tarball_name(name, version, comp_type, os.path.dirname(tarball), component) tarballs.append((component, cname)) if not os.path.exists(cname): raise GbpError("Can not find component tarball %s" % cname) return tarballs
def guess_comp_type(comp_type, source, repo, tarball_dir): """Guess compression type to use for the to be built upstream tarball We prefer pristine-tar over everything else since this is what's carried around with the repo and might be more reliable than what a user has in tarball_dir. """ if comp_type != 'auto': comp_type = Compressor.Aliases.get(comp_type, comp_type) if comp_type not in Compressor.Opts: gbp.log.warn("Unknown compression type - guessing.") comp_type = 'auto' if comp_type == 'auto': branch = None if repo: if repo.has_branch('pristine-tar'): branch = 'pristine-tar' elif repo.has_branch('origin/pristine-tar', remote=True): branch = 'origin/pristine-tar' if branch is not None: regex = r'pristine-tar .* %s_%s\.orig.tar\.' % (source.name, source.upstream_version) commits = repo.grep_log(regex, branch, merges=False) if commits: commit = commits[-1] gbp.log.debug("Found pristine-tar commit at '%s'" % commit) else: commit = branch tarball = repo.get_commit_info(commit)['subject'] (base_name, archive_fmt, comp_type) = Archive.parse_filename(tarball) gbp.log.debug("Determined compression type '%s'" % comp_type) if not comp_type: comp_type = 'gzip' gbp.log.warn("Unknown compression type of %s, assuming %s" % (tarball, comp_type)) else: tarball_dir = tarball_dir or '..' detected = None for comp in Compressor.Opts.keys(): if du.DebianPkgPolicy.has_orig(source.upstream_tarball_name(comp), tarball_dir): if detected is not None: raise GbpError("Multiple orig tarballs found.") detected = comp comp_type = 'gzip' if detected is None else detected return comp_type
def is_valid_orig_archive(cls, filename): """ Is this a valid orig source archive @param filename: upstream source archive filename @type filename: C{str} @return: true if valid upstream source archive filename @rtype: C{bool} >>> RpmPkgPolicy.is_valid_orig_archive("foo/bar_baz.tar.gz") True >>> RpmPkgPolicy.is_valid_orig_archive("foo.bar.tar") True >>> RpmPkgPolicy.is_valid_orig_archive("foo.bar") False >>> RpmPkgPolicy.is_valid_orig_archive("foo.gz") False """ _base, arch_fmt, _compression = Archive.parse_filename(filename) if arch_fmt: return True return False
def safe_patches(queue): """ Safe the current patches in a temporary directory @param queue: an existing patch queue @return: safed queue (with patches in tmpdir) @rtype: tuple """ tmpdir = tempfile.mkdtemp(prefix='patchimport_') safequeue = PatchSeries() if len(queue) > 0: gbp.log.debug("Saving patches '%s' in '%s'" % (os.path.dirname(queue[0].path), tmpdir)) for patch in queue: base, _archive_fmt, comp = Archive.parse_filename(patch.path) uncompressors = {'gzip': gzip.open, 'bzip2': bz2.BZ2File} if comp in uncompressors: gbp.log.debug("Uncompressing '%s'" % os.path.basename(patch.path)) src = uncompressors[comp](patch.path, 'r') dst_name = os.path.join(tmpdir, os.path.basename(base)) elif comp: raise GbpError("Unsupported patch compression '%s', giving up" % comp) else: src = open(patch.path, 'rb') dst_name = os.path.join(tmpdir, os.path.basename(patch.path)) dst = open(dst_name, 'wb') dst.write(src.read()) src.close() dst.close() safequeue.append(patch) safequeue[-1].path = dst_name return safequeue
def guess_comp_type(comp_type, source, repo, tarball_dir): """Guess compression type to use for the to be built upstream tarball We prefer pristine-tar over everything else since this is what's carried around with the repo and might be more reliable than what a user has in tarball_dir. """ if comp_type != 'auto': comp_type = Compressor.Aliases.get(comp_type, comp_type) if comp_type not in Compressor.Opts: gbp.log.warn("Unknown compression type - guessing.") comp_type = 'auto' if comp_type == 'auto': if repo and repo.has_pristine_tar_branch(): regex = r'pristine-tar .* %s_%s\.orig.tar\.' % (source.name, source.upstream_version) commits = repo.grep_log(regex, repo.pristine_tar_branch, merges=False) if commits: commit = commits[-1] gbp.log.debug("Found pristine-tar commit at '%s'" % commit) else: commit = repo.pristine_tar_branch tarball = repo.get_commit_info(commit)['subject'] (base_name, archive_fmt, comp_type) = Archive.parse_filename(tarball) gbp.log.debug("Determined compression type '%s'" % comp_type) if not comp_type: comp_type = 'gzip' gbp.log.warn("Unknown compression type of %s, assuming %s" % (tarball, comp_type)) else: if not tarball_dir: tarball_dir = '..' detected = None for comp in Compressor.Opts.keys(): if du.DebianPkgPolicy.has_orig(source.upstream_tarball_name(comp), tarball_dir): if detected is not None: raise GbpError("Multiple orig tarballs found.") detected = comp comp_type = 'gzip' if detected is None else detected return comp_type
def main(argv): """Main function of the git-import-srpm script""" dirs = dict(top=os.path.abspath(os.curdir)) ret = 0 skipped = False options, args = parse_args(argv) if not options: return ExitCodes.parse_error if len(args) == 1: srpm = args[0] target = None elif len(args) == 2: srpm = args[0] target = args[1] else: gbp.log.err("Need to give exactly one package to import. Try --help.") return 1 try: dirs['tmp_base'] = init_tmpdir(options.tmp_dir, 'import-srpm_') except GbpError as err: gbp.log.err(err) return 1 try: if options.download: srpm = download_source(srpm) # Real srpm, we need to unpack, first true_srcrpm = False if not os.path.isdir(srpm) and not srpm.endswith(".spec"): src = parse_srpm(srpm) true_srcrpm = True dirs['pkgextract'] = tempfile.mkdtemp(prefix='pkgextract_') gbp.log.info("Extracting src rpm to '%s'" % dirs['pkgextract']) src.unpack(dirs['pkgextract']) preferred_spec = src.name + '.spec' srpm = dirs['pkgextract'] elif os.path.isdir(srpm): preferred_spec = os.path.basename(srpm.rstrip('/')) + '.spec' else: preferred_spec = None # Find and parse spec file if os.path.isdir(srpm): gbp.log.debug("Trying to import an unpacked srpm from '%s'" % srpm) dirs['src'] = os.path.abspath(srpm) spec = guess_spec(srpm, True, preferred_spec) else: gbp.log.debug("Trying to import an srpm from '%s' with spec " "file '%s'" % (os.path.dirname(srpm), srpm)) dirs['src'] = os.path.abspath(os.path.dirname(srpm)) spec = SpecFile(srpm) # Check the repository state try: repo = RpmGitRepository('.') is_empty = repo.is_empty() (clean, out) = repo.is_clean() if not clean and not is_empty: gbp.log.err("Repository has uncommitted changes, commit " "these first: ") raise GbpError(out) except GitRepositoryError: gbp.log.info("No git repository found, creating one.") is_empty = True target = target or spec.name repo = RpmGitRepository.create(target) os.chdir(repo.path) repo_setup.set_user_name_and_email(options.repo_user, options.repo_email, repo) if repo.bare: set_bare_repo_options(options) # Create more tempdirs dirs['origsrc'] = tempfile.mkdtemp(prefix='origsrc_') dirs['packaging_base'] = tempfile.mkdtemp(prefix='packaging_') dirs['packaging'] = os.path.join(dirs['packaging_base'], options.packaging_dir) try: os.mkdir(dirs['packaging']) except OSError as err: if err.errno != errno.EEXIST: raise if true_srcrpm: # For true src.rpm we just take everything files = os.listdir(dirs['src']) else: # Need to copy files to the packaging directory given by caller files = [os.path.basename(patch.path) for patch in spec.patchseries(unapplied=True, ignored=True)] for filename in spec.sources().values(): files.append(os.path.basename(filename)) files.append(os.path.join(spec.specdir, spec.specfile)) # Don't copy orig source archive, though if spec.orig_src and spec.orig_src['filename'] in files: files.remove(spec.orig_src['filename']) for fname in files: fpath = os.path.join(dirs['src'], fname) if os.path.exists(fpath): shutil.copy2(fpath, dirs['packaging']) else: gbp.log.err("File '%s' listed in spec not found" % fname) raise GbpError # Unpack orig source archive if spec.orig_src: orig_tarball = os.path.join(dirs['src'], spec.orig_src['filename']) sources = RpmUpstreamSource(orig_tarball) sources.unpack(dirs['origsrc'], options.filters) else: sources = None tag_str_fields = dict(spec.version, vendor=options.vendor.lower()) if options.native: upstream_tag_format = options.packaging_tag upstream_str_fields = tag_str_fields else: upstream_tag_format = options.upstream_tag upstream_str_fields = filter_version(tag_str_fields, 'release', 'epoch') upstream_tag = repo.version_to_tag(upstream_tag_format, upstream_str_fields) full_version = compose_version_str(spec.version) if repo.find_version(options.packaging_tag, tag_str_fields): gbp.log.warn("Version %s already imported." % full_version) if options.allow_same_version: gbp.log.info("Moving tag of version '%s' since import forced" % full_version) move_tag_stamp(repo, options.packaging_tag, tag_str_fields) else: raise SkipImport if is_empty: options.create_missing_branches = True # Determine author and committer info, currently same info is used # for both sources and packaging files author = None if spec.packager: match = re.match(r'(?P<name>.*[^ ])\s*<(?P<email>\S*)>', spec.packager.strip()) if match: author = GitModifier(match.group('name'), match.group('email')) if not author: author = GitModifier() gbp.log.debug("Couldn't determine packager info") committer = committer_from_author(author, options) # Import sources if sources: upstream_commit = repo.find_version(upstream_tag_format, upstream_str_fields) if not upstream_commit: gbp.log.info("Tag %s not found, importing sources" % upstream_tag) branch = [options.upstream_branch, options.packaging_branch][options.native] if not repo.has_branch(branch): if options.create_missing_branches: gbp.log.info("Will create missing branch '%s'" % branch) else: gbp.log.err(no_upstream_branch_msg % branch + "\n" "Also check the --create-missing-branches option.") raise GbpError upstream_vendor = "Native" if options.native else "Upstream" upstream_version = full_version if options.native else spec.upstreamversion msg = "%s version %s" % (upstream_vendor, upstream_version) if options.vcs_tag: vcs_tag = repo.version_to_tag(options.vcs_tag, upstream_str_fields) parents = [repo.rev_parse("%s^{}" % vcs_tag)] else: parents = None upstream_commit = repo.commit_dir(sources.unpacked, "Import %s" % msg, branch, other_parents=parents, author=author, committer=committer, create_missing_branch=options.create_missing_branches) if not (options.native and options.skip_packaging_tag): repo.create_tag(name=upstream_tag, msg=msg, commit=upstream_commit, sign=options.sign_tags, keyid=options.keyid) if not options.native: if options.pristine_tar: archive_fmt = Archive.parse_filename(orig_tarball)[1] if archive_fmt == 'tar': repo.pristine_tar.commit(orig_tarball, 'refs/heads/%s' % options.upstream_branch) else: gbp.log.warn('Ignoring pristine-tar, %s archives ' 'not supported' % archive_fmt) else: gbp.log.info("No orig source archive imported") # Import packaging files. For native packages we assume that also # packaging files are found in the source tarball if not options.native or not sources: gbp.log.info("Importing packaging files") branch = options.packaging_branch if not repo.has_branch(branch): if options.create_missing_branches: gbp.log.info("Will create missing branch '%s'" % branch) else: gbp.log.err(no_packaging_branch_msg % branch + "\n" "Also check the --create-missing-branches " "option.") raise GbpError msg = "%s release %s" % (options.vendor, full_version) if options.orphan_packaging or not sources: commit = repo.commit_dir(dirs['packaging_base'], "Import %s" % msg, branch, author=author, committer=committer, create_missing_branch=options.create_missing_branches) else: # Copy packaging files to the unpacked sources dir try: pkgsubdir = os.path.join(sources.unpacked, options.packaging_dir) os.mkdir(pkgsubdir) except OSError as err: if err.errno != errno.EEXIST: raise for fname in os.listdir(dirs['packaging']): shutil.copy2(os.path.join(dirs['packaging'], fname), pkgsubdir) commit = repo.commit_dir(sources.unpacked, "Import %s" % msg, branch, other_parents=[upstream_commit], author=author, committer=committer, create_missing_branch=options.create_missing_branches) # Import patches on top of the source tree # (only for non-native packages with non-orphan packaging) force_to_branch_head(repo, options.packaging_branch) # Create packaging tag if not options.skip_packaging_tag: tag = repo.version_to_tag(options.packaging_tag, tag_str_fields) repo.create_tag(name=tag, msg=msg, commit=commit, sign=options.sign_tags, keyid=options.keyid) force_to_branch_head(repo, options.packaging_branch) except KeyboardInterrupt: ret = 1 gbp.log.err("Interrupted. Aborting.") except gbpc.CommandExecFailed: ret = 1 except GitRepositoryError as err: gbp.log.err("Git command failed: %s" % err) ret = 1 except GbpError as err: if str(err): gbp.log.err(err) ret = 1 except NoSpecError as err: gbp.log.err("Failed determine spec file: %s" % err) ret = 1 except SkipImport: skipped = True finally: os.chdir(dirs['top']) del_tmpdir() if not ret and not skipped: gbp.log.info("Version '%s' imported under '%s'" % (full_version, repo.path)) return ret