def convert_with_history(repo, upstream, commits, new_branch, spec_fn, options): """Auto-import packaging files and (auto-generated) patches""" # Dump and commit packaging files packaging_tree = '%s:%s' % (commits[0], options.packaging_dir) packaging_tmp = tempfile.mkdtemp(prefix='pack_', dir=options.tmp_dir) dump_packaging_dir = os.path.join(packaging_tmp, options.new_packaging_dir) dump_tree(repo, dump_packaging_dir, packaging_tree, with_submodules=False, recursive=False) msg = "Auto-import packaging files\n\n" \ "Imported initial packaging files from commit '%s'" % (commits[0]) new_tree = repo.create_tree(packaging_tmp) tip_commit = repo.commit_tree(new_tree, msg, []) # Generate initial patches spec = SpecFile(os.path.join(dump_packaging_dir, spec_fn)) update_patch_series(repo, spec, upstream, commits[0], options) # Commit updated packaging files only if something was changed new_tree = repo.create_tree(packaging_tmp) if new_tree != repo.rev_parse(tip_commit + ':'): msg = "Auto-generate patches\n\n" \ "Generated patches from\n'%s..%s'\n\n" \ "updating spec file and possibly removing old patches." \ % (upstream, commits[0]) tip_commit = repo.commit_tree(new_tree, msg, [tip_commit]) # Import rest of the commits for commit in commits[1:]: shutil.rmtree(dump_packaging_dir) packaging_tree = '%s:%s' % (commit, options.packaging_dir) dump_tree(repo, dump_packaging_dir, packaging_tree, with_submodules=False, recursive=False) try: spec = SpecFile(os.path.join(dump_packaging_dir, spec_fn)) update_patch_series(repo, spec, upstream, commit, options) except (NoSpecError, GbpError): gbp.log.warn("Failed to generate patches from '%s'" % commit) new_tree = repo.create_tree(packaging_tmp) if new_tree == repo.rev_parse(tip_commit + ':'): gbp.log.info("Skipping commit '%s' which generated no change" % commit) else: info = repo.get_commit_info(commit) msg = "%s\n\n%sAuto-imported by gbp from '%s'" % ( info['subject'], info['body'], commit) tip_commit = repo.commit_tree(new_tree, msg, [tip_commit]) repo.create_branch(new_branch, tip_commit) repo.set_branch(new_branch)
def test_changelog(self): """Test changelog methods""" spec_filepath = os.path.join(SPEC_DIR, "gbp-test2.spec") spec = SpecFile(spec_filepath) # Read changelog eq_(spec.get_changelog(), "* Tue Feb 04 2014 Name <email> 1\n- My change\n\n\n") # Set changelog and check again new_text = "* Wed Feb 05 2014 Name <email> 2\n- New entry\n\n\n" spec.set_changelog(new_text) eq_(spec.get_changelog(), new_text)
def parse_spec(options, repo, treeish=None): """ Find and parse spec file. If treeish is given, try to find the spec file from that. Otherwise, search for the spec file in the working copy. """ try: if options.spec_file: if not treeish: spec = SpecFile(options.spec_file) else: spec = spec_from_repo(repo, treeish, options.spec_file) else: preferred_name = os.path.basename(repo.path) + '.spec' if not treeish: spec = guess_spec(options.packaging_dir, True, preferred_name) else: spec = guess_spec_repo(repo, treeish, options.packaging_dir, True, preferred_name) except NoSpecError as err: raise GbpError("Can't parse spec: %s" % err) relpath = spec.specpath if treeish else os.path.relpath( spec.specpath, repo.path) options.packaging_dir = os.path.dirname(relpath) gbp.log.debug("Using '%s' from '%s'" % (relpath, treeish or 'working copy')) return spec
def test_parse_raw(self): """Test parsing of a valid spec file""" with assert_raises(NoSpecError): SpecFile(None, None) with assert_raises(NoSpecError): SpecFile('filename', 'filedata') spec_filepath = os.path.join(SPEC_DIR, 'gbp-test.spec') with open(spec_filepath, 'r') as spec_fd: spec_data = spec_fd.read() spec = SpecFile(filedata=spec_data) # Test basic properties eq_(spec.specfile, None) eq_(spec.specdir, None) eq_(spec.name, 'gbp-test')
def test_quirks(self): """Test spec that is broken/has anomalities""" spec_filepath = os.path.join(SPEC_DIR, 'gbp-test-quirks.spec') spec = SpecFile(spec_filepath) # Check that we quess orig source and prefix correctly eq_(spec.orig_src['prefix'], 'foobar/')
def test_spec_4(self): """Test parsing of spec without orig tarball""" spec_filepath = os.path.join(SPEC_DIR, 'gbp-test-native2.spec') spec = SpecFile(spec_filepath) # Test basic properties eq_(spec.name, 'gbp-test-native2') eq_(spec.orig_src, None)
def parse_spec_file(repo, options): """Find and parse spec file""" if options.spec_file: spec_path = os.path.join(repo.path, options.spec_file) spec = SpecFile(spec_path) else: spec = guess_spec(os.path.join(repo.path, options.packaging_dir), True, os.path.basename(repo.path) + '.spec') options.packaging_dir = spec.specdir return spec
class ChangelogFile(object): """Container for changelog file, whether it be a standalone changelog or a spec file""" def __init__(self, file_path): parser = ChangelogParser(RpmPkgPolicy) if os.path.splitext(file_path)[1] == '.spec': gbp.log.debug("Using spec file '%s' as changelog" % file_path) self._file = SpecFile(file_path) self.changelog = parser.raw_parse_string(self._file.get_changelog()) else: self._file = os.path.abspath(file_path) if not os.path.exists(file_path): gbp.log.info("Changelog '%s' not found, creating new " "changelog file" % file_path) self.changelog = Changelog(RpmPkgPolicy) else: gbp.log.debug("Using changelog file '%s'" % file_path) self.changelog = parser.raw_parse_file(self._file) # Parse topmost section and try to determine the start commit if self.changelog.sections: self.changelog.sections[0] = parser.parse_section( self.changelog.sections[0]) def write(self): """Write changelog file to disk""" if isinstance(self._file, SpecFile): self._file.set_changelog(str(self.changelog)) self._file.write_spec_file() else: with open(self._file, 'w') as fobj: fobj.write(str(self.changelog)) @property def path(self): """File path""" if isinstance(self._file, SpecFile): return self._file.specpath else: return self._file
class ChangelogFile(object): """Container for changelog file, whether it be a standalone changelog or a spec file""" def __init__(self, file_path): parser = ChangelogParser(RpmPkgPolicy) if os.path.splitext(file_path)[1] == '.spec': gbp.log.debug("Using spec file '%s' as changelog" % file_path) self._file = SpecFile(file_path) self.changelog = parser.raw_parse_string( self._file.get_changelog()) else: self._file = os.path.abspath(file_path) if not os.path.exists(file_path): gbp.log.info("Changelog '%s' not found, creating new " "changelog file" % file_path) self.changelog = Changelog(RpmPkgPolicy) else: gbp.log.debug("Using changelog file '%s'" % file_path) self.changelog = parser.raw_parse_file(self._file) # Parse topmost section and try to determine the start commit if self.changelog.sections: self.changelog.sections[0] = parser.parse_section( self.changelog.sections[0]) def write(self): """Write changelog file to disk""" if isinstance(self._file, SpecFile): self._file.set_changelog(str(self.changelog)) self._file.write_spec_file() else: with open(self._file, 'w') as fobj: fobj.write(str(self.changelog)) @property def path(self): """File path""" if isinstance(self._file, SpecFile): return self._file.specpath else: return self._file
def test_spec_3(self): """Test parsing of yet another valid spec file""" spec_filepath = os.path.join(SPEC_DIR, 'gbp-test-native.spec') spec = SpecFile(spec_filepath) # Test basic properties eq_(spec.name, 'gbp-test-native') orig = spec.orig_src eq_(orig['filename'], 'gbp-test-native-1.0.zip') eq_(orig['archive_fmt'], 'zip') eq_(orig['compression'], None) eq_(orig['prefix'], 'gbp-test-native-1.0/')
def __init__(self, file_path): parser = ChangelogParser(RpmPkgPolicy) if os.path.splitext(file_path)[1] == '.spec': gbp.log.debug("Using spec file '%s' as changelog" % file_path) self._file = SpecFile(file_path) self.changelog = parser.raw_parse_string( self._file.get_changelog()) else: self._file = os.path.abspath(file_path) if not os.path.exists(file_path): gbp.log.info("Changelog '%s' not found, creating new " "changelog file" % file_path) self.changelog = Changelog(RpmPkgPolicy) else: gbp.log.debug("Using changelog file '%s'" % file_path) self.changelog = parser.raw_parse_file(self._file) # Parse topmost section and try to determine the start commit if self.changelog.sections: self.changelog.sections[0] = parser.parse_section( self.changelog.sections[0])
def test_changelog(self): """Test changelog methods""" spec_filepath = os.path.join(SPEC_DIR, 'gbp-test2.spec') spec = SpecFile(spec_filepath) # Read changelog eq_(spec.get_changelog(), "* Tue Feb 04 2014 Name <email> 1\n- My change\n\n\n") # Set changelog and check again new_text = "* Wed Feb 05 2014 Name <email> 2\n- New entry\n\n\n" spec.set_changelog(new_text) eq_(spec.get_changelog(), new_text)
def test_spec_2(self): """Test parsing of another valid spec file""" spec_filepath = os.path.join(SPEC_DIR, 'gbp-test2.spec') spec = SpecFile(spec_filepath) # Test basic properties eq_(spec.name, 'gbp-test2') eq_(spec.packager, 'Markus Lehtonen <*****@*****.**>') eq_(spec.epoch, '2') eq_(spec.version, {'release': '0', 'upstreamversion': '3.0', 'epoch': '2'}) orig = spec.orig_src eq_(orig['filename'], 'gbp-test2-3.0.tar.gz') eq_(orig['uri'], 'ftp://ftp.host.com/gbp-test2-3.0.tar.gz') eq_(orig['archive_fmt'], 'tar') eq_(orig['compression'], 'gzip') eq_(orig['prefix'], '')
def __init__(self, file_path): parser = ChangelogParser(RpmPkgPolicy) if os.path.splitext(file_path)[1] == '.spec': gbp.log.debug("Using spec file '%s' as changelog" % file_path) self._file = SpecFile(file_path) self.changelog = parser.raw_parse_string(self._file.get_changelog()) else: self._file = os.path.abspath(file_path) if not os.path.exists(file_path): gbp.log.info("Changelog '%s' not found, creating new " "changelog file" % file_path) self.changelog = Changelog(RpmPkgPolicy) else: gbp.log.debug("Using changelog file '%s'" % file_path) self.changelog = parser.raw_parse_file(self._file) # Parse topmost section and try to determine the start commit if self.changelog.sections: self.changelog.sections[0] = parser.parse_section( self.changelog.sections[0])
def test_spec_2(self): """Test parsing of another valid spec file""" spec_filepath = os.path.join(SPEC_DIR, 'gbp-test2.spec') spec = SpecFile(spec_filepath) # Test basic properties assert spec.name == 'gbp-test2' assert spec.packager == 'Markus Lehtonen ' \ '<*****@*****.**>' assert spec.epoch == '2' assert spec.version == { 'release': '0', 'upstreamversion': '3.0', 'epoch': '2' } orig = spec.orig_src assert orig['filename'] == 'gbp-test2-3.0.tar.gz' assert orig['uri'] == 'ftp://ftp.host.com/gbp-test2-3.0.tar.gz' assert orig['archive_fmt'] == 'tar' assert orig['compression'] == 'gzip' assert orig['prefix'] == ''
def test_update_spec2(self): """Another test for spec autoupdate functionality""" tmp_spec = os.path.join(self.tmpdir, 'gbp-test2.spec') shutil.copy2(os.path.join(SPEC_DIR, 'gbp-test2.spec'), tmp_spec) reference_spec = os.path.join(SPEC_DIR, 'gbp-test2-reference2.spec') spec = SpecFile(tmp_spec) spec.update_patches(['1.patch', '2.patch'], { '1.patch': { 'if': 'true' }, '2.patch': { 'ifarch': '%ix86' } }) spec.set_tag('VCS', None, 'myvcstag') spec.write_spec_file() eq_(filecmp.cmp(tmp_spec, reference_spec), True) # Test updating patches again, removing the VCS tag and re-writing # changelog reference_spec = os.path.join(SPEC_DIR, 'gbp-test2-reference.spec') spec.update_patches(['new.patch'], {'new.patch': {'if': '1'}}) spec.set_tag('VCS', None, '') spec.set_changelog("* Wed Feb 05 2014 Name <email> 2\n- New entry\n\n") spec.write_spec_file() eq_(filecmp.cmp(tmp_spec, reference_spec), True)
def test_update_spec(self): """Test spec autoupdate functionality""" # Create temporary spec file tmp_spec = os.path.join(self.tmpdir, 'gbp-test.spec') shutil.copy2(os.path.join(SPEC_DIR, 'gbp-test.spec'), tmp_spec) reference_spec = os.path.join(SPEC_DIR, 'gbp-test-reference.spec') spec = SpecFile(tmp_spec) spec.update_patches(['new.patch'], {}) spec.write_spec_file() eq_(filecmp.cmp(tmp_spec, reference_spec), True) # Test adding the VCS tag and adding changelog reference_spec = os.path.join(SPEC_DIR, 'gbp-test-reference2.spec') spec.set_tag('VCS', None, 'myvcstag') spec.set_changelog("* Wed Feb 05 2014 Name <email> 1\n- New entry\n") spec.write_spec_file() eq_(filecmp.cmp(tmp_spec, reference_spec), True)
def main(argv): """Main function of the git-import-srpm script""" dirs = dict(top=os.path.abspath(os.curdir)) ret = 0 skipped = False options, args = parse_args(argv) if not options: return ExitCodes.parse_error if len(args) == 1: srpm = args[0] target = None elif len(args) == 2: srpm = args[0] target = args[1] else: gbp.log.err("Need to give exactly one package to import. Try --help.") return 1 try: dirs['tmp_base'] = init_tmpdir(options.tmp_dir, 'import-srpm_') except GbpError as err: gbp.log.err(err) return 1 try: if options.download: srpm = download_source(srpm) # Real srpm, we need to unpack, first true_srcrpm = False if not os.path.isdir(srpm) and not srpm.endswith(".spec"): src = parse_srpm(srpm) true_srcrpm = True dirs['pkgextract'] = tempfile.mkdtemp(prefix='pkgextract_') gbp.log.info("Extracting src rpm to '%s'" % dirs['pkgextract']) src.unpack(dirs['pkgextract']) preferred_spec = src.name + '.spec' srpm = dirs['pkgextract'] elif os.path.isdir(srpm): preferred_spec = os.path.basename(srpm.rstrip('/')) + '.spec' else: preferred_spec = None # Find and parse spec file if os.path.isdir(srpm): gbp.log.debug("Trying to import an unpacked srpm from '%s'" % srpm) dirs['src'] = os.path.abspath(srpm) spec = guess_spec(srpm, True, preferred_spec) else: gbp.log.debug("Trying to import an srpm from '%s' with spec " "file '%s'" % (os.path.dirname(srpm), srpm)) dirs['src'] = os.path.abspath(os.path.dirname(srpm)) spec = SpecFile(srpm) # Check the repository state try: repo = RpmGitRepository('.') is_empty = repo.is_empty() (clean, out) = repo.is_clean() if not clean and not is_empty: gbp.log.err("Repository has uncommitted changes, commit " "these first: ") raise GbpError(out) except GitRepositoryError: gbp.log.info("No git repository found, creating one.") is_empty = True target = target or spec.name repo = RpmGitRepository.create(target) os.chdir(repo.path) repo_setup.set_user_name_and_email(options.repo_user, options.repo_email, repo) if repo.bare: set_bare_repo_options(options) # Create more tempdirs dirs['origsrc'] = tempfile.mkdtemp(prefix='origsrc_') dirs['packaging_base'] = tempfile.mkdtemp(prefix='packaging_') dirs['packaging'] = os.path.join(dirs['packaging_base'], options.packaging_dir) try: os.mkdir(dirs['packaging']) except OSError as err: if err.errno != errno.EEXIST: raise if true_srcrpm: # For true src.rpm we just take everything files = os.listdir(dirs['src']) else: # Need to copy files to the packaging directory given by caller files = [os.path.basename(patch.path) for patch in spec.patchseries(unapplied=True, ignored=True)] for filename in spec.sources().values(): files.append(os.path.basename(filename)) files.append(os.path.join(spec.specdir, spec.specfile)) # Don't copy orig source archive, though if spec.orig_src and spec.orig_src['filename'] in files: files.remove(spec.orig_src['filename']) for fname in files: fpath = os.path.join(dirs['src'], fname) if os.path.exists(fpath): shutil.copy2(fpath, dirs['packaging']) else: gbp.log.err("File '%s' listed in spec not found" % fname) raise GbpError # Unpack orig source archive if spec.orig_src: orig_tarball = os.path.join(dirs['src'], spec.orig_src['filename']) sources = RpmUpstreamSource(orig_tarball) sources.unpack(dirs['origsrc'], options.filters) else: sources = None tag_str_fields = dict(spec.version, vendor=options.vendor.lower()) if options.native: src_tag_format = options.packaging_tag src_tag = repo.version_to_tag(src_tag_format, tag_str_fields) upstream_tag = src_tag upstream_str_fields = tag_str_fields else: src_tag_format = options.upstream_tag src_tag = repo.version_to_tag(src_tag_format, tag_str_fields) upstream_str_fields = filter_version(tag_str_fields, 'release', 'epoch') upstream_tag = repo.version_to_tag(src_tag_format, upstream_str_fields) ver_str = compose_version_str(spec.version) if repo.find_version(options.packaging_tag, tag_str_fields): gbp.log.warn("Version %s already imported." % ver_str) if options.allow_same_version: gbp.log.info("Moving tag of version '%s' since import forced" % ver_str) move_tag_stamp(repo, options.packaging_tag, tag_str_fields) else: raise SkipImport if is_empty: options.create_missing_branches = True # Determine author and committer info, currently same info is used # for both sources and packaging files author = None if spec.packager: match = re.match(r'(?P<name>.*[^ ])\s*<(?P<email>\S*)>', spec.packager.strip()) if match: author = GitModifier(match.group('name'), match.group('email')) if not author: author = GitModifier() gbp.log.debug("Couldn't determine packager info") committer = committer_from_author(author, options) # Import sources if sources: src_commit = repo.find_version(src_tag_format, upstream_str_fields) if not src_commit: gbp.log.info("Tag %s not found, importing sources" % src_tag) branch = [options.upstream_branch, options.packaging_branch][options.native] if not repo.has_branch(branch): if options.create_missing_branches: gbp.log.info("Will create missing branch '%s'" % branch) else: gbp.log.err(no_upstream_branch_msg % branch + "\n" "Also check the --create-missing-branches option.") raise GbpError src_vendor = "Native" if options.native else "Upstream" msg = "%s version %s" % (src_vendor, spec.upstreamversion) src_commit = repo.commit_dir(sources.unpacked, "Import %s" % msg, branch, author=author, committer=committer, create_missing_branch=options.create_missing_branches) repo.create_tag(name=src_tag if options.native else upstream_tag, msg=msg, commit=src_commit, sign=options.sign_tags, keyid=options.keyid) if not options.native: if options.pristine_tar: archive_fmt = parse_archive_filename(orig_tarball)[1] if archive_fmt == 'tar': repo.pristine_tar.commit(orig_tarball, 'refs/heads/%s' % options.upstream_branch) else: gbp.log.warn('Ignoring pristine-tar, %s archives ' 'not supported' % archive_fmt) else: gbp.log.info("No orig source archive imported") # Import packaging files. For native packages we assume that also # packaging files are found in the source tarball if not options.native or not sources: gbp.log.info("Importing packaging files") branch = options.packaging_branch if not repo.has_branch(branch): if options.create_missing_branches: gbp.log.info("Will create missing branch '%s'" % branch) else: gbp.log.err(no_packaging_branch_msg % branch + "\n" "Also check the --create-missing-branches " "option.") raise GbpError tag = repo.version_to_tag(options.packaging_tag, tag_str_fields) msg = "%s release %s" % (options.vendor, ver_str) if options.orphan_packaging or not sources: commit = repo.commit_dir(dirs['packaging_base'], "Import %s" % msg, branch, author=author, committer=committer, create_missing_branch=options.create_missing_branches) else: # Copy packaging files to the unpacked sources dir try: pkgsubdir = os.path.join(sources.unpacked, options.packaging_dir) os.mkdir(pkgsubdir) except OSError as err: if err.errno != errno.EEXIST: raise for fname in os.listdir(dirs['packaging']): shutil.copy2(os.path.join(dirs['packaging'], fname), pkgsubdir) commit = repo.commit_dir(sources.unpacked, "Import %s" % msg, branch, other_parents=[src_commit], author=author, committer=committer, create_missing_branch=options.create_missing_branches) # Import patches on top of the source tree # (only for non-native packages with non-orphan packaging) force_to_branch_head(repo, options.packaging_branch) # Create packaging tag repo.create_tag(name=tag, msg=msg, commit=commit, sign=options.sign_tags, keyid=options.keyid) force_to_branch_head(repo, options.packaging_branch) except KeyboardInterrupt: ret = 1 gbp.log.err("Interrupted. Aborting.") except gbpc.CommandExecFailed: ret = 1 except GitRepositoryError as err: gbp.log.err("Git command failed: %s" % err) ret = 1 except GbpError as err: if str(err): gbp.log.err(err) ret = 1 except NoSpecError as err: gbp.log.err("Failed determine spec file: %s" % err) ret = 1 except SkipImport: skipped = True finally: os.chdir(dirs['top']) del_tmpdir() if not ret and not skipped: gbp.log.info("Version '%s' imported under '%s'" % (ver_str, repo.path)) return ret
def _has_patches(self, specfile, patches): spec = SpecFile(specfile) eq_(sorted([p['linevalue'] for p in spec._patches().values()]), sorted(patches))
def test_update_spec2(self): """Another test for spec autoupdate functionality""" tmp_spec = os.path.join(self.tmpdir, 'gbp-test2.spec') shutil.copy2(os.path.join(SPEC_DIR, 'gbp-test2.spec'), tmp_spec) reference_spec = os.path.join(SPEC_DIR, 'gbp-test2-reference2.spec') spec = SpecFile(tmp_spec) spec.update_patches(['1.patch', '2.patch'], {'1.patch': {'if': 'true'}, '2.patch': {'ifarch': '%ix86'}}) spec.set_tag('VCS', None, 'myvcstag') spec.write_spec_file() eq_(filecmp.cmp(tmp_spec, reference_spec), True) # Test updating patches again, removing the VCS tag and re-writing # changelog reference_spec = os.path.join(SPEC_DIR, 'gbp-test2-reference.spec') spec.update_patches(['new.patch'], {'new.patch': {'if': '1'}}) spec.set_tag('VCS', None, '') spec.set_changelog("* Wed Feb 05 2014 Name <email> 2\n- New entry\n\n") spec.write_spec_file() eq_(filecmp.cmp(tmp_spec, reference_spec), True)
def main(argv): """Main function of the git-import-srpm script""" dirs = dict(top=os.path.abspath(os.curdir)) ret = 0 skipped = False options, args = parse_args(argv) if len(args) != 1: gbp.log.err("Need to give exactly one package to import. Try --help.") return 1 try: dirs['tmp_base'] = tempfile.mkdtemp(dir=options.tmp_dir, prefix='import-srpm') except GbpError as err: gbp.log.err(err) return 1 try: srpm = args[0] if options.download: srpm = download_source(srpm, dirs) # Real srpm, we need to unpack, first true_srcrpm = False if not os.path.isdir(srpm) and not srpm.endswith(".spec"): src = parse_srpm(srpm) true_srcrpm = True dirs['pkgextract'] = tempfile.mkdtemp(dir=dirs['tmp_base'], prefix='pkgextract_') gbp.log.info("Extracting src rpm to '%s'" % dirs['pkgextract']) src.unpack(dirs['pkgextract']) preferred_spec = src.name + '.spec' srpm = dirs['pkgextract'] elif os.path.isdir(srpm): preferred_spec = os.path.basename(srpm.rstrip('/')) + '.spec' else: preferred_spec = None # Find and parse spec file if os.path.isdir(srpm): gbp.log.debug("Trying to import an unpacked srpm from '%s'" % srpm) dirs['src'] = os.path.abspath(srpm) spec = guess_spec(srpm, True, preferred_spec) else: gbp.log.debug("Trying to import an srpm from '%s' with spec "\ "file '%s'" % (os.path.dirname(srpm), srpm)) dirs['src'] = os.path.abspath(os.path.dirname(srpm)) spec = SpecFile(srpm) # Check the repository state try: repo = RpmGitRepository('.') is_empty = repo.is_empty() (clean, out) = repo.is_clean() if not clean and not is_empty: gbp.log.err("Repository has uncommitted changes, commit " "these first: ") raise GbpError, out except GitRepositoryError: gbp.log.info("No git repository found, creating one.") is_empty = True repo = RpmGitRepository.create(spec.name) os.chdir(repo.path) if repo.bare: set_bare_repo_options(options) # Create more tempdirs dirs['origsrc'] = tempfile.mkdtemp(dir=dirs['tmp_base'], prefix='origsrc_') dirs['packaging_base'] = tempfile.mkdtemp(dir=dirs['tmp_base'], prefix='packaging_') dirs['packaging'] = os.path.join(dirs['packaging_base'], options.packaging_dir) try: os.mkdir(dirs['packaging']) except OSError as err: if err.errno != errno.EEXIST: raise if true_srcrpm: # For true src.rpm we just take everything files = os.listdir(dirs['src']) else: # Need to copy files to the packaging directory given by caller files = [os.path.basename(patch.path) \ for patch in spec.patchseries(unapplied=True, ignored=True)] for filename in spec.sources().values(): files.append(os.path.basename(filename)) files.append(os.path.join(spec.specdir, spec.specfile)) # Don't copy orig source archive, though if spec.orig_src and spec.orig_src['filename'] in files: files.remove(spec.orig_src['filename']) for fname in files: fpath = os.path.join(dirs['src'], fname) if os.path.exists(fpath): shutil.copy2(fpath, dirs['packaging']) else: gbp.log.err("File '%s' listed in spec not found" % fname) raise GbpError # Unpack orig source archive if spec.orig_src: orig_tarball = os.path.join(dirs['src'], spec.orig_src['filename']) sources = RpmUpstreamSource(orig_tarball) sources.unpack(dirs['origsrc'], options.filters) else: sources = None src_tag_format = options.packaging_tag if options.native \ else options.upstream_tag tag_str_fields = dict(spec.version, vendor=options.vendor.lower()) src_tag = repo.version_to_tag(src_tag_format, tag_str_fields) ver_str = compose_version_str(spec.version) if repo.find_version(options.packaging_tag, tag_str_fields): gbp.log.warn("Version %s already imported." % ver_str) if options.allow_same_version: gbp.log.info("Moving tag of version '%s' since import forced" % ver_str) move_tag_stamp(repo, options.packaging_tag, tag_str_fields) else: raise SkipImport if is_empty: options.create_missing_branches = True # Determine author and committer info, currently same info is used # for both sources and packaging files author = None if spec.packager: match = re.match(r'(?P<name>.*[^ ])\s*<(?P<email>\S*)>', spec.packager.strip()) if match: author = GitModifier(match.group('name'), match.group('email')) if not author: author = GitModifier() gbp.log.debug("Couldn't determine packager info") committer = committer_from_author(author, options) # Import sources if sources: src_commit = repo.find_version(src_tag_format, tag_str_fields) if not src_commit: gbp.log.info("Tag %s not found, importing sources" % src_tag) branch = [options.upstream_branch, options.packaging_branch][options.native] if not repo.has_branch(branch): if options.create_missing_branches: gbp.log.info("Will create missing branch '%s'" % branch) else: gbp.log.err( no_upstream_branch_msg % branch + "\n" "Also check the --create-missing-branches option.") raise GbpError src_vendor = "Native" if options.native else "Upstream" msg = "%s version %s" % (src_vendor, spec.upstreamversion) src_commit = repo.commit_dir( sources.unpacked, "Imported %s" % msg, branch, author=author, committer=committer, create_missing_branch=options.create_missing_branches) repo.create_tag(name=src_tag, msg=msg, commit=src_commit, sign=options.sign_tags, keyid=options.keyid) if not options.native: if options.pristine_tar: archive_fmt = parse_archive_filename(orig_tarball)[1] if archive_fmt == 'tar': repo.pristine_tar.commit( orig_tarball, 'refs/heads/%s' % options.upstream_branch) else: gbp.log.warn('Ignoring pristine-tar, %s archives ' 'not supported' % archive_fmt) else: gbp.log.info("No orig source archive imported") # Import packaging files. For native packages we assume that also # packaging files are found in the source tarball if not options.native or not sources: gbp.log.info("Importing packaging files") branch = options.packaging_branch if not repo.has_branch(branch): if options.create_missing_branches: gbp.log.info("Will create missing branch '%s'" % branch) else: gbp.log.err(no_packaging_branch_msg % branch + "\n" "Also check the --create-missing-branches " "option.") raise GbpError tag = repo.version_to_tag(options.packaging_tag, tag_str_fields) msg = "%s release %s" % (options.vendor, ver_str) if options.orphan_packaging or not sources: commit = repo.commit_dir( dirs['packaging_base'], "Imported %s" % msg, branch, author=author, committer=committer, create_missing_branch=options.create_missing_branches) else: # Copy packaging files to the unpacked sources dir try: pkgsubdir = os.path.join(sources.unpacked, options.packaging_dir) os.mkdir(pkgsubdir) except OSError as err: if err.errno != errno.EEXIST: raise for fname in os.listdir(dirs['packaging']): shutil.copy2(os.path.join(dirs['packaging'], fname), pkgsubdir) commit = repo.commit_dir( sources.unpacked, "Imported %s" % msg, branch, other_parents=[src_commit], author=author, committer=committer, create_missing_branch=options.create_missing_branches) # Import patches on top of the source tree # (only for non-native packages with non-orphan packaging) force_to_branch_head(repo, options.packaging_branch) # Create packaging tag repo.create_tag(name=tag, msg=msg, commit=commit, sign=options.sign_tags, keyid=options.keyid) force_to_branch_head(repo, options.packaging_branch) except KeyboardInterrupt: ret = 1 gbp.log.err("Interrupted. Aborting.") except gbpc.CommandExecFailed: ret = 1 except GitRepositoryError as err: gbp.log.err("Git command failed: %s" % err) ret = 1 except GbpError as err: if str(err): gbp.log.err(err) ret = 1 except NoSpecError as err: gbp.log.err("Failed determine spec file: %s" % err) ret = 1 except SkipImport: skipped = True finally: os.chdir(dirs['top']) gbpc.RemoveTree(dirs['tmp_base'])() if not ret and not skipped: gbp.log.info("Version '%s' imported under '%s'" % (ver_str, spec.name)) return ret
def test_update_spec2(self): """Another test for spec autoupdate functionality""" tmp_spec = os.path.join(self.tmpdir, "gbp-test2.spec") shutil.copy2(os.path.join(SPEC_DIR, "gbp-test2.spec"), tmp_spec) reference_spec = os.path.join(SPEC_DIR, "gbp-test2-reference2.spec") spec = SpecFile(tmp_spec) spec.update_patches(["1.patch", "2.patch"], {"1.patch": {"if": "true"}, "2.patch": {"ifarch": "%ix86"}}) spec.set_tag("VCS", None, "myvcstag") spec.write_spec_file() eq_(filecmp.cmp(tmp_spec, reference_spec), True) # Test updating patches again, removing the VCS tag and re-writing # changelog reference_spec = os.path.join(SPEC_DIR, "gbp-test2-reference.spec") spec.update_patches(["new.patch"], {"new.patch": {"if": "1"}}) spec.set_tag("VCS", None, "") spec.set_changelog("* Wed Feb 05 2014 Name <email> 2\n- New entry\n\n") spec.write_spec_file() eq_(filecmp.cmp(tmp_spec, reference_spec), True)