def _get_build_version(self): """ Figure out the git tag and version-release we're building. """ # Determine which package version we should build: build_version = None if self.build_tag: build_version = self.build_tag[len(self.project_name + "-"):] else: build_version = get_latest_tagged_version(self.project_name) if build_version is None: if not self.test: error_out(["Unable to lookup latest package info.", "Perhaps you need to tag first?"]) warn_out("unable to lookup latest package " "tag, building untagged test project") build_version = get_spec_version_and_release(self.start_dir, find_spec_like_file(self.start_dir)) self.build_tag = "%s-%s" % (self.project_name, build_version) self.spec_version = build_version.split('-')[-2] self.spec_release = build_version.split('-')[-1] if not self.test: check_tag_exists(self.build_tag, offline=self.offline) return build_version
def _bump_version(self, release=False, zstream=False): """ Bump up the package version in the spec file. Set release to True to bump the package release instead. Checks for the keep version option and if found, won't actually bump the version or release. """ old_version = get_latest_tagged_version(self.project_name) if old_version is None: old_version = "untagged" if not self.keep_version: version_regex = re.compile("^(version:\s*)(.+)$", re.IGNORECASE) release_regex = re.compile("^(release:\s*)(.+)$", re.IGNORECASE) in_f = open(self.spec_file, 'r') out_f = open(self.spec_file + ".new", 'w') for line in in_f.readlines(): version_match = re.match(version_regex, line) release_match = re.match(release_regex, line) if version_match and not zstream and not release: current_version = version_match.group(2) if hasattr(self, '_use_version'): updated_content = self._use_version else: updated_content = increase_version(current_version) line = "".join([version_match.group(1), updated_content, "\n"]) elif release_match: current_release = release_match.group(2) if hasattr(self, '_use_release'): updated_content = self._use_release elif release: updated_content = increase_version(current_release) elif zstream: updated_content = increase_zstream(current_release) else: updated_content = reset_release(current_release) line = "".join([release_match.group(1), updated_content, "\n"]) out_f.write(line) in_f.close() out_f.close() shutil.move(self.spec_file + ".new", self.spec_file) new_version = get_spec_version_and_release(self.full_project_dir, self.spec_file_name) if new_version.strip() == "": msg = "Error getting bumped package version, try: \n" msg = msg + " 'rpm -q --specfile %s'" % self.spec_file error_out(msg) info_out("Tagging new version of %s: %s -> %s" % (self.project_name, old_version, new_version)) return new_version
def main(self): BaseCliModule.main(self) if self.global_config.has_option(GLOBALCONFIG_SECTION, "block_tagging"): debug("block_tagging defined in tito.props") error_out("Tagging has been disabled in this git branch.") build_dir = os.path.normpath(os.path.abspath(self.options.output_dir)) package_name = get_project_name(tag=None) self.pkg_config = self._read_project_config(package_name, build_dir, None, None) tagger_class = None if self.pkg_config.has_option("buildconfig", "tagger"): tagger_class = get_class_by_name(self.pkg_config.get("buildconfig", "tagger")) else: tagger_class = get_class_by_name(self.global_config.get( GLOBALCONFIG_SECTION, DEFAULT_TAGGER)) debug("Using tagger class: %s" % tagger_class) tagger = tagger_class(global_config=self.global_config, keep_version=self.options.keep_version) tagger.run(self.options)
def main(self): BaseCliModule.main(self) build_dir = os.path.normpath(os.path.abspath(self.options.output_dir)) package_name = get_project_name(tag=self.options.tag) build_tag = None build_version = None # Determine which package version we should build: if self.options.tag: build_tag = self.options.tag build_version = build_tag[len(package_name + "-"):] else: build_version = get_latest_tagged_version(package_name) if build_version == None: error_out(["Unable to lookup latest package info.", "Perhaps you need to tag first?"]) build_tag = "%s-%s" % (package_name, build_version) if not self.options.test: check_tag_exists(build_tag, offline=self.options.offline) self.pkg_config = self._read_project_config(package_name, build_dir, self.options.tag, self.options.no_cleanup) builder = self._create_builder(package_name, build_tag, build_version, self.options, self.pkg_config, build_dir) builder.run(self.options)
def patch_upstream(self): """ Create one patch per each release """ ch_dir = self.git_root if self.relative_project_dir != "/": ch_dir = os.path.join(self.git_root, self.relative_project_dir) os.chdir(ch_dir) debug("Running /usr/bin/generate-patches.pl -d %s %s %s-1 %s %s" % (self.rpmbuild_gitcopy, self.project_name, self.upstream_version, self.build_version, self.git_commit_id)) output = run_command("/usr/bin/generate-patches.pl -d %s %s %s-1 %s %s" % (self.rpmbuild_gitcopy, self.project_name, self.upstream_version, self.build_version, self.git_commit_id)) self.patch_files = output.split("\n") for p_file in self.patch_files: (status, output) = getstatusoutput( "grep 'Binary files .* differ' %s/%s " % (self.rpmbuild_gitcopy, p_file)) if status == 0 and output != "": error_out("You are doomed. Diff contains binary files. You can not use this builder") run_command("cp %s/%s %s" % (self.rpmbuild_gitcopy, p_file, self.rpmbuild_sourcedir)) (patch_number, patch_insert_index, patch_apply_index, lines) = self._patch_upstream() for patch in self.patch_files: lines.insert(patch_insert_index, "Patch%s: %s\n" % (patch_number, patch)) lines.insert(patch_apply_index, "%%patch%s -p1\n" % (patch_number)) patch_number += 1 patch_insert_index += 1 patch_apply_index += 2 self._write_spec(lines)
def _read_global_config(self): """ Read global build.py configuration from the rel-eng dir of the git repository we're being run from. """ rel_eng_dir = os.path.join(find_git_root(), "rel-eng") filename = os.path.join(rel_eng_dir, GLOBAL_BUILD_PROPS_FILENAME) if not os.path.exists(filename): # HACK: Try the old filename location, pre-tito rename: oldfilename = os.path.join(rel_eng_dir, "global.build.py.props") if not os.path.exists(oldfilename): error_out("Unable to locate branch configuration: %s" "\nPlease run 'tito init'" % filename) config = ConfigParser.ConfigParser() config.read(filename) # Verify the config contains what we need from it: required_global_config = [ (GLOBALCONFIG_SECTION, DEFAULT_BUILDER), (GLOBALCONFIG_SECTION, DEFAULT_TAGGER), ] for section, option in required_global_config: if not config.has_section(section) or not \ config.has_option(section, option): error_out("%s missing required config: %s %s" % ( filename, section, option)) return config
def _build(self, branch): """ Submit a Fedora build from current directory. """ target_param = "" scratch_param = "" build_target = self._get_build_target_for_branch(branch) if build_target: target_param = "--target %s" % build_target if self.scratch: scratch_param = "--scratch" build_cmd = "%s build --nowait %s %s" % (self.cli_tool, scratch_param, target_param) if self.dry_run: self.print_dry_run_warning(build_cmd) return info_out("Submitting build: %s" % build_cmd) (status, output) = getstatusoutput(build_cmd) if status > 0: if "already been built" in output: warn_out("Build has been submitted previously, continuing...") else: error_out([ "Unable to submit build." " Status code: %s\n" % status, " Output: %s\n" % output, ]) # Print the task ID and URL: for line in extract_task_info(output): print(line)
def main(self, argv): BaseCliModule.main(self, argv) build_dir = os.path.normpath(os.path.abspath(self.options.output_dir)) package_name = get_project_name(tag=None) self.load_config(package_name, build_dir, None) if self.config.has_option(BUILDCONFIG_SECTION, "block_tagging"): debug("block_tagging defined in tito.props") error_out("Tagging has been disabled in this git branch.") tagger_class = get_class_by_name(self.config.get( BUILDCONFIG_SECTION, DEFAULT_TAGGER)) debug("Using tagger class: %s" % tagger_class) tagger = tagger_class(config=self.config, user_config=self.user_config, keep_version=self.options.keep_version, offline=self.options.offline) try: return tagger.run(self.options) except TitoException: e = sys.exc_info()[1] error_out(e.message)
def __init__(self, name=None, tag=None, build_dir=None, config=None, user_config=None, args=None, **kwargs): BuilderBase.__init__(self, name=name, build_dir=build_dir, config=config, user_config=user_config, args=args, **kwargs) if tag: error_out("FetchBuilder does not support building " "specific tags.") if not config.has_option("builder", "fetch_strategy"): print("WARNING: no fetch_strategy specified in tito.props" ", assuming ArgSourceStrategy.") if not config.has_section("builder"): config.add_section("builder") config.set('builder', 'fetch_strategy', 'tito.builder.fetch.ArgSourceStrategy') self.build_tag = '%s-%s' % (self.project_name, get_spec_version_and_release( self.start_dir, '%s.spec' % self.project_name))
def main(self, argv): BaseCliModule.main(self, argv) build_dir = os.path.normpath(os.path.abspath(self.options.output_dir)) package_name = get_project_name(tag=None) self.load_config(package_name, build_dir, None) if self.config.has_option(BUILDCONFIG_SECTION, "block_tagging"): debug("block_tagging defined in tito.props") error_out("Tagging has been disabled in this git branch.") tagger_class = get_class_by_name( self.config.get(BUILDCONFIG_SECTION, DEFAULT_TAGGER)) debug("Using tagger class: %s" % tagger_class) tagger = tagger_class(config=self.config, user_config=self.user_config, keep_version=self.options.keep_version, offline=self.options.offline) try: return tagger.run(self.options) except TitoException: e = sys.exc_info()[1] error_out(e.message)
def _setup_sources(self): super(GitAnnexBuilder, self)._setup_sources() old_cwd = os.getcwd() os.chdir(os.path.join(old_cwd, self.relative_project_dir)) # NOTE: 'which' may not be installed... (docker containers) (status, output) = getstatusoutput("which git-annex") if status != 0: msg = "Please run '%s install git-annex' as root." % package_manager() error_out('%s' % msg) run_command("git-annex lock") annexed_files = run_command("git-annex find --include='*'").splitlines() run_command("git-annex get") run_command("git-annex unlock") debug(" Annex files: %s" % annexed_files) for annex in annexed_files: debug("Copying unlocked file %s" % annex) os.remove(os.path.join(self.rpmbuild_gitcopy, annex)) shutil.copy(annex, self.rpmbuild_gitcopy) self._lock() os.chdir(old_cwd)
def patch_upstream(self): """ Create one patch per each release """ ch_dir = self.git_root if self.relative_project_dir != "/": ch_dir = os.path.join(self.git_root, self.relative_project_dir) os.chdir(ch_dir) debug("Running /usr/bin/generate-patches.pl -d %s %s %s-1 %s %s" \ % (self.rpmbuild_gitcopy, self.project_name, self.upstream_version, self.build_version, self.git_commit_id)) output = run_command("/usr/bin/generate-patches.pl -d %s %s %s-1 %s %s" \ % (self.rpmbuild_gitcopy, self.project_name, self.upstream_version, self.build_version, self.git_commit_id)) self.patch_files = output.split("\n") for p_file in self.patch_files: (status, output) = commands.getstatusoutput( "grep 'Binary files .* differ' %s/%s " % (self.rpmbuild_gitcopy, p_file)) if status == 0 and output != "": error_out( "You are doomed. Diff contains binary files. You can not use this builder" ) run_command( "cp %s/%s %s" % (self.rpmbuild_gitcopy, p_file, self.rpmbuild_sourcedir)) (patch_number, patch_insert_index, patch_apply_index, lines) = self._patch_upstream() for patch in self.patch_files: lines.insert(patch_insert_index, "Patch%s: %s\n" % (patch_number, patch)) lines.insert(patch_apply_index, "%%patch%s -p1\n" % (patch_number)) patch_number += 1 patch_insert_index += 1 patch_apply_index += 2 self._write_spec(lines)
def _setup_sources(self): super(GitAnnexBuilder, self)._setup_sources() self.old_cwd = os.getcwd() os.chdir(os.path.join(self.old_cwd, self.relative_project_dir)) # NOTE: 'which' may not be installed... (docker containers) (status, output) = getstatusoutput("which git-annex") if status != 0: msg = "Please run '%s' as root." % self.package_manager.install(["git-annex"]) error_out('%s' % msg) run_command("git-annex lock") annexed_files = run_command("git-annex find --include='*'").splitlines() run_command("git-annex get") run_command("git-annex unlock") debug(" Annex files: %s" % annexed_files) for annex in annexed_files: debug("Copying unlocked file %s" % annex) os.remove(os.path.join(self.rpmbuild_gitcopy, annex)) shutil.copy(annex, self.rpmbuild_gitcopy) self._lock() os.chdir(self.old_cwd)
def __init__(self, name=None, tag=None, build_dir=None, config=None, user_config=None, args=None, **kwargs): BuilderBase.__init__(self, name=name, build_dir=build_dir, config=config, user_config=user_config, args=args, **kwargs) if tag: error_out("FetchBuilder does not support building specific tags.") if not config.has_option('builder', 'fetch_prep_command'): error_out("NativeFetchBuilder requires fetch_prep_command.") self.build_tag = '%s-%s' % (self.project_name, get_spec_version_and_release( self.start_dir, '%s.spec' % self.project_name))
def _bump_version(self, release=False, zstream=False): """ Bump up the package version in the spec file. Set release to True to bump the package release instead. Checks for the keep version option and if found, won't actually bump the version or release. """ old_version = get_latest_tagged_version(self.project_name) if old_version == None: old_version = "untagged" # TODO: Do this here instead of calling out to an external Perl script: if not self.keep_version: bump_type = "bump-version" if release: bump_type = "bump-release" elif zstream: bump_type = "bump-zstream" script_path = get_script_path("bump-version.pl") cmd = "%s %s --specfile %s" % \ (script_path, bump_type, self.spec_file) run_command(cmd) new_version = self._get_spec_version_and_release() if new_version.strip() == "": msg = "Error getting bumped package version, try: \n" msg = msg + " 'rpm -q --specfile %s'" % self.spec_file error_out(msg) print("Tagging new version of %s: %s -> %s" % (self.project_name, old_version, new_version)) return new_version
def _validate_options(self): if self.options.all and self.options.all_starting_with: error_out("Cannot combine --all and --all-starting-with.") if (self.options.all or self.options.all_starting_with) and \ len(self.args) > 1: error_out("Cannot use explicit release targets with " "--all or --all-starting-with.")
def _check_build_dirs_access(self, build_dirs): """ Ensure the build directories are writable. """ msgs = [] for d in build_dirs: if not os.access(d, os.W_OK): msgs.append("%s is not writable." % d) if msgs: error_out(msgs)
def _check_required_config(self, config): # Verify the config contains what we need from it: required_global_config = [ (BUILDCONFIG_SECTION, DEFAULT_BUILDER), (BUILDCONFIG_SECTION, DEFAULT_TAGGER), ] for section, option in required_global_config: if not config.has_section(section) or not \ config.has_option(section, option): error_out("tito.props missing required config: %s %s" % ( section, option))
def _check_required_config(self, config): # Verify the config contains what we need from it: required_global_config = [ (BUILDCONFIG_SECTION, DEFAULT_BUILDER), (BUILDCONFIG_SECTION, DEFAULT_TAGGER), ] for section, option in required_global_config: if not config.has_section(section) or not \ config.has_option(section, option): error_out("tito.props missing required config: %s %s" % (section, option))
def _check_releaser_config(self): """ Verify this release target has all the config options it needs. """ for opt in self.GLOBAL_REQUIRED_CONFIG: if not self.releaser_config.has_option(self.target, opt): error_out("Release target '%s' missing required option '%s'" % (self.target, opt)) for opt in self.REQUIRED_CONFIG: if not self.releaser_config.has_option(self.target, opt): error_out("Release target '%s' missing required option '%s'" % (self.target, opt))
def _check_releaser_config(self): """ Verify this release target has all the config options it needs. """ if self.releaser_config.has_option(self.target, "remote_location"): self.remote_location = self.releaser_config.get(self.target, "remote_location") elif 'COPR_REMOTE_LOCATION' in self.user_config: self.remote_location = self.user_config['COPR_REMOTE_LOCATION'] else: error_out(["No remote location for Copr SRPMs found.", "Either define 'remote_location' in the releaser configuration " "or 'COPR_REMOTE_LOCATION' in ~/.titorc"]) KojiReleaser._check_releaser_config(self)
def _confirm_commit_msg(self, diff_output): """ Generates a commit message in a temporary file, gives the user a chance to edit it, and returns the filename to the caller. """ fd, name = tempfile.mkstemp() debug("Storing commit message in temp file: %s" % name) write( fd, "Update %s to %s\n" % (self.project_name, self.builder.build_version)) # Write out Resolves line for all bugzillas we see in commit diff: # TODO: move to DistGitBuilder only? try: (required_bz_flags, placeholder_bz) = self._get_bz_flags() extractor = BugzillaExtractor(diff_output, required_flags=required_bz_flags, placeholder_bz=placeholder_bz) for line in extractor.extract(): write(fd, line + "\n") except MissingBugzillaCredsException: error_out([ "Releaser specifies required flags but you have not configured", "a ~/.bugzillarc with your bugzilla credentials.", "Example:", "", "[bugzilla.redhat.com]", "user = [email protected]", "password = mypassword" ]) print("") print("##### Commit message: #####") print("") os.lseek(fd, 0, 0) f = os.fdopen(fd) for line in f.readlines(): print(line) f.close() print("") print("###############################") print("") if self._ask_yes_no( "Would you like to edit this commit message? [y/n] ", False): debug("Opening editor for user to edit commit message in: %s" % name) editor = 'vi' if "EDITOR" in os.environ: editor = os.environ["EDITOR"] subprocess.call(editor.split() + [name]) return name
def _fetch_local(self): source_dir = os.path.expanduser(self.builder.args['source_dir'][0]) version_regex = re.compile("^(version:\s*)(.+)$", re.IGNORECASE) with open(self.spec_file, 'r') as spec: for line in spec.readlines(): match = version_regex.match(line) if match: version = match.group(2) if not version: error_out("Version not found in spec") old_dir = os.getcwd() os.chdir(source_dir) fetchdir = os.path.join(self.builder.rpmbuild_sourcedir, 'archive') if not os.path.exists(fetchdir): os.mkdir(fetchdir) arch_prefix = "-".join([self.builder.project_name, version]) # git archive --format=tar.gz --prefix=pulp-2.15.0/ master > pulp-2.15.0.tar.gz with open("./%s.tar.gz" % arch_prefix, "w+") as archive: subprocess.call([ "git", "archive", "--format=tar.gz", ("--prefix=%s/" % arch_prefix), "master" ], stdout=archive) sources = glob.glob("./*.tar.gz") print(sources) for srcfile in sources: debug("Copying %s from local source dir" % srcfile) shutil.move(srcfile, os.path.join(fetchdir, os.path.basename(srcfile))) gitrev = "local" gitsha = subprocess.check_output(["git", "rev-parse", "HEAD"]).decode('utf-8') if gitsha: gitrev = "git%s" % gitsha[0:7] os.chdir(old_dir) return gitrev
def _confirm_commit_msg(self, diff_output): """ Generates a commit message in a temporary file, gives the user a chance to edit it, and returns the filename to the caller. """ fd, name = tempfile.mkstemp() debug("Storing commit message in temp file: %s" % name) write(fd, "Update %s to %s\n" % (self.project_name, self.builder.build_version)) # Write out Resolves line for all bugzillas we see in commit diff: # TODO: move to DistGitBuilder only? try: (required_bz_flags, placeholder_bz) = self._get_bz_flags() extractor = BugzillaExtractor(diff_output, required_flags=required_bz_flags, placeholder_bz=placeholder_bz) for line in extractor.extract(): write(fd, line + "\n") except MissingBugzillaCredsException: error_out([ "Releaser specifies required flags but you have not configured", "a ~/.bugzillarc with your bugzilla credentials.", "Example:", "", "[bugzilla.redhat.com]", "user = [email protected]", "password = mypassword"]) print("") print("##### Commit message: #####") print("") os.lseek(fd, 0, 0) f = os.fdopen(fd) for line in f.readlines(): print(line) f.close() print("") print("###############################") print("") if self._ask_yes_no("Would you like to edit this commit message? [y/n] ", False): debug("Opening editor for user to edit commit message in: %s" % name) editor = 'vi' if "EDITOR" in os.environ: editor = os.environ["EDITOR"] subprocess.call(editor.split() + [name]) return name
def _validate_options(self): if self.options.srpm and self.options.rpm: error_out("Cannot combine --srpm and --rpm") if self.options.test and self.options.tag: error_out("Cannot build test version of specific tag.") if (self.options.srpm or self.options.rpm) and self.options.release: error_out("Cannot combine --srpm/--rpm with --release.") if self.options.release and (self.options.cvs_release or self.options.koji_release): error_out([ "Cannot combine --cvs-release/--koji-release with --release.", "(--release includes both)"]) if self.options.release and self.options.test: error_out("Cannot combine --release with --test.")
def _format_lines(self): output = [] for bz in self.bzs: output.append("Resolves: #%s - %s" % (bz[0], bz[1])) if len(output) == 0 and self.required_flags: # No bugzillas had required flags, use a placeholder if # we have one, otherwise we have to error out. if self.placeholder_bz: print( "No bugs with required flags were found, using placeholder: %s" % self.placeholder_bz) output.append("Related: #%s" % self.placeholder_bz) else: error_out("No bugzillas found with required flags: %s" % self.required_flags) return output
def _validate_options(self): if not any([self.options.rpm, self.options.srpm, self.options.tgz]): error_out("Need an artifact type to build. Use --rpm, --srpm, or --tgz") if self.options.srpm and self.options.rpm: error_out("Cannot combine --srpm and --rpm") if self.options.test and self.options.tag: error_out("Cannot build test version of specific tag.") if self.options.quiet and self.options.verbose: error_out("Cannot set --quiet and --verbose at the same time.")
def _bump_version(self): """ Bump the version unless VERSION_AND_RELEASE specified in the environment. When specified, both the version and release are forced as specified. VERSION_AND_RELEASE must be VR part of NEVRA Eg: 2.0.1-0.1.alpha """ version = os.environ.get(VERSION_AND_RELEASE) if version: parts = version.rsplit('-', 1) if len(parts) != 2: error_out('"%s" not valid' % version) self.__update_spec(*parts) return version else: return VersionTagger._bump_version(self)
def _get_version(self): """ Get the version from the builder. Sources are configured at this point. """ # Assuming source0 is a tar.gz we can extract a version from: base_name = os.path.basename(self.sources[0]) debug("Extracting version from: %s" % base_name) # Example filename: tito-0.4.18.tar.gz: simple_version_re = re.compile(".*-(.*).(tar.gz|tgz|zip|tar.bz2|gem)") match = re.search(simple_version_re, base_name) if match: version = match.group(1) else: error_out("Unable to determine version from file: %s" % base_name) return version
def _sync_mead_scm(self): cmd = "git push %s %s" % (self.push_url, self.builder.build_tag) if self.dry_run: self.print_dry_run_warning(cmd) return with chdir(self.git_root): info_out("Syncing local repo with %s" % self.push_url) try: run_command(cmd) except RunCommandException as e: if "rejected" in e.output: if self._ask_yes_no("The remote rejected a push. Force push? [y/n] ", False): run_command("git push --force %s %s" % (self.mead_scm, self.builder.build_tag)) else: error_out("Could not sync with %s" % self.mead_scm) raise
def __init__(self, name=None, tag=None, build_dir=None, config=None, user_config=None, args=None, **kwargs): BuilderBase.__init__(self, name=name, build_dir=build_dir, config=config, user_config=user_config, args=args, **kwargs) if tag: error_out("FetchBuilder does not support building specific tags.") if not config.has_option('builder', 'fetch_prep_command'): error_out("NativeFetchBuilder requires fetch_prep_command.") self.build_tag = '%s-%s' % ( self.project_name, get_spec_version_and_release(self.start_dir, '%s.spec' % self.project_name) )
def _check_build_dirs_access(self): """ Ensure the build directories are writable. """ if not os.access(self.rpmbuild_basedir, os.W_OK): error_out("%s is not writable." % self.rpmbuild_basedir) if not os.access(self.rpmbuild_dir, os.W_OK): error_out("%s is not writable." % self.rpmbuild_dir) if not os.access(self.rpmbuild_sourcedir, os.W_OK): error_out("%s is not writable." % self.rpmbuild_sourcedir) if not os.access(self.rpmbuild_builddir, os.W_OK): error_out("%s is not writable." % self.rpmbuild_builddir)
def __init__(self, name=None, tag=None, build_dir=None, config=None, user_config=None, args=None, **kwargs): BuilderBase.__init__( self, name=name, build_dir=build_dir, config=config, user_config=user_config, args=args, **kwargs ) if tag: error_out("FetchBuilder does not support building " "specific tags.") if not config.has_option("builder", "fetch_strategy"): print("WARNING: no fetch_strategy specified in tito.props" ", assuming ArgSourceStrategy.") if not config.has_section("builder"): config.add_section("builder") config.set("builder", "fetch_strategy", "tito.builder.fetch.ArgSourceStrategy") self.build_tag = "%s-%s" % ( self.project_name, get_spec_version_and_release(self.start_dir, "%s.spec" % self.project_name), )
def patch_upstream(self): """ Generate patches for any differences between our tag and the upstream tag, and apply them into an exported copy of the spec file. """ patch_filename = "%s-to-%s-%s.patch" % (self.upstream_tag, self.project_name, self.build_version) patch_file = os.path.join(self.rpmbuild_gitcopy, patch_filename) patch_dir = self.git_root if self.relative_project_dir != "/": patch_dir = os.path.join(self.git_root, self.relative_project_dir) os.chdir(patch_dir) debug("patch dir = %s" % patch_dir) print("Generating patch [%s]" % patch_filename) debug("Patch: %s" % patch_file) patch_command = "git diff --relative %s..%s > %s" % \ (self.upstream_tag, self.git_commit_id, patch_file) debug("Generating patch with: %s" % patch_command) output = run_command(patch_command) print(output) (status, output) = getstatusoutput( "grep 'Binary files .* differ' %s " % patch_file) if status == 0 and output != "": error_out("You are doomed. Diff contains binary files. You can not use this builder") # Creating two copies of the patch here in the temp build directories # just out of laziness. Some builders need sources in SOURCES and # others need them in the git copy. Being lazy here avoids one-off # hacks and both copies get cleaned up anyhow. run_command("cp %s %s" % (patch_file, self.rpmbuild_sourcedir)) (patch_number, patch_insert_index, patch_apply_index, lines) = self._patch_upstream() lines.insert(patch_insert_index, "Patch%s: %s\n" % (patch_number, patch_filename)) if patch_apply_index > 0: lines.insert(patch_apply_index, "%%patch%s -p1\n" % (patch_number)) self._write_spec(lines)
def _patch_upstream(self): """ Insert patches into the spec file we'll be building returns (patch_number, patch_insert_index, patch_apply_index, lines) """ f = open(self.spec_file, 'r') lines = f.readlines() f.close() patch_pattern = re.compile('^Patch(\d+):') source_pattern = re.compile('^Source(\d+)?:') # Find the largest PatchX: line, or failing that SourceX: patch_number = 0 # What number should we use for our PatchX line patch_insert_index = 0 # Where to insert our PatchX line in the list patch_apply_index = 0 # Where to insert our %patchX line in the list array_index = 0 # Current index in the array for line in lines: match = source_pattern.match(line) if match: patch_insert_index = array_index + 1 match = patch_pattern.match(line) if match: patch_insert_index = array_index + 1 patch_number = int(match.group(1)) + 1 if line.startswith("%prep"): # We'll apply patch right after prep if there's no %setup line patch_apply_index = array_index + 2 elif line.startswith("%setup"): patch_apply_index = array_index + 2 # already added a line elif line.startswith("%autosetup"): patch_apply_index = -1 # autosetup will do this for us array_index += 1 debug("patch_insert_index = %s" % patch_insert_index) debug("patch_apply_index = %s" % patch_apply_index) if patch_insert_index == 0 or patch_apply_index == 0: error_out("Unable to insert PatchX or %patchX lines in spec file") return (patch_number, patch_insert_index, patch_apply_index, lines)
def _build(self, branch): """ Submit a Mead build from current directory. """ target_param = "" build_target = self._get_build_target_for_branch(branch) if build_target: target_param = "--target=%s" % build_target build_cmd = [self.cli_tool, "maven-chain", "--nowait"] if self.brew_target: build_cmd.append("--target=%s" % self.brew_target) build_cmd.append("--ini=%s" % (os.path.join(self.package_workdir, "mead.chain"))) build_cmd.append(target_param) if self.scratch: build_cmd.append("--scratch") build_cmd = " ".join(build_cmd) if self.dry_run: self.print_dry_run_warning(build_cmd) return info_out("Submitting build: %s" % build_cmd) (status, output) = getstatusoutput(build_cmd) if status > 0: if "already been built" in output: warn_out("Build has been submitted previously, continuing...") else: error_out([ "Unable to submit build.", " Status code: %s\n" % status, " Output: %s\n" % output, ]) # Print the task ID and URL: for line in extract_task_info(output): print(line)
def _read_config(self): """ Read global build.py configuration from the .tito dir of the git repository we're being run from. NOTE: We always load the latest config file, not tito.props as it was for the tag being operated on. """ # List of filepaths to config files we'll be loading: rel_eng_dir = os.path.join(find_git_root(), tito_config_dir()) filename = os.path.join(rel_eng_dir, TITO_PROPS) if not os.path.exists(filename): error_out("Unable to locate branch configuration: %s" "\nPlease run 'tito init'" % filename) # Load the global config. Later, when we know what tag/package we're # building, we may also load that and potentially override some global # settings. config = RawConfigParser() config.read(filename) self._check_legacy_globalconfig(config) return config
def _get_version_and_release(self): """ Get the version and release from the builder. Sources are configured at this point. """ # Assuming source0 is a tar.gz we can extract a version and possibly # release from: base_name = os.path.basename(self.sources[0]) debug("Extracting version/release from: %s" % base_name) # usually a source tarball won't have a release, that is an RPM concept. # Don't forget dist! release = "1%{?dist}" # Example filename: tito-0.4.18.tar.gz: simple_version_re = re.compile(".*-(.*).(tar.gz|tgz|zip|bz2)") match = re.search(simple_version_re, base_name) if match: version = match.group(1) else: error_out("Unable to determine version from file: %s" % base_name) return (version, release)
def _update_version_file(self, new_version): """ land this new_version in the designated file and stages that file for a git commit """ version_file = self._version_file_path() if not version_file: debug("No destination version file found, skipping.") return debug("Found version file to write: %s" % version_file) version_file_template = self._version_file_template() if version_file_template is None: error_out( "Version file specified but without corresponding template.") t = Template(version_file_template) f = open(version_file, 'w') (new_ver, new_rel) = new_version.split('-') f.write(t.safe_substitute(version=new_ver, release=new_rel)) f.close() run_command("git add %s" % version_file)
def _update_version_file(self, new_version): """ land this new_version in the designated file and stages that file for a git commit """ version_file = self._version_file_path() if not version_file: debug("No destination version file found, skipping.") return debug("Found version file to write: %s" % version_file) version_file_template = self._version_file_template() if version_file_template is None: error_out("Version file specified but without corresponding template.") t = Template(version_file_template) f = open(version_file, 'w') (new_ver, new_rel) = new_version.split('-') f.write(t.safe_substitute( version=new_ver, release=new_rel)) f.close() run_command("git add %s" % version_file)
def _setup_sources(self): super(GitAnnexBuilder, self)._setup_sources() old_cwd = os.getcwd() os.chdir(os.path.join(old_cwd, self.relative_project_dir)) (status, output) = getstatusoutput("which git-annex") if status != 0: msg = "Please run 'yum install git-annex' as root." error_out('%s' % msg) run_command("git-annex lock") annexed_files = run_command( "git-annex find --include='*'").splitlines() run_command("git-annex get") run_command("git-annex unlock") debug(" Annex files: %s" % annexed_files) for annex in annexed_files: debug("Copying unlocked file %s" % annex) os.remove(os.path.join(self.rpmbuild_gitcopy, annex)) shutil.copy(annex, self.rpmbuild_gitcopy) os.chdir(old_cwd)
def rpm(self): """ Build an RPM. """ self._create_build_dirs() if not self.ran_tgz: self.tgz() define_dist = "" if self.dist: define_dist = "--define 'dist %s'" % self.dist rpmbuild_options = self.rpmbuild_options + self._scl_to_rpmbuild_option( ) cmd = ('rpmbuild --define "_source_filedigest_algorithm md5" ' '--define "_binary_filedigest_algorithm md5" %s %s %s --clean ' '-ba %s' % (rpmbuild_options, self._get_rpmbuild_dir_options(), define_dist, self.spec_file)) debug(cmd) try: output = run_command_print(cmd) except (KeyboardInterrupt, SystemExit): print("") exit(1) except RunCommandException: err = sys.exc_info()[1] msg = str(err) if (re.search('Failed build dependencies', err.output)): msg = "Please run 'yum-builddep %s' as root." % \ find_spec_file(self.relative_project_dir) error_out('%s' % msg) except Exception: err = sys.exc_info()[1] error_out('%s' % str(err)) files_written = find_wrote_in_rpmbuild_output(output) if len(files_written) < 2: error_out("Error parsing rpmbuild output") self.srpm_location = files_written[0] self.artifacts.extend(files_written) print print("Successfully built: %s" % ' '.join(files_written))
def _bump_version(self, release=False, zstream=False, force=False): """ Bump up the package version in the spec file. Set release to True to bump the package release instead. Checks for the keep version option and if found, won't actually bump the version or release. """ old_version = get_latest_tagged_version(self.project_name) if old_version == None: old_version = "untagged" if not self.keep_version: version_regex = re.compile("^(version:\s*)(.+)$", re.IGNORECASE) release_regex = re.compile("^(release:\s*)(.+)$", re.IGNORECASE) in_f = open(self.spec_file, 'r') out_f = open(self.spec_file + ".new", 'w') for line in in_f.readlines(): if release: match = re.match(release_regex, line) if match: line = "".join((match.group(1), increase_version(match.group(2)), "\n" )) elif zstream: match = re.match(release_regex, line) if match: line = "".join((match.group(1), increase_zstream(match.group(2)), "\n" )) elif force: match = re.match(version_regex, line) if match: line = "".join((match.group(1), self._use_version, "\n" )) match = re.match(release_regex, line) if match: line = "".join((match.group(1), reset_release(match.group(2)), "\n" )) else: match = re.match(version_regex, line) if match: line = "".join((match.group(1), increase_version(match.group(2)), "\n" )) match = re.match(release_regex, line) if match: line = "".join((match.group(1), reset_release(match.group(2)), "\n" )) out_f.write(line) in_f.close() out_f.close() shutil.move(self.spec_file + ".new", self.spec_file) new_version = self._get_spec_version_and_release() if new_version.strip() == "": msg = "Error getting bumped package version, try: \n" msg = msg + " 'rpm -q --specfile %s'" % self.spec_file error_out(msg) print("Tagging new version of %s: %s -> %s" % (self.project_name, old_version, new_version)) return new_version
def _git_user_confirm_commit(self, project_checkout): """ Prompt user if they wish to proceed with commit. """ print("") text = "Running 'git diff' in: %s" % project_checkout print("#" * len(text)) print(text) print("#" * len(text)) print("") main_branch = self.git_branches[0] os.chdir(project_checkout) # Newer versions of git don't seem to want --cached here? Try both: (unused, diff_output) = getstatusoutput("git diff --cached") if diff_output.strip() == "": debug("git diff --cached returned nothing, falling back to git diff.") (unused, diff_output) = getstatusoutput("git diff") if diff_output.strip() == "": print("No changes in main branch, skipping commit for: %s" % main_branch) else: print(diff_output) print("") print("##### Please review the above diff #####") if not self._ask_yes_no("Do you wish to proceed with commit? [y/n] "): print("Fine, you're on your own!") self.cleanup() sys.exit(1) print("Proceeding with commit.") commit_msg_file = self._confirm_commit_msg(diff_output) cmd = '%s commit -F %s' % (self.cli_tool, commit_msg_file) debug("git commit command: %s" % cmd) print if self.dry_run: self.print_dry_run_warning(cmd) else: print("Proceeding with commit.") os.chdir(self.package_workdir) run_command(cmd) os.unlink(commit_msg_file) cmd = self._push_command() if self.dry_run: self.print_dry_run_warning(cmd) else: # Push print(cmd) try: run_command(cmd) except RunCommandException as e: error_out("`%s` failed with: %s" % (cmd, e.output)) if not self.no_build: self._build(main_branch) for branch in self.git_branches[1:]: info_out("Merging branch: '%s' -> '%s'" % (main_branch, branch)) run_command("%s switch-branch %s" % (self.cli_tool, branch)) self._merge(main_branch) cmd = "git push origin %s:%s" % (branch, branch) if self.dry_run: self.print_dry_run_warning(cmd) else: print(cmd) try: run_command(cmd) except RunCommandException as e: error_out("`%s` failed with: %s" % (cmd, e.output)) if not self.no_build: self._build(branch) print