Exemple #1
0
    def load_config(self, package_name, build_dir, tag):
        self.config = ConfigLoader(package_name, build_dir, tag).load()

        if self.config.has_option(BUILDCONFIG_SECTION,
                "offline"):
            self.options.offline = True

        # TODO: Not ideal:
        if self.options.debug:
            os.environ['DEBUG'] = "true"

        # Check if config defines a custom lib dir, if so we add it
        # to the python path allowing users to specify custom builders/taggers
        # in their config:
        if self.config.has_option(BUILDCONFIG_SECTION,
                "lib_dir"):
            lib_dir = self.config.get(BUILDCONFIG_SECTION,
                    "lib_dir")
            if lib_dir[0] != '/':
                # Looks like a relative path, assume from the git root:
                lib_dir = os.path.join(find_git_root(), lib_dir)

            if os.path.exists(lib_dir):
                sys.path.append(lib_dir)
                debug("Added lib dir to PYTHONPATH: %s" % lib_dir)
            else:
                warn_out("lib_dir specified but does not exist: %s" % lib_dir)
Exemple #2
0
    def run(self, options):
        """
        Perform the actions requested of the tagger.

        NOTE: this method may do nothing if the user requested no build actions
        be performed. (i.e. only release tagging, etc)
        """
        if options.tag_release:
            warn_out("--tag-release option no longer necessary,"
                " 'tito tag' will accomplish the same thing.")
        if options.no_auto_changelog:
            self._no_auto_changelog = True
        if options.accept_auto_changelog:
            self._accept_auto_changelog = True
        if options.auto_changelog_msg:
            self._new_changelog_msg = options.auto_changelog_msg
        if options.use_version:
            self._use_version = options.use_version
        if options.use_release:
            self._use_release = options.use_release
        if options.changelog:
            self._changelog = options.changelog

        self.check_tag_precondition()

        # Only two paths through the tagger module right now:
        if options.undo:
            self._undo()
        else:
            self._tag_release()
Exemple #3
0
    def _get_build_version(self):
        """
        Figure out the git tag and version-release we're building.
        """
        # Determine which package version we should build:
        build_version = None
        if self.build_tag:
            build_version = self.build_tag[len(self.project_name + "-"):]
        else:
            build_version = get_latest_tagged_version(self.project_name)
            if build_version is None:
                if not self.test:
                    error_out(["Unable to lookup latest package info.",
                            "Perhaps you need to tag first?"])
                warn_out("unable to lookup latest package "
                    "tag, building untagged test project")
                build_version = get_spec_version_and_release(self.start_dir,
                    find_spec_like_file(self.start_dir))
            self.build_tag = "%s-%s" % (self.project_name, build_version)

        self.spec_version = build_version.split('-')[-2]
        self.spec_release = build_version.split('-')[-1]
        if not self.test:
            check_tag_exists(self.build_tag, offline=self.offline)
        return build_version
Exemple #4
0
    def run(self, options):
        """
        Perform the actions requested of the tagger.

        NOTE: this method may do nothing if the user requested no build actions
        be performed. (i.e. only release tagging, etc)
        """
        if options.tag_release:
            warn_out("--tag-release option no longer necessary,"
                " 'tito tag' will accomplish the same thing.")
        if options.no_auto_changelog:
            self._no_auto_changelog = True
        if options.accept_auto_changelog:
            self._accept_auto_changelog = True
        if options.auto_changelog_msg:
            self._new_changelog_msg = options.auto_changelog_msg
        if options.use_version:
            self._use_version = options.use_version
        if options.changelog:
            self._changelog = options.changelog

        self.check_tag_precondition()

        # Only two paths through the tagger module right now:
        if options.undo:
            self._undo()
        else:
            self._tag_release()
Exemple #5
0
    def _build(self, branch):
        """ Submit a Fedora build from current directory. """
        target_param = ""
        scratch_param = ""
        build_target = self._get_build_target_for_branch(branch)
        if build_target:
            target_param = "--target %s" % build_target
        if self.scratch:
            scratch_param = "--scratch"

        build_cmd = "%s build --nowait %s %s" % (self.cli_tool, scratch_param, target_param)

        if self.dry_run:
            self.print_dry_run_warning(build_cmd)
            return

        info_out("Submitting build: %s" % build_cmd)
        (status, output) = getstatusoutput(build_cmd)
        if status > 0:
            if "already been built" in output:
                warn_out("Build has been submitted previously, continuing...")
            else:
                error_out([
                    "Unable to submit build."
                    "  Status code: %s\n" % status,
                    "  Output: %s\n" % output,
                ])

        # Print the task ID and URL:
        for line in extract_task_info(output):
            print(line)
Exemple #6
0
    def _clear_package_metadata(self):
        """
        Remove all .tito/packages/ files that have a relative path
        matching the package we're tagging a new version of. Normally
        this just removes the previous package file but if we were
        renaming oldpackage to newpackage, this would git rm
        .tito/packages/oldpackage and add
        .tito/packages/spacewalk-newpackage.
        """
        metadata_dir = os.path.join(self.rel_eng_dir, "packages")
        for filename in os.listdir(metadata_dir):
            metadata_file = os.path.join(metadata_dir, filename)  # full path

            if os.path.isdir(metadata_file) or filename.startswith("."):
                continue

            temp_file = open(metadata_file, 'r')
            (version, relative_dir) = temp_file.readline().split(" ")
            relative_dir = relative_dir.strip()  # sometimes has a newline

            if relative_dir == self.relative_project_dir:
                debug("Found metadata for our prefix: %s" %
                        metadata_file)
                debug("   version: %s" % version)
                debug("   dir: %s" % relative_dir)
                if filename == self.project_name:
                    debug("Updating %s with new version." %
                            metadata_file)
                else:
                    warn_out("%s also references %s" % (filename, self.relative_project_dir))
                    print("Assuming package has been renamed and removing it.")
                    run_command("git rm %s" % metadata_file)
Exemple #7
0
    def _get_build_version(self):
        """
        Figure out the git tag and version-release we're building.
        """
        # Determine which package version we should build:
        build_version = None
        if self.build_tag:
            build_version = self.build_tag[len(self.project_name + "-"):]
        else:
            build_version = get_latest_tagged_version(self.project_name)
            if build_version is None:
                if not self.test:
                    error_out(["Unable to lookup latest package info.",
                            "Perhaps you need to tag first?"])
                warn_out("unable to lookup latest package "
                    "tag, building untagged test project")
                build_version = get_spec_version_and_release(self.start_dir,
                    find_spec_like_file(self.start_dir))
            self.build_tag = "%s-%s" % (self.project_name, build_version)

        self.spec_version = build_version.split('-')[-2]
        self.spec_release = build_version.split('-')[-1]
        if not self.test:
            check_tag_exists(self.build_tag, offline=self.offline)
        return build_version
Exemple #8
0
    def load_config(self, package_name, build_dir, tag):
        self.config = ConfigLoader(package_name, build_dir, tag).load()

        if self.config.has_option(BUILDCONFIG_SECTION,
                "offline"):
            self.options.offline = True

        # TODO: Not ideal:
        if self.options.debug:
            os.environ['DEBUG'] = "true"

        # Check if config defines a custom lib dir, if so we add it
        # to the python path allowing users to specify custom builders/taggers
        # in their config:
        if self.config.has_option(BUILDCONFIG_SECTION,
                "lib_dir"):
            lib_dir = self.config.get(BUILDCONFIG_SECTION,
                    "lib_dir")
            if lib_dir[0] != '/':
                # Looks like a relative path, assume from the git root:
                lib_dir = os.path.join(find_git_root(), lib_dir)

            if os.path.exists(lib_dir):
                sys.path.append(lib_dir)
                debug("Added lib dir to PYTHONPATH: %s" % lib_dir)
            else:
                warn_out("lib_dir specified but does not exist: %s" % lib_dir)
Exemple #9
0
    def _clear_package_metadata(self):
        """
        Remove all .tito/packages/ files that have a relative path
        matching the package we're tagging a new version of. Normally
        this just removes the previous package file but if we were
        renaming oldpackage to newpackage, this would git rm
        .tito/packages/oldpackage and add
        .tito/packages/spacewalk-newpackage.
        """
        metadata_dir = os.path.join(self.rel_eng_dir, "packages")
        for filename in os.listdir(metadata_dir):
            metadata_file = os.path.join(metadata_dir, filename)  # full path

            if os.path.isdir(metadata_file) or filename.startswith("."):
                continue

            temp_file = open(metadata_file, 'r')
            (version, relative_dir) = temp_file.readline().split(" ")
            relative_dir = relative_dir.strip()  # sometimes has a newline

            if relative_dir == self.relative_project_dir:
                debug("Found metadata for our prefix: %s" %
                        metadata_file)
                debug("   version: %s" % version)
                debug("   dir: %s" % relative_dir)
                if filename == self.project_name:
                    debug("Updating %s with new version." %
                            metadata_file)
                else:
                    warn_out("%s also references %s" % (filename, self.relative_project_dir))
                    print("Assuming package has been renamed and removing it.")
                    run_command("git rm %s" % metadata_file)
Exemple #10
0
    def _build(self, branch):
        """ Submit a Fedora build from current directory. """
        target_param = ""
        scratch_param = ""
        build_target = self._get_build_target_for_branch(branch)
        if build_target:
            target_param = "--target %s" % build_target
        if self.scratch:
            scratch_param = "--scratch"

        build_cmd = "%s build --nowait %s %s" % (self.cli_tool, scratch_param, target_param)

        if self.dry_run:
            self.print_dry_run_warning(build_cmd)
            return

        info_out("Submitting build: %s" % build_cmd)
        (status, output) = getstatusoutput(build_cmd)
        if status > 0:
            if "already been built" in output:
                warn_out("Build has been submitted previously, continuing...")
            else:
                error_out([
                    "Unable to submit build."
                    "  Status code: %s\n" % status,
                    "  Output: %s\n" % output,
                ])

        # Print the task ID and URL:
        for line in extract_task_info(output):
            print(line)
Exemple #11
0
    def cleanup(self):
        if not self.no_cleanup:
            debug("Cleaning up [%s]" % self.working_dir)
            run_command("rm -rf %s" % self.working_dir)

            if self.builder:
                self.builder.cleanup()
        else:
            warn_out("leaving %s (--no-cleanup)" % self.working_dir)
Exemple #12
0
 def cleanup(self):
     """
     Remove all temporary files and directories.
     """
     if not self.no_cleanup:
         debug("Cleaning up %s" % self.rpmbuild_dir)
         shutil.rmtree(self.rpmbuild_dir)
     else:
         warn_out("Leaving rpmbuild files in: %s" % self.rpmbuild_dir)
Exemple #13
0
    def cleanup(self):
        if not self.no_cleanup:
            debug("Cleaning up [%s]" % self.working_dir)
            run_command("rm -rf %s" % self.working_dir)

            if self.builder:
                self.builder.cleanup()
        else:
            warn_out("leaving %s (--no-cleanup)" % self.working_dir)
Exemple #14
0
 def cleanup(self):
     """
     Remove all temporary files and directories.
     """
     if not self.no_cleanup:
         debug("Cleaning up %s" % self.rpmbuild_dir)
         shutil.rmtree(self.rpmbuild_dir)
     else:
         warn_out("Leaving rpmbuild files in: %s" % self.rpmbuild_dir)
Exemple #15
0
 def cleanup(self):
     """
     Remove all temporary files and directories.
     """
     if not self.no_cleanup:
         for d in [self.rpmbuild_dir, self.deploy_dir, self.maven_clone_dir]:
             debug("Cleaning up %s" % d)
             shutil.rmtree(d)
     else:
         warn_out("Leaving rpmbuild files in: %s" % self.rpmbuild_dir)
Exemple #16
0
 def cleanup(self):
     """
     Remove all temporary files and directories.
     """
     if not self.no_cleanup:
         for d in [self.rpmbuild_dir, self.deploy_dir, self.maven_clone_dir]:
             debug("Cleaning up %s" % d)
             shutil.rmtree(d)
     else:
         warn_out("Leaving rpmbuild files in: %s" % self.rpmbuild_dir)
Exemple #17
0
 def _get_git_user_info(self):
     """ Return the user.name and user.email git config values. """
     try:
         name = run_command('git config --get user.name')
     except:
         warn_out('user.name in ~/.gitconfig not set.\n')
         name = 'Unknown name'
     try:
         email = run_command('git config --get user.email')
     except:
         warn_out('user.email in ~/.gitconfig not set.\n')
         email = None
     return (name, email)
Exemple #18
0
 def _get_git_user_info(self):
     """ Return the user.name and user.email git config values. """
     try:
         name = run_command('git config --get user.name')
     except:
         warn_out('user.name in ~/.gitconfig not set.\n')
         name = 'Unknown name'
     try:
         email = run_command('git config --get user.email')
     except:
         warn_out('user.email in ~/.gitconfig not set.\n')
         email = None
     return (name, email)
Exemple #19
0
    def __init__(self,
                 name=None,
                 tag=None,
                 build_dir=None,
                 config=None,
                 user_config=None,
                 target=None,
                 releaser_config=None,
                 no_cleanup=False,
                 test=False,
                 auto_accept=False,
                 prefix="temp_dir=",
                 **kwargs):

        if 'builder_args' in kwargs:
            kwargs['builder_args']['local'] = False

        DistGitReleaser.__init__(self, name, tag, build_dir, config,
                                 user_config, target, releaser_config,
                                 no_cleanup, test, auto_accept, **kwargs)

        self.mead_scm = self.releaser_config.get(self.target, "mead_scm")

        if self.releaser_config.has_option(self.target, "mead_push_url"):
            self.push_url = self.releaser_config.get(self.target,
                                                     "mead_push_url")
        else:
            self.push_url = self.mead_scm

        # rhpkg maven-build takes an optional override --target:
        self.brew_target = None
        if self.releaser_config.has_option(self.target, "target"):
            self.brew_target = self.releaser_config.get(self.target, "target")

        # If the push URL contains MEAD_SCM_URL, we require the user to set this
        # in ~/.titorc before they can run this releaser. This allows us to
        # use push URLs that require username auth, but still check a generic
        # URL into source control:
        if MEAD_SCM_USERNAME in self.push_url:
            debug("Push URL contains %s, checking for value in ~/.titorc" %
                  MEAD_SCM_USERNAME)
            if MEAD_SCM_USERNAME in user_config:
                user = user_config[MEAD_SCM_USERNAME]
            else:
                user = getpass.getuser()
                warn_out(
                    "You should specify MEAD_SCM_USERNAME in '~/.titorc'.  Using %s for now"
                    % user)

            self.push_url = self.push_url.replace(MEAD_SCM_USERNAME, user)
Exemple #20
0
 def _legacy_builder_hack(self, releaser_config):
     """
     Support the old style koji builds when config is still in global
     tito.props, as opposed to the new releasers.conf.
     """
     # Handle koji:
     if self.config.has_section("koji") and not \
             releaser_config.has_section("koji"):
         warn_out("legacy 'koji' section in tito.props, please "
                 "consider creating a target in releasers.conf.")
         print("Simulating 'koji' release target for now.")
         releaser_config.add_section('koji')
         releaser_config.set('koji', 'releaser', 'tito.release.KojiReleaser')
         releaser_config.set('koji', 'autobuild_tags',
                 self.config.get('koji', 'autobuild_tags'))
Exemple #21
0
 def _legacy_builder_hack(self, releaser_config):
     """
     Support the old style koji builds when config is still in global
     tito.props, as opposed to the new releasers.conf.
     """
     # Handle koji:
     if self.config.has_section("koji") and not \
             releaser_config.has_section("koji"):
         warn_out("legacy 'koji' section in tito.props, please "
                 "consider creating a target in releasers.conf.")
         print("Simulating 'koji' release target for now.")
         releaser_config.add_section('koji')
         releaser_config.set('koji', 'releaser', 'tito.release.KojiReleaser')
         releaser_config.set('koji', 'autobuild_tags',
                 self.config.get('koji', 'autobuild_tags'))
Exemple #22
0
    def __init__(self, name=None, tag=None, build_dir=None,
            config=None, user_config=None,
            target=None, releaser_config=None, no_cleanup=False,
            test=False, auto_accept=False,
            prefix="temp_dir=", **kwargs):
        Releaser.__init__(self, name, tag, build_dir, config,
                user_config, target, releaser_config, no_cleanup, test,
                auto_accept, **kwargs)

        self.build_dir = build_dir
        self.prefix = prefix

        if self.releaser_config.has_option(self.target, "scl"):
            warn_out("please rename 'scl' to 'builder.scl' in releasers.conf")
            self.builder.scl = self.releaser_config.get(self.target, "scl")
Exemple #23
0
 def rsync_to_remote(self, rsync_args, temp_dir, rsync_location):
     print("rsync %s --delete %s/ %s" % (rsync_args, temp_dir, rsync_location))
     os.chdir(temp_dir)
     # TODO: configurable rsync options?
     cmd = "rsync %s --delete %s/ %s" % (rsync_args, temp_dir, rsync_location)
     if self.dry_run:
         self.print_dry_run_warning(cmd)
     else:
         output = run_command(cmd)
         debug(output)
     if not self.no_cleanup:
         debug("Cleaning up [%s]" % temp_dir)
         os.chdir("/")
         shutil.rmtree(temp_dir)
     else:
         warn_out("leaving %s (--no-cleanup)" % temp_dir)
Exemple #24
0
 def _merge(self, main_branch):
     try:
         run_command("git merge %s" % main_branch)
     except:
         print
         warn_out("Conflicts occurred during merge.")
         print
         print("You are being dropped to a shell in the working directory.")
         print
         print("Please resolve this by doing the following:")
         print
         print("  1. List the conflicting files: git ls-files --unmerged")
         print("  2. Edit each resolving the conflict and then: git add FILENAME")
         print("  4. Commit the result when you are done: git commit")
         print("  4. Return to the tito release: exit")
         print
         # TODO: maybe prompt y/n here
         os.system(os.environ['SHELL'])
Exemple #25
0
 def rsync_to_remote(self, rsync_args, temp_dir, rsync_location):
     print("rsync %s --delete %s/ %s" %
           (rsync_args, temp_dir, rsync_location))
     os.chdir(temp_dir)
     # TODO: configurable rsync options?
     cmd = "rsync %s --delete %s/ %s" % (rsync_args, temp_dir,
                                         rsync_location)
     if self.dry_run:
         self.print_dry_run_warning(cmd)
     else:
         output = run_command(cmd)
         debug(output)
     if not self.no_cleanup:
         debug("Cleaning up [%s]" % temp_dir)
         os.chdir("/")
         shutil.rmtree(temp_dir)
     else:
         warn_out("leaving %s (--no-cleanup)" % temp_dir)
Exemple #26
0
 def _merge(self, main_branch):
     try:
         run_command("git merge %s" % main_branch)
     except:
         print
         warn_out("Conflicts occurred during merge.")
         print
         print("You are being dropped to a shell in the working directory.")
         print
         print("Please resolve this by doing the following:")
         print
         print("  1. List the conflicting files: git ls-files --unmerged")
         print("  2. Edit each resolving the conflict and then: git add FILENAME")
         print("  4. Commit the result when you are done: git commit")
         print("  4. Return to the tito release: exit")
         print
         # TODO: maybe prompt y/n here
         os.system(os.environ['SHELL'])
Exemple #27
0
    def _build(self, branch):
        """ Submit a Mead build from current directory. """
        target_param = ""
        build_target = self._get_build_target_for_branch(branch)
        if build_target:
            target_param = "--target=%s" % build_target

        build_cmd = [self.cli_tool, "maven-chain", "--nowait"]

        if self.brew_target:
            build_cmd.append("--target=%s" % self.brew_target)

        build_cmd.append("--ini=%s" %
                         (os.path.join(self.package_workdir, "mead.chain")))
        build_cmd.append(target_param)

        if self.scratch:
            build_cmd.append("--scratch")

        build_cmd = " ".join(build_cmd)

        if self.dry_run:
            self.print_dry_run_warning(build_cmd)
            return

        info_out("Submitting build: %s" % build_cmd)
        (status, output) = getstatusoutput(build_cmd)
        if status > 0:
            if "already been built" in output:
                warn_out("Build has been submitted previously, continuing...")
            else:
                error_out([
                    "Unable to submit build.",
                    "  Status code: %s\n" % status,
                    "  Output: %s\n" % output,
                ])

        # Print the task ID and URL:
        for line in extract_task_info(output):
            print(line)
Exemple #28
0
    def __init__(self, name=None, tag=None, build_dir=None,
        config=None, user_config=None,
        target=None, releaser_config=None, no_cleanup=False,
        test=False, auto_accept=False,
        prefix="temp_dir=", **kwargs):

        if 'builder_args' in kwargs:
            kwargs['builder_args']['local'] = False

        DistGitReleaser.__init__(self, name, tag, build_dir, config,
                user_config, target, releaser_config, no_cleanup, test,
                auto_accept, **kwargs)

        self.mead_scm = self.releaser_config.get(self.target, "mead_scm")

        if self.releaser_config.has_option(self.target, "mead_push_url"):
            self.push_url = self.releaser_config.get(self.target, "mead_push_url")
        else:
            self.push_url = self.mead_scm

        # rhpkg maven-build takes an optional override --target:
        self.brew_target = None
        if self.releaser_config.has_option(self.target, "target"):
            self.brew_target = self.releaser_config.get(self.target, "target")

        # If the push URL contains MEAD_SCM_URL, we require the user to set this
        # in ~/.titorc before they can run this releaser. This allows us to
        # use push URLs that require username auth, but still check a generic
        # URL into source control:
        if MEAD_SCM_USERNAME in self.push_url:
            debug("Push URL contains %s, checking for value in ~/.titorc" %
                MEAD_SCM_USERNAME)
            if MEAD_SCM_USERNAME in user_config:
                user = user_config[MEAD_SCM_USERNAME]
            else:
                user = getpass.getuser()
                warn_out("You should specify MEAD_SCM_USERNAME in '~/.titorc'.  Using %s for now" % user)

            self.push_url = self.push_url.replace(MEAD_SCM_USERNAME, user)
Exemple #29
0
    def _build(self, branch):
        """ Submit a Mead build from current directory. """
        target_param = ""
        build_target = self._get_build_target_for_branch(branch)
        if build_target:
            target_param = "--target=%s" % build_target

        build_cmd = [self.cli_tool, "maven-chain", "--nowait"]

        if self.brew_target:
            build_cmd.append("--target=%s" % self.brew_target)

        build_cmd.append("--ini=%s" % (os.path.join(self.package_workdir, "mead.chain")))
        build_cmd.append(target_param)

        if self.scratch:
            build_cmd.append("--scratch")

        build_cmd = " ".join(build_cmd)

        if self.dry_run:
            self.print_dry_run_warning(build_cmd)
            return

        info_out("Submitting build: %s" % build_cmd)
        (status, output) = getstatusoutput(build_cmd)
        if status > 0:
            if "already been built" in output:
                warn_out("Build has been submitted previously, continuing...")
            else:
                error_out([
                    "Unable to submit build.",
                    "  Status code: %s\n" % status,
                    "  Output: %s\n" % output,
                ])

        # Print the task ID and URL:
        for line in extract_task_info(output):
            print(line)
Exemple #30
0
    def __init__(self,
                 name=None,
                 tag=None,
                 build_dir=None,
                 config=None,
                 user_config=None,
                 target=None,
                 releaser_config=None,
                 no_cleanup=False,
                 test=False,
                 auto_accept=False,
                 prefix="temp_dir=",
                 **kwargs):
        Releaser.__init__(self, name, tag, build_dir, config, user_config,
                          target, releaser_config, no_cleanup, test,
                          auto_accept, **kwargs)

        self.build_dir = build_dir
        self.prefix = prefix

        if self.releaser_config.has_option(self.target, "scl"):
            warn_out("please rename 'scl' to 'builder.scl' in releasers.conf")
            self.builder.scl = self.releaser_config.get(self.target, "scl")
Exemple #31
0
 def _check_legacy_globalconfig(self, config):
     # globalconfig renamed to buildconfig for better overriding in per-package
     # tito.props. If we see globalconfig, automatically rename it after
     # loading and warn the user.
     if config.has_section('globalconfig'):
         if not config.has_section('buildconfig'):
             config.add_section('buildconfig')
         warn_out("Please rename [globalconfig] to [buildconfig] in "
                  "tito.props")
         for k, v in config.items('globalconfig'):
             if k == 'default_builder':
                 warn_out("please rename 'default_builder' to "
                          "'builder' in tito.props")
                 config.set('buildconfig', 'builder', v)
             elif k == 'default_tagger':
                 warn_out("please rename 'default_tagger' to "
                          "'tagger' in tito.props")
                 config.set('buildconfig', 'tagger', v)
             else:
                 config.set('buildconfig', k, v)
         config.remove_section('globalconfig')
Exemple #32
0
 def _check_legacy_globalconfig(self, config):
     # globalconfig renamed to buildconfig for better overriding in per-package
     # tito.props. If we see globalconfig, automatically rename it after
     # loading and warn the user.
     if config.has_section('globalconfig'):
         if not config.has_section('buildconfig'):
             config.add_section('buildconfig')
         warn_out("Please rename [globalconfig] to [buildconfig] in "
             "tito.props")
         for k, v in config.items('globalconfig'):
             if k == 'default_builder':
                 warn_out("please rename 'default_builder' to "
                     "'builder' in tito.props")
                 config.set('buildconfig', 'builder', v)
             elif k == 'default_tagger':
                 warn_out("please rename 'default_tagger' to "
                     "'tagger' in tito.props")
                 config.set('buildconfig', 'tagger', v)
             else:
                 config.set('buildconfig', k, v)
         config.remove_section('globalconfig')
Exemple #33
0
    def _make_changelog(self):
        """
        Create a new changelog entry in the spec, with line items from git
        """
        if self._no_auto_changelog:
            debug("Skipping changelog generation.")
            return

        in_f = open(self.spec_file, 'r')
        out_f = open(self.spec_file + ".new", 'w')

        found_changelog = False
        for line in in_f.readlines():
            out_f.write(line)

            if not found_changelog and line.startswith("%changelog"):
                found_changelog = True

                old_version = get_latest_tagged_version(self.project_name)

                fd, name = tempfile.mkstemp()
                write(fd, "# Create your changelog entry below:\n")
                if self.git_email is None or (('HIDE_EMAIL' in self.user_config) and
                        (self.user_config['HIDE_EMAIL'] not in ['0', ''])):
                    header = "* %s %s\n" % (self.today, self.git_user)
                else:
                    header = "* %s %s <%s>\n" % (self.today, self.git_user,
                       self.git_email)

                write(fd, header)

                # don't die if this is a new package with no history
                if self._changelog is not None:
                    for entry in self._changelog:
                        if not entry.startswith('-'):
                            entry = '- ' + entry
                        write(fd, entry)
                        write(fd, "\n")
                else:
                    if old_version is not None:
                        last_tag = self._get_new_tag(old_version)
                        output = self._generate_default_changelog(last_tag)
                    else:
                        output = self._new_changelog_msg

                    for cmd_out in output.split("\n"):
                        write(fd, "- ")
                        write(fd, "\n  ".join(textwrap.wrap(cmd_out, 77)))
                        write(fd, "\n")

                write(fd, "\n")

                if not self._accept_auto_changelog:
                    # Give the user a chance to edit the generated changelog:
                    editor = 'vi'
                    if "EDITOR" in os.environ:
                        editor = os.environ["EDITOR"]
                    subprocess.call(editor.split() + [name])

                os.lseek(fd, 0, 0)
                f = os.fdopen(fd)

                for line in f.readlines():
                    if not line.startswith("#"):
                        out_f.write(line)

                output = f.read()

                f.close()
                os.unlink(name)

        if not found_changelog:
            warn_out("no %changelog section find in spec file. Changelog entry was not appended.")

        in_f.close()
        out_f.close()

        shutil.move(self.spec_file + ".new", self.spec_file)
Exemple #34
0
    def tgz(self):
        destination_file = os.path.join(self.rpmbuild_basedir, self.tgz_filename)
        formatted_properties = ["-D%s" % x for x in self.maven_properties]

        run_command("git clone --no-hardlinks %s %s" % (find_git_root(), self.maven_clone_dir))
        with chdir(self.maven_clone_dir):
            run_command("git checkout %s" % self.git_commit_id)

            try:
                info_out("Running Maven build...")
                # We always want to deploy to a tito controlled location during local builds
                local_properties = formatted_properties + [
                    "-DaltDeploymentRepository=local-output::default::file://%s" % self.deploy_dir]
                run_command("mvn %s %s deploy" % (
                    " ".join(self.maven_args),
                    " ".join(local_properties)))
            except RunCommandException as e:
                error_out("Maven build failed! %s" % e.output)

        self._create_build_dirs()

        full_path = self._find_tarball()
        if full_path:
            fh = gzip.open(full_path, 'rb')
            fixed_tar = os.path.join(os.path.splitext(full_path)[0])
            fixed_tar_fh = open(fixed_tar, 'wb')
            timestamp = get_commit_timestamp(self.git_commit_id)
            try:
                tarfixer = TarFixer(fh, fixed_tar_fh, timestamp, self.git_commit_id, maven_built=True)
                tarfixer.fix()
            finally:
                fixed_tar_fh.close()

            # It's a pity we can't use Python's gzip, but it doesn't offer an equivalent of -n
            run_command("gzip -n -c < %s > %s" % (fixed_tar, destination_file))
        else:
            warn_out([
                "No Maven generated tarball found.",
                "Please set up the assembly plugin in your pom.xml to generate a .tar.gz"])
            full_path = os.path.join(self.rpmbuild_sourcedir, self.tgz_filename)
            create_tgz(self.git_root, self.tgz_dir, self.git_commit_id, self.relative_project_dir, full_path)
            print("Creating %s from git tag: %s..." % (self.tgz_filename, self.build_tag))
            shutil.copy(full_path, destination_file)

        debug("Copying git source to: %s" % self.rpmbuild_gitcopy)
        shutil.copy(destination_file, self.rpmbuild_gitcopy)

        # Extract the source so we can get at the spec file, etc.
        with chdir(self.rpmbuild_gitcopy):
            run_command("tar --strip-components=1 -xvf %s" % os.path.join(self.rpmbuild_gitcopy, self.tgz_filename))

        if self.local_build:
            artifacts = {}
            all_artifacts = []
            all_artifacts_with_path = []

            for directory, unused, filenames in os.walk(self.deploy_dir):
                for f in filenames:
                    artifacts.setdefault(os.path.splitext(f)[1], []).append(f)
                dir_artifacts_with_path = [os.path.join(directory, f) for f in filenames]

                # Place the Maven artifacts in the SOURCES directory for rpmbuild to use
                for artifact in dir_artifacts_with_path:
                    shutil.copy(artifact, self.rpmbuild_sourcedir)

                dir_artifacts_with_path = map(lambda x: os.path.relpath(x, self.deploy_dir), dir_artifacts_with_path)
                all_artifacts_with_path.extend(dir_artifacts_with_path)
                all_artifacts.extend([os.path.basename(f) for f in filenames])

            cheetah_input = {
                'name': self.project_name,
                'version': self.spec_version,
                'release': self.spec_release,
                'epoch': None,  # TODO: May need to support this at some point
                'artifacts': artifacts,
                'all_artifacts': all_artifacts,
                'all_artifacts_with_path': all_artifacts_with_path,
            }
            debug("Cheetah input: %s" % cheetah_input)
            render_cheetah(find_cheetah_template_file(self.start_dir), self.rpmbuild_gitcopy, cheetah_input)
            self.spec_file_name = find_spec_file(self.rpmbuild_gitcopy)
        else:
            self.spec_file_name = find_cheetah_template_file(self.rpmbuild_gitcopy)

        # NOTE: The spec file we actually use is the one exported by git
        # archive into the temp build directory. This is done so we can
        # modify the version/release on the fly when building test rpms
        # that use a git SHA1 for their version.
        self.spec_file = os.path.join(self.rpmbuild_gitcopy, self.spec_file_name)

        info_out("Wrote: %s" % destination_file)
        self.sources.append(destination_file)
        self.artifacts.append(destination_file)
        self.ran_tgz = True
Exemple #35
0
    def __init__(self, name=None, tag=None, build_dir=None,
            config=None, user_config=None,
            args=None, **kwargs):

        """
        name - Package name that is being built.

        version - Version and release being built.

        tag - The git tag being built.

        build_dir - Temporary build directory where we can safely work.

        config - Merged configuration. (global plus package specific)

        user_config - User configuration from ~/.titorc.

        args - Optional arguments specific to each builder. Can be passed
        in explicitly by user on the CLI, or via a release target config
        entry. Only for things which vary on invocations of the builder,
        avoid using these if possible.  *Given in the format of a dictionary
        of lists.*
        """
        ConfigObject.__init__(self, config=config)
        BuilderBase.__init__(self, name=name, build_dir=build_dir, config=config,
                user_config=user_config, args=args, **kwargs)
        self.build_tag = tag

        self.build_version = self._get_build_version()

        if kwargs and 'options' in kwargs:
            warn_out("'options' no longer a supported builder constructor argument.")

        if self.config.has_section("requirements"):
            if self.config.has_option("requirements", "tito"):
                if loose_version(self.config.get("requirements", "tito")) > \
                        loose_version(require('tito')[0].version):
                    error_out([
                        "tito version %s or later is needed to build this project." %
                        self.config.get("requirements", "tito"),
                        "Your version: %s" % require('tito')[0].version
                    ])

        self.display_version = self._get_display_version()

        with chdir(find_git_root()):
            self.git_commit_id = get_build_commit(tag=self.build_tag,
                test=self.test)

        self.relative_project_dir = get_relative_project_dir(
            project_name=self.project_name, commit=self.git_commit_id)
        if self.relative_project_dir is None and self.test:
            warn_out(".tito/packages/%s doesn't exist "
                "in git, using current directory" % self.project_name)
            self.relative_project_dir = get_relative_project_dir_cwd(
                self.git_root)

        tgz_base = self._get_tgz_name_and_ver()
        self.tgz_filename = tgz_base + ".tar.gz"
        self.tgz_dir = tgz_base
        self.artifacts = []

        # A copy of the git code from commit we're building:
        self.rpmbuild_gitcopy = os.path.join(self.rpmbuild_sourcedir,
                self.tgz_dir)

        # Used to make sure we only modify the spec file for a test build
        # once. The srpm method may be called multiple times during koji
        # releases to create the proper disttags, but we only want to modify
        # the spec file once.
        self.ran_setup_test_specfile = False

        # NOTE: These are defined later when/if we actually dump a copy of the
        # project source at the tag we're building. Only then can we search for
        # a spec file.
        self.spec_file_name = None
        self.spec_file = None

        # Set to path to srpm once we build one.
        self.srpm_location = None
Exemple #36
0
    def _koji_release(self):
        """
        Lookup autobuild Koji tags from global config, create srpms with
        appropriate disttags, and submit builds to Koji.
        """
        koji_tags = self.autobuild_tags()
        print("Building release in %s..." % self.NAME)
        debug("%s tags: %s" % (self.NAME, koji_tags))

        koji_opts = self.DEFAULT_KOJI_OPTS
        if 'KOJI_OPTIONS' in self.builder.user_config:
            koji_opts = self.builder.user_config['KOJI_OPTIONS']

        if self.scratch or ('SCRATCH' in os.environ and os.environ['SCRATCH'] == '1'):
            koji_opts = ' '.join([koji_opts, '--scratch'])

        if self.profile:
            koji_opts = ' '.join(['--profile', self.profile, koji_opts])

        if self.conf_file:
            koji_opts = ' '.join(['--config', self.conf_file, koji_opts])

        # TODO: need to re-do this metaphor to use release targets instead:
        for koji_tag in koji_tags:
            if self.only_tags and koji_tag not in self.only_tags:
                continue
            scl = None
            if self.builder.config.has_option(koji_tag, "scl"):
                scl = self.builder.config.get(koji_tag, "scl")
            # Lookup the disttag configured for this Koji tag:
            if self.builder.config.has_option(koji_tag, "disttag"):
                disttag = self.builder.config.get(koji_tag, "disttag")
            else:
                disttag = ''
            if self.builder.config.has_option(koji_tag, "whitelist"):
                # whitelist implies only those packages can be built to the
                # tag,regardless if blacklist is also defined.
                if not self.__is_whitelisted(koji_tag, scl):
                    warn_out([
                        "%s not specified in whitelist for %s" % (self.project_name, koji_tag),
                        "   Package *NOT* submitted to %s." % self.NAME,
                    ])
                    continue
            elif self.__is_blacklisted(koji_tag, scl):
                warn_out([
                    "%s specified in blacklist for %s" % (self.project_name, koji_tag),
                    "   Package *NOT* submitted to %s." % self.NAME,
                ])
                continue

            # Getting tricky here, normally Builder's are only used to
            # create one rpm and then exit. Here we're going to try
            # to run multiple srpm builds:
            builder = self.builder
            if not self.skip_srpm:
                if scl:
                    builder = copy.copy(self.builder)
                    builder.scl = scl
                builder.srpm(dist=disttag)

            self._submit_build(self.executable, koji_opts, koji_tag, builder.srpm_location)
Exemple #37
0
    def tgz(self):
        destination_file = os.path.join(self.rpmbuild_basedir, self.tgz_filename)
        formatted_properties = ["-D%s" % x for x in self.maven_properties]

        run_command("git clone --no-hardlinks %s %s" % (find_git_root(), self.maven_clone_dir))
        with chdir(self.maven_clone_dir):
            run_command("git checkout %s" % self.git_commit_id)

            try:
                info_out("Running Maven build...")
                # We always want to deploy to a tito controlled location during local builds
                local_properties = formatted_properties + [
                    "-DaltDeploymentRepository=local-output::default::file://%s" % self.deploy_dir]
                run_command("mvn %s %s deploy" % (
                    " ".join(self.maven_args),
                    " ".join(local_properties)))
            except RunCommandException as e:
                error_out("Maven build failed! %s" % e.output)

        self._create_build_dirs()

        full_path = self._find_tarball()
        if full_path:
            fh = gzip.open(full_path, 'rb')
            fixed_tar = os.path.join(os.path.splitext(full_path)[0])
            fixed_tar_fh = open(fixed_tar, 'wb')
            timestamp = get_commit_timestamp(self.git_commit_id)
            try:
                tarfixer = TarFixer(fh, fixed_tar_fh, timestamp, self.git_commit_id, maven_built=True)
                tarfixer.fix()
            finally:
                fixed_tar_fh.close()

            # It's a pity we can't use Python's gzip, but it doesn't offer an equivalent of -n
            run_command("gzip -n -c < %s > %s" % (fixed_tar, destination_file))
        else:
            warn_out([
                "No Maven generated tarball found.",
                "Please set up the assembly plugin in your pom.xml to generate a .tar.gz"])
            full_path = os.path.join(self.rpmbuild_sourcedir, self.tgz_filename)
            create_tgz(self.git_root, self.tgz_dir, self.git_commit_id, self.relative_project_dir, full_path)
            print("Creating %s from git tag: %s..." % (self.tgz_filename, self.build_tag))
            shutil.copy(full_path, destination_file)

        debug("Copying git source to: %s" % self.rpmbuild_gitcopy)
        shutil.copy(destination_file, self.rpmbuild_gitcopy)

        # Extract the source so we can get at the spec file, etc.
        with chdir(self.rpmbuild_gitcopy):
            run_command("tar --strip-components=1 -xvf %s" % os.path.join(self.rpmbuild_gitcopy, self.tgz_filename))

        if self.local_build:
            artifacts = {}
            all_artifacts = []
            all_artifacts_with_path = []

            for directory, unused, filenames in os.walk(self.deploy_dir):
                for f in filenames:
                    artifacts.setdefault(os.path.splitext(f)[1], []).append(f)
                dir_artifacts_with_path = [os.path.join(directory, f) for f in filenames]

                # Place the Maven artifacts in the SOURCES directory for rpmbuild to use
                for artifact in dir_artifacts_with_path:
                    shutil.copy(artifact, self.rpmbuild_sourcedir)

                dir_artifacts_with_path = map(lambda x: os.path.relpath(x, self.deploy_dir), dir_artifacts_with_path)
                all_artifacts_with_path.extend(dir_artifacts_with_path)
                all_artifacts.extend([os.path.basename(f) for f in filenames])

            cheetah_input = {
                'name': self.project_name,
                'version': self.spec_version,
                'release': self.spec_release,
                'epoch': None,  # TODO: May need to support this at some point
                'artifacts': artifacts,
                'all_artifacts': all_artifacts,
                'all_artifacts_with_path': all_artifacts_with_path,
            }
            debug("Cheetah input: %s" % cheetah_input)
            render_cheetah(find_cheetah_template_file(self.start_dir), self.rpmbuild_gitcopy, cheetah_input)
            self.spec_file_name = find_spec_file(self.rpmbuild_gitcopy)
        else:
            self.spec_file_name = find_cheetah_template_file(self.rpmbuild_gitcopy)

        # NOTE: The spec file we actually use is the one exported by git
        # archive into the temp build directory. This is done so we can
        # modify the version/release on the fly when building test rpms
        # that use a git SHA1 for their version.
        self.spec_file = os.path.join(self.rpmbuild_gitcopy, self.spec_file_name)

        info_out("Wrote: %s" % destination_file)
        self.sources.append(destination_file)
        self.artifacts.append(destination_file)
        self.ran_tgz = True
Exemple #38
0
 def print_dry_run_warning(self, command_that_would_be_run_otherwise):
     print
     warn_out("Skipping command due to --dry-run: %s" %
              command_that_would_be_run_otherwise)
     print
Exemple #39
0
 def print_dry_run_warning(self, command_that_would_be_run_otherwise):
     print
     warn_out("Skipping command due to --dry-run: %s" %
             command_that_would_be_run_otherwise)
     print
Exemple #40
0
    def __init__(self,
                 name=None,
                 tag=None,
                 build_dir=None,
                 config=None,
                 user_config=None,
                 args=None,
                 **kwargs):
        """
        name - Package name that is being built.

        version - Version and release being built.

        tag - The git tag being built.

        build_dir - Temporary build directory where we can safely work.

        config - Merged configuration. (global plus package specific)

        user_config - User configuration from ~/.titorc.

        args - Optional arguments specific to each builder. Can be passed
        in explicitly by user on the CLI, or via a release target config
        entry. Only for things which vary on invocations of the builder,
        avoid using these if possible.  *Given in the format of a dictionary
        of lists.*
        """
        ConfigObject.__init__(self, config=config)
        BuilderBase.__init__(self,
                             name=name,
                             build_dir=build_dir,
                             config=config,
                             user_config=user_config,
                             args=args,
                             **kwargs)
        self.build_tag = tag

        self.build_version = self._get_build_version()
        self.git_commit_id = get_build_commit(tag=self.build_tag, test=True)

        if kwargs and 'options' in kwargs:
            warn_out(
                "'options' no longer a supported builder constructor argument."
            )

        if self.config.has_option("requirements", "tito"):
            if loose_version(self.config.get("requirements", "tito")) > \
                    loose_version(require('tito')[0].version):
                error_out([
                    "tito version %s or later is needed to build this project."
                    % self.config.get("requirements", "tito"),
                    "Your version: %s" % require('tito')[0].version
                ])

        self.display_version = self._get_display_version()

        self.relative_project_dir = get_relative_project_dir_cwd(self.git_root)

        tgz_base = self._get_tgz_name_and_ver()
        self.tgz_filename = tgz_base + ".tar.gz"
        self.tgz_dir = tgz_base
        self.artifacts = []

        self.rpmbuild_gitcopy = os.path.join(self.rpmbuild_sourcedir,
                                             self.tgz_dir)

        # Used to make sure we only modify the spec file for a test build
        # once. The srpm method may be called multiple times during koji
        # releases to create the proper disttags, but we only want to modify
        # the spec file once.
        self.ran_setup_test_specfile = False

        # NOTE: These are defined later when/if we actually dump a copy of the
        # project source at the tag we're building. Only then can we search for
        # a spec file.
        self.spec_file_name = None
        self.spec_file = None

        # Set to path to srpm once we build one.
        self.srpm_location = None
Exemple #41
0
    def _make_changelog(self):
        """
        Create a new changelog entry in the spec, with line items from git
        """
        if self._no_auto_changelog:
            debug("Skipping changelog generation.")
            return

        in_f = open(self.spec_file, 'r')
        out_f = open(self.spec_file + ".new", 'w')

        found_changelog = False
        for line in in_f.readlines():
            out_f.write(line)

            if not found_changelog and line.startswith("%changelog"):
                found_changelog = True

                old_version = get_latest_tagged_version(self.project_name)

                fd, name = tempfile.mkstemp()
                write(fd, "# Create your changelog entry below:\n")
                if self.git_email is None or (('HIDE_EMAIL' in self.user_config) and
                        (self.user_config['HIDE_EMAIL'] not in ['0', ''])):
                    header = "* %s %s\n" % (self.today, self.git_user)
                else:
                    header = "* %s %s <%s>\n" % (self.today, self.git_user,
                       self.git_email)

                write(fd, header)

                # don't die if this is a new package with no history
                if self._changelog is not None:
                    for entry in self._changelog:
                        if not entry.startswith('-'):
                            entry = '- ' + entry
                        write(fd, entry)
                        write(fd, "\n")
                else:
                    if old_version is not None:
                        last_tag = "%s-%s" % (self.project_name, old_version)
                        output = self._generate_default_changelog(last_tag)
                    else:
                        output = self._new_changelog_msg

                    for cmd_out in output.split("\n"):
                        write(fd, "- ")
                        write(fd, "\n  ".join(textwrap.wrap(cmd_out, 77)))
                        write(fd, "\n")

                write(fd, "\n")

                if not self._accept_auto_changelog:
                    # Give the user a chance to edit the generated changelog:
                    editor = 'vi'
                    if "EDITOR" in os.environ:
                        editor = os.environ["EDITOR"]
                    subprocess.call(editor.split() + [name])

                os.lseek(fd, 0, 0)
                f = os.fdopen(fd)

                for line in f.readlines():
                    if not line.startswith("#"):
                        out_f.write(line)

                output = f.read()

                f.close()
                os.unlink(name)

        if not found_changelog:
            warn_out("no %changelog section find in spec file. Changelog entry was not appended.")

        in_f.close()
        out_f.close()

        shutil.move(self.spec_file + ".new", self.spec_file)
Exemple #42
0
    def _koji_release(self):
        """
        Lookup autobuild Koji tags from global config, create srpms with
        appropriate disttags, and submit builds to Koji.
        """
        koji_tags = self.autobuild_tags()
        print("Building release in %s..." % self.NAME)
        debug("%s tags: %s" % (self.NAME, koji_tags))

        koji_opts = self.DEFAULT_KOJI_OPTS
        if 'KOJI_OPTIONS' in self.builder.user_config:
            koji_opts = self.builder.user_config['KOJI_OPTIONS']

        if self.scratch or ('SCRATCH' in os.environ
                            and os.environ['SCRATCH'] == '1'):
            koji_opts = ' '.join([koji_opts, '--scratch'])

        if self.profile:
            koji_opts = ' '.join(['--profile', self.profile, koji_opts])

        if self.conf_file:
            koji_opts = ' '.join(['--config', self.conf_file, koji_opts])

        # TODO: need to re-do this metaphor to use release targets instead:
        for koji_tag in koji_tags:
            if self.only_tags and koji_tag not in self.only_tags:
                continue
            scl = None
            if self.builder.config.has_option(koji_tag, "scl"):
                scl = self.builder.config.get(koji_tag, "scl")
            # Lookup the disttag configured for this Koji tag:
            if self.builder.config.has_option(koji_tag, "disttag"):
                disttag = self.builder.config.get(koji_tag, "disttag")
            else:
                disttag = ''
            if self.builder.config.has_option(koji_tag, "whitelist"):
                # whitelist implies only those packages can be built to the
                # tag,regardless if blacklist is also defined.
                if not self.__is_whitelisted(koji_tag, scl):
                    warn_out([
                        "%s not specified in whitelist for %s" %
                        (self.project_name, koji_tag),
                        "   Package *NOT* submitted to %s." % self.NAME,
                    ])
                    continue
            elif self.__is_blacklisted(koji_tag, scl):
                warn_out([
                    "%s specified in blacklist for %s" %
                    (self.project_name, koji_tag),
                    "   Package *NOT* submitted to %s." % self.NAME,
                ])
                continue

            # Getting tricky here, normally Builder's are only used to
            # create one rpm and then exit. Here we're going to try
            # to run multiple srpm builds:
            builder = self.builder
            if not self.skip_srpm:
                if scl:
                    builder = copy.copy(self.builder)
                    builder.scl = scl
                builder.srpm(dist=disttag)

            self._submit_build(self.executable, koji_opts, koji_tag,
                               builder.srpm_location)