Ejemplo n.º 1
0
    def patch_upstream(self):
        """ Create one patch per each release """
        ch_dir = self.git_root
        if self.relative_project_dir != "/":
            ch_dir = os.path.join(self.git_root,
                    self.relative_project_dir)
        os.chdir(ch_dir)
        debug("Running /usr/bin/generate-patches.pl -d %s %s %s-1 %s %s"
               % (self.rpmbuild_gitcopy, self.project_name, self.upstream_version, self.build_version, self.git_commit_id))
        output = run_command("/usr/bin/generate-patches.pl -d %s %s %s-1 %s %s"
               % (self.rpmbuild_gitcopy, self.project_name, self.upstream_version, self.build_version, self.git_commit_id))
        self.patch_files = output.split("\n")
        for p_file in self.patch_files:
            (status, output) = getstatusoutput(
                "grep 'Binary files .* differ' %s/%s " % (self.rpmbuild_gitcopy, p_file))
            if status == 0 and output != "":
                error_out("You are doomed. Diff contains binary files. You can not use this builder")

            run_command("cp %s/%s %s" % (self.rpmbuild_gitcopy, p_file, self.rpmbuild_sourcedir))

        (patch_number, patch_insert_index, patch_apply_index, lines) = self._patch_upstream()

        for patch in self.patch_files:
            lines.insert(patch_insert_index, "Patch%s: %s\n" % (patch_number, patch))
            lines.insert(patch_apply_index, "%%patch%s -p1\n" % (patch_number))
            patch_number += 1
            patch_insert_index += 1
            patch_apply_index += 2
        self._write_spec(lines)
Ejemplo n.º 2
0
    def tag_new_version(project_path, new_version_release):
        """
        Find the line with version number  and change
        it to contain the new version.
        """
        file_name = "Cargo.toml"
        config_file = os.path.join(project_path, file_name)

        if not os.path.exists(config_file):
            debug('Cargo.toml file not found, this is probably not a Rust project')
            return

        debug("Found Cargo.toml file, attempting to update the version.")
        # We probably don't want version-release in config file as
        # release is an RPM concept
        new_version = new_version_release.split('-')[0]
        file_buffer = []

        # Read file line by line and replace version when found
        with open(config_file, 'r') as cfgfile:
            file_buffer = CargoBump.process_cargo_toml(cfgfile, new_version)

        # Write the buffer back into the file
        with open(config_file, 'w') as cfgfile:
            cfgfile.writelines(map(lambda x: x + "\n", file_buffer))

        # Add Cargo.toml into git index
        run_command("git add %s" % file_name)
Ejemplo n.º 3
0
Archivo: obs.py Proyecto: amitsaha/tito
    def _confirm_commit_msg(self, diff_output):
        """
        Generates a commit message in a temporary file, gives the user a
        chance to edit it, and returns the filename to the caller.
        """

        fd, name = tempfile.mkstemp()
        debug("Storing commit message in temp file: %s" % name)
        os.write(fd, "Update %s to %s\n" % (self.obs_package_name,
            self.builder.build_version))
        # Write out Resolves line for all bugzillas we see in commit diff:
        for line in extract_bzs(diff_output):
            os.write(fd, line + "\n")

        print("")
        print("##### Commit message: #####")
        print("")

        os.lseek(fd, 0, 0)
        commit_file = os.fdopen(fd)
        for line in commit_file.readlines():
            print line
        commit_file.close()

        print("")
        print("###############################")
        print("")
        if self._ask_yes_no("Would you like to edit this commit message? [y/n] ", False):
            debug("Opening editor for user to edit commit message in: %s" % name)
            editor = 'vi'
            if "EDITOR" in os.environ:
                editor = os.environ["EDITOR"]
            subprocess.call(editor.split() + [name])

        return name
Ejemplo n.º 4
0
    def load_config(self, package_name, build_dir, tag):
        self.config = ConfigLoader(package_name, build_dir, tag).load()

        if self.config.has_option(BUILDCONFIG_SECTION,
                "offline"):
            self.options.offline = True

        # TODO: Not ideal:
        if self.options.debug:
            os.environ['DEBUG'] = "true"

        # Check if config defines a custom lib dir, if so we add it
        # to the python path allowing users to specify custom builders/taggers
        # in their config:
        if self.config.has_option(BUILDCONFIG_SECTION,
                "lib_dir"):
            lib_dir = self.config.get(BUILDCONFIG_SECTION,
                    "lib_dir")
            if lib_dir[0] != '/':
                # Looks like a relative path, assume from the git root:
                lib_dir = os.path.join(find_git_root(), lib_dir)

            if os.path.exists(lib_dir):
                sys.path.append(lib_dir)
                debug("Added lib dir to PYTHONPATH: %s" % lib_dir)
            else:
                warn_out("lib_dir specified but does not exist: %s" % lib_dir)
Ejemplo n.º 5
0
    def _update_setup_py(self, new_version):
        """
        If this project has a setup.py, attempt to update it's version.
        """
        setup_file = os.path.join(self.full_project_dir, "setup.py")
        if not os.path.exists(setup_file):
            return

        debug("Found setup.py, attempting to update version.")

        # We probably don't want version-release in setup.py as release is
        # an rpm concept. Hopefully this assumption on
        py_new_version = new_version.split('-')[0]

        f = open(setup_file, 'r')
        buf = StringIO.StringIO()
        for line in f.readlines():
            buf.write(replace_version(line, py_new_version))
        f.close()

        # Write out the new setup.py file contents:
        f = open(setup_file, 'w')
        f.write(buf.getvalue())
        f.close()
        buf.close()

        run_command("git add %s" % setup_file)
Ejemplo n.º 6
0
    def _setup_sources(self):
        super(GitAnnexBuilder, self)._setup_sources()

        self.old_cwd = os.getcwd()
        os.chdir(os.path.join(self.old_cwd, self.relative_project_dir))

        # NOTE: 'which' may not be installed... (docker containers)
        (status, output) = getstatusoutput("which git-annex")
        if status != 0:
            msg = "Please run '%s' as root." % self.package_manager.install(["git-annex"])
            error_out('%s' % msg)

        run_command("git-annex lock")
        annexed_files = run_command("git-annex find --include='*'").splitlines()
        run_command("git-annex get")
        run_command("git-annex unlock")
        debug("  Annex files: %s" % annexed_files)

        for annex in annexed_files:
            debug("Copying unlocked file %s" % annex)
            os.remove(os.path.join(self.rpmbuild_gitcopy, annex))
            shutil.copy(annex, self.rpmbuild_gitcopy)

        self._lock()
        os.chdir(self.old_cwd)
Ejemplo n.º 7
0
    def _setup_sources(self):
        super(GitAnnexBuilder, self)._setup_sources()

        old_cwd = os.getcwd()
        os.chdir(os.path.join(old_cwd, self.relative_project_dir))

        # NOTE: 'which' may not be installed... (docker containers)
        (status, output) = getstatusoutput("which git-annex")
        if status != 0:
            msg = "Please run '%s install git-annex' as root." % package_manager()
            error_out('%s' % msg)

        run_command("git-annex lock")
        annexed_files = run_command("git-annex find --include='*'").splitlines()
        run_command("git-annex get")
        run_command("git-annex unlock")
        debug("  Annex files: %s" % annexed_files)

        for annex in annexed_files:
            debug("Copying unlocked file %s" % annex)
            os.remove(os.path.join(self.rpmbuild_gitcopy, annex))
            shutil.copy(annex, self.rpmbuild_gitcopy)

        self._lock()
        os.chdir(old_cwd)
Ejemplo n.º 8
0
    def _git_sync_files(self, project_checkout):
        """
        Copy files from our git into each git build branch and add them.

        A list of safe files is used to protect critical files both from
        being overwritten by a git file of the same name, as well as being
        deleted after.
        """

        # Build the list of all files we will copy:
        debug("Searching for files to copy to build system git:")
        files_to_copy = self._list_files_to_copy()

        os.chdir(project_checkout)

        new, copied, old =  \
                self._sync_files(files_to_copy, project_checkout)

        os.chdir(project_checkout)

        # Git add everything:
        for add_file in (new + copied):
            run_command("git add %s" % add_file)

        # Cleanup obsolete files:
        for cleanup_file in old:
            # Can't delete via full path, must not chdir:
            run_command("git rm %s" % cleanup_file)
Ejemplo n.º 9
0
    def main(self, argv):
        BaseCliModule.main(self, argv)

        build_dir = os.path.normpath(os.path.abspath(self.options.output_dir))
        package_name = get_project_name(tag=None)

        self.load_config(package_name, build_dir, None)
        if self.config.has_option(BUILDCONFIG_SECTION, "block_tagging"):
            debug("block_tagging defined in tito.props")
            error_out("Tagging has been disabled in this git branch.")

        tagger_class = get_class_by_name(
            self.config.get(BUILDCONFIG_SECTION, DEFAULT_TAGGER))
        debug("Using tagger class: %s" % tagger_class)

        tagger = tagger_class(config=self.config,
                              user_config=self.user_config,
                              keep_version=self.options.keep_version,
                              offline=self.options.offline)

        try:
            return tagger.run(self.options)
        except TitoException:
            e = sys.exc_info()[1]
            error_out(e.message)
Ejemplo n.º 10
0
    def patch_upstream(self):
        """ Create one patch per each release """
        ch_dir = self.git_root
        if self.relative_project_dir != "/":
            ch_dir = os.path.join(self.git_root, self.relative_project_dir)
        os.chdir(ch_dir)
        debug("Running /usr/bin/generate-patches.pl -d %s %s %s-1 %s %s" \
               % (self.rpmbuild_gitcopy, self.project_name, self.upstream_version, self.build_version, self.git_commit_id))
        output = run_command("/usr/bin/generate-patches.pl -d %s %s %s-1 %s %s" \
               % (self.rpmbuild_gitcopy, self.project_name, self.upstream_version, self.build_version, self.git_commit_id))
        self.patch_files = output.split("\n")
        for p_file in self.patch_files:
            (status, output) = commands.getstatusoutput(
                "grep 'Binary files .* differ' %s/%s " %
                (self.rpmbuild_gitcopy, p_file))
            if status == 0 and output != "":
                error_out(
                    "You are doomed. Diff contains binary files. You can not use this builder"
                )

            run_command(
                "cp %s/%s %s" %
                (self.rpmbuild_gitcopy, p_file, self.rpmbuild_sourcedir))

        (patch_number, patch_insert_index, patch_apply_index,
         lines) = self._patch_upstream()

        for patch in self.patch_files:
            lines.insert(patch_insert_index,
                         "Patch%s: %s\n" % (patch_number, patch))
            lines.insert(patch_apply_index, "%%patch%s -p1\n" % (patch_number))
            patch_number += 1
            patch_insert_index += 1
            patch_apply_index += 2
        self._write_spec(lines)
Ejemplo n.º 11
0
    def _fetch_local(self):
        source_dir = os.path.expanduser(self.builder.args['source_dir'][0])

        old_dir = os.getcwd()
        os.chdir(source_dir)

        gemspecs = glob.glob("./*.gemspec")
        if gemspecs and not gemspecs[0] == "./smart_proxy.gemspec":
            subprocess.call(["gem", "build", gemspecs[0]])
            sources = glob.glob("./*.gem")
        else:
            subprocess.call(
                ["/bin/bash", "-l", "-c", "rake pkg:generate_source"])
            sources = glob.glob("./pkg/*")

        fetchdir = os.path.join(self.builder.rpmbuild_sourcedir, 'archive')
        if not os.path.exists(fetchdir):
            os.mkdir(fetchdir)

        for srcfile in sources:
            debug("Copying %s from local source dir" % srcfile)
            shutil.move(srcfile,
                        os.path.join(fetchdir, os.path.basename(srcfile)))

        gitrev = "local"
        gitsha = subprocess.check_output(["git", "rev-parse", "HEAD"])
        if gitsha:
            gitrev = "git%s" % gitsha[0:7]

        os.chdir(old_dir)

        return gitrev
Ejemplo n.º 12
0
    def _git_sync_files(self, project_checkout):
        """
        Copy files from our git into each git build branch and add them.

        A list of safe files is used to protect critical files both from
        being overwritten by a git file of the same name, as well as being
        deleted after.
        """

        # Build the list of all files we will copy:
        debug("Searching for files to copy to build system git:")
        files_to_copy = self._list_files_to_copy()

        os.chdir(project_checkout)

        new, copied, old =  \
                self._sync_files(files_to_copy, project_checkout)

        os.chdir(project_checkout)

        # Git add everything:
        for add_file in (new + copied):
            run_command("git add %s" % add_file)

        # Cleanup obsolete files:
        for cleanup_file in old:
            # Can't delete via full path, must not chdir:
            run_command("git rm %s" % cleanup_file)
Ejemplo n.º 13
0
Archivo: cli.py Proyecto: jsabo/tito
    def main(self):
        BaseCliModule.main(self)

        if self.global_config.has_option(GLOBALCONFIG_SECTION,
                "block_tagging"):
            debug("block_tagging defined in tito.props")
            error_out("Tagging has been disabled in this git branch.")

        build_dir = os.path.normpath(os.path.abspath(self.options.output_dir))
        package_name = get_project_name(tag=None)

        self.pkg_config = self._read_project_config(package_name, build_dir,
                None, None)

        tagger_class = None
        if self.pkg_config.has_option("buildconfig", "tagger"):
            tagger_class = get_class_by_name(self.pkg_config.get("buildconfig",
                "tagger"))
        else:
            tagger_class = get_class_by_name(self.global_config.get(
                GLOBALCONFIG_SECTION, DEFAULT_TAGGER))
        debug("Using tagger class: %s" % tagger_class)

        tagger = tagger_class(global_config=self.global_config,
                keep_version=self.options.keep_version)
        tagger.run(self.options)
Ejemplo n.º 14
0
    def _fetch_local(self):
        source_dir = os.path.expanduser(self.builder.args['source_dir'][0])

        old_dir = os.getcwd()
        os.chdir(source_dir)

        gemspecs = glob.glob("./*.gemspec")
        if gemspecs and not gemspecs[0] == "./smart_proxy.gemspec":
          subprocess.call(["gem", "build", gemspecs[0]])
          sources = glob.glob("./*.gem")
        else:
          subprocess.call(["/bin/bash", "-l", "-c", "rake pkg:generate_source"])
          sources = glob.glob("./pkg/*")

        fetchdir = os.path.join(self.builder.rpmbuild_sourcedir, 'archive')
        if not os.path.exists(fetchdir):
          os.mkdir(fetchdir)

        for srcfile in sources:
          debug("Copying %s from local source dir" % srcfile)
          shutil.move(srcfile, os.path.join(fetchdir, os.path.basename(srcfile)))

        gitrev = "local"
        gitsha = subprocess.check_output(["git", "rev-parse", "HEAD"])
        if gitsha:
          gitrev = "git%s" % gitsha[0:7]

        os.chdir(old_dir)

        return gitrev
Ejemplo n.º 15
0
Archivo: cli.py Proyecto: jsabo/tito
    def _create_builder(self, package_name, build_tag, build_version, options,
            pkg_config, build_dir):
        """
        Create (but don't run) the builder class. Builder object may be
        used by other objects without actually having run() called.
        """

        builder_class = None
        if pkg_config.has_option("buildconfig", "builder"):
            builder_class = get_class_by_name(pkg_config.get("buildconfig",
                "builder"))
        else:
            builder_class = get_class_by_name(self.global_config.get(
                GLOBALCONFIG_SECTION, DEFAULT_BUILDER))
        debug("Using builder class: %s" % builder_class)

        # Instantiate the builder:
        builder = builder_class(
                name=package_name,
                version=build_version,
                tag=build_tag,
                build_dir=build_dir,
                pkg_config=pkg_config,
                global_config=self.global_config,
                user_config=self.user_config,
                dist=options.dist,
                test=options.test,
                offline=options.offline,
                auto_install=options.auto_install)
        return builder
Ejemplo n.º 16
0
    def _update_setup_py_in_dir(self, new_version, package_dir=None):
        """
        If this subdir has a setup.py, attempt to update it's version.
        (This is a very minor tweak to the original _update_setup_py method from VersionTagger
        """

        if package_dir is not None:
            full_package_dir = os.path.join(self.full_project_dir, package_dir)
        else:
            full_package_dir = self.full_project_dir

        setup_file = os.path.join(full_package_dir, "setup.py")
        if not os.path.exists(setup_file):
            return

        debug("Found setup.py in {}, attempting to update version.".format(package_dir))

        # We probably don't want version-release in setup.py as release is
        # an rpm concept. Hopefully this assumption on
        py_new_version = new_version.split('-')[0]

        f = open(setup_file, 'r')
        buf = six.StringIO()
        for line in f.readlines():
            buf.write(replace_version(line, py_new_version))
        f.close()

        # Write out the new setup.py file contents:
        f = open(setup_file, 'w')
        f.write(buf.getvalue())
        f.close()
        buf.close()

        run_command("git add %s" % setup_file)
Ejemplo n.º 17
0
Archivo: cli.py Proyecto: xsuchy/tito
    def main(self, argv):
        (self.options, args) = self.parser.parse_args(argv)

        self._validate_options()

        if len(argv) < 1:
            print(self.parser.error("Must supply an argument. "
                "Try -h for help."))

        self.global_config = self._read_global_config()
        if self.global_config.has_option(GLOBALCONFIG_SECTION,
                "offline"):
            self.options.offline = True

        if self.options.debug:
            os.environ['DEBUG'] = "true"

        # Check if global config defines a custom lib dir:
        if self.global_config.has_option(GLOBALCONFIG_SECTION,
                "lib_dir"):
            lib_dir = self.global_config.get(GLOBALCONFIG_SECTION, 
                    "lib_dir")
            if lib_dir[0] != '/':
                # Looks like a relative path, assume from the git root:
                lib_dir = os.path.join(find_git_root(), lib_dir)

            if os.path.exists(lib_dir):
                sys.path.append(lib_dir)
                debug("Added lib dir to PYTHONPATH: %s" % lib_dir)
            else:
                print("WARNING: lib_dir specified but does not exist: %s" %
                        lib_dir)
Ejemplo n.º 18
0
    def main(self, argv):
        BaseCliModule.main(self, argv)

        build_dir = os.path.normpath(os.path.abspath(self.options.output_dir))
        package_name = get_project_name(tag=None)

        self.load_config(package_name, build_dir, None)
        if self.config.has_option(BUILDCONFIG_SECTION,
                "block_tagging"):
            debug("block_tagging defined in tito.props")
            error_out("Tagging has been disabled in this git branch.")

        tagger_class = get_class_by_name(self.config.get(
            BUILDCONFIG_SECTION, DEFAULT_TAGGER))
        debug("Using tagger class: %s" % tagger_class)

        tagger = tagger_class(config=self.config,
                user_config=self.user_config,
                keep_version=self.options.keep_version,
                offline=self.options.offline)

        try:
            return tagger.run(self.options)
        except TitoException:
            e = sys.exc_info()[1]
            error_out(e.message)
Ejemplo n.º 19
0
    def release(self, dry_run=False, no_build=False, scratch=False):
        self.dry_run = dry_run

        # Check if the releaser specifies a srpm disttag:
        srpm_disttag = None
        if self.releaser_config.has_option(self.target, "srpm_disttag"):
            srpm_disttag = self.releaser_config.get(self.target,
                                                    "srpm_disttag")
        self.builder.srpm(dist=srpm_disttag)

        if self.releaser_config.has_option(self.target, 'rsync_args'):
            self.rsync_args = self.releaser_config.get(self.target,
                                                       'rsync_args')

        rsync = self.releaser_config.get(self.target, 'rsync').split(" ")
        for destination in rsync:
            for artifact in self.builder.artifacts:
                if artifact.endswith('.src.rpm'):
                    cmd = "rsync %s %s %s" % (self.rsync_args, artifact,
                                              destination)
                    if self.dry_run:
                        self.print_dry_run_warning(cmd)
                    else:
                        output = run_command(cmd)
                        debug(output)
                os.remove(artifact)
Ejemplo n.º 20
0
    def load_config(self, package_name, build_dir, tag):
        self.config = ConfigLoader(package_name, build_dir, tag).load()

        if self.config.has_option(BUILDCONFIG_SECTION,
                "offline"):
            self.options.offline = True

        # TODO: Not ideal:
        if self.options.debug:
            os.environ['DEBUG'] = "true"

        # Check if config defines a custom lib dir, if so we add it
        # to the python path allowing users to specify custom builders/taggers
        # in their config:
        if self.config.has_option(BUILDCONFIG_SECTION,
                "lib_dir"):
            lib_dir = self.config.get(BUILDCONFIG_SECTION,
                    "lib_dir")
            if lib_dir[0] != '/':
                # Looks like a relative path, assume from the git root:
                lib_dir = os.path.join(find_git_root(), lib_dir)

            if os.path.exists(lib_dir):
                sys.path.append(lib_dir)
                debug("Added lib dir to PYTHONPATH: %s" % lib_dir)
            else:
                warn_out("lib_dir specified but does not exist: %s" % lib_dir)
Ejemplo n.º 21
0
    def _filter_bzs_with_flags(self):
        print("Checking flags on bugs: %s" % self.bzs)
        print("  required flags: %s" % self.required_flags)

        # TODO: Would be nice to load bugs in bulk here but for now we'll
        # keep it simple.
        filtered_bzs = []
        for bz_tuple in self.bzs:
            bug_id = bz_tuple[0]
            try:
                bug = self._load_bug(bug_id)
            except xmlrpclib.Fault:
                print("WARNING: Bug %s does not seem to exist." % bug_id)
                continue
            debug("Bug %s has flags: %s" % (bug_id, bug.flags))
            flags_missing = False
            for flag in self.required_flags:
                if bug.get_flag_status(flag[0:-1]) != flag[-1]:
                    print("WARNING: Bug %s missing required flag: %s" %
                          (bug_id, flag))
                    flags_missing = True
                    break
                else:
                    debug("Bug %s has required flag: %s" % (bug_id, flag))
            if not flags_missing:
                filtered_bzs.append(bz_tuple)
        return filtered_bzs
Ejemplo n.º 22
0
    def _update_setup_py_in_dir(self, new_version, package_dir=None):
        """
        If this subdir has a setup.py, attempt to update it's version.
        (This is a very minor tweak to the original _update_setup_py method from VersionTagger
        """

        if package_dir is not None:
            full_package_dir = os.path.join(self.full_project_dir, package_dir)
        else:
            full_package_dir = self.full_project_dir

        setup_file = os.path.join(full_package_dir, "setup.py")
        if not os.path.exists(setup_file):
            return

        debug("Found setup.py in {}, attempting to update version.".format(
            package_dir))

        # We probably don't want version-release in setup.py as release is
        # an rpm concept. Hopefully this assumption on
        py_new_version = new_version.split('-')[0]

        f = open(setup_file, 'r')
        buf = six.StringIO()
        for line in f.readlines():
            buf.write(replace_version(line, py_new_version))
        f.close()

        # Write out the new setup.py file contents:
        f = open(setup_file, 'w')
        f.write(buf.getvalue())
        f.close()
        buf.close()

        run_command("git add %s" % setup_file)
Ejemplo n.º 23
0
    def _update_setup_py(self, new_version):
        """
        If this project has a setup.py, attempt to update it's version.
        """
        self._update_version_file(new_version)

        setup_file = os.path.join(self.full_project_dir, "setup.py")
        if not os.path.exists(setup_file):
            return

        debug("Found setup.py, attempting to update version.")

        # We probably don't want version-release in setup.py as release is
        # an rpm concept. Hopefully this assumption on
        py_new_version = new_version.split('-')[0]

        f = open(setup_file, 'r')
        buf = StringIO()
        for line in f.readlines():
            buf.write(replace_version(line, py_new_version))
        f.close()

        # Write out the new setup.py file contents:
        f = open(setup_file, 'w')
        f.write(buf.getvalue())
        f.close()
        buf.close()

        run_command("git add %s" % setup_file)
Ejemplo n.º 24
0
    def __init__(self,
                 name=None,
                 build_dir=None,
                 config=None,
                 user_config=None,
                 args=None,
                 **kwargs):

        # Project directory where we started this build:
        self.start_dir = os.getcwd()

        self.project_name = name
        self.user_config = user_config
        self.args = args
        self.kwargs = kwargs
        self.config = config

        # Optional keyword arguments:
        self.dist = self._get_optional_arg(kwargs, 'dist', None)

        self.offline = self._get_optional_arg(kwargs, 'offline', False)
        self.auto_install = self._get_optional_arg(kwargs, 'auto_install',
                                                   False)
        self.scl = self._get_optional_arg(args, 'scl', None) or \
                self._get_optional_arg(kwargs, 'scl', '')

        self.rpmbuild_options = self._get_optional_arg(kwargs,
                                                       'rpmbuild_options',
                                                       None)
        if not self.rpmbuild_options:
            self.rpmbuild_options = ''

        self.test = self._get_optional_arg(kwargs, 'test', False)
        # Allow a builder arg to override the test setting passed in, used by
        # releasers in their config sections.
        if args and 'test' in args:
            self.test = True

        # Location where we do all tito work and store resulting rpms:
        self.rpmbuild_basedir = build_dir
        # Location where we do actual rpmbuilds
        self.rpmbuild_dir = mkdtemp(dir=self.rpmbuild_basedir,
                                    prefix="rpmbuild-%s" % self.project_name)
        debug("Building in temp dir: %s" % self.rpmbuild_dir)
        self.rpmbuild_sourcedir = os.path.join(self.rpmbuild_dir, "SOURCES")
        self.rpmbuild_builddir = os.path.join(self.rpmbuild_dir, "BUILD")

        self._check_required_args()

        # Set to true once we've created/setup sources: (i.e. tar.gz)
        self.ran_tgz = False

        self.no_cleanup = False

        # List of full path to all sources for this package.
        self.sources = []

        # Artifacts we built:
        self.artifacts = []
Ejemplo n.º 25
0
Archivo: main.py Proyecto: awood/tito
 def process_packages(self, temp_dir):
     self.prune_other_versions(temp_dir)
     print("Refreshing yum repodata...")
     if self.releaser_config.has_option(self.target, 'createrepo_command'):
         self.createrepo_command = self.releaser_config.get(self.target, 'createrepo_command')
     os.chdir(temp_dir)
     output = run_command(self.createrepo_command)
     debug(output)
Ejemplo n.º 26
0
    def __init__(self, name=None, build_dir=None,
            config=None, user_config=None,
            args=None, **kwargs):

        # Project directory where we started this build:
        self.start_dir = os.getcwd()

        self.project_name = name
        self.user_config = user_config
        self.args = args
        self.kwargs = kwargs
        self.config = config

        # Optional keyword arguments:
        self.dist = self._get_optional_arg(kwargs, 'dist', None)

        self.offline = self._get_optional_arg(kwargs, 'offline', False)
        self.auto_install = self._get_optional_arg(kwargs, 'auto_install',
                False)
        self.scl = self._get_optional_arg(args, 'scl', [None])[0] or \
                self._get_optional_arg(kwargs, 'scl', '')

        rpmbuildopts = self._get_optional_arg(args, 'rpmbuild_options', None)
        if rpmbuildopts:
            self.rpmbuild_options = ' '.join(rpmbuildopts)
        else:
            self.rpmbuild_options = self._get_optional_arg(kwargs, 'rpmbuild_options', '')

        self.test = self._get_optional_arg(kwargs, 'test', False)
        # Allow a builder arg to override the test setting passed in, used by
        # releasers in their config sections.
        if args and 'test' in args:
            self.test = True

        # Location where we do all tito work and store resulting rpms:
        self.rpmbuild_basedir = build_dir
        # Location where we do actual rpmbuilds
        self.rpmbuild_dir = mkdtemp(dir=self.rpmbuild_basedir,
            prefix="rpmbuild-%s" % self.project_name)
        debug("Building in temp dir: %s" % self.rpmbuild_dir)
        self.rpmbuild_sourcedir = os.path.join(self.rpmbuild_dir, "SOURCES")
        self.rpmbuild_builddir = os.path.join(self.rpmbuild_dir, "BUILD")

        self._check_required_args()

        # Set to true once we've created/setup sources: (i.e. tar.gz)
        self.ran_tgz = False

        self.no_cleanup = False

        # List of full path to all sources for this package.
        self.sources = []

        # Artifacts we built:
        self.artifacts = []

        # Use most suitable package manager for current OS
        self.package_manager = package_manager()
Ejemplo n.º 27
0
    def fetch(self):
        if "source_dir" not in self.builder.args:
            raise Exception("Specify '--arg source_dir=...'")

        # Copy the live spec from our starting location. Unlike most builders,
        # we are not using a copy from a past git commit.
        self.spec_file = os.path.join(self.builder.rpmbuild_sourcedir,
                                      '%s.spec' % self.builder.project_name)
        shutil.copyfile(
            os.path.join(self.builder.start_dir,
                         '%s.spec' % self.builder.project_name),
            self.spec_file)

        gitrev = self._fetch_local()

        for s in os.listdir(self.builder.start_dir):
            if os.path.exists(os.path.join(self.builder.start_dir, s)):
                shutil.copyfile(
                    os.path.join(self.builder.start_dir, s),
                    os.path.join(self.builder.rpmbuild_sourcedir,
                                 os.path.basename(s)))
        print("  %s.spec" % self.builder.project_name)

        replacements = []
        src_files = run_command("find %s -type f" % os.path.join(
            self.builder.rpmbuild_sourcedir, 'archive')).split("\n")

        def filter_archives(path):
            base_name = os.path.basename(path)
            return ".tar" in base_name

        for i, s in enumerate(filter(filter_archives, src_files)):
            base_name = os.path.basename(s)
            debug("Downloaded file %s" % base_name)

            dest_filepath = os.path.join(self.builder.rpmbuild_sourcedir,
                                         base_name)
            shutil.move(s, dest_filepath)
            self.sources.append(dest_filepath)

            # Add a line to replace in the spec for each source:
            source_regex = re.compile("^(source%s:\s*)(.+)$" % i,
                                      re.IGNORECASE)
            new_line = "Source%s: %s\n" % (i, base_name)
            replacements.append((source_regex, new_line))

        # Replace version in spec:
        version_regex = re.compile("^(version:\s*)(.+)$", re.IGNORECASE)
        self.version = self._get_version()
        print("Building version: %s" % self.version)
        replacements.append((version_regex, "Version: %s\n" % self.version))
        self.replace_in_spec(replacements)

        rel_date = datetime.utcnow().strftime("%Y%m%d%H%M")
        self.release = rel_date + gitrev
        print("Building release: %s" % self.release)
        run_command("sed -i '/^Release:/ s/%%/.%s%%/' %s" %
                    (self.release, self.spec_file))
Ejemplo n.º 28
0
 def tgz(self):
     print('Fetching third-party tarballs')
     run_command('make -C third-party tarballs')
     debug('Copying third-party tarballs')
     for line in open('third-party/tarballs'):
         tarball = line.strip()
         shutil.copy(os.path.join('third-party', tarball), self.rpmbuild_sourcedir)
         self.sources.append(tarball)
     return super(RestraintBuilder, self).tgz()
Ejemplo n.º 29
0
 def cleanup(self):
     """
     Remove all temporary files and directories.
     """
     if not self.no_cleanup:
         debug("Cleaning up %s" % self.rpmbuild_dir)
         shutil.rmtree(self.rpmbuild_dir)
     else:
         warn_out("Leaving rpmbuild files in: %s" % self.rpmbuild_dir)
Ejemplo n.º 30
0
 def process_packages(self, temp_dir):
     self.prune_other_versions(temp_dir)
     print("Refreshing yum repodata...")
     if self.releaser_config.has_option(self.target, 'createrepo_command'):
         self.createrepo_command = self.releaser_config.get(
             self.target, 'createrepo_command')
     os.chdir(temp_dir)
     output = run_command(self.createrepo_command)
     debug(output)
Ejemplo n.º 31
0
    def cleanup(self):
        if not self.no_cleanup:
            debug("Cleaning up [%s]" % self.working_dir)
            run_command("rm -rf %s" % self.working_dir)

            if self.builder:
                self.builder.cleanup()
        else:
            print("WARNING: leaving %s (--no-cleanup)" % self.working_dir)
Ejemplo n.º 32
0
Archivo: main.py Proyecto: awood/tito
    def cleanup(self):
        if not self.no_cleanup:
            debug("Cleaning up [%s]" % self.working_dir)
            run_command("rm -rf %s" % self.working_dir)

            if self.builder:
                self.builder.cleanup()
        else:
            warn_out("leaving %s (--no-cleanup)" % self.working_dir)
Ejemplo n.º 33
0
 def cleanup(self):
     """
     Remove all temporary files and directories.
     """
     if not self.no_cleanup:
         debug("Cleaning up %s" % self.rpmbuild_dir)
         shutil.rmtree(self.rpmbuild_dir)
     else:
         warn_out("Leaving rpmbuild files in: %s" % self.rpmbuild_dir)
Ejemplo n.º 34
0
    def fetch(self):
        if "jenkins_job" in self.builder.args:
            gitrev = self._fetch_jenkins()
        elif "source_dir" in self.builder.args:
            gitrev = self._fetch_local()
        else:
            raise Exception("Specify either '--arg jenkins_job=...' or '--arg source_dir=...'")

        # Copy the live spec from our starting location. Unlike most builders,
        # we are not using a copy from a past git commit.
        self.spec_file = os.path.join(self.builder.rpmbuild_sourcedir,
                    '%s.spec' % self.builder.project_name)
        shutil.copyfile(
            os.path.join(self.builder.start_dir, '%s.spec' %
                self.builder.project_name),
            self.spec_file)
        for s in os.listdir(self.builder.start_dir):
            if os.path.exists(os.path.join(self.builder.start_dir, s)):
                shutil.copyfile(
                    os.path.join(self.builder.start_dir, s),
                    os.path.join(self.builder.rpmbuild_sourcedir, os.path.basename(s)))
        print("  %s.spec" % self.builder.project_name)

        i = 0
        replacements = []
        src_files = run_command("find %s -type f" %
              os.path.join(self.builder.rpmbuild_sourcedir, 'archive')).split("\n")
        for s in src_files:
            base_name = os.path.basename(s)
            debug("Downloaded file %s" % base_name)
            if ".tar" not in base_name and ".gem" not in base_name:
                debug("Skipping %s as it isn't a source archive" % base_name)
                continue

            dest_filepath = os.path.join(self.builder.rpmbuild_sourcedir,
                    base_name)
            shutil.move(s, dest_filepath)
            self.sources.append(dest_filepath)

            # Add a line to replace in the spec for each source:
            source_regex = re.compile("^(source%s:\s*)(.+)$" % i, re.IGNORECASE)
            new_line = "Source%s: %s\n" % (i, base_name)
            replacements.append((source_regex, new_line))
            i += 1

        # Replace version in spec:
        version_regex = re.compile("^(version:\s*)(.+)$", re.IGNORECASE)
        self.version = self._get_version()
        print("Building version: %s" % self.version)
        replacements.append((version_regex, "Version: %s\n" % self.version))
        self.replace_in_spec(replacements)

        rel_date = datetime.utcnow().strftime("%Y%m%d%H%M")
        self.release = rel_date + gitrev
        print("Building release: %s" % self.release)
        run_command("sed -i '/^Release:/ s/%%/.%s%%/' %s" % (self.release, self.spec_file))
Ejemplo n.º 35
0
 def tgz(self):
     print('Fetching third-party tarballs')
     run_command('make -C third-party tarballs')
     debug('Copying third-party tarballs')
     for line in open('third-party/tarballs'):
         tarball = line.strip()
         shutil.copy(os.path.join('third-party', tarball),
                     self.rpmbuild_sourcedir)
         self.sources.append(tarball)
     return super(RestraintBuilder, self).tgz()
Ejemplo n.º 36
0
 def cleanup(self):
     """
     Remove all temporary files and directories.
     """
     if not self.no_cleanup:
         os.chdir('/')
         debug("Cleaning up [%s]" % self.rpmbuild_dir)
         getoutput("rm -rf %s" % self.rpmbuild_dir)
     else:
         print("WARNING: Leaving rpmbuild files in: %s" % self.rpmbuild_dir)
Ejemplo n.º 37
0
 def cleanup(self):
     """
     Remove all temporary files and directories.
     """
     if not self.no_cleanup:
         os.chdir('/')
         debug("Cleaning up [%s]" % self.rpmbuild_dir)
         getoutput("rm -rf %s" % self.rpmbuild_dir)
     else:
         print("WARNING: Leaving rpmbuild files in: %s" % self.rpmbuild_dir)
Ejemplo n.º 38
0
 def cleanup(self):
     """
     Remove all temporary files and directories.
     """
     if not self.no_cleanup:
         for d in [self.rpmbuild_dir, self.deploy_dir, self.maven_clone_dir]:
             debug("Cleaning up %s" % d)
             shutil.rmtree(d)
     else:
         warn_out("Leaving rpmbuild files in: %s" % self.rpmbuild_dir)
Ejemplo n.º 39
0
 def cleanup(self):
     """
     Remove all temporary files and directories.
     """
     if not self.no_cleanup:
         for d in [self.rpmbuild_dir, self.deploy_dir, self.maven_clone_dir]:
             debug("Cleaning up %s" % d)
             shutil.rmtree(d)
     else:
         warn_out("Leaving rpmbuild files in: %s" % self.rpmbuild_dir)
Ejemplo n.º 40
0
    def _koji_release(self):
        """
        Lookup autobuild Koji tags from global config, create srpms with
        appropriate disttags, and submit builds to Koji.
        """
        koji_tags = self.autobuild_tags()
        print("Building release in %s..." % self.NAME)
        debug("%s tags: %s" % (self.NAME, koji_tags))

        koji_opts = DEFAULT_KOJI_OPTS
        if 'KOJI_OPTIONS' in self.builder.user_config:
            koji_opts = self.builder.user_config['KOJI_OPTIONS']

        if self.scratch or ('SCRATCH' in os.environ
                            and os.environ['SCRATCH'] == '1'):
            koji_opts = ' '.join([koji_opts, '--scratch'])

        # TODO: need to re-do this metaphor to use release targets instead:
        for koji_tag in koji_tags:
            if self.only_tags and koji_tag not in self.only_tags:
                continue
            scl = None
            if self.builder.config.has_option(koji_tag, "scl"):
                scl = self.builder.config.get(koji_tag, "scl")
            # Lookup the disttag configured for this Koji tag:
            if self.builder.config.has_option(koji_tag, "disttag"):
                disttag = self.builder.config.get(koji_tag, "disttag")
            else:
                disttag = ''
            if self.builder.config.has_option(koji_tag, "whitelist"):
                # whitelist implies only those packages can be built to the
                # tag,regardless if blacklist is also defined.
                if not self.__is_whitelisted(koji_tag, scl):
                    print("WARNING: %s not specified in whitelist for %s" %
                          (self.project_name, koji_tag))
                    print("   Package *NOT* submitted to %s." % self.NAME)
                    continue
            elif self.__is_blacklisted(koji_tag, scl):
                print("WARNING: %s specified in blacklist for %s" %
                      (self.project_name, koji_tag))
                print("   Package *NOT* submitted to %s." % self.NAME)
                continue

            # Getting tricky here, normally Builder's are only used to
            # create one rpm and then exit. Here we're going to try
            # to run multiple srpm builds:
            builder = self.builder
            if not self.skip_srpm:
                if scl:
                    builder = copy.copy(self.builder)
                    builder.scl = scl
                builder.srpm(dist=disttag)

            self._submit_build("koji", koji_opts, koji_tag,
                               builder.srpm_location)
Ejemplo n.º 41
0
    def _koji_release(self):
        """
        Lookup autobuild Koji tags from global config, create srpms with
        appropriate disttags, and submit builds to Koji.
        """
        koji_tags = self.autobuild_tags()
        print("Building release in %s..." % self.NAME)
        debug("%s tags: %s" % (self.NAME, koji_tags))

        koji_opts = DEFAULT_KOJI_OPTS
        if 'KOJI_OPTIONS' in self.builder.user_config:
            koji_opts = self.builder.user_config['KOJI_OPTIONS']

        if self.scratch or ('SCRATCH' in os.environ and os.environ['SCRATCH'] == '1'):
            koji_opts = ' '.join([koji_opts, '--scratch'])

        # TODO: need to re-do this metaphor to use release targets instead:
        for koji_tag in koji_tags:
            if self.only_tags and koji_tag not in self.only_tags:
                continue
            scl = None
            if self.builder.config.has_option(koji_tag, "scl"):
                scl = self.builder.config.get(koji_tag, "scl")
            # Lookup the disttag configured for this Koji tag:
            if self.builder.config.has_option(koji_tag, "disttag"):
                disttag = self.builder.config.get(koji_tag, "disttag")
            else:
                disttag = ''
            if self.builder.config.has_option(koji_tag, "whitelist"):
                # whitelist implies only those packages can be built to the
                # tag,regardless if blacklist is also defined.
                if not self.__is_whitelisted(koji_tag, scl):
                    print("WARNING: %s not specified in whitelist for %s" % (
                        self.project_name, koji_tag))
                    print("   Package *NOT* submitted to %s." % self.NAME)
                    continue
            elif self.__is_blacklisted(koji_tag, scl):
                print("WARNING: %s specified in blacklist for %s" % (
                    self.project_name, koji_tag))
                print("   Package *NOT* submitted to %s." % self.NAME)
                continue

            # Getting tricky here, normally Builder's are only used to
            # create one rpm and then exit. Here we're going to try
            # to run multiple srpm builds:
            builder = self.builder
            if not self.skip_srpm:
                if scl:
                    builder = copy.copy(self.builder)
                    builder.scl = scl
                builder.srpm(dist=disttag)

            self._submit_build("koji", koji_opts, koji_tag, builder.srpm_location)
Ejemplo n.º 42
0
    def tgz(self):
        """ Override parent behavior, we already have a tgz. """
        # TODO: Does it make sense to allow user to create a tgz for this type
        # of project?
        self._setup_sources()
        self.ran_tgz = True

        debug("Scanning for sources.")
        cmd = "/usr/bin/spectool --list-files '%s' | awk '{print $2}' |xargs -l1 --no-run-if-empty basename " % self.spec_file
        result = run_command(cmd)
        self.sources = map(lambda x: os.path.join(self.rpmbuild_gitcopy, x), result.split("\n"))
        debug("  Sources: %s" % self.sources)
Ejemplo n.º 43
0
    def tgz(self):
        """ Override parent behavior, we already have a tgz. """
        # TODO: Does it make sense to allow user to create a tgz for this type
        # of project?
        self._setup_sources()
        self.ran_tgz = True

        debug("Scanning for sources.")
        cmd = "/usr/bin/spectool --list-files '%s' | awk '{print $2}' |xargs -l1 --no-run-if-empty basename " % self.spec_file
        result = run_command(cmd)
        self.sources = map(lambda x: os.path.join(self.rpmbuild_gitcopy, x), result.split("\n"))
        debug("  Sources: %s" % self.sources)
Ejemplo n.º 44
0
 def _tag_release(self):
     """
     Tag a new version of the package. (i.e. x.y.z+1)
     """
     self._make_changelog()
     new_version = self._bump_version()
     self._check_tag_does_not_exist(self._get_new_tag(new_version))
     self._update_changelog(new_version)
     self._update_setup_py(new_version)
     debug("Trying to run Cargo update")
     self._update_cargo_toml(new_version)
     self._update_pom_xml(new_version)
     self._update_package_metadata(new_version)
Ejemplo n.º 45
0
    def run_git_archive(self, relative_git_dir, prefix, commit, dest_tar, subdir):
        # command to generate a git-archive
        git_archive_cmd = 'git archive --format=tar --prefix=%s/ %s:%s --output=%s' % (
            prefix, commit, relative_git_dir, dest_tar)

        with chdir(subdir) as p:
            run_command(git_archive_cmd)

            # Run git-archive separately if --debug was specified.
            # This allows us to detect failure early.
            # On git < 1.7.4-rc0, `git archive ... commit:./` fails!
            debug('git-archive fails if relative dir is not in git tree',
                  '%s > /dev/null' % git_archive_cmd)
Ejemplo n.º 46
0
 def _setup_test_specfile(self):
     """ Override parent behavior. """
     if self.test:
         # If making a test rpm we need to get a little crazy with the spec
         # file we're building off. (note that this is a temp copy of the
         # spec) Swap out the actual release for one that includes the git
         # SHA1 we're building for our test package:
         debug("setup_test_specfile:commit_count = %s" % str(self.commit_count))
         munge_specfile(
             self.spec_file,
             self.git_commit_id[:7],
             self.commit_count
         )
Ejemplo n.º 47
0
 def _setup_test_specfile(self):
     """ Override parent behavior. """
     if self.test:
         # If making a test rpm we need to get a little crazy with the spec
         # file we're building off. (note that this is a temp copy of the
         # spec) Swap out the actual release for one that includes the git
         # SHA1 we're building for our test package:
         debug("setup_test_specfile:commit_count = %s" % str(self.commit_count))
         munge_specfile(
             self.spec_file,
             self.git_commit_id[:7],
             self.commit_count
         )
Ejemplo n.º 48
0
 def __init__(self, config=None, keep_version=False, offline=False, user_config=None):
     VersionTagger.__init__(self, config=config)
     self.gemspec_file_name = find_file_with_extension(suffix=".gemspec")
     new_version = subprocess.check_output(
         ["ruby", "-e", "gspec = eval(File.read('" + self.gemspec_file_name + "')); " + "print(gspec.version)"]
     )
     regex = re.compile("^(\d+(?:\.\d+)*)-?(.*)$")
     match = re.match(regex, new_version)
     if match:
         debug("Deduced version='%s' release='%s'" % (match.group(1), match.group(2)))
         self._use_version = match.group(1)
         """ The release value is currently parsed, but unused. """
         self._use_release = match.group(2)
Ejemplo n.º 49
0
    def _confirm_commit_msg(self, diff_output):
        """
        Generates a commit message in a temporary file, gives the user a
        chance to edit it, and returns the filename to the caller.
        """

        fd, name = tempfile.mkstemp()
        debug("Storing commit message in temp file: %s" % name)
        write(
            fd, "Update %s to %s\n" %
            (self.project_name, self.builder.build_version))
        # Write out Resolves line for all bugzillas we see in commit diff:
        # TODO: move to DistGitBuilder only?
        try:
            (required_bz_flags, placeholder_bz) = self._get_bz_flags()
            extractor = BugzillaExtractor(diff_output,
                                          required_flags=required_bz_flags,
                                          placeholder_bz=placeholder_bz)
            for line in extractor.extract():
                write(fd, line + "\n")
        except MissingBugzillaCredsException:
            error_out([
                "Releaser specifies required flags but you have not configured",
                "a ~/.bugzillarc with your bugzilla credentials.", "Example:",
                "", "[bugzilla.redhat.com]", "user = [email protected]",
                "password = mypassword"
            ])

        print("")
        print("##### Commit message: #####")
        print("")

        os.lseek(fd, 0, 0)
        f = os.fdopen(fd)
        for line in f.readlines():
            print(line)
        f.close()

        print("")
        print("###############################")
        print("")
        if self._ask_yes_no(
                "Would you like to edit this commit message? [y/n] ", False):
            debug("Opening editor for user to edit commit message in: %s" %
                  name)
            editor = 'vi'
            if "EDITOR" in os.environ:
                editor = os.environ["EDITOR"]
            subprocess.call(editor.split() + [name])

        return name
Ejemplo n.º 50
0
    def _clear_package_metadata(self):
        """
        Remove all .tito/packages/ files that have a relative path
        matching the package we're tagging a new version of. Normally
        this just removes the previous package file but if we were
        renaming oldpackage to newpackage, this would git rm
        .tito/packages/oldpackage and add
        .tito/packages/spacewalk-newpackage.
        """
        metadata_dir = os.path.join(self.rel_eng_dir, "packages")
        for filename in os.listdir(metadata_dir):
            metadata_file = os.path.join(metadata_dir, filename)  # full path

            if os.path.isdir(metadata_file) or filename.startswith("."):
                continue

            temp_file = open(metadata_file, 'r')
            (version, relative_dir) = temp_file.readline().split(" ")
            relative_dir = relative_dir.strip()  # sometimes has a newline

            if relative_dir == self.relative_project_dir:
                debug("Found metadata for our prefix: %s" %
                        metadata_file)
                debug("   version: %s" % version)
                debug("   dir: %s" % relative_dir)
                if filename == self.project_name:
                    debug("Updating %s with new version." %
                            metadata_file)
                else:
                    warn_out("%s also references %s" % (filename, self.relative_project_dir))
                    print("Assuming package has been renamed and removing it.")
                    run_command("git rm %s" % metadata_file)
Ejemplo n.º 51
0
    def _fetch_local(self):

        source_dir = os.path.expanduser(self.builder.args['source_dir'][0])

        version_regex = re.compile("^(version:\s*)(.+)$", re.IGNORECASE)

        with open(self.spec_file, 'r') as spec:
            for line in spec.readlines():
                match = version_regex.match(line)
                if match:
                    version = match.group(2)

        if not version:
            error_out("Version not found in spec")

        old_dir = os.getcwd()
        os.chdir(source_dir)

        fetchdir = os.path.join(self.builder.rpmbuild_sourcedir, 'archive')
        if not os.path.exists(fetchdir):
            os.mkdir(fetchdir)

        arch_prefix = "-".join([self.builder.project_name, version])

        # git archive --format=tar.gz --prefix=pulp-2.15.0/ master > pulp-2.15.0.tar.gz

        with open("./%s.tar.gz" % arch_prefix, "w+") as archive:
            subprocess.call([
                "git", "archive", "--format=tar.gz",
                ("--prefix=%s/" % arch_prefix), "master"
            ],
                            stdout=archive)

        sources = glob.glob("./*.tar.gz")
        print(sources)

        for srcfile in sources:
            debug("Copying %s from local source dir" % srcfile)
            shutil.move(srcfile,
                        os.path.join(fetchdir, os.path.basename(srcfile)))

        gitrev = "local"
        gitsha = subprocess.check_output(["git", "rev-parse",
                                          "HEAD"]).decode('utf-8')
        if gitsha:
            gitrev = "git%s" % gitsha[0:7]

        os.chdir(old_dir)

        return gitrev
Ejemplo n.º 52
0
    def _get_bz_flags(self):
        required_bz_flags = None
        if self.releaser_config.has_option(self.target, 'required_bz_flags'):
            required_bz_flags = self.releaser_config.get(
                self.target, 'required_bz_flags').split(" ")
            debug("Found required flags: %s" % required_bz_flags)

        placeholder_bz = None
        if self.releaser_config.has_option(self.target, 'placeholder_bz'):
            placeholder_bz = self.releaser_config.get(self.target,
                                                      'placeholder_bz')
            debug("Found placeholder bugzilla: %s" % placeholder_bz)

        return (required_bz_flags, placeholder_bz)
Ejemplo n.º 53
0
    def __init__(self,
                 name=None,
                 tag=None,
                 build_dir=None,
                 config=None,
                 user_config=None,
                 target=None,
                 releaser_config=None,
                 no_cleanup=False,
                 test=False,
                 auto_accept=False,
                 prefix="temp_dir=",
                 **kwargs):

        if 'builder_args' in kwargs:
            kwargs['builder_args']['local'] = False

        DistGitReleaser.__init__(self, name, tag, build_dir, config,
                                 user_config, target, releaser_config,
                                 no_cleanup, test, auto_accept, **kwargs)

        self.mead_scm = self.releaser_config.get(self.target, "mead_scm")

        if self.releaser_config.has_option(self.target, "mead_push_url"):
            self.push_url = self.releaser_config.get(self.target,
                                                     "mead_push_url")
        else:
            self.push_url = self.mead_scm

        # rhpkg maven-build takes an optional override --target:
        self.brew_target = None
        if self.releaser_config.has_option(self.target, "target"):
            self.brew_target = self.releaser_config.get(self.target, "target")

        # If the push URL contains MEAD_SCM_URL, we require the user to set this
        # in ~/.titorc before they can run this releaser. This allows us to
        # use push URLs that require username auth, but still check a generic
        # URL into source control:
        if MEAD_SCM_USERNAME in self.push_url:
            debug("Push URL contains %s, checking for value in ~/.titorc" %
                  MEAD_SCM_USERNAME)
            if MEAD_SCM_USERNAME in user_config:
                user = user_config[MEAD_SCM_USERNAME]
            else:
                user = getpass.getuser()
                warn_out(
                    "You should specify MEAD_SCM_USERNAME in '~/.titorc'.  Using %s for now"
                    % user)

            self.push_url = self.push_url.replace(MEAD_SCM_USERNAME, user)
Ejemplo n.º 54
0
    def _clear_package_metadata(self):
        """
        Remove all rel-eng/packages/ files that have a relative path
        matching the package we're tagging a new version of. Normally
        this just removes the previous package file but if we were
        renaming oldpackage to newpackage, this would git rm
        rel-eng/packages/oldpackage and add
        rel-eng/packages/spacewalk-newpackage.
        """
        metadata_dir = os.path.join(self.rel_eng_dir, "packages")
        for filename in os.listdir(metadata_dir):
            metadata_file = os.path.join(metadata_dir, filename)  # full path

            if os.path.isdir(metadata_file) or filename.startswith("."):
                continue

            temp_file = open(metadata_file, 'r')
            (version, relative_dir) = temp_file.readline().split(" ")
            relative_dir = relative_dir.strip()  # sometimes has a newline

            if relative_dir == self.relative_project_dir:
                debug("Found metadata for our prefix: %s" %
                        metadata_file)
                debug("   version: %s" % version)
                debug("   dir: %s" % relative_dir)
                if filename == self.project_name:
                    debug("Updating %s with new version." %
                            metadata_file)
                else:
                    print("WARNING: %s also references %s" % (filename,
                            self.relative_project_dir))
                    print("Assuming package has been renamed and removing it.")
                    run_command("git rm %s" % metadata_file)
Ejemplo n.º 55
0
    def srpm(self, dist=None):
        """
        Build a source RPM.
        """
        self._create_build_dirs()
        if not self.ran_tgz:
            self.tgz()

        if self.test:
            self._setup_test_specfile()

        debug("Creating srpm from spec file: %s" % self.spec_file)
        define_dist = ""
        if self.dist:
            debug("using self.dist: %s" % self.dist)
            define_dist = "--define 'dist %s'" % self.dist
        elif dist:
            debug("using dist: %s" % dist)
            define_dist = "--define 'dist %s'" % dist
        else:
            debug("*NOT* using dist at all")

        rpmbuild_options = self.rpmbuild_options + self._scl_to_rpmbuild_option()

        cmd = ('rpmbuild --define "_source_filedigest_algorithm md5"  --define'
            ' "_binary_filedigest_algorithm md5" %s %s %s --nodeps -bs %s' % (
                rpmbuild_options, self._get_rpmbuild_dir_options(),
                define_dist, self.spec_file))
        output = run_command_print(cmd)
        self.srpm_location = find_wrote_in_rpmbuild_output(output)[0]
        self.artifacts.append(self.srpm_location)
Ejemplo n.º 56
0
    def srpm(self, dist=None):
        """
        Build a source RPM.
        """
        self._create_build_dirs()
        if not self.ran_tgz:
            self.tgz()

        if self.test:
            self._setup_test_specfile()

        debug("Creating srpm from spec file: %s" % self.spec_file)
        define_dist = ""
        if self.dist:
            debug("using self.dist: %s" % self.dist)
            define_dist = "--define 'dist %s'" % self.dist
        elif dist:
            debug("using dist: %s" % dist)
            define_dist = "--define 'dist %s'" % dist
        else:
            debug("*NOT* using dist at all")

        rpmbuild_options = self.rpmbuild_options + self._scl_to_rpmbuild_option()

        cmd = ('rpmbuild --define "_source_filedigest_algorithm md5"  --define'
            ' "_binary_filedigest_algorithm md5" %s %s %s --nodeps -bs %s' % (
                rpmbuild_options, self._get_rpmbuild_dir_options(),
                define_dist, self.spec_file))
        output = run_command_print(cmd)
        self.srpm_location = find_wrote_in_rpmbuild_output(output)[0]
        self.artifacts.append(self.srpm_location)
Ejemplo n.º 57
0
    def _confirm_commit_msg(self, diff_output):
        """
        Generates a commit message in a temporary file, gives the user a
        chance to edit it, and returns the filename to the caller.
        """

        fd, name = tempfile.mkstemp()
        debug("Storing commit message in temp file: %s" % name)
        write(fd, "Update %s to %s\n" % (self.project_name,
            self.builder.build_version))
        # Write out Resolves line for all bugzillas we see in commit diff:
        # TODO: move to DistGitBuilder only?
        try:
            (required_bz_flags, placeholder_bz) = self._get_bz_flags()
            extractor = BugzillaExtractor(diff_output,
                required_flags=required_bz_flags,
                placeholder_bz=placeholder_bz)
            for line in extractor.extract():
                write(fd, line + "\n")
        except MissingBugzillaCredsException:
            error_out([
                "Releaser specifies required flags but you have not configured",
                "a ~/.bugzillarc with your bugzilla credentials.",
                "Example:",
                "",
                "[bugzilla.redhat.com]",
                "user = [email protected]",
                "password = mypassword"])

        print("")
        print("##### Commit message: #####")
        print("")

        os.lseek(fd, 0, 0)
        f = os.fdopen(fd)
        for line in f.readlines():
            print(line)
        f.close()

        print("")
        print("###############################")
        print("")
        if self._ask_yes_no("Would you like to edit this commit message? [y/n] ", False):
            debug("Opening editor for user to edit commit message in: %s" % name)
            editor = 'vi'
            if "EDITOR" in os.environ:
                editor = os.environ["EDITOR"]
            subprocess.call(editor.split() + [name])

        return name
Ejemplo n.º 58
0
Archivo: main.py Proyecto: awood/tito
 def rsync_to_remote(self, rsync_args, temp_dir, rsync_location):
     print("rsync %s --delete %s/ %s" % (rsync_args, temp_dir, rsync_location))
     os.chdir(temp_dir)
     # TODO: configurable rsync options?
     cmd = "rsync %s --delete %s/ %s" % (rsync_args, temp_dir, rsync_location)
     if self.dry_run:
         self.print_dry_run_warning(cmd)
     else:
         output = run_command(cmd)
         debug(output)
     if not self.no_cleanup:
         debug("Cleaning up [%s]" % temp_dir)
         os.chdir("/")
         shutil.rmtree(temp_dir)
     else:
         warn_out("leaving %s (--no-cleanup)" % temp_dir)
Ejemplo n.º 59
0
    def _get_bz_flags(self):
        required_bz_flags = None
        if self.releaser_config.has_option(self.target,
            'required_bz_flags'):
            required_bz_flags = self.releaser_config.get(self.target,
                'required_bz_flags').split(" ")
            debug("Found required flags: %s" % required_bz_flags)

        placeholder_bz = None
        if self.releaser_config.has_option(self.target,
            'placeholder_bz'):
            placeholder_bz = self.releaser_config.get(self.target,
                'placeholder_bz')
            debug("Found placeholder bugzilla: %s" % placeholder_bz)

        return (required_bz_flags, placeholder_bz)