Exemple #1
0
    def run_action(self, action, args=None):
        if not args:
            args = {}
        if action.continuable:
            log.info(log.term.bold("## %s" % action.name))
        for carg in action.const_args:
            args[carg] = action.const_args[carg]
        action_fun = action.action_fun
        if not action_fun:
            action_fun = self._get_action_fun(action)
            if not action_fun:
                raise exception.ActionFunctionNotAvailable(
                    action=action.name, module=action.module)
            action.action_fun = action_fun

        argspec = inspect_getargspec(action_fun)
        fun_args = []
        if argspec.defaults:
            n_defaults = len(argspec.defaults)
        else:
            n_defaults = 0
        n_required = len(argspec.args) - n_defaults
        for i, arg in enumerate(argspec.args):
            if arg in args:
                fun_args.append(args[arg])
            else:
                if i < n_required:
                    raise exception.RequiredActionArgumentNotAvailable(
                        action=action.name, arg=arg)
                else:
                    fun_args.append(argspec.defaults[i - n_required])
        return action_fun(*fun_args)
Exemple #2
0
    def __init__(self, base_path=None, url=None, local_repo_path=None,
                 verbose=False, user=None):
        # remote repo (base_path, url) XOR local repo (local_repo_path)
        assert bool(base_path and url) != bool(local_repo_path)

        self.user = user
        if not self.user:
            # we need a user, so pick the current user by default
            env = os.environ.copy()
            # USERNAME is an env var used by gerrit
            self.user = env.get('USERNAME') or env.get('USER')
            if verbose:
                log.info('Using user %s with repo %s' % (self.user,
                                                         url))
        self.url = tidy_ssh_user(url, self.user)
        self.verbose = verbose
        if local_repo_path:
            self.repo_path = local_repo_path
            self.base_path, self.repo_name = \
                os.path.split(os.path.abspath(local_repo_path))
        else:
            self.base_path = base_path
            self.repo_name = repo_name_from_url(self.url)
            if not self.repo_name:
                raise exception.RepoError(
                    what='Failed to parse %s repo URL: %s' % (self.repo_desc,
                                                              self.url))
            self.repo_path = os.path.join(self.base_path, self.repo_name)
def get_updates_info(verbose=False):
    gitdir = tempfile.mkdtemp(prefix='rdopkg-list-updates')
    uinfos = []
    prev_cwd = os.getcwd()
    os.chdir(gitdir)
    try:
        cmd.git('init', log_cmd=False)
        f_project = filters.OrFilter()
        f_project.add_items('project', 'rdo-update')

        f_other = filters.Items()
        f_other.add_items('is', 'open')

        query = reviews.Query(cfg['RDO_UPDATE_GERRIT_HOST'])
        for review in query.filter(f_project, f_other):
            try:
                url = review.get('url', '???')
                if verbose:
                    log.info("Processing update review: %s" % url)
                uinfo = get_review_update_info(review, gitdir)
                uinfos.append(uinfo)
            except Exception as ex:
                if verbose:
                    log.warn("Error processing update review: %s: %s",
                             type(ex).__name__, str(ex))
                pass
    finally:
        os.chdir(prev_cwd)
        shutil.rmtree(gitdir)
    return uinfos
Exemple #4
0
def rebase_patches_branch(new_version, local_patches_branch,
                          patches_branch=None, local_patches=False,
                          patches_style=None, version_tag_style=None,
                          bump_only=False):
    if bump_only:
        return
    git.checkout(local_patches_branch)
    new_version_tag = guess.version2tag(new_version, version_tag_style)
    git('rebase', new_version_tag, direct=True)

    if patches_style != 'review':
        if local_patches or not patches_branch:
            return
        if _is_same_commit(local_patches_branch, patches_branch):
            log.info("%s is up to date, no need for push." % patches_branch)
            return
        try:
            remote, branch = git.remote_branch_split(patches_branch)
            helpers.confirm("Push %s to %s / %s (with --force)?" % (
                local_patches_branch, remote, branch))
            git('push', '--force', remote,
                '%s:%s' % (local_patches_branch, branch))
            # push the tag
            git('push', '--force', remote, new_version_tag)
        except exception.UserAbort:
            pass
Exemple #5
0
    def new_build(self, srpm_url, release, dist, watch=False):
        copr = rdo_copr_name(release, dist)
        url = self.copr_api_url('coprs/%s/%s/new_build/' % (self.owner, copr))
        data = {
            'pkgs': srpm_url,
        }

        req = requests.post(url,
                            auth=(self.user['login'], self.user['token']),
                            data=data)
        output = _get_copr_data(req, self.user, type='new_build')
        build_ids = output.get('ids')
        if not build_ids:
            raise exception.CoprError(
                error="copr didn't return id of new build.(?!)")
        build_id = build_ids[0]
        if watch:
            log.info("\nWatching build (may be safely interrupted)...")
            prevstatus = None
            try:
                while True:
                    try:
                        status = self._fetch_build_status(build_id)
                    except exception.CoprError as ex:
                        log.warn("Failed to get build status: %s" % ex)
                        break

                    if prevstatus != status:
                        now = datetime.datetime.now()
                        if status in ['pending', 'waiting', 'running']:
                            cstatus = log.term.bold(status)
                        elif status == 'succeeded':
                            cstatus = log.term.good(status)
                        elif status == 'failed':
                            cstatus = log.term.error(status)
                        elif status == 'skipped':
                            cstatus = ("{t.magenta}{st}{t.normal} (build "
                                       "already done)".format(t=log.term,
                                                              st=status))
                        else:
                            cstatus = log.term.warn(status)
                        log.info("[%s] %s" % (now.strftime('%H:%M:%S'),
                                              cstatus))
                        prevstatus = status

                    if status in [
                            'succeeded',
                            'failed',
                            'canceled',
                            'skipped']:
                        break

                    time.sleep(60)
            except KeyboardInterrupt:
                pass
            except Exception as ex:
                log.warn("Error during copr build monitoring: %s" % ex)

        return build_id
Exemple #6
0
def copy_package(pkg_path, dest_dir, overwrite=False):
    helpers.ensure_dir(dest_dir)
    pkg_name = os.path.basename(pkg_path)
    dst_path = os.path.join(dest_dir, pkg_name)
    if not overwrite and os.path.exists(dst_path):
        raise exception.NewPackageAlreadyPresent(path=dst_path)
    log.info("{t.bold}copy{t.normal} {src} {t.bold}->{t.normal} {dst}".format(
        src=pkg_path, dst=dst_path, t=log.term))
    shutil.copyfile(pkg_path, dst_path)
    return dst_path
Exemple #7
0
 def _get_new_update_id(self):
     looking = True
     while looking:
         update_id = generate_id()
         upfile_path = self._upfile_path_abs(update_id)
         if os.path.exists(upfile_path):
             log.info("Generated colliding ID %s. Weird. Generating new ID.")
         else:
             looking = False
     return update_id, upfile_path
Exemple #8
0
        def _push_pkg(upf):
            log.info("\nPushing update {t.bold}{upf}{t.normal}".format(
                t=log.term, upf=upf))
            update = self._load_update_file(upf)
            pushed_rpms = []
            try:
                _updated_repos = set()
                _updated_repo_bases = set()
                _pushed_build_tmp_paths = []
                for build in update.builds:
                    src_path = self._build_tmp_path(upf, build)
                    if src_path in _pushed_build_tmp_paths:
                        continue
                    build_rpms = helpers.find_files(src_path, ext='.rpm')
                    dest_repo_base_path = self._dest_repo_base_path(build.repo)
                    if not os.path.isdir(dest_repo_base_path):
                        raise exception.NotADirectory(path=dest_repo_base_path)
                    dest_path = self._build_dest_path(build)
                    for rpm in build_rpms:
                        pushed_path = copy_package(rpm, dest_path,
                                                   overwrite=self.overwrite)
                        pushed_rpms.append(pushed_path)
                    _pushed_build_tmp_paths.append(src_path)
                    _updated_repo_bases.add(dest_repo_base_path)
                    _updated_repos.add(self._dest_repo_path(build.repo,
                                                            build.dist))

                with helpers.cdir(self.update_repo_path):
                    helpers.ensure_dir(self.pushed_dir)
                    upf_base = os.path.basename(upf)
                    pushed_upf = os.path.join(self.pushed_dir, upf_base)
                    pushed_files_fn = pushed_upf + self.pushed_files_ext
                    git('mv', upf, pushed_upf)
                    pushed_files_f = open(pushed_files_fn, 'w')
                    pushed_files_f.writelines(
                        map(lambda x: "%s\n" % x, pushed_rpms))
                    pushed_files_f.close()
                    git('add', pushed_files_fn)
                    try:
                        git('commit', '-m',
                            "Push %s" % rdoupdate.core.pp_update(upf))
                    except Exception:
                        git('git', 'reset', '--hard')
                        raise
                updated_repos.update(_updated_repos)
                updated_repo_bases.update(_updated_repo_bases)
            except Exception as ex:
                if pushed_rpms:
                    log.warn("Final push failed for %s, cleaning copied "
                             "packages" % upf)
                    for rpm in pushed_rpms:
                        log.info("{t.warn}remove{t.normal} {rpm}".format(
                            t=log.term, rpm=rpm))
                        os.remove(rpm)
                raise
Exemple #9
0
    def _clone(self):
        if self.verbose:

            log.info("Cloning {desc} repo: {url}\n"
                     "        {space} into: {path}".format(
                         desc=self.repo_desc,
                         space=len(self.repo_desc) * ' ',
                         url=self.url,
                         path=self.repo_path))
        with helpers.cdir(self.base_path):
            cmd.git('clone', self.url, self.repo_name, log_cmd=self.verbose)
Exemple #10
0
def edit(path):
    editor = os.environ.get('EDITOR')
    if not editor:
        editor = 'vim'
        log.info("$EDITOR not set. Falling back to %s." % editor)
    try:
        r = run(editor, path, direct=True)
    except exception.CommandNotFound:
        raise exception.CommandNotFound(
            msg='Failed to find suitable text editor ({0}).  '
                'Please set $EDITOR environment variable.'.format(editor))
    return r.success
Exemple #11
0
def query_repos(distrepos, package, verbose=False):
    if not distrepos:
        return []
    versions = []
    for repo in distrepos:
        repo_name = repo['name']
        repo_url = repo['url']
        version = repoquery(repo_url, repo_name, package, verbose=verbose)
        if version:
            versions.append((repo_name, version))
        if verbose:
            log.info("%s: %s", repo_name, version or 'N/A')
    versions.sort(cmp=lambda x, y: nvrcmp(x[0], y[0]), reverse=True)
    return versions
Exemple #12
0
 def submit_existing_update(self, upfile_path, check_availability=True):
     update = ensure_update_notes(upfile_path)
     update_rdoinfo_check(update)
     if check_availability:
         log.info("Checking availability of updated builds...")
         for b in update.builds:
             r = b.is_available(verbose=True)
             if not r:
                 raise rdoupdate.exception.BuildNotAvailable(
                     build_id=b.id, source=b.source, detail=r.err)
     update_id, tmp_upfile_path = self._get_new_update_id()
     tmp_updir_path, _ = os.path.split(tmp_upfile_path)
     helpers.ensure_dir(tmp_updir_path)
     shutil.copyfile(upfile_path, tmp_upfile_path)
     print("\nUpdate:\n%s\n" % update)
     self.submit_update_file(update_id, msg=str(update))
Exemple #13
0
def conf():
    if cfg_files:
        log.info("Following config files were read:")
        helpers.print_list(cfg_files)
    else:
        log.info("No rdopkg config files found, using default config:")
    log.info("")
    for item in cfg.items():
        log.info("%s: %s" % item)
Exemple #14
0
def upload_fpo(pkg, user):
    dst_host = user + '@fedorapeople.org'
    dst_path = '~/public_html/copr'
    dst = '%s:%s/%s' % (dst_host, dst_path, pkg)
    _cmd = ['scp', pkg, dst]
    url = fpo_url(pkg, user)
    try:
        cmd.run(*_cmd)
    except exception.CommandFailed as ex:
        err = ex.kwargs['out'].stderr
        # TODO: fragile, use paramiko instead?
        if not re.match('scp: .*No such file or directory', err):
            raise
        log.info("Creating remote dir: %s:%s" % (dst_host, dst_path))
        cmd.run('ssh', dst_host, 'mkdir -p ' + dst_path)
        cmd.run(*_cmd)
    return url
Exemple #15
0
 def new_update(self, update, check_availability=True):
     update_id, upfile_path = self._get_new_update_id()
     updir_path, _ = os.path.split(upfile_path)
     helpers.ensure_dir(updir_path)
     upfile = file(upfile_path, 'wt')
     upfile.write(update.update_file())
     upfile.close()
     helpers.edit(upfile_path)
     cmd.run('/bin/sed', '-i', '-e', '/^#/d', upfile_path, shell=False)
     parsed_update = None
     while not parsed_update:
         try:
             parsed_update = rdoupdate.actions.check_file(upfile_path)
         except Exception as ex:
             print("\n{t.important}Error parsing update file: {t.normal}"
                   "{t.warn}{ex}{t.normal}".format(t=log.term, ex=ex))
             again = raw_input("Do you want to edit? [Yn] ")
             if again and again.lower() != 'y':
                 os.remove(upfile_path)
                 raise exception.UserAbort()
             helpers.edit(upfile_path)
             continue
         if check_availability:
             log.info("Checking availability of updated builds...")
             bad_builds = [x for x in parsed_update.builds
                           if not x.is_available(verbose=True)]
             if bad_builds:
                 builds_str = "\n".join(map(str, bad_builds))
                 print("\n{t.important}Builds below doesn't seem to be "
                       "available:{t.normal}\n{builds}\n".format(
                       t=log.term, builds=builds_str))
                 print("Options:")
                 print("  e: Edit update")
                 print("  u: Update anyway")
                 print("  a: Abort")
                 resp = raw_input("What do you want to do? [euA] ").lower()
                 if resp == 'e':
                     parsed_update = None
                     helpers.edit(upfile_path)
                 elif resp == 'u':
                     pass
                 else:
                     raise exception.UserAbort()
     print("\nUpdate:\n%s\n" % parsed_update)
     self.submit_update_file(update_id, msg=str(parsed_update))
Exemple #16
0
def get_package_env(version=None, release=None, dist=None, branch=None,
                    patches_branch=None, local_patches_branch=None,
                    patches_style=None, gerrit_patches_chain=None,
                    release_bump_index=None):
    if not branch:
        branch = git.current_branch()
    if branch.endswith('-patches'):
        branch = branch[:-8]
        if git.branch_exists(branch):
            log.info(
                "This looks like -patches branch. Assuming distgit branch: "
                "%s" % branch)
            git.checkout(branch)
        else:
            raise exception.InvalidUsage(
                why="This action must be run on a distgit branch.")
    args = {
        'package': guess.package(),
        'branch': branch,
    }
    osdist = guess.osdist()
    if osdist.startswith('RH'):
        log.info("RH package detected.")
        args['fedpkg'] = ['rhpkg']
    if not patches_branch:
        patches_branch = guess.patches_branch(branch, pkg=args['package'],
                                              osdist=osdist)
    if not patches_style:
        patches_style = guess.patches_style(gerrit_patches_chain)
    args['patches_style'] = patches_style
    args['patches_branch'] = patches_branch
    if release_bump_index is None:
        args['release_bump_index'] = guess.release_bump_index()
    if not local_patches_branch:
        args['local_patches_branch'] = patches_branch.partition('/')[2]
    if not version:
        base_ref = guess.patches_base_ref()
        version, _ = guess.tag2version(base_ref)
        args['version'] = version
    args['version_tag_style'] = guess.version_tag_style(version=version)

    return args
Exemple #17
0
def get_source(new_sources=False):
    if not new_sources:
        # due to new-version changes, this function does nothing by default :)
        # TODO: factor this into get_source() and new_version_get_source()
        return
    # TODO: consider using `spectool --gf --source`
    source_urls = specfile.Spec().get_source_urls()
    # So far, only Source/Source0 is a tarball to download
    source_url = source_urls[0]
    source_fn = os.path.basename(source_url)
    if os.path.isfile(source_fn):
        log.info("%s already present" % source_fn)
        return
    try:
        helpers.download_file(source_url)
    except exception.CommandFailed:
        # only valid for new-sources, not for get-source
        raise exception.ActionRequired(
            msg="Failed to download source tarball. Please update Source0 in "
                ".spec file.", rerun=True)
Exemple #18
0
 def _fetch(self, force=False):
     need_fetch = True
     with self.repo_dir():
         if not force:
             try:
                 t_fetch = os.path.getmtime('.git/FETCH_HEAD')
                 t_now = int(time.time())
                 delta = t_now - t_fetch
                 if delta < cfg['FETCH_PERIOD']:
                     need_fetch = False
             except Exception:
                 pass
         if need_fetch:
             if self.verbose:
                 log.info("Fetching %s repo: %s" % (
                     self.repo_desc, self.repo_path))
             cmd.git('fetch', 'origin', log_cmd=self.verbose)
             cmd.git('checkout', '-f', 'master', log_cmd=self.verbose)
             cmd.git('reset', '--hard', 'origin/master',
                     log_cmd=self.verbose)
Exemple #19
0
 def __init__(self, base_path=None, url=None, local_repo_path=None,
              verbose=False, user=None):
     # remote repo (base_path, url) XOR local repo (local_repo_path)
     assert bool(base_path and url) != bool(local_repo_path)
     
     self.user = user
     self.url = url
     if self.url and self.url.startswith('ssh://'):
         # is there a user already?
         match = re.compile('ssh://([^@]+)@.+').match(self.url)
         if match:
             ssh_user = match.groups()[0]
             if ssh_user != self.user:
                 # assume prevalence of argument
                 self.url.replace(ssh_user + '@',
                                  self.user + '@')
         else:
             if not self.user:
                 # we need a user, so pick the current user by default
                 env = os.environ.copy()
                 # USERNAME is an env var used by gerrit
                 self.user = env.get('USERNAME') or env.get('USER')
                 if verbose:
                     log.info('Using user %s with %s' % (self.user,
                                                         self.url))
             self.url = 'ssh://' +\
                        self.user + '@' +\
                        self.url[len('ssh://'):]
     self.verbose = verbose
     if local_repo_path:
         self.repo_path = local_repo_path
         self.base_path, self.repo_name = \
             os.path.split(os.path.abspath(local_repo_path))
     else:
         self.base_path = base_path
         self.repo_name = repo_name_from_url(self.url)
         if not self.repo_name:
             raise exception.RepoError(
                 what='Failed to parse %s repo URL: %s' % (self.repo_desc,
                                                           self.url))
         self.repo_path = os.path.join(self.base_path, self.repo_name)
Exemple #20
0
    def sign_packages(self):
        repos = defaultdict(set)

        def _sign_pkg(upf):
            update = self._load_update_file(upf)
            for build in update.builds:
                build_path = self._build_tmp_path(upf, build)
                repos[build.repo].add(build_path)

        self._run_on_each(_sign_pkg, 'sign')

        for repo, build_paths in repos.items():
            key = "rdo-%s" % repo
            log.info("Signing with %s..." % log.term.bold(key))
            rpms = set()
            for path in build_paths:
                build_rpms = helpers.find_files(path, ext='.rpm')
                rpms = rpms.union(build_rpms)
            cmd = [self.sign_tool_path, key] + list(rpms)
            run(*cmd, direct=True)
        return self.update_files
Exemple #21
0
 def init(self, force_fetch=False):
     if not self.url:
         if not os.path.isdir(self.repo_path):
             raise exception.NotADirectory(path=self.repo_path)
         return
     if self.base_path and not os.path.isdir(self.base_path):
         if self.verbose:
             log.info("Creating base directory: %s" % self.base_path)
         os.makedirs(self.base_path)
     if not os.path.isdir(self.repo_path):
         self._clone()
     else:
         try:
             self.git_check_remote()
         except exception.RepoError as e:
             if self.verbose:
                 log.warn("%s repo didn't pass the checks, renewing: %s"
                          % (self.repo_desc, e))
             self._nuke()
             self._clone()
         else:
             self._fetch(force=force_fetch)
Exemple #22
0
 def submit_update_file(self, id, msg=''):
     upfile_path = self._upfile_path(id)
     with self.repo_dir():
         if not os.path.isfile(upfile_path):
             raise exception.UpdateFileNotFound(path=upfile_path)
         update = rdoupdate.actions.check_file(upfile_path)
         branch = update_summary(update)
         commit_msg = "New %s" % id
         if msg:
             commit_msg += "\n\n%s\n" % msg
         log.info("Setting up gerrit.")
         cmd.git('review', '-s', direct=True)
         cmd.git('branch', branch, 'origin/master')
         try:
             cmd.git('checkout', '-f', branch)
             cmd.git('add', upfile_path)
             cmd.git('commit', '-F', '-', input=commit_msg)
             log.info("Submitting update for review.")
             cmd.git('review', direct=True)
         finally:
             pass
             cmd.git('checkout', '-f', 'master')
             cmd.git('branch', '-D', branch)
Exemple #23
0
def reqcheck(version, spec=False):
    if version.upper() == 'XXX':
        if 'upstream' in git.remotes():
            current_branch = git.current_branch()
            branch = current_branch.replace('rpm-', '')
            if branch != 'master':
                branch = 'stable/{}'.format(branch)
            version = 'upstream/{}'.format(branch)
            check = _reqs.reqcheck_spec(ref=version)
        else:
            m = re.search(r'/([^/]+)_distro', os.getcwd())
            if not m:
                raise exception.CantGuess(what="requirements.txt location",
                                          why="failed to parse current path")
            path = '../%s/requirements.txt' % m.group(1)
            log.info("Delorean detected. Using %s" % path)
            check = _reqs.reqcheck_spec(reqs_txt=path)
    else:
        check = _reqs.reqcheck_spec(ref=version)
    format = None
    if spec:
        format = 'spec'
    _reqs.print_reqcheck(*check, format=format)
Exemple #24
0
def update_rdoinfo_check(update):
    fail_msg = None
    warn = False
    rls = defaultdict(set)
    for b in update.builds:
        rls[b.repo].add(b)
    log.info("Checking update using rdoinfo...".format(
        t=log.term))
    for r, builds in rls.items():
        rbuilds = guess.builds(r)
        rbuilds_ = list(rbuilds)
        if not rbuilds:
            msg = 'Unknown OpenStack release: %s' % r
            log.warn(msg)
            if not fail_msg:
                fail_msg = msg
            continue
        for b in builds:
            found = False
            for dist_, src_ in rbuilds:
                if b.dist == dist_:
                    found = True
                    rbuilds_ = filter(lambda x: x[0] != b.dist, rbuilds_)
                    break
            if not found:
                msg = 'Unexpected %s build: %s' % (r, b.dist)
                log.warn(msg)
                if not fail_msg:
                    fail_msg = msg
        for dist_, src_ in rbuilds_:
            log.info('Missing %s build: %s' % (r, dist_))
            warn = True
    if fail_msg:
        raise exception.UpdateCheckFailed(fail=fail_msg)
    if warn:
        helpers.confirm("Submit anyway?")
Exemple #25
0
def dump_build(build, update_file):
    if os.path.isfile(update_file):
        update = rdoupdate.actions.check_file(update_file)
        found = False
        for b in update.builds:
            if b.__dict__ == build.__dict__:
                found = True
                break
        if found:
            log.info("\nBuild already present in update file: %s" % update_file)
        else:
            log.info("\nAppending build to update file: %s" % update_file)
            update.builds.append(build)
    else:
        log.info("\nSaving build to new update file: %s" % update_file)
        update = rdoupdate.core.Update(builds=[build],
                                       notes=FILLME)
    helpers.ensure_new_file_dirs(update_file)
    file(update_file, 'w').write(update.update_file(hints=False))
Exemple #26
0
def update_spec(branch=None, changes=None,
                new_rpm_version=None, new_release=None,
                new_milestone=None, new_patches_base=None,
                no_bump=False, changelog_user=None,
                changelog_email=None, release_bump_index=None):

    if no_bump:
        return
    if not changes:
        changes = []
    _ensure_branch(branch)
    spec = specfile.Spec()
    if new_rpm_version:
        old_version = spec.get_tag('Version')
        if specfile.has_macros(old_version):
            log.info('Version contains macro - not touching that.')
        else:
            spec.set_tag('Version', new_rpm_version)
    if new_release is not None:
        if spec.recognized_release():
            spec.set_release(new_release, milestone=new_milestone)
        else:
            log.info('Custom Release format detected '
                     '- assuming custom milestone management.')
            spec.set_release(new_release)
    else:
        spec.bump_release(milestone=new_milestone,
                          index=release_bump_index)
    if new_patches_base:
        new_patches_base_version, _ = guess.tag2version(new_patches_base)
        if new_patches_base_version == new_rpm_version:
            new_patches_base = None
        changed = spec.set_patches_base_version(new_patches_base)
        if not changed:
            log.info("Macro detected in patches_base - not touching that.")
    user = (changelog_user or guess.user())
    email = (changelog_email or guess.email())
    spec.new_changelog_entry(user=user, email=email,
                             changes=changes)
    spec.save()
Exemple #27
0
def reqquery(reqs_file=None,
             reqs_ref=None,
             spec=False,
             filter=None,
             dump=None,
             dump_file=None,
             load=None,
             load_file=None,
             verbose=False):
    if not (reqs_ref or reqs_file or spec or load or load_file):
        reqs_ref = guess.patches_base_ref()
    if not (bool(reqs_ref) ^ bool(reqs_file) ^ bool(spec) ^ bool(load)
            ^ bool(load_file)):
        raise exception.InvalidUsage(
            why="Only one requirements source (-r/-R/-s/-l/-L) can be "
            "selected.")
    if dump and dump_file:
        raise exception.InvalidUsage(
            why="Only one dump method (-d/-D) can be selected.")
    if dump:
        dump_file = 'requirements.yml'
    if load:
        load_file = 'requirements.yml'

    # get query results as requested
    if load_file:
        log.info("Loading query results from file: %s" % load_file)
        r = yaml.load(open(load_file))
    else:
        release, dist = None, None
        if not filter:
            try:
                release, dist = guess.osreleasedist()
                log.info('Autodetected filter: %s/%s' % (release, dist))
            except exception.CantGuess as ex:
                raise exception.CantGuess(
                    msg='%s\n\nPlease select RELEASE(/DIST) filter to query.' %
                    str(ex))
        else:
            release, _, dist = filter.partition('/')
        module2pkg = True
        if reqs_file:
            log.info("Querying requirements file: %s" % reqs_file)
            reqs = _reqs.get_reqs_from_path(reqs_file)
        elif reqs_ref:
            log.info("Querying requirements file from git: "
                     "%s -- requirements.txt" % reqs_ref)
            reqs = _reqs.get_reqs_from_ref(reqs_ref)
        else:
            log.info("Querying .spec file")
            module2pkg = False
            reqs = _reqs.get_reqs_from_spec(as_objects=True)
        log.info('')
        r = _reqs.reqquery(reqs,
                           release=release,
                           dist=dist,
                           module2pkg=module2pkg,
                           verbose=verbose)

    if dump_file:
        log.info("Saving query results to file: %s" % dump_file)
        yaml.dump(r, open(dump_file, 'w'))

    _reqs.print_reqquery(r)
Exemple #28
0
def gerrit_patches_chain(project=None, verbose=True):
    if not project:
        project = project_from_repo()

    gerrit_host, gerrit_port = gerrit_from_repo()
    gerrit_query = GerritQuery(gerrit_host, gerrit_port, log_cmd=verbose)

    candidate = None
    number = None
    subject = None
    _last_patch = last_patch()
    if _last_patch:
        log.info('Assumed last patch is %s' % _last_patch)
        with open(_last_patch, 'r') as f:
            patch_content = f.read()
        subject_regex = re.compile("^Subject: (?:\[PATCH\]\s*)?(.+)$",
                                   re.MULTILINE | re.IGNORECASE)
        s = subject_regex.findall(patch_content)
        if s:
            subject = s[0]
        # commit id tells us which revision we need
        from_regex = re.compile(r"^From ([0-9a-f]{40})",
                                re.MULTILINE | re.IGNORECASE)
        for from_commit in from_regex.findall(patch_content):
            query = "project:%s commit:%s" % (project, from_commit)
            q = gerrit_query('--patch-sets', query)
            if q:
                candidate = q
                numbers = [ps.get('number') for ps
                           in candidate.get('patchSets')
                           if ps.get('revision') == from_commit]
                if numbers:
                    number = numbers[0]
                break
        # If this fails (we're dealing with migration residues),
        # look for changeIDs and use last revision.
        if not candidate:
            log.info("Revision number not found, the latest patchset "
                     "(if found) will be downloaded.")
            cid_regex = re.compile(r"^Change-Id: (I[0-9a-f]{40})",
                                   re.MULTILINE | re.IGNORECASE)
            for cid in cid_regex.findall(patch_content):
                query = "project:%s change:%s" % (project, cid)
                if subject:
                    query += " message:%s" % subject
                query += " limit:1"
                q = gerrit_query(query)
                if q:
                    candidate = q
                    break
    # last chance, with the commit message
    if not candidate:
        query = "project:%s status:open" % project
        if subject:
            query += " message:%s" % subject
        query += " limit:1"
        candidate = gerrit_query(query)

    if not candidate:
        return None
    patchset = candidate.get('number')
    if number:
        patchset = "%s/%s" % (patchset, number)
    return patchset
Exemple #29
0
def clone(package,
          force_fetch=False,
          use_master_distgit=False,
          gerrit_remotes=False,
          review_user=None):
    inforepo = rdoinfo.get_default_inforepo()
    inforepo.init(force_fetch=force_fetch)
    pkg = inforepo.get_package(package)
    if not pkg:
        raise exception.InvalidRDOPackage(package=package)
    if use_master_distgit:
        try:
            distgit = pkg['master-distgit']
            distgit_str = 'master-distgit'
        except KeyError:
            raise exception.InvalidUsage(
                msg="-m/--use-master-distgit used but 'master-distgit' "
                "missing in rdoinfo for package: %s" % package)
    else:
        distgit = pkg['distgit']
        distgit_str = 'distgit'
    log.info("Cloning {dg} into ./{t.bold}{pkg}{t.normal}/".format(
        t=log.term, dg=distgit_str, pkg=package))
    patches = pkg.get('patches')
    upstream = pkg.get('upstream')
    review_patches = pkg.get('review-patches')
    review_origin = pkg.get('review-origin')

    git('clone', distgit, package)
    with helpers.cdir(package):
        if gerrit_remotes:
            log.info('Adding gerrit-origin remote...')
            git('remote', 'add', 'gerrit-origin', distgit)
        if patches:
            log.info('Adding patches remote...')
            git('remote', 'add', 'patches', patches)
            if gerrit_remotes:
                log.info('Adding gerrit-patches remote...')
                git('remote', 'add', 'gerrit-patches', patches)
        else:
            log.warn("'patches' remote information not available in rdoinfo.")
        if upstream:
            log.info('Adding upstream remote...')
            git('remote', 'add', 'upstream', upstream)
        else:
            log.warn("'upstream' remote information not available in rdoinfo.")
        if patches or upstream:
            git('fetch', '--all')

        if not review_user:
            # USERNAME is an env var used by gerrit
            review_user = os.environ.get('USERNAME') or os.environ.get('USER')
        msg_user = ('Using {t.bold}{u}{t.normal} as gerrit username, '
                    'you can change it with '
                    '{t.cmd}git remote set-url {r} ...{t.normal}')
        if review_patches:
            log.info('Adding gerrit remote for patch chains reviews...')
            r = tidy_ssh_user(review_patches, review_user)
            log.info(
                msg_user.format(u=review_user, r='review-patches', t=log.term))
            git('remote', 'add', 'review-patches', r)
        else:
            log.warn("'review-patches' remote information not available"
                     " in rdoinfo.")
        if review_origin:
            log.info('Adding gerrit remote for reviews...')
            r = tidy_ssh_user(review_origin, review_user)
            log.info(
                msg_user.format(u=review_user, r='review-origin', t=log.term))
            git('remote', 'add', 'review-origin', r)
        else:
            log.warn("'review-origin' remote information not available"
                     " in rdoinfo.")
        git('remote', '-v', direct=True)
Exemple #30
0
    def push_packages(self):
        updated_repo_bases = set()
        updated_repos = set()

        def _push_pkg(upf):
            log.info("\nPushing update {t.bold}{upf}{t.normal}".format(
                t=log.term, upf=upf))
            update = self._load_update_file(upf)
            pushed_rpms = []
            try:
                _updated_repos = set()
                _updated_repo_bases = set()
                _pushed_build_tmp_paths = []
                for build in update.builds:
                    src_path = self._build_tmp_path(upf, build)
                    if src_path in _pushed_build_tmp_paths:
                        continue
                    build_rpms = helpers.find_files(src_path, ext='.rpm')
                    dest_repo_base_path = self._dest_repo_base_path(build.repo)
                    if not os.path.isdir(dest_repo_base_path):
                        raise exception.NotADirectory(path=dest_repo_base_path)
                    dest_path = self._build_dest_path(build)
                    for rpm in build_rpms:
                        pushed_path = copy_package(rpm,
                                                   dest_path,
                                                   overwrite=self.overwrite)
                        pushed_rpms.append(pushed_path)
                    _pushed_build_tmp_paths.append(src_path)
                    _updated_repo_bases.add(dest_repo_base_path)
                    _updated_repos.add(
                        self._dest_repo_path(build.repo, build.dist))

                with helpers.cdir(self.update_repo_path):
                    helpers.ensure_dir(self.pushed_dir)
                    upf_base = os.path.basename(upf)
                    pushed_upf = os.path.join(self.pushed_dir, upf_base)
                    pushed_files_fn = pushed_upf + self.pushed_files_ext
                    git('mv', upf, pushed_upf)
                    pushed_files_f = open(pushed_files_fn, 'w')
                    pushed_files_f.writelines(
                        map(lambda x: "%s\n" % x, pushed_rpms))
                    pushed_files_f.close()
                    git('add', pushed_files_fn)
                    try:
                        git('commit', '-m',
                            "Push %s" % rdoupdate.core.pp_update(upf))
                    except Exception:
                        git('git', 'reset', '--hard')
                        raise
                updated_repos.update(_updated_repos)
                updated_repo_bases.update(_updated_repo_bases)
            except Exception as ex:
                if pushed_rpms:
                    log.warn("Final push failed for %s, cleaning copied "
                             "packages" % upf)
                    for rpm in pushed_rpms:
                        log.info("{t.warn}remove{t.normal} {rpm}".format(
                            t=log.term, rpm=rpm))
                        os.remove(rpm)
                raise

        self._run_on_each(_push_pkg, 'final push')

        if updated_repos:
            log.info("\nRunning {t.cmd}createrepo{t.normal} on updated repos".
                     format(t=log.term))
            for repo in sorted(updated_repos):
                run('createrepo', repo, direct=True)

        return sorted(updated_repo_bases)
Exemple #31
0
def gerrit_patches_chain(project=None, verbose=True):
    if not project:
        project = project_from_repo()

    gerrit_host, gerrit_port = gerrit_from_repo()
    gerrit_query = GerritQuery(gerrit_host, gerrit_port, log_cmd=verbose)

    candidate = None
    number = None
    subject = None
    _last_patch = last_patch()
    if _last_patch:
        log.info('Assumed last patch is %s' % _last_patch)
        with open(_last_patch, 'r') as f:
            patch_content = f.read()
        subject_regex = re.compile("^Subject: (?:\[PATCH\]\s*)?(.+)$",
                                   re.MULTILINE | re.IGNORECASE)
        s = subject_regex.findall(patch_content)
        if s:
            subject = s[0]
        # commit id tells us which revision we need
        from_regex = re.compile(r"^From ([0-9a-f]{40})",
                                re.MULTILINE | re.IGNORECASE)
        for from_commit in from_regex.findall(patch_content):
            query = "project:%s commit:%s" % (project, from_commit)
            q = gerrit_query('--patch-sets', query)
            if q:
                candidate = q
                numbers = [
                    ps.get('number') for ps in candidate.get('patchSets')
                    if ps.get('revision') == from_commit
                ]
                if numbers:
                    number = numbers[0]
                break
        # If this fails (we're dealing with migration residues),
        # look for changeIDs and use last revision.
        if not candidate:
            log.info("Revision number not found, the latest patchset "
                     "(if found) will be downloaded.")
            cid_regex = re.compile(r"^Change-Id: (I[0-9a-f]{40})",
                                   re.MULTILINE | re.IGNORECASE)
            for cid in cid_regex.findall(patch_content):
                query = "project:%s change:%s" % (project, cid)
                if subject:
                    query += " message:%s" % subject
                query += " limit:1"
                q = gerrit_query(query)
                if q:
                    candidate = q
                    break
    # last chance, with the commit message
    if not candidate:
        query = "project:%s status:open" % project
        if subject:
            query += " message:%s" % subject
        query += " limit:1"
        candidate = gerrit_query(query)

    if not candidate:
        return None
    patchset = candidate.get('number')
    if number:
        patchset = "%s/%s" % (patchset, number)
    return patchset
Exemple #32
0
def reqdiff(version_tag_from, version_tag_to):
    fmt = "\n{t.bold}requirements.txt diff{t.normal} between " \
          "{t.bold}{old}{t.normal} and {t.bold}{new}{t.normal}:"
    log.info(fmt.format(t=log.term, old=version_tag_from, new=version_tag_to))
    rdiff = _reqs.reqdiff_from_refs(version_tag_from, version_tag_to)
    _reqs.print_reqdiff(*rdiff)
Exemple #33
0
 def _nuke(self):
     log.info("Removing %s repo: %s" % (self.repo_desc, self.repo_path))
     shutil.rmtree(self.repo_path, ignore_errors=True)
Exemple #34
0
def new_version_setup(patches_branch=None,
                      local_patches=False,
                      version=None,
                      new_version=None,
                      version_tag_style=None,
                      new_sources=None,
                      no_new_sources=None,
                      unattended=False,
                      bug=None,
                      bump_only=False):
    args = {}
    if new_version:
        # support both version and tag
        ver, _ = guess.tag2version(new_version)
        if ver != new_version:
            new_version = ver
            args['new_version'] = new_version
        new_version_tag = guess.version2tag(new_version, version_tag_style)
        if not bump_only and not git.object_type(new_version_tag):
            raise exception.InvalidGitRef(ref=new_version_tag)
    else:
        ub = guess.upstream_branch()
        if not git.ref_exists('refs/remotes/%s' % ub):
            msg = ("Upstream branch not found: %s\n"
                   "Can't guess latest version.\n\n"
                   "a) provide new version (git tag) yourself\n"
                   "   $ rdopkg new-version 1.2.3\n\n"
                   "b) add upstream git remote:\n"
                   "   $ git remote add -f upstream GIT_URL\n" % ub)
            raise exception.CantGuess(msg=msg)
        new_version_tag = git.get_latest_tag(ub)
        new_version, _ = guess.tag2version(new_version_tag)
        args['new_version'] = new_version
        log.info("Latest version detected from %s: %s" % (ub, new_version))
    if version == new_version:
        if unattended:
            log.info("Package is already at version %s\n" % version)
            raise exception.UserAbort(exit_code=0)
        helpers.confirm(msg="It seems the package is already at version %s\n\n"
                        "Run new-version anyway?" % version,
                        default_yes=False)
    changelog = 'Update to %s' % new_version
    if bug:
        changelog += ' (%s)' % bug
    args['changes'] = [changelog]
    args['new_patches_base'] = new_version_tag
    spec = specfile.Spec()
    rpm_version = spec.get_tag('Version')
    rpm_milestone = spec.get_milestone()
    new_rpm_version, new_milestone = specfile.version_parts(new_version)
    args['new_rpm_version'] = new_rpm_version
    if new_milestone:
        args['new_milestone'] = new_milestone
    if (rpm_version != new_rpm_version
            or bool(new_milestone) != bool(rpm_milestone)):
        if new_milestone:
            args['new_release'] = '0.1'
        else:
            args['new_release'] = '1'
    if not local_patches and not bump_only:
        if not patches_branch or \
           not git.ref_exists('refs/remotes/' + patches_branch):
            log.warn("Patches branch '%s' not found. Running in --bump-only "
                     "mode." % patches_branch)
            args['bump_only'] = True
    if new_sources or no_new_sources:
        if new_sources and no_new_sources:
            raise exception.InvalidUsage(
                msg="DOES NOT COMPUTE: both -n and -N don't make sense.")
        # new_sources == not no_new_sources
    else:
        new_sources = guess.new_sources()
    args['new_sources'] = new_sources

    return args
Exemple #35
0
def reqdiff(version_tag_from, version_tag_to):
    fmt = "\n{t.bold}requirements.txt diff{t.normal} between " \
          "{t.bold}{old}{t.normal} and {t.bold}{new}{t.normal}:"
    log.info(fmt.format(t=log.term, old=version_tag_from, new=version_tag_to))
    rdiff = _reqs.reqdiff_from_refs(version_tag_from, version_tag_to)
    _reqs.print_reqdiff(*rdiff)
Exemple #36
0
def main():
    parser = argparse.ArgumentParser(
        formatter_class=argparse.RawDescriptionHelpFormatter,
    )
    parser.add_argument(
        '--patches-branch',
        help='Specify another local "patches" branch, like "ceph-5.0-rhel-patches-bz12345"',
    )
    args = parser.parse_args()
    spec = specfile.Spec()

    name = spec.get_tag('Name', expand_macros=True)  # "ceph"
    version = spec.get_tag('Version', expand_macros=True)  # "12.2.8"
    orig_commit = spec.get_macro('commit')  # "9e20ef1b14ac70dea53123"

    branch = git.current_branch()  # "ceph-3.2-rhel-7"
    tag_style = guess.version_tag_style(version=version)  # "vX.Y.Z"
    base_tag = guess.version2tag(version, tag_style)  # "v12.2.8"
    osdist = guess.osdist()

    # "ceph-3.2-rhel-patches"
    if args.patches_branch:
        patches_branch = args.patches_branch
    else:
        remote_patches_branch = guess.patches_branch(branch, pkg=name,
                                                     osdist=osdist)
        patches_branch = remote_patches_branch.partition('/')[2]

    patches_sha = git('rev-parse', patches_branch)  # "9e20ef1b14ac70dea53123"

    archive_basename = '%s-%s' % (name, version)  # "ceph-12.2.8"
    patches_base, patches_base_commits = spec.get_patches_base()
    if patches_base_commits != 0:
        # We don't yet support the "+n_commits" syntax for patches_base.
        raise NotImplementedError('use a plain ref in patches_base')
    if patches_base is None:
        patches_base = base_tag
    filenames = diff_filenames(patches_base, patches_branch)
    if not filenames:
        # todo: make this a silent no-op eventually
        log.warning('%s identical to %s' % (patches_branch, patches_base))
        raise RuntimeError(patches_base)

    tarball = archive_files(archive_basename, patches_sha, filenames)
    log.info('wrote %s' % tarball)

    # Ensure our spec file will reference this tarball.
    spec.set_macro('commit', patches_sha)
    set_source1(spec)
    spec.save()

    # Find the changelog entries from the Git -patches branch.
    changes = check_new_commits(patches_base, orig_commit, patches_sha)

    if not changes:
        log.info('no changes. exiting')
        raise SystemExit(1)

    # Bump the release and add the %changelog entries.

    # Insert %changelog.
    rdopkg.actions.distgit.actions.update_spec(branch=branch, changes=changes)

    # add + upload this new tarball.
    if guess.new_sources():
        fedpkg = 'fedpkg'
        if osdist.startswith('RH'):
            fedpkg = 'rhpkg'
        clear_old_changes_sources()
        run(fedpkg, 'upload', tarball, direct=True)

    # Commit everything to dist-git
    rdopkg.actions.distgit.actions.commit_distgit_update(branch=branch,
                                                         local_patches_branch=patches_branch)
    # Show the final commit
    rdopkg.actions.distgit.actions.final_spec_diff(branch=branch)
Exemple #37
0
 def _nuke(self):
     log.info("Removing %s repo: %s" % (self.repo_desc, self.repo_path))
     shutil.rmtree(self.repo_path, ignore_errors=True)
Exemple #38
0
def reqquery(reqs_file=None, reqs_ref=None, spec=False, filter=None,
             dump=None, dump_file=None, load=None, load_file=None,
             verbose=False):
    if not (reqs_ref or reqs_file or spec or load or load_file):
        reqs_ref = guess.patches_base_ref()
    if not (bool(reqs_ref) ^ bool(reqs_file) ^ bool(spec)
            ^ bool(load) ^ bool(load_file)):
        raise exception.InvalidUsage(
            why="Only one requirements source (-r/-R/-s/-l/-L) can be "
                "selected.")
    if dump and dump_file:
        raise exception.InvalidUsage(
            why="Only one dump method (-d/-D) can be selected.")
    if dump:
        dump_file = 'requirements.yml'
    if load:
        load_file = 'requirements.yml'

    # get query results as requested
    if load_file:
        log.info("Loading query results from file: %s" % load_file)
        r = yaml.load(open(load_file))
    else:
        release, dist = None, None
        if not filter:
            try:
                release, dist = guess.osreleasedist()
                log.info('Autodetected filter: %s/%s'
                         % (release, dist))
            except exception.CantGuess as ex:
                raise exception.CantGuess(
                    msg='%s\n\nPlease select RELEASE(/DIST) filter to query.' %
                        str(ex))
        else:
            release, _, dist = filter.partition('/')
        module2pkg = True
        if reqs_file:
            log.info("Querying requirements file: %s" % reqs_file)
            reqs = _reqs.get_reqs_from_path(reqs_file)
        elif reqs_ref:
            log.info("Querying requirements file from git: "
                     "%s -- requirements.txt" % reqs_ref)
            reqs = _reqs.get_reqs_from_ref(reqs_ref)
        else:
            log.info("Querying .spec file")
            module2pkg = False
            reqs = _reqs.get_reqs_from_spec(as_objects=True)
        log.info('')
        r = _reqs.reqquery(reqs, release=release, dist=dist,
                           module2pkg=module2pkg, verbose=verbose)

    if dump_file:
        log.info("Saving query results to file: %s" % dump_file)
        yaml.dump(r, open(dump_file, 'w'))

    _reqs.print_reqquery(r)
Exemple #39
0
def update_patches(branch,
                   local_patches_branch,
                   bump_only=False,
                   version=None,
                   new_version=None,
                   version_tag_style=None):
    if bump_only:
        return
    target_version = new_version or version
    if not target_version:
        raise exception.RequiredActionArgumentNotAvailable(
            action='update_patches', arg='version or new_version')
    tag = guess.version2tag(target_version, version_tag_style)

    _ensure_branch(local_patches_branch)
    patches = list(git.get_commits(tag, local_patches_branch))
    n_patches = len(patches)
    _ensure_branch(branch)
    spec = specfile.Spec()
    spec.sanity_check()
    patches_base, n_excluded = spec.get_patches_base()
    ignore_regex = spec.get_patches_ignore_regex()
    if ignore_regex and patches_base is None:
        # TODO: patches_base and patches_ignore should be independent
        # patches_ignore feature tests should help with this a lot
        raise exception.OnlyPatchesIgnoreUsed()
        pass

    patch_fns = spec.get_patch_fns()
    for pfn in patch_fns:
        git('rm', '-f', '--ignore-unmatch', pfn)
    patch_fns = []

    if n_excluded > 0:
        patches = patches[:-n_excluded]
    patches.reverse()

    ranges = [patches]
    filtered_patches = patches
    if ignore_regex:
        ranges = _partition_patches(patches, ignore_regex)
        filtered_patches = flatten(ranges)
    n_filtered_out = len(patches) - len(filtered_patches)

    if ignore_regex:
        fmt = (
            '\nUsing {t.bold}patches_ignore={t.normal}{t.magenta}%s'
            '{t.normal} regexp to filter out patches.') % ignore_regex.pattern
    else:
        fmt = ('\nNo valid {t.bold}patches_ignore{t.normal} '
               'filtering regex found in the .spec file.')
    log.info(fmt.format(t=log.term))
    log.info(
        "\n{t.bold}{n} patches{t.normal} on top of {t.bold}{tag}{t.normal}"
        ", {t.bold}{ne}{t.normal} excluded by base"
        ", {t.bold}{nf}{t.normal} filtered out by regex.".format(
            t=log.term, n=n_patches, tag=tag, ne=n_excluded,
            nf=n_filtered_out))

    if patches and filtered_patches:
        for hsh, title in reversed(filtered_patches):
            log.info("%s  %s" % (log.term.green(hsh), title))

        log.info("")
        patch_fns = []
        for patch_range in ranges:
            start_commit, _title = patch_range[0]
            end_commit, _title = patch_range[-1]
            start_number = len(patch_fns) + 1

            rng = git.rev_range(start_commit + '~', end_commit)
            format_patch_cmd = [
                '-c', 'core.abbrev=7', 'format-patch', '--no-renames',
                '--no-signature', '-N', '--ignore-submodules',
                '--start-number',
                str(start_number), rng
            ]

            o = git(*format_patch_cmd)
            range_files = git._parse_output(o)
            patch_fns.extend(range_files)

        for pfn in patch_fns:
            git('add', pfn)

    spec.set_new_patches(patch_fns)
    patches_branch_ref = git('rev-parse', local_patches_branch, log_cmd=False)
    spec.set_commit_ref_macro(patches_branch_ref)
    spec.save()
Exemple #40
0
def run(cmd, *params, **kwargs):
    fatal = kwargs.get('fatal', True)
    direct = kwargs.get('direct', False)
    log_cmd = kwargs.get('log_cmd', True)
    log_fail = kwargs.get('log_fail', True)
    input = kwargs.get('input')
    print_stdout = kwargs.get('print_stdout', False)
    print_stderr = kwargs.get('print_stderr', False)
    print_output = kwargs.get('print_output', False)
    env = kwargs.get('env', None)

    cmd = [cmd] + list(params)
    cmd_str = ' '.join(cmd)

    if log_cmd:
        log.command(log.term.cmd(cmd_str))

    if print_output:
        print_stdout = True
        print_stderr = True

    if input:
        stdin = subprocess.PIPE
        input = input.encode()
    else:
        stdin = None

    if direct:
        stdout = None
        stderr = None
    else:
        stdout = subprocess.PIPE
        stderr = subprocess.PIPE

    try:
        prc = subprocess.Popen(cmd, stdin=stdin, stdout=stdout,
                               stderr=stderr, env=env)
    except OSError:
        raise exception.CommandNotFound(cmd=cmd[0])
    out, err = prc.communicate(input=input)

    if out:
        out = out.rstrip()
        if print_stdout:
            log.info(out)
    else:
        out = b''

    if err:
        err = err.rstrip()
        if print_stderr:
            log.info(err)
    else:
        err = b''

    cout = _CommandOutput(out.decode('utf-8'))
    cout.stderr = err
    cout.return_code = prc.returncode
    cout.cmd = cmd_str
    if prc.returncode != 0:
        if log_fail:
            log_cmd_fail(cmd_str, cout)
        if fatal:
            raise exception.CommandFailed(cmd=cmd, out=cout)
    return cout